教程推荐:https://blog.csdn.net/weixin_55305220/article/details/123588501
资源下载:
JDK:https://www.jianguoyun.com/p/DaJ9OJ0Q7uH1ChiJr9cEIAA
Hadoop:https://www.jianguoyun.com/p/DdSqSrkQ7uH1ChiHr9cEIAA
网络配置教程推荐:https://blog.csdn.net/qq_41474121/article/details/108929640
vim /etc/hostname
#修改IP及主机名映射
vim /etc/hosts
#重启虚拟机
reboot
#查看主机名
hostname
# 连续三次回车即可
ssh-keygen -t rsa
ssh-copy-id root@hadoop100
#测试ssh
ssh root@hadoop100
没有问题,完成配置
#创建两个文件夹
mkdir /opt/software #放安装包
mkdir /opt/module #放解压好的组件
openjdk =>>先卸载它
#查看自带的JDK
rpm -qa | grep java
#卸载命令,一一卸载
rpm -e --nodeps #使用这个
rpm -e --nodeps java-1.7.0-openjdk-headless-1.7.0.261-2.6.22.2.el7_8.x86_64
......
#出现如下即可
使用Xftp上传
或者使用Xshell直接从windows拖进去就行
tar -zxvf jdk-8u212-linux-x64.tar.gz -C /opt/module/
vim /etc/profile
#加入如下配置
export JAVA_HOME=/opt/module/jdk1.8.0_212
export PATH=$PATH:$JAVA_HOME/bin
#保存后刷新环境变量
source /etc/profile
#查看JDK 如下
java -version
tar -zxvf hadoop-3.1.3.tar.gz -C /opt/module/
cd /opt/module/hadoop-3.1.3/etc/hadoop/
#修改 hadoop-env.sh 文件
vim hadoop-env.sh
export JAVA_HOME=/opt/module/jdk1.8.0_212
注意JDK路径,必需正确
#修改 core-site.xml
vim core-site.xml
<configuration>
<property>
<name>fs.default.name</name>
<value>hdfs://hadoop100:8020</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/opt/module/hadoop-3.1.3/data/tmp</value>
</property>
</configuration>
# 修改 hdfs-site.xml
vim hdfs-site.xml
<configuration>
<property>
<name>dfs.replication</name>
<value>3</value>
</property>
<property>
<name>dfs.permissions.enabled</name>
<value>false</value>
</property>
<property>
<value>hadoop100:9870</value>
</property>
</configuration>
#修改 mapred-site.xml
vim mapred-site.xml
<configuration>
<!-- yarn 与 MapReduce相关 -->
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
</configuration>
#修改 yarn-site.xml
vim yarn-site.xml
<configuration>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
</configuration>
vim /etc/profile
export HADOOP_HOME=/opt/module/hadoop-3.1.3
export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
export HDFS_NAMENODE_USER=root
export HDFS_DATANODE_USER=root
export HDFS_SECONDARYNAMENODE_USER=root
export YARN_RESOURCEMANAGER_USER=root
export YARN_NODEMANAGER_USER=root
# 刷新环境变量
source /etc/profile
hadoop version
hadoop namenode -format
start-all.sh #启动
stop-all.sh #停止
#输入 jps 查看进程
jps
注意自己的IP地址,这里得使用自己的IP
http://192.168.127.100:9870 访问 hdfs
http://192.168.127.100:8088 访问 yarn
查看防火墙状态:
service iptables status
关闭防火墙:
service iptables stop
irewalld 方式
启动: systemctl start firewalld
查看状态: systemctl status firewalld
禁用,禁止开机启动: systemctl disable firewalld
停止运行: systemctl stop firewalld
重启:firewall-cmd --reload