

| 节点 | 角色 |
|---|---|
| node1 主 | NN DN RM NM |
| node2 从 | SNN DN NM |
| node3 从 | DN NM |
vim /etc/hosts
#最下方输入
ip1 node1
ip2 node2
ip3 node3
tar zxvf jdk-8u241-linux-x64.tar.gz -C /usr/local/
vim /etc/profile
#最下方输入
export JAVA_HOME=/usr/local/jdk1.8
export PATH=$PATH:$JAVA_HOME/bin
export CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar
yum install -y ntpdate
ntpdate ntp5.aliyun.com
#查看防火墙状态
firewall-cmd --state
#停止firewalld服务
systemctl stop firewalld
#开机禁用firewalld服务
systemctl disable firewalld
#node1生成公钥私钥 (一路回车)
ssh-keygen
#node1配置免密登录到node1 node2 node3
ssh-copy-id node1
ssh-copy-id node2
ssh-copy-id node3
#解压Hadoop软件
tar -zxvf hadoop-3.3.0-Centos7-64-with-snappy.tar.gz -C /export/server
#hadoop-env.sh
#文件最后添加
export JAVA_HOME=/export/server/jdk1.8.0_241
export HDFS_NAMENODE_USER=root
export HDFS_DATANODE_USER=root
export HDFS_SECONDARYNAMENODE_USER=root
export YARN_RESOURCEMANAGER_USER=root
export YARN_NODEMANAGER_USER=root
#core-site.xml
<property>
<name>fs.defaultFSname>
<value>hdfs://node1:8020value>
property>
<property>
<name>hadoop.tmp.dirname>
<value>/export/data/hadoop-3.3.0value>
property>
<property>
<name>hadoop.http.staticuser.username>
<value>rootvalue>
property>
<property>
<name>hadoop.proxyuser.root.hostsname>
<value>*value>
property>
<property>
<name>hadoop.proxyuser.root.groupsname>
<value>*value>
property>
<property>
<name>fs.trash.intervalname>
<value>1440value>
property>
#hdfs-site.xml
<property>
<name>dfs.namenode.secondary.http-addressname>
<value>node2:9868value>
property>
#mapred-site.xml
<property>
<name>mapreduce.framework.namename>
<value>yarnvalue>
property>
<property>
<name>mapreduce.jobhistory.addressname>
<value>node1:10020value>
property>
<property>
<name>mapreduce.jobhistory.webapp.addressname>
<value>node1:19888value>
property>
<property>
<name>yarn.app.mapreduce.am.envname>
<value>HADOOP_MAPRED_HOME=${HADOOP_HOME}value>
property>
<property>
<name>mapreduce.map.envname>
<value>HADOOP_MAPRED_HOME=${HADOOP_HOME}value>
property>
<property>
<name>mapreduce.reduce.envname>
<value>HADOOP_MAPRED_HOME=${HADOOP_HOME}value>
property>
#yarn-site.xml
<property>
<name>yarn.resourcemanager.hostnamename>
<value>node1value>
property>
<property>
<name>yarn.nodemanager.aux-servicesname>
<value>mapreduce_shufflevalue>
property>
<property>
<name>yarn.nodemanager.pmem-check-enabledname>
<value>falsevalue>
property>
<property>
<name>yarn.nodemanager.vmem-check-enabledname>
<value>falsevalue>
property>
<property>
<name>yarn.log-aggregation-enablename>
<value>truevalue>
property>
<property>
<name>yarn.log.server.urlname>
<value>http://node1:19888/jobhistory/logsvalue>
property>
<property>
<name>yarn.log-aggregation.retain-secondsname>
<value>604800value>
property>
#workers(原slaves)
node1
node2
node3
scp -r hadoop-3.3.0 root@node2:$PWD
scp -r hadoop-3.3.0 root@node3:$PWD
添加Hadoop环境变量
vim /etc/profile
export HADOOP_HOME=/export/server/hadoop-3.3.0
export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
source /etc/profile
hdfs namenode -format
格式化成功如下图

#hdfs启动
start-dfs.sh
#yarn启动
start-yarn.sh
#全部启动
start-all.sh





#创建目录
hadoop fs -mkdir /test
#上传文件
hadoop fs -put test.txt /test
#查看目录
hadoop fs -ls /



cd /export/server/hadoop-3.3.0/share/hadoop/mapreduce
hadoop jar hadoop-mapreduce-examples-3.3.0.jar pi 2 4


