
上传jdk和hadoop安装包

查看上传的安装包

ssh-keygen

ssh-copy-id root@ied
ssh ied
tar -zxvf jdk-8u231-linux-x64.tar.gz -C /usr/local
ll /usr/local/jdk1.8.0_231
vim /etc/profile
export JAVA_HOME=/usr/local/jdk1.8.0_231
export PATH=$JAVA_HOME/bin:$PATH
export CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar

source /etc/profile
JAVA_HOME
java -version
vim HelloWorld.java
javac HelloWorld.java
java HelloWorld
tar -zxvf hadoop-3.3.4.tar.gz -C /usr/local
ll /usr/local/hadoop-3.3.4





vim /etc/profile
export HADOOP_HOME=/usr/local/hadoop-3.3.4
export PATH=$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$PATH
export HDFS_NAMENODE_USER=root
export HDFS_DATANODE_USER=root
export HDFS_SECONDARYNAMENODE_USER=root
export YARN_RESOURCEMANAGER_USER=root
export YARN_NODEMANAGER_USER=root
source /etc/profile
hadoop version
cd etc/hadoop,进入hadoop配置目录
vim hadoop-env.sh,添加三条环境变量配置
source hadoop-env.sh,让配置生效
vim core-site.xml
<configuration>
<property>
<name>fs.defaultFSname>
<value>hdfs://ied:9000value>
property>
<property>
<name>hadoop.tmp.dirname>
<value>/usr/local/hadoop-3.3.4/tmpvalue>
property>
configuration>
hdfs://ied:9000,否则必须用IP地址hdfs://192.168.1.100:9000
vim hdfs-site.xml
<configuration>
<property>
<name>dfs.namenode.name.dirname>
<value>/usr/local/hadoop-3.3.4/tmp/namenodevalue>
property>
<property>
<name>dfs.datanode.data.dirname>
<value>/usr/local/hadoop-3.3.4/tmp/datanodevalue>
property>
<property>
<name>dfs.namenode.secondary.http-addressname>
<value>ied:50090value>
property>
<property>
<name>dfs.namenode.http-addressname>
<value>0.0.0.0:9870value>
property>
<property>
<name>dfs.replicationname>
<value>1value>
property>
<property>
<name>dfs.permissions.enabledname>
<value>falsevalue>
property>
configuration>
vim mapred-site.xml
<configuration>
<property>
<name>mapreduce.framework.namename>
<value>yarnvalue>
property>
<property>
<name>yarn.app.mapreduce.am.envname>
<value>HADOOP_MAPRED_HOME=${HADOOP_HOME}value>
property>
<property>
<name>mapreduce.map.envname>
<value>HADOOP_MAPRED_HOME=${HADOOP_HOME}value>
property>
<property>
<name>mapreduce.reduce.envname>
<value>HADOOP_MAPRED_HOME=${HADOOP_HOME}value>
property>
configuration>
Error: Could not find or load main class org.apache.hadoop.mapreduce.v2.app.MRAppMastervim yarn-site.xml
<configuration>
<property>
<name>yarn.resourcemanager.hostnamename>
<value>iedvalue>
property>
<property>
<name>yarn.nodemanager.aux-servicesname>
<value>mapreduce_shufflevalue>
property>
<property>
<name>yarn.nodemanager.vmem-check-enabledname>
<value>falsevalue>
property>
configuration>
slaves文件,hadoop-3.x里配置workers文件vim workers
hdfs namenode -format


Storage directory /usr/local/hadoop-3.3.4/tmp/namenode has been successfully formatted. 表明名称节点格式化成功。start-dfs.sh
start-yarn.sh
执行命令:jps

说明:start-dfs.sh与start-yarn.sh可以用一条命令start-all.sh来替换

http://ied:9870


BigData
hadoop安装包



stop-dfs.sh
执行命令:stop-yarn.sh

说明:stop-dfs.sh与stop-yarn.sh可以用一条命令stop-all.sh来替换
