1,
1
2
3
hadoop@hadoop-virtual-machine:~$ cat /etc/hostname
yard02
hadoop@hadoop-virtual-machine:~$
2,
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
hadoop@hadoop-virtual-machine:~$ cat /etc/hosts
127.0.0.1 localhost
127.0.1.1 hadoop-virtual-machine
::1 ip6-localhost ip6-loopback
fe00::0 ip6-localnet
ff00::0 ip6-mcastprefix
ff02::1 ip6-allnodes
ff02::2 ip6-allrouters
192.168.137.2 yard02
192.168.137.3 yard03
192.168.137.4 yard04
192.168.137.5 yard05
hadoop@hadoop-virtual-machine:~$
3
core-site.xml —-conf/hadoop
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
hdfs-site.xml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
yarn-site.xml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
$HADOOP_COMMON_HOME/share/hadoop/common/lib/*,
KaTeX parse error: Undefined control sequence: \* at position 38: …re/hadoop/hdfs/\̲*̲,HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*,
KaTeX parse error: Undefined control sequence: \* at position 30: …re/hadoop/yarn/\̲*̲,YARN_HOME/share/hadoop/yarn/lib/*,
KaTeX parse error: Undefined control sequence: \* at position 35: …doop/mapreduce/\̲*̲,YARN_HOME/share/hadoop/mapreduce/lib/*
mapred-site.xml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
masters
1
2
yard02
yard03
slaves
1
2
3
4
yard02
yard03
yard04
yard05
.bashrc
1
2
3
4
5
6
7
8
9
10
export HADOOP_HOME=/home/hadoop/bigdata/hadoop-2.0.0-cdh4.4.0
export HADOOP_MAPRED_HOME=
H
A
D
O
O
P
_
H
O
M
E
e
x
p
o
r
t
H
A
D
O
O
P
_
C
O
M
M
O
N
_
H
O
M
E
=
{HADOOP\_HOME} export HADOOP\_COMMON\_HOME=
HADOOP_HOMEexportHADOOP_COMMON_HOME={HADOOP_HOME}
export HADOOP_HDFS_HOME=
H
A
D
O
O
P
_
H
O
M
E
e
x
p
o
r
t
Y
A
R
N
_
H
O
M
E
=
{HADOOP\_HOME} export YARN\_HOME=
HADOOP_HOMEexportYARN_HOME={HADOOP_HOME}
export HADOOP_YARN_HOME=
H
A
D
O
O
P
_
H
O
M
E
e
x
p
o
r
t
H
A
D
O
O
P
_
C
O
N
F
_
D
I
R
=
{HADOOP\_HOME} export HADOOP\_CONF\_DIR=
HADOOP_HOMEexportHADOOP_CONF_DIR={HADOOP_HOME}/etc/hadoop
export HDFS_CONF_DIR=
H
A
D
O
O
P
_
H
O
M
E
/
e
t
c
/
h
a
d
o
o
p
e
x
p
o
r
t
Y
A
R
N
_
C
O
N
F
_
D
I
R
=
{HADOOP\_HOME}/etc/hadoop export YARN\_CONF\_DIR=
HADOOP_HOME/etc/hadoopexportYARN_CONF_DIR={HADOOP_HOME}/etc/hadoop
export PATH=
P
A
T
H
:
PATH:
PATH:HOME/bin:
J
A
V
A
_
H
O
M
E
/
b
i
n
:
JAVA\_HOME/bin:
JAVA_HOME/bin:HADOOP_HOME/sbin
同步个配置文件
1
2
3
4
5
6
7
scp -r /home/hadoop/bigdata/hadoop-2.0.0-cdh4.4.0/* hadoop@yard03:/home/hadoop/bigdata/hadoop-2.0.0-cdh4.4.0/
scp -r /home/hadoop/bigdata/hadoop-2.0.0-cdh4.4.0/* hadoop@yard04:/home/hadoop/bigdata/hadoop-2.0.0-cdh4.4.0/
scp -r /home/hadoop/bigdata/hadoop-2.0.0-cdh4.4.0/* hadoop@yard05:/home/hadoop/bigdata/hadoop-2.0.0-cdh4.4.0/
scp /home/hadoop/.bashrc hadoop@yard03:/home/hadoop
scp /home/hadoop/.bashrc hadoop@yard04:/home/hadoop
scp /home/hadoop/.bashrc hadoop@yard05:/home/hadoop
关于ssh无密码还是这个屡试不爽
1
2
3
4
5
6
7
ssh-keygen -t rsa
cp id_rsa.pub authorized_keys //本地也要哦
ssh localhost //确保本地可以无密码登陆
scp authorized_keys hadoop@yard03:/home/hadoop/.ssh
scp authorized_keys hadoop@yard04:/home/hadoop/.ssh
scp authorized_keys hadoop@yard05:/home/hadoop/.ssh
这个启动基本在sbin目录了,
格式化:
1
hadoop namenode -format
启动
1
start-all.sh
启动hdfs:
1
start-dfs.sh
启动mapreduce
1
start-yarn.sh
启动historyserver
1
mr-jobhistory-daemon.sh start historyserver
WordCount 测试
./hadoop fs -put /etc/profile /user/hadoop/input
./hadoop jar …/share/hadoop/mapreduce/hadoop-mapreduce-examples-2.0.0-cdh4.4.0.jar wordcount input output
输出结果
访问:
http://yard02:50070/
http://yard03:50090
http://yard02:8088/cluster
http://yard02:8088/cluster/apps
http://yard02:8088/cluster/nodes
http://yard02:8088/cluster/scheduler
http://blog.csdn.net/qiaochao911/article/details/9143303
http://blog.javachen.com/hadoop/2013/03/24/manual-install-Cloudera-Hadoop-CDH/