root@master:/home/duanwf# useradd --create-home hadoop
root@master:/home/duanwf# passwd hadoop
root@master:~# vi /etc/sudoers
# User privilege specification
root ALL=(ALL:ALL) ALL
duanwf ALL=(ALL:ALL) ALL
hadoop ALL=(ALL:ALL) ALL
1.4 设定电脑的IP为静态地址
参考博文“Vmware下Ubuntu 14.04静态IP地址的设置方法”。
1.5 设置各个主机的hostname
打开/etc/hostname文件:
root@master:~# vi /etc/hostname
master
将/etc/hostname文件中的机器名改为你想取的机器名, 重启系统后才会生效。
1.6 在以上三台电脑的/etc/hosts添加以上配置的hostname
root@master:~# vi /etc/hosts
127.0.0.1 localhost
127.0.1.1 ubuntu
# The following lines are desirable for IPv6 capable hosts
::1 ip6-localhost ip6-loopback
fe00::0 ip6-localnet
ff00::0 ip6-mcastprefix
ff02::1 ip6-allnodes
ff02::2 ip6-allrouters
192.168.174.160 master
192.168.174.161 slave1
192.168.174.162 slave2
1.7 设置SSH无密码登陆
安装SSH
duanwf@master:~$ sudo apt-get install ssh
查看SSH是否安装成功以及版本
duanwf@master:~$ ssh -V
OpenSSH_6.6.1p1 Ubuntu-2ubuntu2, OpenSSL 1.0.1f 6 Jan 2014
安装完成后会在~目录(当前用户主目录,即这里的/home/hadoop)下产生一个隐藏文件夹.ssh(ls -a 可以查看隐藏文件)。如果没有这个文件,自己新建即可(mkdir .ssh)。
duanwf@master:~$ cd /home/hadoop
duanwf@master:~$ ls -a
duanwf@master:~$ mkdir .ssh
进入.ssh文件夹
duanwf@master:~$ cd .ssh
产生秘钥
duanwf@master:~/.ssh$ ssh-keygen -t rsa
Generating public/private rsa key pair.
Enter file in which to save the key (/home/duanwf/.ssh/id_rsa):
Enter passphrase (empty for no passphrase):
Enter same passphrase again:
Your identification has been saved in /home/duanwf/.ssh/id_rsa.
Your public key has been saved in /home/duanwf/.ssh/id_rsa.pub.
The key fingerprint is:
49:ad:12:42:36:15:c8:f6:42:08:c1:d9:a6:04:27:a1 duanwf@master
The key's randomart image is:
+--[ RSA 2048]----+
|O++o+oo. |
|.*.==. . |
|E oo... . . |
| . ...o o |
| .. S |
| . |
| |
| |
| |
+-----------------+
duanwf@master:~/.ssh$ scp authorized_keys duanwf@slave1:~/.ssh/authorized_keys_from_master
The authenticity of host 'slave1 (192.168.174.161)' can't be established.
ECDSA key fingerprint is 1f:c0:2a:ed:c1:7b:6e:26:46:e3:c3:b6:87:bb:99:42.
Are you sure you want to continue connecting (yes/no)? yes
Warning: Permanently added 'slave1,192.168.174.161' (ECDSA) to the list of known hosts.
duanwf@slave1's password:
authorized_keys 100% 395 0.4KB/s 00:00
duanwf@master:~/.ssh$ scp authorized_keys duanwf@slave2:~/.ssh/authorized_keys_from_master
The authenticity of host 'slave2 (192.168.174.162)' can't be established.
ECDSA key fingerprint is 1f:c0:2a:ed:c1:7b:6e:26:46:e3:c3:b6:87:bb:99:42.
Are you sure you want to continue connecting (yes/no)? yes
Warning: Permanently added 'slave2,192.168.174.162' (ECDSA) to the list of known hosts.
duanwf@slave2's password:
authorized_keys 100% 395 0.4KB/s 00:00
进入slave1和slave2的.ssh目录
duanwf@slave1:~$ cd .ssh
duanwf@slave1:~/.ssh$ ssh -V
OpenSSH_6.6.1p1 Ubuntu-2ubuntu2, OpenSSL 1.0.1f 6 Jan 2014
duanwf@slave1:~/.ssh$ cat authorized_keys_from_master >> authorized_keys
duanwf@slave1:~/.ssh$ ls
authorized_keys authorized_keys_from_master
duanwf@slave2:~/.ssh$ ssh -V
OpenSSH_6.6.1p1 Ubuntu-2ubuntu2, OpenSSL 1.0.1f 6 Jan 2014
duanwf@slave2:~/.ssh$ cat authorized_keys_from_master >> authorized_keys
duanwf@slave2:~/.ssh$ ls
authorized_keys authorized_keys_from_master
验证SSH无密码登录
duanwf@master:~/.ssh$ ssh slave1
Welcome to Ubuntu 14.04.1 LTS (GNU/Linux 3.13.0-32-generic i686)
* Documentation: https://help.ubuntu.com/
208 packages can be updated.
110 updates are security updates.
Last login: Tue Oct 7 18:25:31 2014 from 192.168.174.1
hadoop@master:~$ hadoop
Usage: hadoop [--config confdir] COMMAND
where COMMAND is one of:
fs run a generic filesystem user client
version print the version
jar run a jar file
checknative [-a|-h] check native hadoop and compression libraries availability
distcp copy file or directories recursively
archive -archiveName NAME -p * create a hadoop archive
classpath prints the class path needed to get the
Hadoop jar and the required libraries
daemonlog get/set the log level for each daemon
or
CLASSNAME run the class named CLASSNAME
Most commands print help when invoked w/o parameters.
hadoop@master:~/hadoop-2.4.1$ ./bin/hdfs namenode -format
14/10/08 18:43:05 INFO namenode.NameNode: STARTUP_MSG:
/************************************************************
STARTUP_MSG: Starting NameNode
STARTUP_MSG: host = master/192.168.174.160
STARTUP_MSG: args = [-format]
STARTUP_MSG: version = 2.4.1
STARTUP_MSG: classpath = /home/hadoop/hadoop-2.4.1//etc/hadoop:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/commons-beanutils-core-1.8.0.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/snappy-java-1.0.4.1.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/jackson-mapper-asl-1.8.8.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/jersey-core-1.9.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/java-xmlbuilder-0.4.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/xz-1.0.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/commons-el-1.0.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/jackson-jaxrs-1.8.8.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/hadoop-annotations-2.4.1.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/jsr305-1.3.9.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/httpcore-4.2.5.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/commons-compress-1.4.1.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/junit-4.8.2.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/xmlenc-0.52.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/protobuf-java-2.5.0.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/avro-1.7.4.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/jsch-0.1.42.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/commons-lang-2.6.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/jackson-core-asl-1.8.8.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/asm-3.2.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/commons-cli-1.2.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/commons-beanutils-1.7.0.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/jetty-util-6.1.26.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/slf4j-log4j12-1.7.5.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/commons-codec-1.4.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/commons-httpclient-3.1.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/jasper-compiler-5.5.23.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/jets3t-0.9.0.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/commons-net-3.1.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/commons-math3-3.1.1.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/commons-logging-1.1.3.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/commons-digester-1.8.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/netty-3.6.2.Final.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/jaxb-impl-2.2.3-1.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/stax-api-1.0-2.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/activation-1.1.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/commons-configuration-1.6.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/jersey-json-1.9.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/jackson-xc-1.8.8.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/httpclient-4.2.5.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/jersey-server-1.9.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/mockito-all-1.8.5.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/zookeeper-3.4.5.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/jettison-1.1.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/commons-collections-3.2.1.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/log4j-1.2.17.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/jsp-api-2.1.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/guava-11.0.2.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/hadoop-auth-2.4.1.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/jasper-runtime-5.5.23.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/jaxb-api-2.2.2.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/commons-io-2.4.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/jetty-6.1.26.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/slf4j-api-1.7.5.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/servlet-api-2.5.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/lib/paranamer-2.3.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/hadoop-nfs-2.4.1.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/hadoop-common-2.4.1.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/common/hadoop-common-2.4.1-tests.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/jackson-mapper-asl-1.8.8.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/jersey-core-1.9.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/commons-el-1.0.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/jsr305-1.3.9.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/xmlenc-0.52.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/protobuf-java-2.5.0.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/commons-lang-2.6.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/jackson-core-asl-1.8.8.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/asm-3.2.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/commons-cli-1.2.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/jetty-util-6.1.26.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/commons-codec-1.4.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/commons-logging-1.1.3.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/netty-3.6.2.Final.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/jersey-server-1.9.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/log4j-1.2.17.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/jsp-api-2.1.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/guava-11.0.2.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/jasper-runtime-5.5.23.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/commons-io-2.4.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/jetty-6.1.26.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/commons-daemon-1.0.13.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/lib/servlet-api-2.5.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/hadoop-hdfs-2.4.1-tests.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/hadoop-hdfs-nfs-2.4.1.jar:/home/hadoop/hadoop-2.4.1//share/hadoop/hdfs/hadoop-hdfs-2.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/jackson-mapper-asl-1.8.8.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/jersey-core-1.9.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/xz-1.0.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/guice-servlet-3.0.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/jackson-jaxrs-1.8.8.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/jsr305-1.3.9.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/commons-compress-1.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/protobuf-java-2.5.0.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/commons-lang-2.6.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/jackson-core-asl-1.8.8.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/asm-3.2.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/commons-cli-1.2.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/javax.inject-1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/jetty-util-6.1.26.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/commons-codec-1.4.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/commons-httpclient-3.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/leveldbjni-all-1.8.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/jersey-client-1.9.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/commons-logging-1.1.3.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/jaxb-impl-2.2.3-1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/stax-api-1.0-2.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/activation-1.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/jersey-json-1.9.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/jackson-xc-1.8.8.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/jersey-server-1.9.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/zookeeper-3.4.5.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/guice-3.0.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/aopalliance-1.0.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/jettison-1.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/commons-collections-3.2.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/jline-0.9.94.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/log4j-1.2.17.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/guava-11.0.2.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/jersey-guice-1.9.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/jaxb-api-2.2.2.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/commons-io-2.4.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/jetty-6.1.26.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/lib/servlet-api-2.5.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/hadoop-yarn-server-tests-2.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/hadoop-yarn-server-resourcemanager-2.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/hadoop-yarn-applications-distributedshell-2.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/hadoop-yarn-client-2.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/hadoop-yarn-server-applicationhistoryservice-2.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/hadoop-yarn-server-nodemanager-2.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/hadoop-yarn-common-2.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/hadoop-yarn-applications-unmanaged-am-launcher-2.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/hadoop-yarn-server-web-proxy-2.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/hadoop-yarn-server-common-2.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/yarn/hadoop-yarn-api-2.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/lib/snappy-java-1.0.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/lib/jackson-mapper-asl-1.8.8.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/lib/jersey-core-1.9.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/lib/hamcrest-core-1.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/lib/xz-1.0.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/lib/guice-servlet-3.0.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/lib/hadoop-annotations-2.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/lib/commons-compress-1.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/lib/protobuf-java-2.5.0.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/lib/avro-1.7.4.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/lib/jackson-core-asl-1.8.8.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/lib/asm-3.2.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/lib/javax.inject-1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/lib/netty-3.6.2.Final.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/lib/junit-4.10.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/lib/jersey-server-1.9.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/lib/guice-3.0.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/lib/aopalliance-1.0.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/lib/log4j-1.2.17.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/lib/jersey-guice-1.9.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/lib/commons-io-2.4.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/lib/paranamer-2.3.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-plugins-2.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-2.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/hadoop-mapreduce-examples-2.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/hadoop-mapreduce-client-shuffle-2.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.4.1-tests.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/hadoop-mapreduce-client-core-2.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/hadoop-mapreduce-client-app-2.4.1.jar:/home/hadoop/hadoop-2.4.1/share/hadoop/mapreduce/hadoop-mapreduce-client-common-2.4.1.jar:/contrib/capacity-scheduler/*.jar
STARTUP_MSG: build = http://svn.apache.org/repos/asf/hadoop/common -r 1604318; compiled by 'jenkins' on 2014-06-21T05:43Z
STARTUP_MSG: java = 1.7.0_51
************************************************************/
14/10/08 18:43:05 INFO namenode.NameNode: registered UNIX signal handlers for [TERM, HUP, INT]
14/10/08 18:43:05 INFO namenode.NameNode: createNameNode [-format]
14/10/08 18:43:06 WARN common.Util: Path /home/hadoop/dfs/name/ should be specified as a URI in configuration files. Please update hdfs configuration.
14/10/08 18:43:06 WARN common.Util: Path /home/hadoop/dfs/name/ should be specified as a URI in configuration files. Please update hdfs configuration.
Formatting using clusterid: CID-f1441872-89ef-4733-98df-454c18da5043
14/10/08 18:43:06 INFO namenode.FSNamesystem: fsLock is fair:true
14/10/08 18:43:06 INFO namenode.HostFileManager: read includes:
HostSet(
)
14/10/08 18:43:06 INFO namenode.HostFileManager: read excludes:
HostSet(
)
14/10/08 18:43:06 INFO blockmanagement.DatanodeManager: dfs.block.invalidate.limit=1000
14/10/08 18:43:06 INFO blockmanagement.DatanodeManager: dfs.namenode.datanode.registration.ip-hostname-check=true
14/10/08 18:43:06 INFO util.GSet: Computing capacity for map BlocksMap
14/10/08 18:43:06 INFO util.GSet: VM type = 32-bit
14/10/08 18:43:06 INFO util.GSet: 2.0% max memory 966.7 MB = 19.3 MB
14/10/08 18:43:06 INFO util.GSet: capacity = 2^22 = 4194304 entries
14/10/08 18:43:06 INFO blockmanagement.BlockManager: dfs.block.access.token.enable=false
14/10/08 18:43:06 INFO blockmanagement.BlockManager: defaultReplication = 3
14/10/08 18:43:06 INFO blockmanagement.BlockManager: maxReplication = 512
14/10/08 18:43:06 INFO blockmanagement.BlockManager: minReplication = 1
14/10/08 18:43:06 INFO blockmanagement.BlockManager: maxReplicationStreams = 2
14/10/08 18:43:06 INFO blockmanagement.BlockManager: shouldCheckForEnoughRacks = false
14/10/08 18:43:06 INFO blockmanagement.BlockManager: replicationRecheckInterval = 3000
14/10/08 18:43:06 INFO blockmanagement.BlockManager: encryptDataTransfer = false
14/10/08 18:43:06 INFO blockmanagement.BlockManager: maxNumBlocksToLog = 1000
14/10/08 18:43:06 INFO namenode.FSNamesystem: fsOwner = hadoop (auth:SIMPLE)
14/10/08 18:43:06 INFO namenode.FSNamesystem: supergroup = supergroup
14/10/08 18:43:06 INFO namenode.FSNamesystem: isPermissionEnabled = true
14/10/08 18:43:06 INFO namenode.FSNamesystem: HA Enabled: false
14/10/08 18:43:06 INFO namenode.FSNamesystem: Append Enabled: true
14/10/08 18:43:06 INFO util.GSet: Computing capacity for map INodeMap
14/10/08 18:43:06 INFO util.GSet: VM type = 32-bit
14/10/08 18:43:06 INFO util.GSet: 1.0% max memory 966.7 MB = 9.7 MB
14/10/08 18:43:06 INFO util.GSet: capacity = 2^21 = 2097152 entries
14/10/08 18:43:06 INFO namenode.NameNode: Caching file names occuring more than 10 times
14/10/08 18:43:06 INFO util.GSet: Computing capacity for map cachedBlocks
14/10/08 18:43:06 INFO util.GSet: VM type = 32-bit
14/10/08 18:43:06 INFO util.GSet: 0.25% max memory 966.7 MB = 2.4 MB
14/10/08 18:43:06 INFO util.GSet: capacity = 2^19 = 524288 entries
14/10/08 18:43:06 INFO namenode.FSNamesystem: dfs.namenode.safemode.threshold-pct = 0.9990000128746033
14/10/08 18:43:06 INFO namenode.FSNamesystem: dfs.namenode.safemode.min.datanodes = 0
14/10/08 18:43:06 INFO namenode.FSNamesystem: dfs.namenode.safemode.extension = 30000
14/10/08 18:43:06 INFO namenode.FSNamesystem: Retry cache on namenode is enabled
14/10/08 18:43:06 INFO namenode.FSNamesystem: Retry cache will use 0.03 of total heap and retry cache entry expiry time is 600000 millis
14/10/08 18:43:06 INFO util.GSet: Computing capacity for map NameNodeRetryCache
14/10/08 18:43:06 INFO util.GSet: VM type = 32-bit
14/10/08 18:43:06 INFO util.GSet: 0.029999999329447746% max memory 966.7 MB = 297.0 KB
14/10/08 18:43:06 INFO util.GSet: capacity = 2^16 = 65536 entries
14/10/08 18:43:06 INFO namenode.AclConfigFlag: ACLs enabled? false
Re-format filesystem in Storage Directory /home/hadoop/dfs/name ? (Y or N) Y
14/10/08 18:43:10 INFO namenode.FSImage: Allocated new BlockPoolId: BP-215877782-192.168.174.160-1412764990823
14/10/08 18:43:10 INFO common.Storage: Storage directory /home/hadoop/dfs/name has been successfully formatted.
14/10/08 18:43:11 INFO namenode.NNStorageRetentionManager: Going to retain 1 images with txid >= 0
14/10/08 18:43:11 INFO util.ExitUtil: Exiting with status 0
14/10/08 18:43:11 INFO namenode.NameNode: SHUTDOWN_MSG:
/************************************************************
SHUTDOWN_MSG: Shutting down NameNode at master/192.168.174.160
************************************************************/
格式化HDFS:hadoop@master:~/hadoop-2.4.1$ ./bin/hdfs namenode -format
hadoop@master:~/hadoop-2.4.1$ ./sbin/start-dfs.sh
Starting namenodes on [master]
The authenticity of host 'master (192.168.174.160)' can't be established.
ECDSA key fingerprint is 1f:c0:2a:ed:c1:7b:6e:26:46:e3:c3:b6:87:bb:99:42.
Are you sure you want to continue connecting (yes/no)? yes
master: Warning: Permanently added 'master,192.168.174.160' (ECDSA) to the list of known hosts.
hadoop@master's password:
master: mkdir: 无法创建目录"/opt/hadoop-2.4.1/logs": 权限不够
master: chown: 无法访问"/opt/hadoop-2.4.1/logs": 没有那个文件或目录
master: starting namenode, logging to /opt/hadoop-2.4.1/logs/hadoop-hadoop-namenode-master.out
master: /opt/hadoop-2.4.1/sbin/hadoop-daemon.sh: 行 151: /opt/hadoop-2.4.1/logs/hadoop-hadoop-namenode-master.out: 没有那个文件或目录
master: head: 无法打开"/opt/hadoop-2.4.1/logs/hadoop-hadoop-namenode-master.out" 读取数据: 没有那个文件或目录
master: /opt/hadoop-2.4.1/sbin/hadoop-daemon.sh: 行 166: /opt/hadoop-2.4.1/logs/hadoop-hadoop-namenode-master.out: 没有那个文件或目录
master: /opt/hadoop-2.4.1/sbin/hadoop-daemon.sh: 行 167: /opt/hadoop-2.4.1/logs/hadoop-hadoop-namenode-master.out: 没有那个文件或目录
The authenticity of host 'slave2 (192.168.174.162)' can't be established.
ECDSA key fingerprint is 1f:c0:2a:ed:c1:7b:6e:26:46:e3:c3:b6:87:bb:99:42.
Are you sure you want to continue connecting (yes/no)? The authenticity of host 'slave1 (192.168.174.161)' can't be established.
ECDSA key fingerprint is 1f:c0:2a:ed:c1:7b:6e:26:46:e3:c3:b6:87:bb:99:42.
Are you sure you want to continue connecting (yes/no)? yes
slave2: Warning: Permanently added 'slave2,192.168.174.162' (ECDSA) to the list of known hosts.
hadoop@slave2's password: Please type 'yes' or 'no':
slave1: Warning: Permanently added 'slave1,192.168.174.161' (ECDSA) to the list of known hosts.
hadoop@slave1's password:
slave2: mkdir: 无法创建目录"/opt/hadoop-2.4.1/logs": 权限不够
slave2: chown: 无法访问"/opt/hadoop-2.4.1/logs": 没有那个文件或目录
slave2: starting datanode, logging to /opt/hadoop-2.4.1/logs/hadoop-hadoop-datanode-slave2.out
slave2: /opt/hadoop-2.4.1/sbin/hadoop-daemon.sh: 行 151: /opt/hadoop-2.4.1/logs/hadoop-hadoop-datanode-slave2.out: 没有那个文件或目录
slave2: head: 无法打开"/opt/hadoop-2.4.1/logs/hadoop-hadoop-datanode-slave2.out" 读取数据: 没有那个文件或目录
slave2: /opt/hadoop-2.4.1/sbin/hadoop-daemon.sh: 行 166: /opt/hadoop-2.4.1/logs/hadoop-hadoop-datanode-slave2.out: 没有那个文件或目录
slave2: /opt/hadoop-2.4.1/sbin/hadoop-daemon.sh: 行 167: /opt/hadoop-2.4.1/logs/hadoop-hadoop-datanode-slave2.out: 没有那个文件或目录
hadoop@master:~/hadoop-2.4.1$ ./sbin/start-dfs.sh