虚拟机软件VMWare Server2.0
操作系统:RedHat Enterprise Linux Server 5.3(32bit)
hadoop版本:0.20.2
jdk版本:1.7
注意:各操作用户请注意查看命令行的提示符
1、首先查看下该虚拟机系统的网络配置
[root@hadoop ~]# cat /etc/hosts
# Do not remove the following line, or various programs
# that require network functionality will fail.
127.0.0.1 localhost
10.10.10.200 hadoop
[root@hadoop ~]# cat /etc/sysconfig/network
NETWORKING=yes
NETWORKING_IPV6=no
HOSTNAME=hadoop
[root@hadoop ~]# cat /etc/sysconfig/network-scripts/ifcfg-eth0
# Advanced Micro Devices [AMD] 79c970 [PCnet32 LANCE]
DEVICE=eth0
BOOTPROTO=none
ONBOOT=yes
HWADDR=00:0c:29:2c:c8:df
NETMASK=255.255.255.0
IPADDR=10.10.10.200
TYPE=Ethernet
USERCTL=no
IPV6INIT=no
PEERDNS=yes
2、创建hadoop用户组和用户
[root@hadoop ~]# groupadd hadoop
[root@hadoop ~]# useradd hadoop
[root@hadoop ~]# passwd hadoop
Changing password for user hadoop.
New UNIX password:
BAD PASSWORD: it is based on a dictionary word
Retype new UNIX password:
passwd: all authentication tokens updated successfully.
3、上传需要用到的软件,这里用smb上传到hadoop家目录的software目录下
[hadoop@hadoop software]$ pwd
/home/hadoop/software
[hadoop@hadoop software]$ ll
total 162096
-rwxr--r-- 1 hadoop hadoop 44575568 Feb 3 15:24 hadoop-0.20.2.tar.gz
-rwxr--r-- 1 hadoop hadoop 121236291 Jan 3 11:15 jdk-7u45-linux-i586.rpm
4、安装jdk
[root@hadoop ~]# cd /home/hadoop/software/
[root@hadoop software]# ll
total 162096
-rwxr--r-- 1 hadoop hadoop 44575568 Feb 3 15:24 hadoop-0.20.2.tar.gz
-rwxr--r-- 1 hadoop hadoop 121236291 Jan 3 11:15 jdk-7u45-linux-i586.rpm
[root@hadoop software]# rpm -ivh jdk-7u45-linux-i586.rpm
Preparing... ########################################### [100%]
1:jdk ########################################### [100%]
Unpacking JAR files...
rt.jar...
jsse.jar...
charsets.jar...
tools.jar...
localedata.jar...
jfxrt.jar...
plugin.jar...
javaws.jar...
deploy.jar...
[root@hadoop software]# vi /etc/profile
export JAVA_HOME=/usr/java/jdk1.7.0_45
export PATH=$JAVA_HOME/bin/:$PATH
export CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar
[root@hadoop software]# source /etc/profile
[root@hadoop software]# java -version
java version "1.7.0_45"
Java(TM) SE Runtime Environment (build 1.7.0_45-b18)
Java HotSpot(TM) Client VM (build 24.45-b08, mixed mode, sharing)
5、配置hadoop用户能够无密码登陆本机(localhost)的ssh
[hadoop@hadoop home]$ chmod -R 755 /home/hadoop
[hadoop@hadoop ~]$ mkdir ~/.ssh
[hadoop@hadoop ~]$ ssh-keygen -t rsa
Generating public/private rsa key pair.
Enter file in which to save the key (/home/hadoop/.ssh/id_rsa):
Enter passphrase (empty for no passphrase):
Enter same passphrase again:
Your identification has been saved in /home/hadoop/.ssh/id_rsa.
Your public key has been saved in /home/hadoop/.ssh/id_rsa.pub.
The key fingerprint is:
4d:78:42:48:9c:4d:ef:eb:68:a1:09:83:b1:53:65:bb hadoop@hadoop
[hadoop@hadoop ~]$ ssh-keygen -t dsa
Generating public/private dsa key pair.
Enter file in which to save the key (/home/hadoop/.ssh/id_dsa):
Enter passphrase (empty for no passphrase):
Enter same passphrase again:
Your identification has been saved in /home/hadoop/.ssh/id_dsa.
Your public key has been saved in /home/hadoop/.ssh/id_dsa.pub.
The key fingerprint is:
37:4b:d9:0f:4c:fa:28:4b:c3:2a:bd:d1:24:85:11:d5 hadoop@hadoop
[hadoop@hadoop ~]$ cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
[hadoop@hadoop ~]$ cat ~/.ssh/id_dsa.pub >> ~/.ssh/authorized_keys
[hadoop@hadoop ~]$ chmod 700 ~/.ssh
[hadoop@hadoop ~]$ ssh localhost date
The authenticity of host 'localhost (127.0.0.1)' can't be established.
RSA key fingerprint is 99:ef:c2:9e:28:e3:b6:83:e2:00:eb:a3:ee:ad:29:d8.
Are you sure you want to continue connecting (yes/no)? yes
Warning: Permanently added 'localhost' (RSA) to the list of known hosts.
Mon Feb 3 23:55:31 CST 2014
[hadoop@hadoop ~]$ ssh localhost date
Mon Feb 3 23:55:33 CST 2014
6、安装hadoop
[hadoop@hadoop software]$ pwd
/home/hadoop/software
[hadoop@hadoop software]$ ll
total 162096
-rwxr-xr-x 1 hadoop hadoop 44575568 Feb 3 15:24 hadoop-0.20.2.tar.gz
-rwxr-xr-x 1 hadoop hadoop 121236291 Jan 3 11:15 jdk-7u45-linux-i586.rpm
[hadoop@hadoop software]$ tar -zxvf hadoop-0.20.2.tar.gz
[hadoop@hadoop software]$ ll
total 162100
drwxr-xr-x 12 hadoop hadoop 4096 Feb 19 2010 hadoop-0.20.2
-rwxr-xr-x 1 hadoop hadoop 44575568 Feb 3 15:24 hadoop-0.20.2.tar.gz
-rwxr-xr-x 1 hadoop hadoop 121236291 Jan 3 11:15 jdk-7u45-linux-i586.rpm
[hadoop@hadoop software]$ mv hadoop-0.20.2 /home/hadoop
[hadoop@hadoop software]$ cd
[hadoop@hadoop ~]$ ll
total 8
drwxr-xr-x 12 hadoop hadoop 4096 Feb 19 2010 hadoop-0.20.2
drwxr-xr-x 2 hadoop hadoop 4096 Feb 3 23:59 software
7、配置hadoop
[hadoop@hadoop conf]$ pwd
/home/hadoop/hadoop-0.20.2/conf
[hadoop@hadoop conf]$ ll
total 56
-rw-rw-r-- 1 hadoop hadoop 3936 Feb 19 2010 capacity-scheduler.xml
-rw-rw-r-- 1 hadoop hadoop 535 Feb 19 2010 configuration.xsl
-rw-rw-r-- 1 hadoop hadoop 178 Feb 19 2010 core-site.xml
-rw-rw-r-- 1 hadoop hadoop 2237 Feb 19 2010 hadoop-env.sh
-rw-rw-r-- 1 hadoop hadoop 1245 Feb 19 2010 hadoop-metrics.properties
-rw-rw-r-- 1 hadoop hadoop 4190 Feb 19 2010 hadoop-policy.xml
-rw-rw-r-- 1 hadoop hadoop 178 Feb 19 2010 hdfs-site.xml
-rw-rw-r-- 1 hadoop hadoop 2815 Feb 19 2010 log4j.properties
-rw-rw-r-- 1 hadoop hadoop 178 Feb 19 2010 mapred-site.xml
-rw-rw-r-- 1 hadoop hadoop 10 Feb 19 2010 masters
-rw-rw-r-- 1 hadoop hadoop 10 Feb 19 2010 slaves
-rw-rw-r-- 1 hadoop hadoop 1243 Feb 19 2010 ssl-client.xml.example
-rw-rw-r-- 1 hadoop hadoop 1195 Feb 19 2010 ssl-server.xml.example
[hadoop@hadoop conf]$ vi hadoop-env.sh
# The java implementation to use. Required.
# export JAVA_HOME=/usr/lib/j2sdk1.5-sun
export JAVA_HOME=/usr/java/jdk1.7.0_45
[hadoop@hadoop conf]$ vi core-site.xml
<configuration>
<property>
<name>fs.default.name</name>
<value>hdfs://localhost:9000</value>
</property>
</configuration>
[hadoop@hadoop conf]$ vi hdfs-site.xml
<configuration>
<property>
<name>dfs.data.dir</name>
<value>/home/hadoop/hadoop-data</value>
</property>
<property>
<name>dfs.name.dir</name>
<value>/home/hadoop/hadoop-name</value>
</property>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
</configuration>
[hadoop@hadoop conf]$ vi mapred-site.xml
<configuration>
<property>
<name>mapred.job.tracker</name>
<value>localhost:9001</value>
</property>
</configuration>
8、格式化hdfs
[hadoop@hadoop bin]$ pwd
/home/hadoop/hadoop-0.20.2/bin
[hadoop@hadoop bin]$ ll
total 64
-rwxr-xr-x 1 hadoop hadoop 9998 Feb 19 2010 hadoop
-rwxr-xr-x 1 hadoop hadoop 1966 Feb 19 2010 hadoop-config.sh
-rwxr-xr-x 1 hadoop hadoop 3690 Feb 19 2010 hadoop-daemon.sh
-rwxr-xr-x 1 hadoop hadoop 1227 Feb 19 2010 hadoop-daemons.sh
-rwxr-xr-x 1 hadoop hadoop 2710 Feb 19 2010 rcc
-rwxr-xr-x 1 hadoop hadoop 2043 Feb 19 2010 slaves.sh
-rwxr-xr-x 1 hadoop hadoop 1066 Feb 19 2010 start-all.sh
-rwxr-xr-x 1 hadoop hadoop 965 Feb 19 2010 start-balancer.sh
-rwxr-xr-x 1 hadoop hadoop 1645 Feb 19 2010 start-dfs.sh
-rwxr-xr-x 1 hadoop hadoop 1159 Feb 19 2010 start-mapred.sh
-rwxr-xr-x 1 hadoop hadoop 1019 Feb 19 2010 stop-all.sh
-rwxr-xr-x 1 hadoop hadoop 1016 Feb 19 2010 stop-balancer.sh
-rwxr-xr-x 1 hadoop hadoop 1146 Feb 19 2010 stop-dfs.sh
-rwxr-xr-x 1 hadoop hadoop 1068 Feb 19 2010 stop-mapred.sh
[hadoop@hadoop bin]$ ./hadoop namenode -format
14/02/04 00:20:44 INFO namenode.NameNode: STARTUP_MSG:
/************************************************************
STARTUP_MSG: Starting NameNode
STARTUP_MSG: host = hadoop/10.10.10.200
STARTUP_MSG: args = [-format]
STARTUP_MSG: version = 0.20.2
STARTUP_MSG: build = https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.20 -r 911707; compiled by 'chrisdo' on Fri Feb 19 08:07:34 UTC 2010
************************************************************/
14/02/04 00:20:44 INFO namenode.FSNamesystem: fsOwner=hadoop,hadoop
14/02/04 00:20:44 INFO namenode.FSNamesystem: supergroup=supergroup
14/02/04 00:20:44 INFO namenode.FSNamesystem: isPermissionEnabled=true
14/02/04 00:20:44 INFO common.Storage: Image file of size 96 saved in 0 seconds.
14/02/04 00:20:45 INFO common.Storage: Storage directory /home/hadoop/hadoop-name has been successfully formatted.
14/02/04 00:20:45 INFO namenode.NameNode: SHUTDOWN_MSG:
/************************************************************
SHUTDOWN_MSG: Shutting down NameNode at hadoop/10.10.10.200
************************************************************/
9、启动hadoop
[hadoop@hadoop bin]$ ./start-all.sh
starting namenode, logging to /home/hadoop/hadoop-0.20.2/bin/../logs/hadoop-hadoop-namenode-hadoop.out
localhost: starting datanode, logging to /home/hadoop/hadoop-0.20.2/bin/../logs/hadoop-hadoop-datanode-hadoop.out
localhost: starting secondarynamenode, logging to /home/hadoop/hadoop-0.20.2/bin/../logs/hadoop-hadoop-secondarynamenode-hadoop.out
starting jobtracker, logging to /home/hadoop/hadoop-0.20.2/bin/../logs/hadoop-hadoop-jobtracker-hadoop.out
localhost: starting tasktracker, logging to /home/hadoop/hadoop-0.20.2/bin/../logs/hadoop-hadoop-tasktracker-hadoop.out
10、查看hadoop进程
[hadoop@hadoop bin]$ jps
9176 NameNode
9501 JobTracker
9322 DataNode
9771 Jps
9435 SecondaryNameNode
9615 TaskTracker
(责任编辑:IT) |