1. wget http://apache.freelamp.com/hadoop/core/hadoop-0.19.1/hadoop-0.19.1.tar.gz
2. tar xvzf hadoop-0.19.1.tar.gz
3. vi conf/hadoop-env.sh
###add JAVA_HOME parameter
export JAVA_HOME=/usr/java/jdk1.6.0_13
4. bin/hadoop
5. mkdir input
6. cp conf/*.xml input/
7. bin/hadoop jar hadoop-*-examples.jar grep input output 'dfs[a-z.]+'
8. vi conf/core-site.xml
###add follwing lines
<configuration>
<property>
<name>fs.default.name</name>
<value>hdfs://localhost:9000</value>
</property>
</configuration>
9. vi conf/hdfs-site.xml
###add follwing lines
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
</configuration>
10. vi conf/mapred-site.xml
###add following lines
<configuration>
<property>
<name>mapred.job.tracker</name>
<value>localhost:9001</value>
</property>
</configuration>
11. ssh
ssh-keygen -t dsa -P '' -f ~/.ssh/id_dsa
cat ~/.ssh/id_dsa.pub >> ~/.ssh/authorized_keys
#######
groupadd hadoop
adduser -g hadoop hadoop
passwd hadoop
ssh-keygen -t rsa -P ""
cat $HOME/.ssh/id_rsa.pub >> $HOME/.ssh/authorized_keys
####
Hadoop Web Interfaces
http://localhost:50030/ - web UI for MapReduce job tracker(s)
http://localhost:50060/ - web UI for task tracker(s)
http://localhost:50070/ - web UI for HDFS name node(s)
####Hadoop kill job
hadoop job -kill jobid
07-24
07-24