hadoop 安装配置学习

tar -xzvf hadoop-2.6.0.tar.gz 
vi /etc/profile
export HADOOP_HOME=/data/hadoop-2.6.0
export JAVA_HOME=/usr/java/jdk1.6.0_45
export JRE_HOME=/usr/java/jdk1.6.0_45/jre
export JAVA_BIN=/usr/java/jdk1.6.0_45/bin
export MAVEN_HOME=/data/apache-maven-3.0.5
export PATH=$PATH:$JAVA_HOME/bin:$MAVEN_HOME/bin:$HADOOP_HOME/bin
export CLASSPATH=.:$JAVA_HOME/lib:$JAVA_HOME/jre/lib:$HADOOP_HOME/lib
export JAVA_HOME JAVA_BIN PATH CLASSPATH

查看状态

[root@localhost ~]# hadoop version
Hadoop 2.6.0
Subversion https://git-wip-us.apache.org/repos/asf/hadoop.git -r e3496499ecb8d220fba99dc5ed4c99c8f9e33bb1
Compiled by jenkins on 2014-11-13T21:10Z
Compiled with protoc 2.5.0
From source with checksum 18e43357c8f927c0695f1e9522859d6a
This command was run using /data/hadoop-2.6.0/share/hadoop/common/hadoop-common-2.6.0.jar

[root@localhost ~]# ssh-keygen -t rsa  # 一直按回车就可以,生成的密钥保存为.ssh/id_rsa
[root@localhost ~]# cd /root/.ssh/
[root@localhost ~]# cp id_rsa.pub authorized_keys
[root@localhost ~]# ssh localhost
Last login: Tue Jan 27 13:48:06 2015 from 192.168.2.56

vi hadoop-2.6.0/etc/hadoop/hadoop-env.sh
export JAVA_HOME="/usr/java/jdk1.6.0_45
vi hadoop-2.6.0/etc/hadoop/core-site.xml
<configuration>
    <property>
        <name>hadoop.tmp.dir</name>
        <value>file:/usr/local/hadoop/tmp</value>
        <description>Abase for other temporary directories.</description>
    </property>
    <property>
        <name>fs.defaultFS</name>
        <value>hdfs://localhost:9000</value>
    </property>
</configuration>
vi hadoop-2.6.0/etc/hadoop/hdfs-site.xml
<configuration>
    <property>
        <name>dfs.replication</name>
        <value>1</value>
    </property>
    <property>
        <name>dfs.namenode.name.dir</name>
        <value>file:/data/hadoop-2.6.0/dfs/name</value>
    </property>
    <property>
        <name>dfs.datanode.data.dir</name>
        <value>file:/data/hadoop-2.6.0/dfs/data</value>
    </property>
</configuration>
vi hadoop-2.6.0/etc/hadoop/mapred-site.xml
<configuration>
<property>
    <name>mapreduce.framework.name</name>
    <value>yarn</value>
</property>
 <property>
  <name>mapreduce.jobhistory.address</name>
  <value>master:10020</value>
 </property>
 <property>
  <name>mapreduce.jobhistory.webapp.address</name>
  <value>master:19888</value>
 </property>
</configuration>
vi hadoop-2.6.0/etc/hadoop/yarn-site.xml
<configuration>
<property>
   <name>yarn.nodemanager.aux-services</name>
   <value>mapreduce_shuffle</value>
 </property>
 <property>
   <name>yarn.nodemanager.aux-services.mapreduce.shuffle.class</name>
   <value>org.apache.hadoop.mapred.ShuffleHandler</value>
  </property>
  <property>
   <name>yarn.resourcemanager.address</name>
   <value>master:8032</value>
  </property>
  <property>
   <name>yarn.resourcemanager.scheduler.address</name>
   <value>master:8030</value>
  </property>
  <property>
   <name>yarn.resourcemanager.resource-tracker.address</name>
   <value>master:8035</value>
  </property>
  <property>
   <name>yarn.resourcemanager.admin.address</name>
   <value>master:8033</value>
  </property>
  <property>
   <name>yarn.resourcemanager.webapp.address</name>
   <value>master:8088</value>
  </property>
</configuration>
cd /data/hadoop-2.6.0
mkdir tmp dfs dfs/name dfs/data
bin/hdfs namenode -format
sbin/start-dfs.sh #输入jps,以检查各个节点是否就绪
 

[root@master bin]# ./hadoop fs -ls hdfs://master:9000/
[root@master bin]# ./hadoop fs -lsr hdfs://master:9000/

[root@master bin]#  mkdir input
[root@master bin]# vi input/f1 
Hello world  bye jj
[root@master bin]# vi input/f2
Hello Hadoop  bye Hadoop
[root@master bin]#./hadoop fs  -mkdir /tmp
[root@master bin]#./hadoop fs  -mkdir /tmp/input
[root@master bin]#./hadoop fs  -put input/ /tmp
[root@master bin]# ./hadoop fs -ls /tmp/input/
15/10/27 23:09:02 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Found 2 items
-rw-r--r--   1 root supergroup         20 2015-10-27 23:08 /tmp/input/f1
-rw-r--r--   1 root supergroup         19 2015-10-27 23:08 /tmp/input/f2
[root@master bin]# ./hadoop  jar /root/hadoop-2.6.0/share/hadoop/mapreduce/hadoop-mapreduce-examples-2.6.0.jar wordcount /tmp/input /output
15/10/28 01:51:20 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
15/10/28 01:51:21 INFO client.RMProxy: Connecting to ResourceManager at master/192.168.1.228:8032
15/10/28 01:51:22 INFO input.FileInputFormat: Total input paths to process : 2
15/10/28 01:51:23 INFO mapreduce.JobSubmitter: number of splits:2
15/10/28 01:51:24 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1446022129296_0001
15/10/28 01:51:24 INFO impl.YarnClientImpl: Submitted application application_1446022129296_0001
15/10/28 01:51:24 INFO mapreduce.Job: The url to track the job: http://master:8088/proxy/application_1446022129296_0001/
15/10/28 01:51:24 INFO mapreduce.Job: Running job: job_1446022129296_0001
15/10/28 01:51:43 INFO mapreduce.Job: Job job_1446022129296_0001 running in uber mode : false
15/10/28 01:51:43 INFO mapreduce.Job:  map 0% reduce 0%
15/10/28 01:52:20 INFO mapreduce.Job:  map 100% reduce 0%
15/10/28 01:52:35 INFO mapreduce.Job:  map 100% reduce 100%
15/10/28 01:52:36 INFO mapreduce.Job: Job job_1446022129296_0001 completed successfully
15/10/28 01:52:36 INFO mapreduce.Job: Counters: 49
        File System Counters
                FILE: Number of bytes read=72
                FILE: Number of bytes written=316895
                FILE: Number of read operations=0
                FILE: Number of large read operations=0
                FILE: Number of write operations=0
                HDFS: Number of bytes read=231
                HDFS: Number of bytes written=36
                HDFS: Number of read operations=9
                HDFS: Number of large read operations=0
                HDFS: Number of write operations=2
        Job Counters 
                Launched map tasks=2
                Launched reduce tasks=1
                Data-local map tasks=2
                Total time spent by all maps in occupied slots (ms)=70434
                Total time spent by all reduces in occupied slots (ms)=8868
                Total time spent by all map tasks (ms)=70434
                Total time spent by all reduce tasks (ms)=8868
                Total vcore-seconds taken by all map tasks=70434
                Total vcore-seconds taken by all reduce tasks=8868
                Total megabyte-seconds taken by all map tasks=72124416
                Total megabyte-seconds taken by all reduce tasks=9080832
        Map-Reduce Framework
                Map input records=2
                Map output records=7
                Map output bytes=65
                Map output materialized bytes=78
                Input split bytes=192
                Combine input records=7
                Combine output records=6
                Reduce input groups=5
                Reduce shuffle bytes=78
                Reduce input records=6
                Reduce output records=5
                Spilled Records=12
                Shuffled Maps =2
                Failed Shuffles=0
                Merged Map outputs=2
                GC time elapsed (ms)=299
                CPU time spent (ms)=15590
                Physical memory (bytes) snapshot=494714880
                Virtual memory (bytes) snapshot=2565672960
                Total committed heap usage (bytes)=257171456
        Shuffle Errors
                BAD_ID=0
                CONNECTION=0
                IO_ERROR=0
                WRONG_LENGTH=0
                WRONG_MAP=0
                WRONG_REDUCE=0
        File Input Format Counters 
                Bytes Read=39
        File Output Format Counters 
                Bytes Written=36
[root@master bin]# ./hadoop fs -ls /output/
15/10/28 01:55:03 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Found 2 items
-rw-r--r--   1 root supergroup          0 2015-10-28 01:52 /output/_SUCCESS
-rw-r--r--   1 root supergroup         36 2015-10-28 01:52 /output/part-r-00000
[root@master bin]# ./hadoop fs -cat /output/part-r-0000
15/10/28 01:55:35 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
cat: `/output/part-r-0000': No such file or directory
[root@master bin]# ./hadoop fs -cat /output/part-r-00000
15/10/28 01:56:48 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Hadoop  2
Hello   1
bye     2
jj      1
world   1
[root@master bin]# ./hdfs dfsadmin -report
15/10/28 02:08:25 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Configured Capacity: 50486104064 (47.02 GB)
Present Capacity: 43185094656 (40.22 GB)
DFS Remaining: 43183263744 (40.22 GB)
DFS Used: 1830912 (1.75 MB)
DFS Used%: 0.00%
Under replicated blocks: 49
Blocks with corrupt replicas: 0
Missing blocks: 0

-------------------------------------------------
Live datanodes (1):

Name: 192.168.1.238:50010 (slave)
Hostname: slave
Decommission Status : Normal
Configured Capacity: 50486104064 (47.02 GB)
DFS Used: 1830912 (1.75 MB)
Non DFS Used: 7301009408 (6.80 GB)
DFS Remaining: 43183263744 (40.22 GB)
DFS Used%: 0.00%
DFS Remaining%: 85.53%
Configured Cache Capacity: 0 (0 B)
Cache Used: 0 (0 B)
Cache Remaining: 0 (0 B)
Cache Used%: 100.00%
Cache Remaining%: 0.00%
Xceivers: 1
Last contact: Wed Oct 28 02:08:23 PDT 2015


linux修改主机名

第一步:
#hostname oratest 
第二步:
修改/etc/sysconfig/network中的hostname
第三步:
修改/etc/hosts文件

http://wiki.eclipse.org/EGit/FAQ

http://www.cnblogs.com/yjmyzz/p/4280069.html

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 1
    评论
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值