--安全模式状态命令
hadoop dfsadmin –safemode get 查看安全模式状态
hadoop dfsadmin –safemode enter 进入安全模式状态
hadoop dfsadmin –safemode leave 离开安全模式状态
---删除
[hadoop@Master bin]$ hadoop fs -rmr in
Deleted hdfs://192.168.150.2:9000/user/hadoop/in
--创建目录
[hadoop@Master ~]$ hadoop fs -mkdir in
--查看文件属性
[hadoop@Master ~]$ hadoop fs -ls
Found 2 items
drwxr-xr-x - hadoop supergroup 0 2014-10-28 15:26 /user/hadoop/in
drwxr-xr-x - hadoop supergroup 0 2014-10-28 15:26 /user/hadoop/out
----拷贝本地文件到hdfs文件系统中
[hadoop@Master ~]$ hadoop fs -put /home/hadoop/input/* ./in
[hadoop@Master ~]$ hadoop fs -ls ./in
Found 2 items
-rw-r--r-- 1 hadoop supergroup 12 2014-10-28 15:28 /user/hadoop/in/test1.txt
-rw-r--r-- 1 hadoop supergroup 13 2014-10-28 15:28 /user/hadoop/in/test2.txt
---查看hdfs文件系统中的内容
[hadoop@Master ~]$ hadoop fs -cat ./in/*
hello world
hello hadoop
-- wordcount
[hadoop@Master hadoop]$ pwd
/usr/hadoop
[hadoop@Master hadoop]$ hadoop jar hadoop-examples-1.2.1.jar wordcount in out
Warning: $HADOOP_HOME is deprecated.
14/10/28 15:45:59 INFO input.FileInputFormat: Total input paths to process : 2
14/10/28 15:45:59 INFO util.NativeCodeLoader: Loaded the native-hadoop library
14/10/28 15:45:59 WARN snappy.LoadSnappy: Snappy native library not loaded
14/10/28 15:46:01 INFO mapred.JobClient: Running job: job_201410281519_0010
14/10/28 15:46:02 INFO mapred.JobClient: map 0% reduce 0%
14/10/28 15:46:27 INFO mapred.JobClient: map 50% reduce 0%
14/10/28 15:46:43 INFO mapred.JobClient: map 50% reduce 16%
14/10/28 15:46:47 INFO mapred.JobClient: map 100% reduce 16%
14/10/28 15:46:51 INFO mapred.JobClient: map 100% reduce 100%
14/10/28 15:46:52 INFO mapred.JobClient: Job complete: job_201410281519_0010
14/10/28 15:46:52 INFO mapred.JobClient: Counters: 29
14/10/28 15:46:52 INFO mapred.JobClient: Job Counters
14/10/28 15:46:52 INFO mapred.JobClient: Launched reduce tasks=1
14/10/28 15:46:52 INFO mapred.JobClient: SLOTS_MILLIS_MAPS=54973
14/10/28 15:46:52 INFO mapred.JobClient: Total time spent by all reduces waiting after reserving slots (ms)=0
14/10/28 15:46:52 INFO mapred.JobClient: Total time spent by all maps waiting after reserving slots (ms)=0
14/10/28 15:46:52 INFO mapred.JobClient: Launched map tasks=2
14/10/28 15:46:52 INFO mapred.JobClient: Data-local map tasks=2
14/10/28 15:46:52 INFO mapred.JobClient: SLOTS_MILLIS_REDUCES=23420
14/10/28 15:46:52 INFO mapred.JobClient: File Output Format Counters
14/10/28 15:46:52 INFO mapred.JobClient: Bytes Written=25
14/10/28 15:46:52 INFO mapred.JobClient: FileSystemCounters
14/10/28 15:46:52 INFO mapred.JobClient: FILE_BYTES_READ=55
14/10/28 15:46:52 INFO mapred.JobClient: HDFS_BYTES_READ=255
14/10/28 15:46:52 INFO mapred.JobClient: FILE_BYTES_WRITTEN=164937
14/10/28 15:46:52 INFO mapred.JobClient: HDFS_BYTES_WRITTEN=25
14/10/28 15:46:52 INFO mapred.JobClient: File Input Format Counters
14/10/28 15:46:52 INFO mapred.JobClient: Bytes Read=25
14/10/28 15:46:52 INFO mapred.JobClient: Map-Reduce Framework
14/10/28 15:46:52 INFO mapred.JobClient: Map output materialized bytes=61
14/10/28 15:46:52 INFO mapred.JobClient: Map input records=2
14/10/28 15:46:52 INFO mapred.JobClient: Reduce shuffle bytes=61
14/10/28 15:46:52 INFO mapred.JobClient: Spilled Records=8
14/10/28 15:46:52 INFO mapred.JobClient: Map output bytes=41
14/10/28 15:46:52 INFO mapred.JobClient: Total committed heap usage (bytes)=413605888
14/10/28 15:46:52 INFO mapred.JobClient: CPU time spent (ms)=35300
14/10/28 15:46:52 INFO mapred.JobClient: Combine input records=4
14/10/28 15:46:52 INFO mapred.JobClient: SPLIT_RAW_BYTES=230
14/10/28 15:46:52 INFO mapred.JobClient: Reduce input records=4
14/10/28 15:46:52 INFO mapred.JobClient: Reduce input groups=3
14/10/28 15:46:52 INFO mapred.JobClient: Combine output records=4
14/10/28 15:46:52 INFO mapred.JobClient: Physical memory (bytes) snapshot=318337024
14/10/28 15:46:52 INFO mapred.JobClient: Reduce output records=3
14/10/28 15:46:52 INFO mapred.JobClient: Virtual memory (bytes) snapshot=1040879616
14/10/28 15:46:52 INFO mapred.JobClient: Map output records=4
--查看输出
[hadoop@Master hadoop]$ hadoop fs -ls out
Found 3 items
-rw-r--r-- 1 hadoop supergroup 0 2014-10-28 15:46 /user/hadoop/out/_SUCCESS
drwxr-xr-x - hadoop supergroup 0 2014-10-28 15:46 /user/hadoop/out/_logs
-rw-r--r-- 1 hadoop supergroup 25 2014-10-28 15:46 /user/hadoop/out/part-r-00000
[hadoop@Master hadoop]$ hadoop fs -cat /user/hadoop/out/part-r-00000
hadoop 1
hello 2
world 1