hadoop fs -ls /
hadoop fs -mkdir /sanguo
hadoop fs -put ./shuguo.txt /sanguo
hadoop fs -cat /sanguo/shuguo.txt
hadoop fs -chown ice:ice /sanguo/shuguo.txt
hadoop fs -cp /sanguo/shuguo.txt /jinguo
hadoop fs -mv /sanguo/weiguo.txt /jinguo
hadoop fs -tail /jinguo/shuguo.txt
hadoop fs -rm /sanguo/shuguo.txt
-r: 递归
hadoop fs -du /jinguo
-s :文件夹 总大小
-h :具体文件详细 大小
hadoop fs -setrep 10 /目录/文件
– 设置 该文件 的副本数量
从本地 剪切
hdfs dfs -moveFromLocal ./shuguo.txt /sanguo
从本地 拷贝
hadoop fs -copyFromLocal ./weiguo.txt /sanguo
hadoop fs -put ./weiguo.txt /sanguo
-put 与 拷贝相同,生产环境下 更习惯用 put
追加一个文件到 已经存在的 文件中
-appendToFile
下载
hadoop fs -copyToLocal /sanguo/shuguo.txt ./
hadoop fs -get /sanguo/shuguo.txt ./