第4章 HDFS的Java API
4.6 Java API应用
4.6.1 上传文件
先在本地(客户端)一个文件,比如在D盘下新建一个word2.txt文件,内容随便写
test
node node
Hadoop Hadoop
在Eclipse中编写Java程序
package cn.hadron.hdfsDemo;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class PutFile {
public static void main(String[] args) throws Exception{
//本地文件路径
String local="D:\\word2.txt";
String dest="hdfs://192.168.80.131:9000/user/root/input/word2.txt";
Configuration cfg=new Configuration();
FileSystem fs= FileSystem.get(URI.create(dest),cfg,"root");
fs.copyFromLocalFile(new Path(local), new Path(dest));
fs.close();
}
}
再次说明,String dest="hdfs://192.168.80.131:9000/user/root/input/word2.txt"
要与core-site.xml文件中的fs.defaultFS配置对应,其值是hdfs://node1:9000
。由于本地Windows系统的hosts文件没有配置node1,所以这里需要IP地址表示。
执行结果
[root@node1 ~]# hdfs dfs -ls /user/root/input
Found 2 items
-rw-r--r-- 3 root supergroup 30 2017-05-21 03:48 /user/root/input/word2.txt
-rw-r--r-- 3 root supergroup 55 2017-05-17 10:32 /user/root/input/words.txt
补充: FileSystem fs= FileSystem.get(URI.create(dest),cfg,"root");
语句中指定了root用户,这是因为Windows系统默认用户是Administrator。如果程序中不指定用户名root,则可能抛出异常:Permission denied: user=Administrator
Exception in thread "main" org.apache.hadoop.security.AccessControlException: Permission denied: user=Administrator
4.6.2 下载文件
package cn.hadron.hdfsDemo;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.io.IOException;
import java.net.URI;
public class GetFile {
public static void main(String[] args) throws IOException {
String hdfsPath="hdfs://192.168.80.131:9000/user/root/input/words.txt";
String localPath="D:/copy_words.txt";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
Path hdfs_path = new Path(hdfsPath);
Path local_path = new Path(localPath);
fs.copyToLocalFile(hdfs_path, local_path);
fs.close();
}
}
4.6.3 创建HDFS目录
package cn.hadron.hdfsDemo;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class CreateDir {
public static void main(String[] args) throws Exception {
String url = "hdfs://192.168.80.131:9000/tmp/";
Configuration conf = new Configuration();
FileSystem fs= FileSystem.get(URI.create(url),conf,"root");
boolean b=fs.mkdirs(new Path(url));
System.out.println(b);
fs.close();
}
}
[root@node1 ~]# hdfs dfs -ls /
Found 3 items
drwxr-xr-x - root supergroup 0 2017-05-14 09:40 /abc
drwxr-xr-x - root supergroup 0 2017-05-21 04:38 /tmp
drwxr-xr-x - root supergroup 0 2017-05-14 09:37 /user
4.6.4 删除HDFS文件或文件夹
先上传一个文件到HDFS的/tmp目录
[root@node1 ~]# hdfs dfs -put /root/words.txt /tmp
[root@node1 ~]# hdfs dfs -ls /tmp
Found 1 items
-rw-r--r-- 3 root supergroup 55 2017-05-21 04:57 /tmp/words.txt
package cn.hadron.hdfsDemo;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class DeleteFile {
public static void main(String[] args) throws Exception{
String uri = "hdfs://192.168.80.131:9000/tmp";
Configuration conf = new Configuration();
FileSystem fs =FileSystem.get(URI.create(uri),conf,"root");
//参数true表示递归删除文件夹及文件夹下的文件
boolean b = fs.delete(new Path(uri), true);
System.out.println(b);
fs.close();
}
}
通过命令查看HDFS目录,显然HDFS的/tmp目录已经被删除
[root@node1 ~]# hdfs dfs -ls /
Found 2 items
drwxr-xr-x - root supergroup 0 2017-05-14 09:40 /abc
drwxr-xr-x - root supergroup 0 2017-05-14 09:37 /user
4.6.5 下载HDFS目录
package cn.hadron.hdfsDemo;
import java.io.IOException;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class CopyToLocalFile {
public static void main(String[] args) throws IOException {
String hdfsPath="hdfs://192.168.80.131:9000/user/root/input";
String localPath="D:\\input";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
Path hdfs_path = new Path(hdfsPath);
Path local_path = new Path(localPath);
fs.copyToLocalFile(hdfs_path,local_path);
fs.close();
}
}
在D盘可以查看到input目录文件
其中.crc文件是校验文件。Hadoop系统为了保证数据的一致性,会对文件生成相应的校验文件,并在读写的时候进行校验,确保数据的准确性。
4.6.6 上传本地目录(文件夹)
先在本地准备一个待上传的目录,这里将刚才下载的input目录重命名为words,并删除.crc校验文件
package cn.hadron.hdfsDemo;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class CopyFromLocalFile {
public static void main(String[] args) throws Exception{
String hdfsPath="hdfs://192.168.80.131:9000/user/root/";
String localPath="D:\\words";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf,"root");
Path hdfs_path = new Path(hdfsPath);
Path local_path = new Path(localPath);
fs.copyFromLocalFile(local_path, hdfs_path);
fs.close();
}
}
[root@node1 ~]# hdfs dfs -ls /user/root
Found 2 items
drwxr-xr-x - root supergroup 0 2017-05-21 03:48 /user/root/input
drwxr-xr-x - root supergroup 0 2017-05-21 05:21 /user/root/words
[root@node1 ~]# hdfs dfs -ls /user/root/words
Found 2 items
-rw-r--r-- 3 root supergroup 30 2017-05-21 05:21 /user/root/words/word2.txt
-rw-r--r-- 3 root supergroup 55 2017-05-21 05:21 /user/root/words/words.txt