HDFS文件上传(测试参数优先级)
1.编写源代码
//1.文件上传
@Test
public void testCopyFromLocalFile() throws URISyntaxException, IOException, InterruptedException {
//1.获取fs对象
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.88.102:9000"),conf,"hadoop");
//2. 执行上传API
fs.copyFromLocalFile(new Path("D:/xin.txt"),new Path("/0811/xin.txt"));
//3.关闭资源
fs.close();
System.out.println("完毕");
}
2.将hdfs-site.xml拷贝到项目的根目录下
<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
</configuration>
@Test
public void testCopyFromLocalFile() throws URISyntaxException, IOException, InterruptedException {
//1.获取fs对象
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.88.102:9000"),conf,"hadoop");
//2. 执行上传API
fs.copyFromLocalFile(new Path("D:/xin.txt"),new Path("/0811/xinnew.txt"));
//3.关闭资源
fs.close();
System.out.println("完毕");
}
@Test
public void testCopyFromLocalFile() throws URISyntaxException, IOException, InterruptedException {
//1.获取fs对象
Configuration conf = new Configuration();
conf.set("dfs.replication","2");
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.88.102:9000"),conf,"hadoop");
//2. 执行上传API
fs.copyFromLocalFile(new Path("D:/xin.txt"),new Path("/0811/chenxin.txt"));
//3.关闭资源
fs.close();
System.out.println("完毕");
3.参数优先级
可以以看出:
参数优先级排序:(1)客户端代码中设置的值 >(2)ClassPath下的用户自定义配置文件 >(3)然后是服务器的默认配置
2 HDFS文件下载
//2.文件下载
@Test
public void testCopyToLocalFile() throws URISyntaxException, IOException, InterruptedException {
//1.获取fs对象
System.setProperty("hadoop.home.dir","D:\\hadoop\\hadoop-2.7.2" );
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.88.102:9000"), conf, "hadoop");
//2.执行下载操作
fs.copyToLocalFile(false,new Path("/xinyue.txt"),new Path("D:/xiaoyue.txt"),true);
//关闭资源
fs.close();
3 HDFS文件夹删除
@Test
public void testDelete() throws URISyntaxException, IOException, InterruptedException {
//1.获取fs对象
System.setProperty("hadoop.home.dir","D:\\hadoop\\hadoop-2.7.2" );
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.88.102:9000"), conf, "hadoop");
// 2. 文件的删除
fs.delete(new Path("/0811"),false);
//3. 关闭资源
fs.close();
System.out.println("成功");
}
4 HDFS文件名更改
@Test
public void testRename() throws URISyntaxException, IOException, InterruptedException {
//1.获取fs对象
System.setProperty("hadoop.home.dir","D:\\hadoop\\hadoop-2.7.2" );
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.88.102:9000"), conf, "hadoop");
//2.执行更名操作
fs.rename(new Path("/chen.txt"),new Path("/yuanyuan.txt"));
//关闭资源
fs.close();
System.out.println("成功");
}
5 HDFS文件详情查看
@Test
public void testListFiles() throws URISyntaxException, IOException, InterruptedException {
//1.获取fs对象
System.setProperty("hadoop.home.dir","D:\\hadoop\\hadoop-2.7.2" );
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.88.102:9000"), conf, "hadoop");
//2.查看文件详情
RemoteIterator<LocatedFileStatus> listFiles = fs.listFiles(new Path("/"), true);
while (listFiles.hasNext()){
LocatedFileStatus fileStatus = listFiles.next();
//查看文件名称、权限、长度、块信息
System.out.println(fileStatus.getPath().getName());//文件名称
System.out.println(fileStatus.getPermission());//文件权限
System.out.println(fileStatus.getLen());//文件长度
BlockLocation[] blockLocations = fileStatus.getBlockLocations();
for (BlockLocation blockLocation: blockLocations) {
String[] hosts = blockLocation.getHosts();
for (String host:hosts) {
System.out.println(host);
}
}
System.out.println("-------分割线---------");
}
//3. 关闭资源
fs.close();
}
输出:
wc.input
rw-r--r--
37
117.59.224.141
-------分割线---------
xinyue.txt
rw-r--r--
14
117.59.224.141
-------分割线---------
yuanyuan.txt
rw-r--r--
0
-------分割线---------
6 HDFS文件和文件夹判断
@Test
public void testListStatus() throws URISyntaxException, IOException, InterruptedException {
//1.获取fs对象
System.setProperty("hadoop.home.dir","D:\\hadoop\\hadoop-2.7.2" );
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.88.102:9000"), conf, "hadoop");
//2.判断操作
FileStatus[] listStatus = fs.listStatus(new Path("/"));
for (FileStatus fileStatus:listStatus){
if (fileStatus.isFile()){
//文件
System.out.println("f:"+fileStatus.getPath().getName());
}else {
//文件夹
System.out.println("d:"+fileStatus.getPath().getName());
}
}
//3. 关闭资源
fs.close();
}
输出:
f:wc.input
f:xinyue.txt
f:yuanyuan.txt
几百本常用电子书免费领取:https://github.com/XiangLinPro/IT_book