1.在eclipse编写在hdfs上创建目录
public static void main(String[] args) throws IOException, Exception, URISyntaxException {
Configuration conf = new Configuration();
//conf.set("fs.defaultFS", "hdfs://hadoop132:9000");
//1获取hdfs客户端对象
//FileSystem fs = FileSystem.get(conf);
FileSystem fs = FileSystem.get(new URI("hdfs://hadoop132:9000"), conf, "root"); //你的Hadoop主机名称
//2在hdfs上创建路径
fs.mkdirs(new Path("/0529/dashen/banzhang2")); //会在HDFS上创建目录
fs.close();
System.out.println("over");
}
2.实现文件上传
@Test
public void testCopyFromLocalFile() throws IOException, InterruptedException, URISyntaxException {
//1获取fs对象
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://hadoop132:9000"), conf, "root");
//2执行上传API
fs.copyFromLocalFile(new Path("f:/TestData/dabanzhang.txt"), new Path("/banzhang.txt")); //第一个是你本机电脑里面的一个文件,第二个是你在HDFS上显示的文件名称
//3关闭资源
fs.close();
}
这是右键执行unit
3:可以改变配置参数,可以在eclipse里面resourse里面创建hdfs-site.xml写入副本设置,他的参数优先级高于linux里面默认的参数
可以在代码里直接写conf.set(“dfs.replication”,“2”);这个优先级最大
//2.HDFS文件下载到本地电脑
```java
@Test
public void testCopyToLocalFile() throws IOException, InterruptedException, URISyntaxException {
//1获取fs对象
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://hadoop132:9000"), conf, "root");
//2执行下载操作
fs.copyToLocalFile(new Path("/banzhang.txt"), new Path("f:/TestData/banhua.txt")); //名称是根据自己来的
//3关闭资源
fs.close();
}
//3.HDFS删除文件夹
```java
@Test
public void testDelete() throws IOException, InterruptedException, URISyntaxException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://hadoop132:9000"), conf, "root");
//2文件删除
fs.delete(new Path("/0529"), true); //如果是文件夹就是true,如果是文件就没事
fs.close();
}
//4.HDFS更改文件名
@Test
public void testChangeName() throws IOException, InterruptedException, URISyntaxException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://hadoop132:9000"), conf, "root");
fs.rename(new Path("/banzhang.txt"), new Path("/yanjing.txt"));
fs.close();
}
//5.HDFS文件详情查看
@Test
public void testListFiles() throws IOException, InterruptedException, URISyntaxException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://hadoop132:9000"), conf, "root");
//返回迭代器
RemoteIterator<LocatedFileStatus> listFiles = fs.listFiles(new Path("/"), true);
//循环迭代器
while(listFiles.hasNext()) {
LocatedFileStatus fileStatus = listFiles.next();
System.out.println(fileStatus.getPath().getName());//文件名称
System.out.println(fileStatus.getPermission());//文件权限
System.out.println(fileStatus.getLen());//文件长度
BlockLocation[] blockLocations = fileStatus.getBlockLocations();
for(BlockLocation blockLocation :blockLocations) {
String[] hosts = blockLocation.getHosts();
for(String host:hosts) {
System.out.println(host);
}
}
System.out.println("--------------------");
}
fs.close();
}
//6.HDFS判断是否是文件夹
@Test
public void testIsFile() throws IOException, InterruptedException, URISyntaxException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://hadoop132:9000"), conf, "root");
FileStatus[] liStatus = fs.listStatus(new Path("/"));
for(FileStatus fileStatus : liStatus) {
if(fileStatus.isFile()) {
//如果是文件
System.out.println("f:"+fileStatus.getPath().getName());
}else {
//文件夹
System.out.println("d:"+fileStatus.getPath().getName());
}
}
fs.close();
}