创建一个maven文件
修改pom文件导入hadoop的jar包
版本根据自身hadoop版本修改
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.6.0</version>
<scope>test</scope>
</dependency>
完整pom文件
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.panzi.cn</groupId>
<artifactId>hdfs</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>
<name>hdfs</name>
<url>http://maven.apache.org</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.6.0</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>
在src的test里创建一个hfs.class
完整文件
package com.panzi.cn.hdfs;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.Test;
public class hfs {
// 一次性新建所有目录(包括父目录)
@Test
public void mkdir() throws IOException, InterruptedException, URISyntaxException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.43.201:9000"), conf, "root");
Path f = new Path("/dt/tmp/");
boolean tag = fs.mkdirs(f);
System.out.println(tag);
}
// 永久性删除指定的文件或目录,删除td下的tmp文件夹
@Test
public void delete() throws IOException, InterruptedException, URISyntaxException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.43.201:9000"), conf, "root");
Path f = new Path("/dt/tmp/");
boolean tag = fs.delete(f);
System.out.println(tag);
}
}
运行成功结果
在linux上运行hdfs dfs -ls /
可以看见创建或删除了指定文件。
完成!