1、首先导入maven可以去网上找或者输入以下
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.7.6</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.zookeeper/zookeeper -->
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-common -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.7.6</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-client -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.7.6</version>
</dependency>
<!-- https://mvnrepository.com/artifact/junit/junit -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.13.2</version>
</dependency>
</dependencies>
2、然后配置环境变量
然后输入代码
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
public class Test1 {
FileSystem fs;
@Before
public void conn() throws URISyntaxException, IOException {
//hadoop配置文件,自动获取hadoop的hdfs配置文件
Configuration conf = new Configuration();
conf.set("dfs.replication", "1");//设置副本数为一
//创建url 9000是端口号配置文件中有,master是主机名,如果没有配置映射可以是ip地址
URI uri = new URI("hdfs://master:9000");
//等同于客户端
fs = FileSystem.get(uri, conf);
}
@Test
public void mkdir() throws IOException {
//创建一个Path对象传入想要创建hdfs的路径
Path path = new Path("/data1");
//判断是否存在要是存在就删除,以免报错
if(fs.exists(path)){
fs.delete(path);
}
//创建目录
fs.mkdirs(path);
}
}
3、然后检验是否成功
@Test
public void filestatus() throws IOException {
//获取根目录下的文件列表
FileStatus[] fileStatuses = fs.listStatus(new Path("/"));
//遍历 fileStatuses
for (FileStatus fileStatus : fileStatuses) {
System.out.println(fileStatus);
}