import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.io.IOUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.URISyntaxException;
public class hdfs {
private final String hdfs_path="hdfs://bigdata01:9000";
private FileSystem fileSystem=null;
private Configuration configuration=null;
@Before
public void setUp() throws URISyntaxException, IOException, InterruptedException {
//获取Configuration配置对象
configuration = new Configuration();
//设置副本数
//configuration.set("dfs.replication","2");
//初始化文件系统连接,此时需要在windows中配置环境变量HADOOP_USER_NAME了
fileSystem = FileSystem.get(new URI(hdfs_path),configuration,"AzF");
System.out.println("----------------------已经连接上集群-------------------------");
System.out.println("\n");
}
//打印配置文件信息
@Test
public void test() throws IOException{
System.out.println(configuration);
System.out.println(fileSystem);
}
//创建目录
@Test
public void mkdirs() throws IOException {
fileSystem.mkdirs(new Path("/input/aa/bb/cc"));
//可以同时创建多级目录 /aaa/bbb/ccc
System.out.println("成功创建HDFS 文件目录");
}
//创建文
java代码操作hadoop基本命令(基本版)
最新推荐文章于 2022-08-04 17:07:31 发布
![](https://img-home.csdnimg.cn/images/20240711042549.png)