package com.atguigu.hdfs; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.IOUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.net.URI; public class HdfsClientDemo { private Configuration conf; private FileSystem fs; //初始化方法,创建客户端对象 @Before public void init() throws IOException, InterruptedException { //1.创建客户端对象 因为是一个文件系统 创建文件系统对象 //参数第一个url //参数第二个conf //参数第三个user //URI uri = new URI("hdfs://hadoop102:8020"); URI uri = URI.create("hdfs://hadoop102:8020"); //创建conf对象 conf = new Configuration(); //conf.set("dfs.replication","2");//修改副本数,用来测试参数优先级 fs = FileSystem.get(uri, conf,"atguigu"); } //关闭客户端对象 @After public void close() throws IOException { fs.close(); } //创建文件夹 @Test public void mkdirs() throws IOException { fs.mkdirs(new Path("/java1")); } //上传文件 @Test public void put() throws IOException { //参数解读 //1.boolean delSrc
HDFS的API案例实操
最新推荐文章于 2022-11-15 00:04:24 发布