package cn.itcast.hadoop.hdfs;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.net.URISyntaxException;
import javax.imageio.stream.FileImageInputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Before;
import org.junit.Test;
public class HDFSDemo {
FileSystem fs = null;
@Before
public void init() throws IOException, URISyntaxException, InterruptedException{
//首先创建FileSystem的实现类(工具类)
fs = FileSystem.get(new URI("hdfs://fuyuhao01:9000"), new Configuration(),"root");
}
@Test
public void testUpload() throws IOException{
//读取本地文件系统的文件,返回输入流
InputStream in = new FileInputStream("/root/hadoop-hdfs-2.2.0.jar");
//在HDFS上创建一个文件,返回输出流
OutputStream out = fs.create(new Path("/test.jar"));
//将输入流写出到输出流
IOUtils.copyBytes(in, out, 4096, true);
}
@Test
public void testDownload() throws IllegalArgumentException, IOException{
fs.copyToLocalFile(new Path("/hadoop"), new Path("/root/hadoop111"));
}
@Test
public void testMkdir() throws Exception{
boolean mkdir = fs.mkdirs(new Path("/fuyuhao0106"));
System.out.println(mkdir);
}
@Test
public void testDel() throws Exception{
boolean flag = fs.delete(new Path("/fuyuhao0106"), true);
System.out.println(flag);
}
public static void main(String[] args) throws IOException, URISyntaxException {
// TODO Auto-generated method stub
FileSystem fs = FileSystem.get(new URI("hdfs://fuyuhao01:9000"), new Configuration());
InputStream in = fs.open(new Path("/hadoop"));
OutputStream out = new FileOutputStream("/root/hd");
IOUtils.copyBytes(in, out, 4096, true);
}
}
HDFS的Java接口
最新推荐文章于 2023-05-20 00:47:11 发布