package com.myhadoop.hdfs;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Before;
import org.junit.Test;
public class HDFSDemo1 {
private static FileSystem fs;
@Before
public void init() throws IOException, URISyntaxException, InterruptedException {
fs = FileSystem.get(new URI("hdfs://master:9000"), new Configuration(), "root");
}
@Test
public void testMkdir() throws IOException {
// 创建文件夹
boolean bRes = fs.mkdirs(new Path("/wh"));
System.out.println("mkdir result: " + bRes);
}
@Test
public void testUpload() throws IOException {
// 读取本地文件并创建输入流
InputStream in = new FileInputStream("C:/Users/Think/Desktop/student1.txt");
OutputStream out = fs.create(new Path("/wh/studentUpload.txt"));
IOUtils.copyBytes(in, out, 4096, true);
}
@Test
public void testDownload() throws IOException {
InputStream in = fs.open(new Path("/user/hive/warehouse/db_hive_edu.db/student/student.txt"));
OutputStream out = new FileOutputStream("C:/Users/Think/Desktop/student1.txt");
IOUtils.copyBytes(in, out, 4096, true);
}
@Test
public void testDelete() throws IOException {
boolean bRes = fs.delete(new Path("/wh/studentUpload.txt"), true);
System.out.println("file delete result: " + bRes);
}
}
Hadoop HDFS文件操作
最新推荐文章于 2021-04-26 09:24:47 发布