import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.net.URI;
/**
* Created by hadoop on 17-2-16.
*/
public class HdfsIO {
//构造一个配置参数对象
private Configuration conf = new Configuration();
//构造hdfs的客户端
private FileSystem fs = null;
//初始化文件系统
private void init() {
try {
fs = FileSystem.get(new URI("hdfs://hadoop-virtual-machine:9000"), conf, "hadoop");
System.out.println("初始化成功");
} catch (Exception e) {
e.printStackTrace();
System.out.println("init()函数错误");
}
}
@Test
public void testDownload() throws Exception {
init();
FSDataInputStream in = fs.open(new Path("/test.txt"));
FileOutputStream out = new FileOutputStream("/home/hadoop/test.txt");
IOUtils.copyBytes(in, out, new Configuration());
IOUtils.closeStream(in);
IOUtils.closeStream(out);
fs.close();
}
@Test
public void testUpload() throws Exception {
init();
FSDataOutputStream out = fs.create(new Path("/test2.txt"));
FileInputStream in = new FileInputStream("/home/hadoop/test.txt");
IOUtils.copyBytes(in, out, 4096);
IOUtils.closeStream(in);
IOUtils.closeStream(out);
fs.close();
}
}
<hadoop>文件流
最新推荐文章于 2021-10-14 10:51:56 发布