package com.demo.hadoop.hdfs; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.Progressable; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.net.URI; /* * * * * Hadoop HDFS Java API操作 * * * */ public class HDFSApp { public static final String HDFS_PATH = "hdfs://pumbaapc:8020"; FileSystem fileSystem = null; Configuration configuration = null; /** * 创建目录 * * */ @Test public void mkdir() throws Exception { fileSystem.mkdirs(new Path("/hdfsapi/test")); } /**w * * 创建文件 * */ @Test public void creat() throws Exception { FSDataOutputStream output = fileSystem.create(new Path("/hdfsapi/test/a.txt")); output.write("hello hadoop".getBytes()); output.flush(); output.close(); } /** * * 查看HDFS文件的内容 */ @Test public void cat() throws Exception { FSDataInputStream in = fileSystem.open(new Path("/hdfsapi/test/a.txt")); IOUtils.copyBytes(in, System.out, 1024); in.close(); } /** * 重命名 * */ @Test public void rename() throws Exception { Path oldpath = new Path("/hdfsapi/test/a.txt"); Path newpath = new Path("/hdfsapi/test/hello.txt"); fileSystem.rename(oldpath, newpath); } /** * * 文件上传 * */ @Test public void copyFromLocalFile() throws Exception{ Path localpath = new Path("/home/pumbaa/Downloads/Anaconda3-5.0.1-Linux-x86_64.sh"); Path destpath = new Path("/hdfsapi/test"); fileSystem.copyFromLocalFile(localpath, destpath); } @Test public void copyFromLocalBigFile() throws Exception{ Path localpath = new Path("/home/pumbaa/Downloads/Anaconda3-5.0.1-Linux-x86_64.sh"); Path destpath = new Path("/hdfsapi/test"); fileSystem.copyFromLocalFile(localpath, destpath); InputStream in = new BufferedInputStream( new FileInputStream( new File("/home/pumbaa/Downloads/ideaIU-2017.3.4.tar.gz"))); FSDataOutputStream output = fileSystem.create(new Path("/hdfsapi/test/ideaIU-2017.3.4.tar.gz"), new Progressable() { public void progress() { System.out.print("*"); } }); IOUtils.copyBytes(in, output, 4096); } @Before public void setUp() throws Exception{ configuration = new Configuration(); //fileSystem = FileSystem.get(); fileSystem = FileSystem.get(new URI(HDFS_PATH), configuration, "pumbaa"); } @After public void tearDOwn() throws Exception { configuration = null; fileSystem = null; System.out.println("资源释放"); } }
HDFS利用FileSystem API文件的读写
最新推荐文章于 2024-06-28 10:58:41 发布