1.Maven 构建java工程
2.添加HDFS相关依赖
<properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <hadoop.version>2.6.0-cdh5.7.0</hadoop.version> </properties>
<dependencies> <!--添加hadoop依赖--> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>${hadoop.version}</version> <scope>provided</scope> </dependency><!--添加单元测试的依赖--> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>4.10</version> <scope>test</scope> </dependency></dependencies>
3.开发Java API操作HDFS文件
package com.imooc.hadoop.hdfs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.net.URI;
/**
* hadoop HDFS java API 操作
*/
public class HDFSApp {
FileSystem fileSystem = null;
Configuration configuration = null;
//hdfs地址
public static final String HDFS_PASH = "hdfs://hadoop000:8020";
/**
* 创建HDFS目录
* @throws Exception
*/
@Test
public void mkdir() throws Exception{
fileSystem.mkdirs(new Path("/hdfsapi/test"));
}
/**
* 创建文件
* @throws Exception
*/
@Test
public void create() throws Exception{
FSDataOutputStream outputStream = fileSystem.create(new Path("/hdfsapi/test/a.txt"));
// FSDataOutputStream outputStream = fileSystem.create(new Path("/hdfsapi/test/b.txt"),true,1024,(short)1,1048576L);
outputStream.write("hello hadoop".getBytes());
outputStream.flush();
outputStream.close();
}
/**
* 查看HDFS文件的内容
* @throws Exception
*/
@Test
public void cat() throws Exception{
FSDataInputStream in = fileSystem.open(new Path("/hdfsapi/test/a.txt"));
IOUtils.copyBytes(in,System.out,1024);
in.close();
}
/**
* 重命名
*/
@Test
public void rename() throws Exception {
Path oldPath = new Path("/hdfsapi/test/a.txt");
Path newPath = new Path("/hdfsapi/test/b.txt");
fileSystem.rename(oldPath, newPath);
}
/**
* 上传文件到HDFS
*
* @throws Exception
*/
@Test
public void copyFromLocalFile() throws Exception {
Path localPath = new Path("C:/soft/apache/apache-maven-3.5.0/conf/settings.xml");
Path hdfsPath = new Path("/hdfsapi/test");
fileSystem.copyFromLocalFile(localPath, hdfsPath);
}
/**
* 上传文件到HDFS
*/
@Test
public void copyFromLocalFileWithProgress() throws Exception {
InputStream in = new BufferedInputStream(
new FileInputStream(
new File("D:\\001编程学习\\慕课Hadoop\\software\\spark-2.1.0-bin-2.6.0-cdh5.7.0.tgz")));
FSDataOutputStream output = fileSystem.create(new Path("/hdfsapi/test/spark-1.6.1.tgz"),
new Progressable() {
public void progress() {
System.out.println(".");//带进度条提示信息
}
});
IOUtils.copyBytes(in,output,4096);
}
/**
* 下载HDFS文件
*/
@Test
public void copyToLocalFile() throws Exception {
Path localPath = new Path("D:\\h.xml");
Path hdfsPath = new Path("/hdfsapi/test/a.xml");
fileSystem.copyToLocalFile(false,hdfsPath, localPath,true);
}
/**
* 查看某个目录下的所有文件
*/
@Test
public void listFiles() throws Exception {
FileStatus[] fileStatuses = fileSystem.listStatus(new Path("/hdfsapi"));
for(FileStatus fileStatus : fileStatuses) {
String isDir = fileStatus.isDirectory() ? "文件夹" : "文件";
short replication = fileStatus.getReplication();
long len = fileStatus.getLen();
String path = fileStatus.getPath().toString();
System.out.println(isDir + "\t" + replication + "\t" + len + "\t" + path);
}
}
/**
* 删除
* recursive 递归删除
* @throws Exception
*/
@Test
public void delete() throws Exception{
fileSystem.delete(new Path("/hdfsapi/test/b.txt"),true);
}
@Before
public void setUp() throws Exception{
System.out.println("HDFSApp.setUp");
configuration = new Configuration();
fileSystem = FileSystem.get(new URI(HDFS_PASH),configuration,"hadoop");
}
@After
public void tearDown() throws Exception{
configuration = null;
fileSystem = null;
System.out.println("HDFSApp.tearDown");
}
}