新建maven工程 - quickstart
1. pom文件引入依赖
<!-- hdfs 依赖 -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
</dependency>
2. 直接在测试test文件夹下写(注意)
package youngPeng.HDFS;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.net.URI;
public class HDFSApp {
public static final String HDFS_PATH = "hdfs://47.94.206.59:9000";
FileSystem fileSystem = null;
Configuration configuration = null;
@Before
public void setUp() throws Exception{
System.out.println("开始运行");
// 初始化
// 方法二:本地没有hadoop系统,但是可以远程访问。根据给定的URI和用户名,访问hdfs的配置参数
// 此时的conf不需任何设置,只需读取远程的配置文件即可。
configuration = new Configuration();
fileSystem = FileSystem.get(new URI(HDFS_PATH), configuration);
}
/**
* 创建文件夹
*/
@Test
public void mkdir ()throws Exception{
fileSystem.mkdirs(new Path("/youngPeng/HDFSDemo"));
}
/**
* 创建文件
*/
@Test
public void createFile()throws Exception{
FSDataOutputStream outputStream = fileSystem.create(new Path("/youngPeng/HDFSDemo/01helloHDFS.txt"));
outputStream.write("youngpeng, welcome to remote HDFS!".getBytes());
outputStream.flush();
outputStream.close();
}
/**
* 查看hdfs 文件内容
*/
@Test
public void cat()throws Exception{
FSDataInputStream inputStream = fileSystem.open(new Path("/youngPeng/HDFSDemo/01helloHDFS.txt"));
IOUtils.copyBytes(inputStream, System.out,1024);
inputStream.close();
}
/**
* 重命名文件
*/
@Test
public void rename()throws Exception{
Path oldPath = new Path("/youngPeng/HDFSDemo/01helloHDFS.txt");
Path newPath = new Path("/youngPeng/HDFSDemo/00helloHDFS.txt");
Boolean status = fileSystem.rename(oldPath,newPath);
System.out.println("成功?" + status);
}
/**
* 上传到远端
*/
@Test
public void copyFromLocal()throws Exception{
Path localPath = new Path("C:\\work\\BigData\\01WordCount\\target\\wordcount-1.0.jar");
Path hdfsPath = new Path("/youngPeng/HDFSDemo/");
fileSystem.copyFromLocalFile(localPath, hdfsPath);
}
/**
* 上传到远端带有进度条
*/
@Test
public void copyFromLocalWithProgress()throws Exception{
InputStream in = new BufferedInputStream(
new FileInputStream(
new File("C:\\work\\BigData\\01WordCount\\target\\wordcount-1.0.jar")
));
FSDataOutputStream out = fileSystem.create(new Path("/youngPeng/HDFSDemo/wordcount.jar"),
new Progressable() {
@Override
public void progress() {
// 进度条
System.out.println("=");
}
});
IOUtils.copyBytes(in, out, 4096);
}
/**
* 下载文件
*/
@Test
public void copyToLocalFile()throws Exception{
Path localPath = new Path("");
Path hdfsPath = new Path("");
fileSystem.copyToLocalFile(localPath, hdfsPath);
}
/**
* 查看文件列表
* @throws Exception
*/
@Test
public void fileList()throws Exception{
FileStatus[] fileStatuses = fileSystem.listStatus(new Path("/youngPeng/HDFSDemo/"));
for (FileStatus fileStatus : fileStatuses){
String isDir = fileStatus.isDirectory()? "文件": "文件夹";
short replication = fileStatus.getReplication();
long len = fileStatus.getLen();
Path path = fileStatus.getPath();
System.out.println(isDir + "\t" + replication + "\t" +len + "\t" +path);
}
}
@After
public void tearDown() throws Exception{
// 资源释放
configuration = null;
fileSystem = null;
System.out.println("运行结束");
}
}