简单介绍使用java控制hdfs文件系统
一、注意namenode端访问权限,修改hdfs-site.xml文件或修改文件目录权限
本次采用修改hdfs-site.xml用于测试,在configuration节点中添加如下内容
dfs.permissions.enabled
false
二、本次使用eclipse环境新建项目完成测试
使用手动添加jar包完成环境准备,jar包位于hadoop解压目录
如下:
hadoop-2.7.3\share\hadoop\common\hadoop-common-2.7.3.jar
hadoop-2.7.3\share\hadoop\common\lib\*.jar
hadoop-2.7.3\share\hadoop\hdfs\hadoop-hdfs-2.7.3.jar
添加完成jar包就可以编写代码,链接hdfs文件系统
链接hdfs需完成如下步骤
1.创建 org.apache.hadoop.conf.Configuration 用于指定客户端的配置(服务器的地址,上传下载文件的一些配置),本次采用如下方式配置
packagecom.huaqin.hdfs.conf;importorg.apache.hadoop.conf.Configuration;public class DeFaultDfsClientConfigration extendsConfiguration{publicDeFaultDfsClientConfigration() {this.set("fs.defaultFS","hdfs://*.*.*.*:9000");this.set("dfs.replication", "2");
}
}
2.编写Utils封装常见操作文件方法
需使用org.apache.hadoop.fs.FileSystem
通过上面的配置文件创建
FileSystem fileSystem = FileSystem.get(new DeFaultDfsClientConfigration());
创建完成之后便可以操作hdfs了,代码封装如下
packagecom.huaqin.hdfs.utils;importjava.io.File;importjava.io.FileInputStream;importjava.io.FileNotFoundException;importjava.io.IOException;importjava.util.Map;importorg.apache.hadoop.fs.FSDataInputStream;importorg.apache.hadoop.fs.FSDataOutputStream;importorg.apache.hadoop.fs.FileStatus;importorg.apache.hadoop.fs.FileSystem;importorg.apache.hadoop.fs.Path;importorg.apache.hadoop.io.IOUtils;importcom.huaqin.hdfs.conf.DeFaultDfsClientConfigration;public classHDFSFileUtils {public doubleprogressBar;public HDFSFileUtils() throwsIOException {//使用默认类加载
fileSystem = FileSystem.get(newDeFaultDfsClientConfigration());
}public HDFSFileUtils(DeFaultDfsClientConfigration clientConfration) throwsIOException {//使用指定类加载
fileSystem =FileSystem.get(clientConfration);
}//默认客户端配置类
privateFileSystem fileSystem;public voidreloadClientConfigration(DeFaultDfsClientConfigration clientConfration) {
fileSystem.setConf(clientConfration);
}public FileStatus[] list(String fileName) throwsFileNotFoundException, IllegalArgumentException, IOException {//TODO Auto-generated method stub
FileStatus[] statusList = this.fileSystem.listStatus(newPath(fileName));returnstatusList;
}public void text(String fileName) throwsIllegalArgumentException, IOException {//TODO Auto-generated method stub
FSDataInputStream inputStream = this.fileSystem.open(newPath(fileName));
IOUtils.copyBytes(inputStream, System.out, fileSystem.getConf());
}//上传文件
public void upload(String src, String dest) throwsIOException {//TODO Auto-generated method stub
FileInputStream in = newFileInputStream(src);
FSDataOutputStream os= this.fileSystem.create(new Path(dest), true);
IOUtils.copyBytes(in, os,4096, true);
}//删除文件
public boolean deleteFile(String dest) throwsIllegalArgumentException, IOException {//TODO Auto-generated method stub
boolean success = this.fileSystem.delete(new Path(dest), true);returnsuccess;
}//创建文件夹
public boolean makeDir(String dest) throwsIllegalArgumentException, IOException {return this.fileSystem.mkdirs(newPath(dest));
}//下载显示进度
public void download2(String dest, Map descript) throwsIllegalArgumentException, IOException {
FSDataInputStream in= fileSystem.open(newPath(dest));
descript.put("byteSize", in.available());
descript.put("current", 0);byte[] bs = new byte[1024];while (-1 !=(in.read(bs))) {
descript.put("current", descript.get("current") + 1024);
}
in.close();
}//上传显示进度
public void upload2(String src, String dest, Mapdescript)throwsIllegalArgumentException, IOException {
File file= newFile(src);
FileInputStream in= newFileInputStream(file);
FSDataOutputStream out= this.fileSystem.create(new Path(dest), true);
descript.put("byteSize", file.length());
descript.put("current", 0l);//0.5mb
byte[] bs = new byte[1024 * 1024 / 2];while (-1 !=(in.read(bs))) {
out.write(bs);
descript.put("current", descript.get("current") + 1024);
}
out.close();
in.close();
}
}
三、以下是JUnitTest测试环境
importjava.io.IOException;importjava.text.DecimalFormat;importjava.util.HashMap;importjava.util.Map;importorg.junit.Before;importorg.junit.Test;importcom.huaqin.hdfs.utils.HDFSFileUtils;public classHDFSFileUtilsJUT {
@Beforepublic void before() throwsIOException {
fileUtils= newHDFSFileUtils();
}
HDFSFileUtils fileUtils;
@Testpublic void testCreateNEWFile() throwsIOException {//fileUtils.upload("D:\\temp\\helloworld.txt", "/tmp/helloworld.txt");
fileUtils.upload("E:\\devtool\\hadoop-2.7.3.tar.gz", "/hadoop-2.7.3.tar.gz");
}
@Testpublic void testText() throwsIllegalArgumentException, IOException {
fileUtils.text("/hello.txt");
}
@Testpublic void testDeleteFile() throwsIllegalArgumentException, IOException {boolean success = fileUtils.deleteFile("/CentOS-7-x86_64-DVD-1511.iso");
System.out.println(success);
}
@Testpublic void testZMikdirs() throwsIllegalArgumentException, IOException {boolean success = fileUtils.makeDir("/tmp");
System.out.println(success);
}
@Testpublic void testdownload2() throwsIllegalArgumentException, IOException {
Map desc = new HashMap<>();
desc.put("current", 0);
desc.put("byteSize", 0);new Thread(newRunnable() {
@Overridepublic voidrun() {//TODO Auto-generated method stub
while (true) {try{
Thread.sleep(500);
System.out.printf("maxL:%d\tcurrent:%d\tsurplus:%d\n", desc.get("byteSize"),desc.get("current"),desc.get("byteSize")-desc.get("current"));
}catch(InterruptedException e) {//TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}).start();
fileUtils.download2("/hadoop-2.7.3.tar.gz",desc);
}
@Testpublic void testupload2() throwsIllegalArgumentException, IOException {
DecimalFormat df= new DecimalFormat("0.00%");
Map desc = new HashMap();
desc.put("current", 0l);
desc.put("byteSize", 0l);new Thread(newRunnable() {
@Overridepublic voidrun() {//TODO Auto-generated method stub
while (true) {try{
Thread.sleep(500);
System.out.printf("maxL:%d\tcurrent:%d\tsurplus:%d\tprogressBar:%s\n", desc.get("byteSize"),desc.get("current"),desc.get("byteSize")-desc.get("current"),df.format((desc.get("current")+0.0)/desc.get("byteSize")));
}catch(InterruptedException e) {//TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}).start();
fileUtils.upload2("D:\\hadoop\\CentOS-7-x86_64-DVD-1511.iso", "/CentOS-7-x86_64-DVD-1511.iso",desc);
}
}