import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class FileHandle {
/**
* 设置hadoop HDFS 初始化配置方法
* @throws IOException
*/
public static FileSystem init(){
Configuration config=new Configuration();
config.set("fs.defaultFS", "hdfs://192.168.100.200:9000/");
FileSystem fs=null;
try{
fs=FileSystem.get(config);
}catch(Exception e){
throw new RuntimeException("初始化异常");
}
return fs;
}
/**
* 上传文件的方法
* @param uploadPath 本地路径
* @param hdfsPath hadoop的路径
*/
public static void uploadFileHDFS(String uploadPath,String hdfsPath){
FileSystem fs=init();
Path src =new Path(uploadPath);
Path dst =new Path(hdfsPath);
try {
fs.copyFromLocalFile(src, dst);
fs.close();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 创建新文件
* @param fileName 文件名字
* @param content 文件内容
*/
public static void createNewHdfsFile(String fileName,String content){
FileSystem fs=init();
Path path=new Path(fileName);
try {
FSDataOutputStream output=fs.create(path);
output.write(content.getBytes());
fs.close();
output.close();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 将本地文件上传到hdfs上并命名
* @param HDFSFileName hdfs上的名字
* @param localName 本地上的文件名
*/
public static void upload(String HDFSFileName,String localName){
File file=new File(localName);
BufferedInputStream input=null;
try {
input = new BufferedInputStream(new FileInputStream(file));
} catch (FileNotFoundException e) {
e.printStackTrace();
}
FileSystem fs=init();
byte[] b=new byte[1024];
int hasRead=0;
try {
FSDataOutputStream output=fs.create(new Path(HDFSFileName));
while((hasRead=input.read(b))>0){
output.write(b, 0, hasRead);
}
output.close();
input.close();
fs.close();
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 将HDFS上文件复制到HDFS上
* @param src 原目标
* @param dsc 复制到的目标
* @throws Exception
* @throws IllegalArgumentException
*/
public static void copy(String src,String dsc) throws IllegalArgumentException, Exception{
/**
* 1:建立输入流
* 2:建立输出流
* 3:两个流的对接
* 4:资源的关闭
*/
FileSystem fs=init();
FSDataInputStream input=fs.open(new Path(src));
FSDataOutputStream output=fs.create(new Path(dsc));
byte[] b= new byte[1024];
int hasRead=0;
while((hasRead=input.read(b))>0){
output.write(b, 0, hasRead);
}
input.close();
output.close();
fs.close();
}
/**
* 创建目录
* @param DirName 目录的路径
*/
public static void createFileDir(String DirName){
FileSystem fs=init();
try {
fs.mkdirs(new Path(DirName));
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
/**
* 列出一个文件夹下面的信息
* @param rootPath 需要查找的目录
* @throws Exception 异常处理
*/
public static void queryDirOnHdfs(String rootPath) throws Exception{
FileSystem fs=init();
Path path=new Path(rootPath);
FileStatus[] status=fs.listStatus(path);
for(FileStatus fileStatus:status){
if(fileStatus.isDirectory()){
System.out.println("这是一个文件夹"+fileStatus.getPath()+"\n");
queryDirOnHdfs(fileStatus.getPath().toString());
}else{
System.out.println("文件名字是:"+fileStatus.getPath()+"\n");
System.out.println("文件的大小为:"+fileStatus.getLen()+"\n");
System.out.println("文件的大小为:"+fileStatus.toString()+"\n");
}
}
fs.close();
}
}