对HDFS进行文件操作的若干代码

import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class Optation {
    /**
     * HDFS 文件访问API
     */
    public static String hdfsUrl="hdfs://localhost:9000";
    /**
     * create HDFS folder 创建一个文件夹
     * @param dirPath
     * @return
     */
    public static void createDir(String dirPath) throws Exception{
        Configuration conf = new Configuration();
        FileSystem hdfs =FileSystem.get(URI.create(hdfsUrl),conf);
        Path path = new Path(dirPath);
        hdfs.mkdirs(path);
        hdfs.close();
    }
    /**
     * delete HDFS folder 删除一个文件夹
     * @param dirPath
     * @return
     */
    public static void deleteDir(String dirPath) throws Exception{
        Configuration conf = new Configuration();
        FileSystem hdfs =FileSystem.get(URI.create(hdfsUrl),conf);
        Path path = new Path(dirPath);
        hdfs.delete(path);
        hdfs.close();
    }
    /**
     * create a file 创建一个文件
     * @param filePath
     * @return
     */
    public static void createFile(String filePath) throws Exception{
        Configuration conf = new Configuration();
        FileSystem hdfs =FileSystem.get(URI.create(hdfsUrl),conf);
        Path path = new Path(filePath);
        FSDataOutputStream out =hdfs.create(path);
        out.close();
        hdfs.close();
    }
    /**
     * rename a file 重命名一个文件
     * @param oldPath
     * @param newPath
     * @return
     */
    public static void renameFile(String oldPath,String newPath) throws Exception{
        Configuration conf = new Configuration();
        FileSystem hdfs =FileSystem.get(URI.create(hdfsUrl),conf);
        Path path = new Path(oldPath);
        Path nPath = new Path(newPath);
        hdfs.rename(path, nPath);
        hdfs.close();
    }
    /**
     * delete a file 删除一个文件
     * @param hadoopFile
     * @return isDeleted
     */
    public static boolean deleteFile(String hadoopFile) throws Exception{
        Configuration conf = new Configuration();
        FileSystem hdfs =FileSystem.get(URI.create(hdfsUrl),conf);
        Path path = new Path(hadoopFile);
        boolean isDeleted =hdfs.delete(path);
        hdfs.close();
        return isDeleted;
    }
    /**
     * upload a local file 上传文件
     * @param localPath
     * @param hadoopPath
     * @return
     */
    public static void uploadLocalFile(String localPath,String hadoopPath) throws Exception{
        Configuration conf = new Configuration();
        FileSystem hdfs =FileSystem.get(URI.create(hdfsUrl),conf);
        Path src = new Path(localPath);
        Path dst =new Path(hadoopPath);
        hdfs.copyFromLocalFile(src, dst);
        hdfs.close();
    }
    /**
     * 读取文件于字节缓冲数组
     * @param hadoopFile
     * @return buffer
     */
    public static byte[] readFile(String hadoopFile) throws Exception{
        Configuration conf = new Configuration();
        FileSystem hdfs =FileSystem.get(URI.create(hdfsUrl),conf);
        Path path = new Path(hadoopFile);
        if(hdfs.exists(path)){
            FSDataInputStream in = hdfs.open(path);
            FileStatus stat = hdfs.getFileStatus(path);
            byte[] buffer = new byte[Integer.parseInt(String.valueOf(stat.getBlockSize()))];
            in.readFully(0,buffer);
            in.close();
            hdfs.close();
            return buffer;
        }
        else{
            throw new Exception("the file is not found.");
        }
    }
    /**
     * list files under folder 列出文件夹中所有文件
     * @param hadoopPath
     * @return fileString
     */
    public static String listFiles(String hadoopPath) throws Exception{
        Configuration conf = new Configuration();
        FileSystem hdfs =FileSystem.get(URI.create(hdfsUrl),conf);
        Path dst = new Path(hadoopPath);
        FileStatus[] files = hdfs.listStatus(dst);
        String fileString = "";
        for(FileStatus file:files){
            System.out.println(file.getPath().toString());
            fileString +=file.getPath().toString();
        }
        hdfs.close();
        return fileString;
    }
    /**
     * list block info of file 查找文件所在的数据块
     * @param hadoopPath
     * @return blockString
     */
    public static String getBlockInfo(String hadoopPath) throws Exception{
        Configuration conf = new Configuration();
        FileSystem hdfs =FileSystem.get(URI.create(hdfsUrl),conf);
        Path dst = new Path(hadoopPath);
        FileStatus fileStatus = hdfs.getFileStatus(dst);
        BlockLocation[] blkloc =hdfs.getFileBlockLocations(fileStatus, 0, fileStatus.getLen());
        String blockString = "";
        for(BlockLocation loc:blkloc){
            for(int i=0;i<loc.getHosts().length;i++){
                System.out.println(loc.getHosts()[i]);
                // blockString +=loc.getHosts()[i]+" ";
            }
            hdfs.close();
            return blockString;
        }
    }
}
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值