java操作hadoop_java api使用分享

文件系统对象FileSystem

Configuration conf = new Configuration();
// 获取配置文件对象
conf.set("fs.defaultFS", "hdfs://localhost:9000");
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");

FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000"), conf, user);
// 获取文件系统对象

列出文件

public void listfiles(String where) throws IOException, InterruptedException {
        Configuration conf = new Configuration();
        // 获取配置文件对象
        conf.set("fs.defaultFS", "hdfs://localhost:9000");
        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");

        FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000"), conf, user);
        // 获取文件系统对象

        try {
            RemoteIterator<LocatedFileStatus> iter = fsSource.listFiles(new Path(where.toString()), true);
            //这里的第二个参数true表示递归遍历,false反之

            while (iter.hasNext()){
                LocatedFileStatus file = iter.next();
                String Path_file = file.getPath().toString();
                // 获取文件目录
                System.out.println(user + "$:" + Path_file.substring(21));

                //System.out.println(file.getPath().getName());
                // 只获取文件名
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

这里注意到RemoteIterator<LocatedFileStatus> iter = fsSource.listFiles(new Path(where.toString()), true);中true表示递归查看文件。
为true时这里我的运行结果:
在这里插入图片描述
为false时:什么都没有遍历到
在这里插入图片描述

创建目录

/*
*
* 创建目录
*
*/
public void mkdir(String name) throws IOException, InterruptedException {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://localhost:9000");
        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000/"), conf, user);

        try {
            fsSource.mkdirs(new Path(name));
        } catch (IOException e) {
            // 创建目录
            e.printStackTrace();
        }
    }

删除文件

/*
*
* 删除文件
*/
public void delete(String name, boolean recursive) throws IOException, InterruptedException {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://localhost:9000");
        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000/"), conf, user);

        try {
            fsSource.delete(new Path(name), recursive);
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

下载文件

/*
*
* 下载文件
*/
    public void download(String src, String dst) throws IOException, InterruptedException {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://localhost:9000");
        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000/"), conf, user);

        try {
            // 下载
            fsSource.copyToLocalFile(new Path(src), new Path(dst));
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

上传文件

/*
*
* 上传文件
*/
    public void upload(String src, String dst) throws IOException, InterruptedException {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://localhost:9000");
        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000/"), conf, user);

        try {
            // 上传
            fsSource.copyFromLocalFile(new Path(src), new Path(dst));
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

重命名文件

/*
*
* 重命名
*/
public void rename(String source, String dst) throws IOException, InterruptedException {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://localhost:9000");
        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000/"), conf, user);

        try {
            fsSource.rename(new Path(source), new Path(dst));
        } catch (IOException e) {
            e.printStackTrace();
        }
}

完整程序

注意开头package要改成你自己的包名

package oprate_hadoop;

import java.io.IOException;
import java.net.URI;
import java.util.Scanner;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;

//import java.util.concurrent.TransferQueue;
//import javax.naming.NamingException;
//import org.apache.hadoop.fs.FileStatus;
//import org.apache.commons.collections.bag.SynchronizedSortedBag;


public class Oprate_hadoop {
    /*
    * 列出指定目录下的所有文件
    *
    *  */
    String user = "hadoop";
    //声明操作hadoop的用户,我这里就是当前登录linux的用户
	//这个user会在这行代码中被使用  FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000"), conf, user);

    public void listfiles(String where) throws IOException, InterruptedException {
        Configuration conf = new Configuration();
        // 获取配置文件对象
        conf.set("fs.defaultFS", "hdfs://localhost:9000");
        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");

        FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000"), conf, user);
        // 获取文件系统对象

        try {
            RemoteIterator<LocatedFileStatus> iter = fsSource.listFiles(new Path(where.toString()), true);

            while (iter.hasNext()){
                LocatedFileStatus file = iter.next();
                String Path_file = file.getPath().toString();
                // 获取文件目录
                System.out.println(user + "$:" + Path_file.substring(21));

                //System.out.println(file.getPath().getName());
                // 只获取文件名
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }


    /*
    *
    * 创建目录
    *
    * */
    public void mkdir(String name) throws IOException, InterruptedException {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://localhost:9000");
        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000/"), conf, user);

        try {
            fsSource.mkdirs(new Path(name));
        } catch (IOException e) {
            // 创建目录
            e.printStackTrace();
        }
    }


    /**
     * 删除文件
     */
    public void delete(String name, boolean recursive) throws IOException, InterruptedException {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://localhost:9000");
        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000/"), conf, user);

        try {
            fsSource.delete(new Path(name), recursive);
        } catch (IOException e) {
            e.printStackTrace();
        }
    }


    /**
     * 下载文件
     */
    public void download(String src, String dst) throws IOException, InterruptedException {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://localhost:9000");
        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000/"), conf, user);

        try {
            // 下载
            fsSource.copyToLocalFile(new Path(src), new Path(dst));
        } catch (IOException e) {
            e.printStackTrace();
        }
    }


    /**
     * 上传文件
     */
    public void upload(String src, String dst) throws IOException, InterruptedException {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://localhost:9000");
        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000/"), conf, user);

        try {
            // 上传
            fsSource.copyFromLocalFile(new Path(src), new Path(dst));
        } catch (IOException e) {
            e.printStackTrace();
        }
    }


    /**
     * 重命名
     */
    public void rename(String source, String dst) throws IOException, InterruptedException {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://localhost:9000");
        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000/"), conf, user);

        try {
            fsSource.rename(new Path(source), new Path(dst));
        } catch (IOException e) {
            e.printStackTrace();
        }
    }


    public static void main(String[] args) throws IOException, InterruptedException {
        Oprate_hadoop hadoop = new Oprate_hadoop();
        while(true) {
            System.out.println("==============================================");
            System.out.println("\n " +
                    "||\t\t1.查看文件\t\t|| \n " +
                    "||\t\t2.创建目录\t\t|| \n " +
                    "||\t\t3.删除文件\t\t|| \n " +
                    "||\t\t4.下载文件\t\t|| \n " +
                    "||\t\t5.上传文件\t\t|| \n " +
                    "||\t\t6.重命名文件\t\t|| \n");
            System.out.println("==============================================");
            Scanner input = new Scanner(System.in);
            int number = input.nextInt();
            if (number == 1) {
                hadoop.listfiles("/");
            }
            else if (number == 2) {
                System.out.println("请输入目录地址");
                System.out.print("hdfs$:");
                Scanner name = new Scanner(System.in);
                String file = name.nextLine();
                hadoop.mkdir(file);
            }
            else if(number == 3) {
                System.out.println("请输入要删除的文件名称");
                Scanner name = new Scanner(System.in);
                String file = name.nextLine();
                hadoop.delete(file, true);
            }
            else if(number == 4) {
                System.out.println("请输入要下载的文件目录");
                Scanner name = new Scanner(System.in);
                String file = name.nextLine();
                System.out.println("请输入要存放在本地的目录");
                Scanner name2 = new Scanner(System.in);
                String dst = name2.nextLine();
                hadoop.download(file, dst);
            }
            else if(number == 5) {
                System.out.println("请输入上传文件的位置");
                Scanner name = new Scanner(System.in);
                String file = name.nextLine();
                System.out.println("请输入上传的目标位置");
                Scanner name2 = new Scanner(System.in);
                String dst = name2.nextLine();
                hadoop.upload(file, dst);
            }
            else if(number == 6) {
                System.out.println("请输入需要改名的文件");
                Scanner name = new Scanner(System.in);
                String file = name.nextLine();
                System.out.println("请输入新名字");
                Scanner name2 = new Scanner(System.in);
                String dst = name2.nextLine();
                hadoop.rename(file, dst);
            }
        }
    }
}
  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 3
    评论
评论 3
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值