java操作hdfs

引入依赖

<dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-client</artifactId>
      <version>2.9.2</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-common</artifactId>
      <version>2.9.2</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-hdfs</artifactId>
      <version>2.9.2</version>
    </dependency>

    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-yarn-common</artifactId>
      <version>2.9.2</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-yarn-client</artifactId>
      <version>3.1.2</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-yarn-server-resourcemanager</artifactId>
      <version>2.9.2</version>
    </dependency>

    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-mapreduce-client-core</artifactId>
      <version>2.9.2</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
      <version>3.1.2</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-mapreduce-client-common</artifactId>
      <version>2.9.2</version>
    </dependency>

    <dependency>
      <groupId>org.anarres.lzo</groupId>
      <artifactId>lzo-hadoop</artifactId>
      <version>1.0.6</version>
    </dependency>

测试方法

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;

import org.apache.hadoop.io.IOUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.io.*;
import java.net.URI;

public class HDFSTEST {
    private final String HDFS_PATH = "hdfs://192.168.233.150:9000";

    private FileSystem fs = null;
    private Configuration cfg = null;

    @Before
    public void setUp() throws Exception {

        cfg = new Configuration();

        fs = FileSystem.get(new URI(HDFS_PATH), cfg);//此时需要在windows中配置环境变量HADOOP_USER_NAME了
        System.out.println("HDFS 开启");
    }

    @Test
    public void frameTest() throws IOException {
        System.out.println(cfg);
        System.out.println(fs);
    }


    //本地文件上传文件到HDFS
    @Test
    public void uplodFile() throws IOException {

        fs.copyFromLocalFile(new Path("F:/hadoop/hdfs/down/t1.txt"),new Path("/input/"));
        System.out.println("本地文件已经上传文件到HDFS");
    }
    //创建目录
    @Test
    public void mkdir() throws IOException {
        fs.mkdirs(new Path("/user/tt"));
        //可以同时创建多级目录 /aaa/bbb/ccc
        System.out.println("成功创建HDFS 文件目录");
    }


    @Test
    public void deleteDir() throws IllegalArgumentException, IOException {
        //delete默认为true,false时只能删除空的文件夹
        fs.delete(new Path("/output"), true);

        System.out.println("文件夹已删除");
    }


    @Test
    public void downFile() throws Exception {
        //第一个true表示是"剪切"操作,没有或者false表示复制
        fs.copyToLocalFile(false, new Path("/output/part-r-00000"), new Path("F:/down/t1.txt"));
        System.out.println("文件已经下载成功!");
    }

    //查看文件详情(名称、权限、大小、块信息等)
    @Test
    public void listFiles() throws IOException {
        //查看文件详情(名称、权限、大小、块信息等)
        System.out.println("查看文件详情:");
        RemoteIterator<LocatedFileStatus> iterator = fs.listFiles(new Path("/myhah.txt"), true);
        while (iterator.hasNext()){
            LocatedFileStatus fileStatus = iterator.next();
            System.out.println(fileStatus.getPath().getName());
            System.out.println(fileStatus.getPermission());
            System.out.println(fileStatus.getLen());
            BlockLocation[] blockLocations = fileStatus.getBlockLocations();
            for (BlockLocation blockLocation : blockLocations) {
                String[] hosts = blockLocation.getHosts();
                for (String host : hosts) {
                    //副本存储在哪个主机上
                    System.out.println(host);
                }
            }
            System.out.println("-----------------------------");
        }
    }

    //判断是文件还是文件夹
    @Test
    public void fileStatus() throws IOException {
        FileStatus[] fileStatuses = fs.listStatus(new Path("/"));
        for (FileStatus fileStatus : fileStatuses) {
            if(fileStatus.isDirectory()){
                System.out.println(fileStatus.getPath().getName()+" is folder");
            }else if(fileStatus.isFile()){
                System.out.println(fileStatus.getPath().getName()+" is file");
            }
        }
    }

    //判断文件是否存在
    @Test
    public void exists() throws IOException {
        boolean exists = fs.exists(new Path("/12342.xml"));
        if(exists){
            System.out.println("文件已经存在");
        }else {
            System.out.println("文件不存在");
        }
    }


    //创建文件
    @Test
    public void createNewFile() throws IOException {
        boolean newFile = fs.createNewFile(new Path("/aa.txt"));
        System.out.println("创建文件:"+newFile);
    }


    //删除文件
    @Test
    public void deleteFile() throws IOException {
        boolean res = fs.delete(new Path("/aa.txt"),true);
        System.out.println("删除文件:"+res);
    }

    //文件重命名
    @Test
    public void rename() throws IOException {
        Path src = new Path("/aa.txt");
        Path aim = new Path("/aim.xml");
        fs.rename(src, aim);
        System.out.println("文件重命名完成");
    }

    //向文件中添加内容
    @Test
    public void writ() throws IOException {
        FSDataOutputStream fos = fs.create(new Path("/aim.xml"));
        fos.writeUTF("test");
        fos.flush();
        fos.close();
        System.out.println("添加内容完成");
    }

    //全部读取
    @Test
    public void fileToIO() throws IllegalArgumentException, IOException {
        //将下载的文件传为流
        FSDataInputStream inputStream = fs.open(new Path("/aim.xml"));
        //从HDFS中读取文件到本地
        IOUtils.copyBytes(inputStream, System.out, 4096, false);
    }

    //定位读取
    @Test
    public void getStringByLine() throws IOException {
        FSDataInputStream dis = fs.open(new Path("/aim.xml"));
        BufferedReader br = new BufferedReader(new InputStreamReader(dis));
        FileWriter fileWriter = new FileWriter(new File("F:/down/ss.xml"));
        char[] temp = new char[16];
        for (int i =0;i<16;i++){//读几个字符写几个字符,共读取256个字符,一次读取16个,只需要读取16次
            br.read(temp);
            fileWriter.write(new String(temp));
        }
        br.close();
        fileWriter.close();

        System.out.println("定位读取文件完成");
    }

    //从指定位置开始读取
    @Test
    public void getStringByIOUtils() throws IOException {//定位读取
        FSDataInputStream dis = fs.open(new Path("/aim.xml"));
        dis.seek(256);//定位
        BufferedReader br = new BufferedReader(new InputStreamReader(dis));

        FileOutputStream fos  = new FileOutputStream(new File("F:/down/ss2.xml"));

        IOUtils.copyBytes(dis,fos,cfg);
        IOUtils.closeStream(dis);
        IOUtils.closeStream(fos);
    }












    @After
    public void tearDown() throws Exception {
        fs.close();
        System.out.println("HDFS 关闭");
    }
}


  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

缘不易

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值