使用Java实现HDFS文件读写

maven引入依赖

file-new-project-maven-create from archetype(org.apache.maven.archetypes:maven-archetype-quickstart)
GroupId:项目组织唯一的标识符,实际对应JAVA的包的结构,是main目录里java的目录结构
ArtifactId:项目的唯一的标识符,实际对应项目的名称,就是项目根目录的名称-next-finish

pom.xml

<?xml version="1.0" encoding="UTF-8"?>

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
  <modelVersion>4.0.0</modelVersion>
  
  <groupId>cn.kgc.kb11</groupId>
  <artifactId>20210319-hdfs</artifactId>
  <version>1.0-SNAPSHOT</version>

  <name>20210319-hdfs</name>
  <!-- FIXME change it to the project's website -->
  <url>http://www.example.com</url>

  <properties>
    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
    <maven.compiler.source>1.7</maven.compiler.source>
    <maven.compiler.target>1.7</maven.compiler.target>
  </properties>

  <dependencies>
    <dependency>
      <groupId>junit</groupId>
      <artifactId>junit</artifactId>
      <version>4.12</version>
      <scope>test</scope>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-common</artifactId>
      <version>2.6.0</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-client</artifactId>
      <version>2.6.0</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-hdfs</artifactId>
      <version>2.6.0</version>
    </dependency>
  </dependencies>

  <build>
    <pluginManagement><!-- lock down plugins versions to avoid using Maven defaults (may be moved to parent pom) -->
      <plugins>
        <!-- clean lifecycle, see https://maven.apache.org/ref/current/maven-core/lifecycles.html#clean_Lifecycle -->
        <plugin>
          <artifactId>maven-clean-plugin</artifactId>
          <version>3.1.0</version>
        </plugin>
        <!-- default lifecycle, jar packaging: see https://maven.apache.org/ref/current/maven-core/default-bindings.html#Plugin_bindings_for_jar_packaging -->
        <plugin>
          <artifactId>maven-resources-plugin</artifactId>
          <version>3.0.2</version>
        </plugin>
        <plugin>
          <artifactId>maven-compiler-plugin</artifactId>
          <version>3.8.0</version>
        </plugin>
        <plugin>
          <artifactId>maven-surefire-plugin</artifactId>
          <version>2.22.1</version>
        </plugin>
        <plugin>
          <artifactId>maven-jar-plugin</artifactId>
          <version>3.0.2</version>
        </plugin>
        <plugin>
          <artifactId>maven-install-plugin</artifactId>
          <version>2.5.2</version>
        </plugin>
        <plugin>
          <artifactId>maven-deploy-plugin</artifactId>
          <version>2.8.2</version>
        </plugin>
        <!-- site lifecycle, see https://maven.apache.org/ref/current/maven-core/lifecycles.html#site_Lifecycle -->
        <plugin>
          <artifactId>maven-site-plugin</artifactId>
          <version>3.7.1</version>
        </plugin>
        <plugin>
          <artifactId>maven-project-info-reports-plugin</artifactId>
          <version>3.0.0</version>
        </plugin>
      </plugins>
    </pluginManagement>
  </build>
</project>

程序代码

public class TestHDFS {
    public static void main(String[] args) {
    	//1.创建Configuration
        Configuration conf = new Configuration();
        
        try {
        //2.创建FS->分布式文件系统(URI:IP:9000,configuration,用户名)
            FileSystem fs = FileSystem.get(new URI("hdfs://192.168.XXX.100:9000"),conf,"root");  
            
        //3.中间写hdfs命令
        fs.mkdirs(new Path("/testHDFS/java/hello"));                       			          //递归创建文件夹
        fs.copyFromLocalFile(new Path("D:\\a.txt"),new Path("/testHDFS/java/hello/"));	      //上传文件至HDFS
        fs.copyToLocalFile(false,new Path("/testHDFS/java/hello/a.txt"),new Path("./"),true); //从HDFS上下载文件
        RemoteIterator<LocatedFileStatus> ri = fs.listFiles(new Path("/testHDFS"), false);    //只打印文件, recursice的参数:是否递归
        while (ri.hasNext()){
            System.out.println(ri.next());				//查询结果返回一个迭代器,利用迭代器的遍历方式,输出所有文件名
        }
        
        fs.deleteOnExit(new Path("/testHDFS/java/hello/a.txt"));      //删除文件或文件夹
        fs.delete(new Path("/a"),true);     						  //是否递归删除文件目录
        
 		//4.关闭
        fs.close();    
        } catch (IOException e) {
            e.printStackTrace();
        } catch (InterruptedException e) {
            e.printStackTrace();
        } catch (URISyntaxException e) {
            e.printStackTrace();
        }
    }
}
  • 1
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值