API操作
一、建立MAVEN工程,在POM.XML中引入JAR包
pom.xml
<dependencies>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>2.14.0</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.7.7</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.7.7</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.7.7</version>
</dependency>
</dependencies>
log4j.properties
log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - %m%n
log4j.appender.logFile=org.apache.log4j.FileAppender
log4j.appender.logFile.File=target/spring.log
log4j.appender.logFile.layout=org.apache.log4j.PatternLayout
log4j.appender.logFile.layout.ConversionPattern=%d %p [%c] - %m%n
二、编写代码
1.创建文件夹
代码如下(示例):
package com.hdfs.controller;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
public class HDFSTest {
public static void main(String[] args) throws IOException, URISyntaxException, InterruptedException {
//1, 获取fs 对象
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://s201:9000"),conf,"root");
//2,执行api
fs.mkdirs(new Path("/root/tt"));
//3.关闭流
fs.close();
}
}
2.上传数据
代码如下(示例):
//1, 获取fs 对象
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://s201:9000"),conf,"root");
//2,执行api
fs.copyFromLocalFile(new Path("d:/c.txt"),new Path("/root/tt/b.txt"));
// fs.copyFromLocalFile(new Path("/root/a.txt"),new Path("/a.txt"));
//3.关闭流
fs.close();
3.删除文件
代码如下(示例):
//1, 其它操作和上面代码相同
//2,执行api
fs.delete(new Path("/c.txt"),true);
三、上传到LINUX服务器运行
hadoop jar hdfs-api.jar com.hdfs.controller.HDFSTest
如果本地运行,需要一下文件,或者本地安装hadoop并配置环境变量。