HDFS API详解:https://www.cnblogs.com/alisande/archive/2012/06/06/2537903.html
Hadoop HDFS 文件访问权限问题导致Java Web 上传文件到Hadoop失败的原因分析及解决方法:https://blog.csdn.net/bikun/article/details/25506489?utm_medium=distribute.pc_relevant.none-task-blog-BlogCommendFromMachineLearnPai2-1.nonecase&depth_1-utm_source=distribute.pc_relevant.none-task-blog-BlogCommendFromMachineLearnPai2-1.nonecase
Name node is in safe mode 解决办法:离开safe mode就可以
执行一下:hadoop dfsadmin -safemode leave
pom.xml:
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>HDFS0519</groupId>
<artifactId>HDFSUpload</artifactId>
<version>1.0-SNAPSHOT</version>
<repositories>
<repository>
<id>apache</id>
<url>http://maven.apache.org</url>
</repository>
</repositories>
<dependencies>
<!--<dependency>-->
<!--<groupId>org.apache.hadoop</groupId>-->
<!--<artifactId>hadoop-core</artifactId>-->
<!--<version>1.2.1</version>-->
<!--</dependency>-->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.7.1</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.7.1</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.7.1</version>
</dependency>
</dependencies>
</project>
代码:
package com.xy.uploadfile;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
/**
* @ClassName s
* @Description
* @Date 2020-05-19 19:44
* @Create By XinYan
*/
public class UploadFile{
public static void main(String[] args) throws IOException, URISyntaxException {
Configuration conf = new Configuration();
URI uri = new URI("hdfs://hadoop:9000");
FileSystem fs = FileSystem.get(uri, conf);
// 本地文件
Path src = new Path("E:/工作学习/学习/java-code/HDFSUpload/uploadfiletext.txt");
//HDFS存放位置
Path dst = new Path("/");
fs.copyFromLocalFile(src, dst);
System.out.println("Upload to " + conf.get("fs.defaultFS"));
//相当于hdfs dfs -ls /
FileStatus files[] = fs.listStatus(dst);
for (FileStatus file:files) {
System.out.println(file.getPath());
}
}
}