hadoop环境:hadoop2.7 centos6
主机环境: jdk1.8 Ubuntu16
直接编辑的的代码如下:
package com.lcy.hdfs.upload;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;
import java.io.File;
import java.io.FileInputStream;
import java.net.URI;
/**
* Created by lcy on 17-5-19.
*/
public class test {
@Test
public void testMkdir()throws Exception{
FileSystem fs=FileSystem.get(new URI("hdfs://10.10.3.189:8020"),new Configuration());
fs.mkdirs(new Path("/lcy/upload"));
fs.close();
}
@Test
public void testUpload() throws Exception{
FileSystem fs=FileSystem.get(new URI("hdfs://10.10.3.189:8020"),new Configuration());
FSDataOutputStream fsDataOutputStream=fs.create(new Path("/lcy/upload/test.txt"));
FileInputStream inputStream=new FileInputStream(new File("/etc/profile"));
IOUtils.copyBytes(inputStream,fsDataOutputStream,4096,true);
fs.close();
}
@Test
public void testDel() throws Exception{
FileSystem fs=FileSystem.get(new URI("hdfs://10.10.3.189:8020"),new Configuration());
fs.delete(new Path("/lcy/test8.txt"),true);
fs.close();
}
}
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>hadooptest</groupId>
<artifactId>hadooptest</artifactId>
<version>1.0-SNAPSHOT</version>
<repositories>
<repository>
<id>apache</id>
<url>http://maven.apache.org</url>
</repository>
</repositories>
<properties>
<hadoop.version>2.7.1</hadoop.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.8.2</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<configuration>
<excludeTransitive>false</excludeTransitive>
<stripVersion>true</stripVersion>
<outputDirectory>./lib</outputDirectory>
</configuration>
</plugin>
</plugins>
</build>
</project>
在运行的时候会报错:
Exception“org.apache.hadoop.security.AccessControlException: org.apache.hadoop.security.AccessControlException: Permission denied: user=lcy, access=WRITE, inode="/user":user:supergroup:drwxr-xr-x”
原因:
hadoop服务器中,检查hdfs所有者为hdfs用户,其他用户没有读取和写入的权限,所以我在本机测试的时候,我的用户为lcy,与hadoop中HDFS用户名不符
解决方案:
在使用hadoop中的FileSystem创建文件的时候,会首先获取HADOOP_USER_NAME,那么我们在环境变量中设置他就可以,
命令行输入(root登录)
#gedit /etc/profile
在配置文件的最后加入
export HADOOP_USER_NAME="hdfs"
然后注销重新登录或者重启电脑就可以了
HADOOP_USER_NAME 部分参考http://www.udpwork.com/item/7047.html