1、HDFS客户端操作
1.1 环境准备
配置HADOOP_HOME环境变量,注意路径不要带中文,然后配置Path
1.2 测试代码
pom.xml
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.mine</groupId>
<artifactId>hdfs-001</artifactId>
<version>0.0.1-SNAPSHOT</version>
<properties>
<hadoop.version>2.10.0</hadoop.version>
</properties>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>2.8.2</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
<version>1.8</version>
<scope>system</scope>
<systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
</dependency>
</dependencies>
</project>
TestHdfsFile
package com.mine.hdfs;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.junit.Test;
public class TestHdfsFile {
private FileSystem getFileSystem() throws Exception {
Configuration configuration = new Configuration();
FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop150:9000"), configuration, "root");
return fileSystem;
}
@Test
public void testMkdir() throws Exception {
FileSystem fileSystem = getFileSystem();
fileSystem.mkdirs(new Path("/test/hdfs001"));
fileSystem.close();
}
@Test
public void testCopyFromLocal() throws Exception {
FileSystem fileSystem = getFileSystem();
fileSystem.copyFromLocalFile(new Path("F:/study/workspace/hadoop/hdfs-001/pom.xml"), new Path("/test/hdfs001"));
fileSystem.close();
}
@Test
public void testCopyToLocal() throws Exception {
FileSystem fileSystem = getFileSystem();
fileSystem.copyToLocalFile(new Path("/test/hdfs001/pom.xml"), new Path("f:/study/file.xml"));
fileSystem.close();
}
@Test
public void testList() throws Exception {
FileSystem fileSystem = getFileSystem();
RemoteIterator<LocatedFileStatus> listFiles = fileSystem.listFiles(new Path("/test"), true);
while (listFiles.hasNext()) {
LocatedFileStatus next = listFiles.next();
System.out.println(next.getPath().getName());
System.out.println(next.getPermission());
System.out.println(next.getLen());
System.out.println(next.getGroup());
System.out.println("----------");
}
fileSystem.close();
}
@Test
public void testDelete() throws Exception {
FileSystem fileSystem = getFileSystem();
fileSystem.delete(new Path("/test/hdfs001"), true);
fileSystem.close();
}
}
TestHdfsIO
package com.mine.hdfs;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;
public class TestHdfsIO {
// 从本地上传到hdfs
@Test
public void testIOUpload() throws Exception {
Configuration configuration = new Configuration();
FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop150:9000"), configuration, "root");
FileInputStream in = new FileInputStream(new File("F:/study/workspace/hadoop/hdfs-001/pom.xml"));
FSDataOutputStream out = fileSystem.create(new Path("/test/hdfs001/myfilename.txt"));
IOUtils.copyBytes(in, out, configuration);
IOUtils.closeStream(in);
IOUtils.closeStream(out);
fileSystem.close();
}
// 从hdfs下载到本地
@Test
public void testIODownload() throws Exception {
Configuration configuration = new Configuration();
FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop150:9000"), configuration, "root");
FSDataInputStream in = fileSystem.open(new Path("/test/hdfs001/myfilename.txt"));
FileOutputStream out = new FileOutputStream(new File("F:/study/pom.xml"));
IOUtils.copyBytes(in, out, configuration);
IOUtils.closeStream(in);
IOUtils.closeStream(out);
fileSystem.close();
}
}
根据代码顺序逐个测试即可