HDFS客户端操作

1、HDFS客户端操作

1.1 环境准备

配置HADOOP_HOME环境变量,注意路径不要带中文,然后配置Path

1.2 测试代码

pom.xml

<project xmlns="http://maven.apache.org/POM/4.0.0"
	xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
	<modelVersion>4.0.0</modelVersion>
	<groupId>com.mine</groupId>
	<artifactId>hdfs-001</artifactId>
	<version>0.0.1-SNAPSHOT</version>

	<properties>
		<hadoop.version>2.10.0</hadoop.version>
	</properties>

	<dependencies>
		<dependency>
			<groupId>junit</groupId>
			<artifactId>junit</artifactId>
			<version>4.12</version>
		</dependency>

		<dependency>
			<groupId>org.apache.logging.log4j</groupId>
			<artifactId>log4j-core</artifactId>
			<version>2.8.2</version>
		</dependency>
		<dependency>
			<groupId>org.apache.hadoop</groupId>
			<artifactId>hadoop-common</artifactId>
			<version>${hadoop.version}</version>
		</dependency>
		<dependency>
			<groupId>org.apache.hadoop</groupId>
			<artifactId>hadoop-client</artifactId>
			<version>${hadoop.version}</version>
		</dependency>
		<dependency>
			<groupId>org.apache.hadoop</groupId>
			<artifactId>hadoop-hdfs</artifactId>
			<version>${hadoop.version}</version>
		</dependency>
		<dependency>
			<groupId>jdk.tools</groupId>
			<artifactId>jdk.tools</artifactId>
			<version>1.8</version>
			<scope>system</scope>
			<systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
		</dependency>
	</dependencies>

</project>

TestHdfsFile

package com.mine.hdfs;

import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.junit.Test;

public class TestHdfsFile {
	private FileSystem getFileSystem() throws Exception {
		Configuration configuration = new Configuration();
		FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop150:9000"), configuration, "root");
		return fileSystem;
	}

	@Test
	public void testMkdir() throws Exception {
		FileSystem fileSystem = getFileSystem();
		fileSystem.mkdirs(new Path("/test/hdfs001"));
		fileSystem.close();
	}

	@Test
	public void testCopyFromLocal() throws Exception {
		FileSystem fileSystem = getFileSystem();
		fileSystem.copyFromLocalFile(new Path("F:/study/workspace/hadoop/hdfs-001/pom.xml"), new Path("/test/hdfs001"));
		fileSystem.close();
	}

	@Test
	public void testCopyToLocal() throws Exception {
		FileSystem fileSystem = getFileSystem();
		fileSystem.copyToLocalFile(new Path("/test/hdfs001/pom.xml"), new Path("f:/study/file.xml"));
		fileSystem.close();
	}

	@Test
	public void testList() throws Exception {
		FileSystem fileSystem = getFileSystem();
		RemoteIterator<LocatedFileStatus> listFiles = fileSystem.listFiles(new Path("/test"), true);
		while (listFiles.hasNext()) {
			LocatedFileStatus next = listFiles.next();
			System.out.println(next.getPath().getName());
			System.out.println(next.getPermission());
			System.out.println(next.getLen());
			System.out.println(next.getGroup());
			System.out.println("----------");
		}
		fileSystem.close();
	}

	@Test
	public void testDelete() throws Exception {
		FileSystem fileSystem = getFileSystem();
		fileSystem.delete(new Path("/test/hdfs001"), true);
		fileSystem.close();
	}
}

TestHdfsIO

package com.mine.hdfs;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;

public class TestHdfsIO {
	// 从本地上传到hdfs
	@Test
	public void testIOUpload() throws Exception {
		Configuration configuration = new Configuration();
		FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop150:9000"), configuration, "root");

		FileInputStream in = new FileInputStream(new File("F:/study/workspace/hadoop/hdfs-001/pom.xml"));
		FSDataOutputStream out = fileSystem.create(new Path("/test/hdfs001/myfilename.txt"));

		IOUtils.copyBytes(in, out, configuration);

		IOUtils.closeStream(in);
		IOUtils.closeStream(out);
		fileSystem.close();
	}

	// 从hdfs下载到本地
	@Test
	public void testIODownload() throws Exception {
		Configuration configuration = new Configuration();
		FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop150:9000"), configuration, "root");

		FSDataInputStream in = fileSystem.open(new Path("/test/hdfs001/myfilename.txt"));
		FileOutputStream out = new FileOutputStream(new File("F:/study/pom.xml"));

		IOUtils.copyBytes(in, out, configuration);

		IOUtils.closeStream(in);
		IOUtils.closeStream(out);
		fileSystem.close();
	}
}

根据代码顺序逐个测试即可

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值