package com.future.hdfs;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.junit.Before;
import org.junit.Test;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
/**
* @description:
* @author: 北斗星
* @date: 2021/5/3
* @version: 1.0
*/
public class HDFSTest {
private static final String NAME = "fs.defaultFS";
private static final String VALUE = "hdfs://node1:8020";
private Configuration conf;
private FileSystem fs;
/**
* 12.4.3.遍历HDFS中所有文件
*
* @throws Exception
*/
@Test
public void test1() throws Exception {
Configuration conf = new Configuration();
conf.set(HDFSTest.NAME, HDFSTest.VALUE);
FileSystem fileSystem = FileSystem.get(conf);
RemoteIterator<LocatedFileStatus> listFiles = fileSystem.listFiles(new Path("/"), false);
while (listFiles.hasNext()) {
LocatedFileStatus fileStatus = listFiles.next();
Path path = fileStatus.getPath();
System.out.println(path.getName());
}
}
/**
* 12.4.4.HDFS上创建文件夹
*
* @throws Exception
*/
@Test
public void test2() throws Exception {
Configuration conf = new Configuration();
conf.set(HDFSTest.NAME, HDFSTest.VALUE);
FileSystem fileSystem = FileSystem.newInstance(conf);
fileSystem.create(new Path("/FUTURE/mydir/test"));
fileSystem.mkdirs(new Path("/FUTURE1/mydir/test"));
fileSystem.close();
}
/**
* 12.4.5.下载文件(1)
*/
@Test
public void test3() throws Exception {
Configuration conf = new Configuration();
conf.set(HDFSTest.NAME, HDFSTest.VALUE);
FileSystem fileSystem = FileSystem.get(conf);
fileSystem.copyToLocalFile(new Path("/FUTURE/mydir/test"), new Path("file:///D:\\temp\\JD_Phone"));
fileSystem.close();
}
/**
* 下载文件(2)
*/
@Test
public void test4() throws Exception {
FileSystem fileSystem = FileSystem.get(new URI(HDFSTest.VALUE), new Configuration());
FSDataInputStream inputStream = fileSystem.open(new Path("/FUTURE/mydir/test"));
FileOutputStream outputStream = new FileOutputStream(new File("D:\\temp\\JD_Phone\\test.txt"));
IOUtils.copy(inputStream, outputStream);
IOUtils.closeQuietly(inputStream);
IOUtils.closeQuietly(outputStream);
//fileSystem.close();
FileSystem.closeAll();
}
@Before
public void init() {
conf = new Configuration();
// 拿到一个文件操作系统的客户端实例对象
try {
// 直接传入URL 与用户证明
fs = FileSystem.get(new URI(HDFSTest.VALUE), conf);
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* 获取目录
*/
@Test
public void test5() throws IOException {
FileStatus[] fileStatus = fs.listStatus(new Path("/"));
for (FileStatus file : fileStatus) {
System.out.println("Name:" + file.getPath().getName());
System.out.println(file.isFile() ? "file" : "directory");
}
}
/**
* 查询目录 - 迭代器
* 主要注意的是迭代器内, 还能获取到文件的块的信息.
* // 递归列出子文件夹下的所有文件
* // 目录(层级递归) - 返回迭代器
*/
@Test
public void testLs() throws Exception {
RemoteIterator<LocatedFileStatus> listFiles = fs.listFiles(new Path("/FUTURE"), true);
while (listFiles.hasNext()) {
LocatedFileStatus file = listFiles.next();
System.out.println("Name:" + file.getPath().getName());
System.out.println("Permission:" + file.getPermission());
System.out.println("BlockSize:" + file.getBlockSize());
System.out.println("Owner:" + file.getOwner());
System.out.println("Replication:" + file.getReplication());
BlockLocation[] blockLocations = file.getBlockLocations();
for (BlockLocation blockLocation : blockLocations) {
//偏移量
System.out.println("Block:" + blockLocation.getOffset());
System.out.println("Block lenth:" + blockLocation.getLength());
for (String host : blockLocation.getHosts()) {
System.out.println("Block host:" + host);
}
}
}
}
/**
* 12.4.6.上传文件
*/
@Test
public void test6() throws Exception {
try {
fs.copyFromLocalFile(new Path("file:///D:\\temp\\2020-09-10\\5331d040-c753-4ba3-ac8e-2e771afce6f2.jpg"), new Path("/"));
} finally {
//FileSystem.closeAll();
fs.close();
}
}
/**
* 删除(目录/文件)
*/
@Test
public void testDelete() throws Exception {
//递归删除
try {
fs.delete(new Path("/5331d040-c753-4ba3-ac8e-2e771afce6f2.jpg"), true);
} finally {
fs.close();
}
}
/**
* 通过流的方式 上传文件到Hdfs.
*
* */
public void testUpload() throws Exception {
//HDFS输出流
FSDataOutputStream fsDataOutputStream = fs.create(new Path("/sean2019/hellok.sh"), true);
//获取本地输入流
FileInputStream fileInputStream = new FileInputStream(new File("file:///C:\\Users\\beidouxing\\Pictures\\Camera Roll\\picture\\login_bg3.jpg"));
IOUtils.copy(fileInputStream, fsDataOutputStream);
IOUtils.closeQuietly(fsDataOutputStream);
IOUtils.closeQuietly(fileInputStream);
}
/**
* 通过流的方式 下载文件.
*/
@Test
public void testWrite() throws Exception {
FSDataInputStream fsDataInputStream = fs.open(new Path("/FUTURE/mydir/test"));
FileOutputStream outputStream = new FileOutputStream(new File("D:\\temp\\JD_Phone\\bb.txt"));
IOUtils.copy(fsDataInputStream, outputStream);
IOUtils.closeQuietly(outputStream);
IOUtils.closeQuietly(fsDataInputStream);
}
/**
* 流模式指定长度
* 使用inputStream.seek(xxx)方法可以指定文件读取的开始长度. 使用IOUtils.copyLarge()方法可以指定开始和结束长度.
* EOF代表无更多资料可读
*/
@Test
public void testRandomAccess() throws Exception {
FSDataInputStream fsDataInputStream = fs.open(new Path("/FUTURE/mydir/test"));
//通过seek来指定长度
fsDataInputStream.seek(12);
FileOutputStream outputStream = new FileOutputStream(new File("D:\\temp\\JD_Phone\\bb.txt"));
IOUtils.copy(fsDataInputStream, outputStream);
IOUtils.closeQuietly(fsDataInputStream);
IOUtils.closeQuietly(outputStream);
}
/**
* 显示文件流到屏幕上
* @throws IOException
* @throws IllegalArgumentException
* */
@Test
public void testConsole() throws IllegalArgumentException, IOException{
FSDataInputStream inputStream = fs.open(new Path("/FUTURE/mydir/test"));
IOUtils.copy(inputStream, System.out);
IOUtils.closeQuietly(inputStream);
}
// 读取指定长度伪代码
//inputStream.seek(60M);
//while(read){
// count++;
// if(count>60M)break;
//}
}
Hadoop通过java-api的基本使用
最新推荐文章于 2024-08-27 16:06:29 发布