package com.test.hdfsclint;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import sun.nio.ch.IOUtil;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.URI;
public class hdfsclint {
private FileSystem fs;
@Before
public void before() throws IOException, InterruptedException {
//获取hdfs的抽象对象
fs = FileSystem.get(URI.create("hdfs://hadoop100:9000"),new Configuration(),"root");
System.out.println("调用before方法");
}
@Test
public void put() throws IOException, InterruptedException {
//修改hadoop配置文件
Configuration configuration = new Configuration();
configuration.setInt("dfs.replication",1);
//操作文件系统
fs = FileSystem.get(URI.create("hdfs://hadoop100:9000"),configuration,"root");
fs.copyFromLocalFile(new Path("C:\\Users\\Administrator\\test.txt"),new Path("/"));
}
@Test
public void rename() throws IOException, InterruptedException {
//rename方法
fs.rename(new Path("/test.txt"),new Path("/test.txt.bak"));
}
@Test
public void delete() throws IOException {
boolean delete = fs.delete(new Path("/test.txt.bak"),true);
if (delete) {
System.out.println("删除成功");
}else System.out.println("删除失败");
}
@Test
public void append() throws IOException {
FSDataOutputStream append = fs.append(new Path("/test.txt"));
FileInputStream open =new FileInputStream("C:\\unintall.log");
IOUtils.copyBytes(open,append,4096,true);
}
@Test
public void ls() throws IOException {
FileStatus[] filestatus = fs.listStatus(new Path("/"));
for(FileStatus fileStatus :filestatus){
if (fileStatus.isFile()){
System.out.println(fileStatus);
System.out.println("文件信息");
System.out.println(fileStatus.getPath());
System.out.println(fileStatus.getLen());
}else {
System.out.println("文件夹信息");
System.out.println(fileStatus.getPath());
}
}
}
@Test
//查看文件的副本信息
public void listFiles() throws IOException {
RemoteIterator<LocatedFileStatus> files = fs.listFiles(new Path("/"), true);
while (files.hasNext()){
LocatedFileStatus file = files.next();
System.out.println("====================");
System.out.println(file.getPath());
BlockLocation[] blockLocations = file.getBlockLocations();
for (BlockLocation blockLocation:blockLocations) {
String[] hosts = blockLocation.getHosts();
for (String host : hosts) {
System.out.println("块在");
System.out.println(host);
}
}
}
}
@After
public void after() throws IOException {
//关闭文件系统
fs.close();
System.out.println("after!!!!");
}
}
java操作hdfs常见的方法
最新推荐文章于 2024-04-20 16:36:29 发布