package com.atguigu.hdfs;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.permission.FsPermission;
import org.junit.Test;
public class HdfsClient {
//上传文件
public static void main(String[] args) throws IOException, InterruptedException, URISyntaxException {
/*Configuration conf = new Configuration();
// conf.set("fs.defaultFS", "hdfs://hadoop103:9000");
//1,获取文件系统连接
// FileSystem fileSystem = FileSystem.get(conf);
FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop103:9000"), conf, "atguigu");
// //2,上传文件
fileSystem.copyFromLocalFile(new Path("e:/hadoop.txt"),new Path("/hello1.txt"));
//3,关闭资源
fileSystem.close();
System.out.println("over....");*/
}
/**上传文件到文件系统
* @throws IOException
* @throws InterruptedException
* @throws URISyntaxException
*/
@Test
public void putFileToHDFS() throws IOException, InterruptedException, URISyntaxException{
//1,获取文件系统配置
Configuration conf = new Configuration();
//2,获取连接
FileSystem fs = FileSystem.get(new URI("hdfs://hadoop103:9000"), conf, "atguigu");
//3,上传文件
fs.copyFromLocalFile(false, new Path("e:/translate.conf"), new Path("/translate.conf"));
//4,关闭资源
fs.close();
}
/**文件下载
* @throws IOException
* @throws InterruptedException
* @throws URISyntaxException
*/
@Test
public void getFileFromHDFS() throws IOException, InterruptedException, URISyntaxException{
Configuration conf = new Configuration();
//1,获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://hadoop103:9000"), conf, "atguigu");
//2,下载文件
fs.copyToLocalFile(true, new Path("/hello1.txt"), new Path("e:/hello2.txt"));
//3,关闭资源
fs.close();
}
/**创建文件夹
* @throws IOException
* @throws InterruptedException
* @throws URISyntaxException
*/
@Test
public void makirAtHDFS() throws IOException, InterruptedException, URISyntaxException{
Configuration conf = new Configuration();
//1,获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://hadoop103:9000"), conf, "atguigu");
//2,创建文件夹
FsPermission permission = new FsPermission("677");
fs.mkdirs(new Path("/user/atguigu/a/b"), permission);
//3,关闭资源
fs.close();
}
/**更改文件名称
* @throws IOException
* @throws InterruptedException
* @throws URISyntaxException
*/
@Test
public void deleteAtHDFS() throws IOException, InterruptedException, URISyntaxException{
Configuration conf = new Configuration();
//1,获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://hadoop103:9000"), conf, "atguigu");
//2,执行删除操作
fs.rename(new Path("/hello.txt"), new Path("/hellodemo.txt"));
//3,关闭资源
fs.close();
}
@Test
public void readFileAtHDFS() throws IOException, InterruptedException, URISyntaxException{
Configuration conf = new Configuration();
//1,获取文件系统
FileSystem fs = FileSystem.get(new URI("hdfs://hadoop103:9000"), conf, "atguigu");
//2,查看文件详情
RemoteIterator<LocatedFileStatus> listFiles = fs.listFiles(new Path("/"), true);
//3,遍历
while(listFiles.hasNext()){
LocatedFileStatus fileStatus = listFiles.next();
//文件名
System.out.println(fileStatus.getPath().getName());
//块大小
System.out.println(fileStatus.getBlockSize());
//文件内容长度
System.out.println(fileStatus.getLen());
//文件权限
System.out.println(fileStatus.getPermission());
}
}
}