package cn.hx.test;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.apache.log4j.BasicConfigurator;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.URI;
public class test1 {
public static void main(String[] args) throws IOException {
BasicConfigurator.configure();
try {
//链接hdfs的URL
URI uri = new URI("hdfs://192.168.22.131:9000");
Configuration conf = new Configuration();
FileSystem fileSystem = FileSystem.get(uri, conf);
//filestatus表示hdfs中的每一个文件夹
FileStatus[] listStatus = fileSystem.listStatus(new Path("/"));
for (FileStatus fileStatus : listStatus) {
System.out.println(fileStatus);
System.out.println("文件路径:" + fileStatus.getPath());
System.out.println("是否是一个目录:" + fileStatus.isDirectory());
System.out.println("修改时间:" + fileStatus.getModificationTime());
System.out.println("访问时间:" + fileStatus.getAccessTime());
System.out.println("所有者:" + fileStatus.getOwner());
System.out.println("用户:" + fileStatus.getGroup());
System.out.println("文件权限:" + fileStatus.getPermission());
System.out.println("是否是?:" + fileStatus.getSymlink());
}
} catch (Exception e) {
}
}
//下载文件
private static void open(FileSystem fileSystem) throws IOException {
FSDataInputStream in = fileSystem.open(new Path("/test/1"));
IOUtils.copyBytes(in, System.out, 1024, true);
}
//上传文件
private static void put(FileSystem fileSystem) throws IOException {
FSDataOutputStream out = fileSystem.create(new Path("/test/1"));
FileInputStream in = new FileInputStream("\u202AE:\\BigDataVideos\\Hadoop 7\\2015-12-30 【hadoop】\\edits.xml");
IOUtils.copyBytes(in, out, 1024, true);
}
}