package hdfs;
import java.io.FileInputStream;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.Trash;
import org.apache.hadoop.io.IOUtils;
public class HdfsUtil {
public static void main(String[] args) throws Exception {
final FileSystem fileSystem = FileSystem.get(new URI("hdfs://ha:9000"), new Configuration());
System.out.println("**************"+fileSystem.toString());
System.out.println("**************"+fileSystem.getClass());
//创建文件夹mkdirs
fileSystem.mkdirs(new Path("mydir"));
fileSystem.mkdirs(new Path("/dir2"), new FsPermission("111"));
//上传文件create
final FSDataOutputStream out = fileSystem.create(new Path("/dir1/file1"), true, 1024000, (short)2, 1048576);
final FileInputStream in = new FileInputStream("/root/Downloads/hello");
IOUtils.copyBytes(in, out, 1024, true);
final AtomicInteger writeBytes = new AtomicInteger(0);
final FSDataOutputStream out = fileSystem.create(new Path("/dir1/file2"), new Progressable() {
@Override
public void progress() {
System.out.println("WriteBytes = "+writeBytes.get());
}
});
final FileInputStream in = new FileInputStream("/root/Downloads/hello");
byte[] buffer = new byte[4];
int readBytes = in.read(buffer);
while(readBytes!=-1) {
out.write(buffer);
out.flush();
out.hsync();
writeBytes.addAndGet(readBytes);
readBytes = in.read(buffer);
}
//读取数据
final FSDataInputStream in = fileSystem.open(new Path("/dir1/file1"));
IOUtils.copyBytes(in, System.out, 1024, true);
//遍历
final FileStatus[] listStatus = fileSystem.listStatus(new Path("/"));
for (FileStatus fileStatus : listStatus) {
System.out.println(fileStatus);
}
//获取工作目录
fileSystem.getWorkingDirectory().toString();
//删除
final Trash trash = new Trash(fileSystem, fileSystem.getConf());
trash.moveToTrash(new Path("/dir1/file1"));
fileSystem.delete(new Path("/dir1/file1"), true);
}
}