package hdfs;
import java.net.URL;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
public class Crud {
static final String PATH = "hdfs://master:9000/";
static final String DIR = "/WinterTest";
static final String sourcePath = "/WinterTest/Hello";
public static void main(String[] args) throws Exception {
URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
FileSystem fileSystem = Tools.getFileSystem(PATH);
Tools.mkdir(fileSystem, DIR);// mkdir
Tools.delete(fileSystem, DIR);// delete
Tools.upLoad(fileSystem, DIR, sourcePath);// upload
Tools.downLoad(fileSystem, sourcePath);// download
Tools.list(fileSystem, DIR);// list
}
}
package hdfs;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.net.URL;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
class Tools {
public static void jread(String PATH) throws Exception {
final URL url = new URL(PATH);
// URL只认识http的协议 不认识hdfs这种的协议
// 打开的连接读取的输入流。
InputStream in = url.openConnection().getInputStream();
int temp = 0;
int count = 0;
byte[] b = new byte[1024];
while ((temp = in.read()) != -1) {
b[count++] = (byte) temp;
}
in.read(b);
in.close();
System.out.println(new String(b));
}
public static void hread(String PATH) throws Exception {
final URL url = new URL(PATH);
InputStream in = url.openStream();// 开启流啊
IOUtils.copyBytes(in, System.out, 1024, false);
IOUtils.closeStream(in);
}
// 创建文件夹
public static FileSystem getFileSystem(String PATH) throws Exception {
return FileSystem.get(new URI(PATH), new Configuration());
}
public static void mkdir(FileSystem fileSystem, String DIR)
throws Exception {
if (fileSystem.mkdirs(new Path(DIR))) {
System.out.println("create" + DIR + "success");
} else {
System.out.println("delete" + DIR + "failure");
}
}
// 删除文件夹
public static void delete(FileSystem fileSystem, String DIR)
throws Exception {
if (fileSystem.delete(new Path(DIR), true)) {
System.out.println("delete" + DIR + "success");
} else {
System.out.println("delete" + DIR + "failure");
}
}
// 上传文件
public static void upLoad(FileSystem fileSystem, String DIR,
String sourcePath) throws Exception {
final FSDataOutputStream out = fileSystem.create(new Path(sourcePath));
final FileInputStream in = new FileInputStream(
"/Users/Hadoop/Documents/workspace/Winter/Hello");
// InputStream
IOUtils.copyBytes(in, out, 1024, true);
return;
}
// 浏览文件夹
public static void downLoad(FileSystem fileSystem, String sourcePath)
throws Exception {
final FSDataInputStream in = fileSystem.open(new Path(sourcePath));
IOUtils.copyBytes(in, System.out, 1024, true);
}
// list file
public static void list(FileSystem fileSystem, String DIR) throws Exception {
final FileStatus[] listStatus = fileSystem.listStatus(new Path(DIR));
System.out.println(" 属性" + "\t\t" + " 权限" + "\t\t" + "副本数" + "\t"
+ "长度" + "\t\t\t" + "路径");
for (FileStatus x : listStatus) {
String idDir = x.isDir() ? "Dir" : "Document";
final String Permission = x.getPermission().toString();
final short replication = x.getReplication();
final long len = x.getLen();
final String path = x.getPath().toString();
System.out.println(idDir + "\t" + Permission + "\t" + replication
+ "\t" + len + "\t" + path);
}
}
public static void listMore(File fileName) {
if (fileName != null) {
if (fileName.isDirectory()) {
File[] arr = fileName.listFiles();
if (arr != null) {
for (int i = 0; i < arr.length; i++) {
listMore(fileName);
}
}
}// 如果不为空就遍历
else {
System.out.println(fileName);
}
}
}
}