除了命令行之外,还可以通过API操作文件的操作
导入 share/hadoop/common/lib 下面jar到工程
先打开与 hadoop连接配置,
Configuration conf = new Configuration(); conf.set("fs.defaultFS", "localhost:9000"); FileSystem fs = FileSystem.get(conf);
查看hdfs目录列表
public void ls() throws Exception {Path path = new Path("/user"); FileStatus[] status = fs.listStatus(path); for(FileStatus s : status) { System.out.println(s.getPath()); System.out.println(s.getAccessTime()); System.out.println(s.getLen()); System.out.println(s.getBlockSize()); } }
创建文件夹
public void mkdir() throws Exception { Path path = new Path("/user/dir"); Boolean flag = fs.mkdirs(path); if(flag) { System.out.println("mkdir /user/dir success~~"); } }
上传文件
public void put() throws Exception { Path path = new Path("/user/aaa.txt"); FSDataOutputStream out = fs.create(path); IOUtils.copyBytes(new FileInputStream(new File("/Users/wlx/test/my.text")), out, conf); }
小文件上传
public void putsmall() throws Exception { //小文件上传 Path path = new Path("/user/bigfile"); SequenceFile.Writer write = new SequenceFile.Writer(fs, conf, path, Text.class, Text.class); File[] files = new File("D:/HDFS/").listFiles(); for(File f : files) { write.append(new Text(f.getName()), new Text(FileUtils.readFileToString(f))); } write.close(); }
大文件上传
public void getsmall() throws Exception { Path path = new Path("/user/bigfile"); SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, conf); Text key = new Text(); Text val = new Text(); while(reader.next(key, val)) { System.out.println("111"); System.out.println(key.toString()); System.out.println(val.toString()); } }
删除文件
public void del() throws Exception { Path path = new Path("/user/aaa.txt"); fs.delete(path, true); }