基本实例化代码
System.setProperty("hadoop.home.dir", "/home/xm/hadoop-2.7.1");
String uri = "hdfs://master:9000";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri),conf);
Path path = new Path("hdfs://master:9000/input/input3.txt");
创建目录
Path path = new Path("hdfs://master:9000/input");
fs.mkdirs(path);
创建文件
Path path = new Path("hdfs://master:9000/input/input3.txt");
fs.createNewFile(path);
删除目录或者文件,路径修改即可
Path path = new Path("hdfs://master:9000/input/input3.txt");
fs.delete(path);
文件写入代码
Path path = new Path("hdfs://master:9000/input/input3.txt");
FSDataOutputStream out = fs.create(path);
out.writeUTF("hello hadoop!");
文件读出代码方法1(配合上面写入使用,否则可能出错)
Path path = new Path("hdfs://master:9000/input/input3.txt");
FSDataInputStream inStream = fs.open(path);
String data = inStream.readUTF();
System.out.println(data);
inStream.close();
文件读出方法2--万能代码(不乱码)
Path path = new Path("hdfs://master:9000/input/input3.txt");
InputStream in = null;
in = fs.open(path);
IOUtils.copyBytes(in, System.out, 4096);
IOUtils.closeStream(in);
本地文件内容导入HDFS文件代码
Path path = new Path("hdfs://master:9000/input/input3.txt");
String p = "/home/xm/aaaa";
InputStream in = new BufferedInputStream(new FileInputStream(p));
OutputStream out = null;
out = fs.create(path);
IOUtils.copyBytes(in, out, 4096,true);
IOUtils.closeStream(in);
IOUtils.closeStream(out);
本地文件追加至HDFS文件
Path path = new Path("hdfs://master:9000/input/input3.txt");
String p = "/home/xm/aaaa";
InputStream in = new BufferedInputStream(new FileInputStream(p));
OutputStream out = null;
out = fs.append(path);
IOUtils.copyBytes(in, out, 4096, true);
IOUtils.closeStream(in);
IOUtils.closeStream(out);
追加若出错加上
conf.setBoolean( "dfs.support.append", true );
conf.set("dfs.client.block.write.replace-datanode-on-failure.policy", "NEVER");
conf.set("dfs.client.block.write.replace-datanode-on-failure.enable", "true");
自行追加(若出错希望知道的同学告诉改错方法写在下面)
FSDataOutputStream out = fs.append(path);
int readLen = "end hadoop!".getBytes().length;
while(-1!=readLen){
out.write("end hadoop".getBytes(),0,readLen);
break;
}
out.close();
本地文件导入到HDFS目录中
String srcFile = "/home/xm/aaaa";
Path srcPath = new Path(srcFile);
String dstFile = "hdfs://master:9000/input/";
Path dstPath = new Path(dstFile);
fs.copyFromLocalFile(srcPath,dstPath);
获取给定目录下的所有子目录以及子文件
getFile(path,fs);
public static void getFile(Path path,FileSystem fs) throws IOException, IOException{
FileStatus[] fileStatus = fs.listStatus(path);
for(int i=0;i<fileStatus.length;i++){
if(fileStatus[i].isDir()){
Path p = new Path(fileStatus[i].getPath().toString());
getFile(p,fs);
}else{
System.out.println(fileStatus[i].getPath().toString());
}
}
}
查找某个文件在HDFS集群的位置
Path path = new Path("hdfs://master:9000/input/input3.txt");
FileStatus status = fs.getFileStatus(path);
BlockLocation[]locations = fs.getFileBlockLocations(status, 0, status.getLen());
int length = locations.length;
for(int i=0;i<length;i++){
String [] hosts = locations[i].getHosts();
System.out.println("block_"+i+"location:"+hosts[i]);
}
HDFS集群上所有节点名称信息
DistributedFileSystem dfs = (DistributedFileSystem) fs;
DatanodeInfo[]dataNodeStats = dfs.getDataNodeStats();
for(int i=0;i<dataNodeStats.length;i++){
System.out.println("DataNode_"+i+"_Node:"+dataNodeStats[i].getHostName());
}