关闭

HDFS的JAVA API操作

标签: hdfshadoop
747人阅读 评论(0) 收藏 举报
分类:
package hdfs;

import java.io.IOException;
import java.net.URI;
import java.util.Arrays;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.server.datanode.DataNode;

public class TestHdfs {
	public static void main(String[] args) throws IOException {
		//上传本地文件到HDFS
		Configuration conf = new Configuration();
		conf.set("mapred.job.tracker", "172.30.1.245:9001");
		/*
		String dstFileRemotSrc = "hdfs://172.30.1.245:9000/user/dir1/";
		FileSystem hdfs = FileSystem.get(URI.create(dstFileRemotSrc), conf);

		Path src = new Path("D:\\out");
		Path dst = new Path(dstFileRemotSrc);
		hdfs.copyFromLocalFile(src, dst);
		System.out.println("Upload to " + conf.get("fd.default.name") );

		FileStatus files[] = hdfs.listStatus(dst);
		for (FileStatus fileStatus : files) {
			System.out.println(fileStatus.getPath());
		}
		*/
		
		//创建HDFS文件
		/*
		FileSystem hdfs = FileSystem.get(URI.create("hdfs://172.30.1.245:9000/user/dir1/"), conf);
		Path dst = new Path("hdfs://172.30.1.245:9000/user/dir1/test");
		FSDataOutputStream  outputStream = null;
		byte[] buff  = "Hello Hadoop World!".getBytes();
		try {
			outputStream = hdfs.create(dst);
			outputStream.write(buff, 0, buff.length);
		} catch (Exception e) {
			e.printStackTrace();
		} finally {
			if(outputStream != null){
				outputStream.close();
			}
		}
		
		FileStatus files[] = hdfs.listStatus(dst);
		for (FileStatus fileStatus : files) {
			System.out.println(fileStatus.getPath());
		}
		*/
		
		//重命名HDFS文件
		/*
		String srcFile = "hdfs://172.30.1.245:9000/user/dir1/test";
		FileSystem hdfs = FileSystem.get(URI.create(srcFile), conf);
		String dstFile = "hdfs://172.30.1.245:9000/user/dir1/test1";
		Path src = new Path(srcFile);
		Path dst = new Path(dstFile);
		boolean isRename = hdfs.rename(src, dst);
		String result = isRename ? "成功" : "失败";
		System.out.println(result);
		
		FileStatus files[] = hdfs.listStatus(dst);
		for (FileStatus fileStatus : files) {
			System.out.println(fileStatus.getPath());
		}
		*/
		
		//刪除HDFS文件
		/*
		String src = "hdfs://172.30.1.245:9000/user/dir1/test1";
		FileSystem hdfs = FileSystem.get(URI.create(src), conf);
		Path delPath = new Path(src);
		boolean isDeleted = hdfs.delete(delPath, false);
		System.out.println(isDeleted ? "成功" : "失败");
		
		Path dst = new Path("hdfs://172.30.1.245:9000/user/dir1/");
		FileStatus files[] = hdfs.listStatus(dst);
	    for(FileStatus file : files){
	             System.out.println(file.getPath());
	    }
		 */
		
	    //查看HDFS文件的最后修改时间
		/*
	    FileSystem hdfs = FileSystem.get(URI.create("hdfs://172.30.1.245:9000/user/dir1/"), conf);
	    Path dst = new Path("hdfs://172.30.1.245:9000/user/dir1/");
	    FileStatus files[] = hdfs.listStatus(dst);
	    for (FileStatus fileStatus : files) {
			System.out.println(fileStatus.getPath() + " " + fileStatus.getModificationTime());
		}
	    */
	    
	    //查看HDFS文件是否存在
		/*
	    FileSystem hdfs  = FileSystem.get(URI.create("hdfs://172.30.1.245:9000/user/dir1/"), conf);
	    Path dst = new Path("hdfs://172.30.1.245:9000/user/dir1/");
	    boolean isExist = hdfs.exists(dst);
	    System.out.println(isExist ? "存在":"不存在");
	    */
		
		//查看某个文件在HDFS集群的位置
		/*
		FileSystem hdfs = FileSystem.get(URI.create("hdfs://172.30.1.245:9000/user/dir1/"), conf);
		Path dst = new Path("hdfs://172.30.1.245:9000/user/dir1/out");
		FileStatus fileStatus = hdfs.getFileStatus(dst);
		BlockLocation[] blockLocations = hdfs.getFileBlockLocations(fileStatus, 0, fileStatus.getLen());
		for (BlockLocation blockLocation : blockLocations) {
			System.out.println(Arrays.toString(blockLocation.getHosts()) + "\t" + Arrays.toString(blockLocation.getTopologyPaths()) + " \t" + 
					Arrays.toString(blockLocation.getNames()));
		}
		*/
		
		//获取HDFS集群上所有节点名称
		DistributedFileSystem hdfs = (DistributedFileSystem) FileSystem.get(URI.create("hdfs://172.30.1.245:9000/user/dir1/"), conf);
		DatanodeInfo[] dataNodeStatus = hdfs.getDataNodeStats();
		for (DatanodeInfo datanodeInfo : dataNodeStatus) {
			System.out.println(datanodeInfo.getHostName() +  "\t" + datanodeInfo.getName());
		}
	}
}

0
0

查看评论
* 以上用户言论只代表其个人观点,不代表CSDN网站的观点或立场
    个人资料
    • 访问:191912次
    • 积分:3071
    • 等级:
    • 排名:第11273名
    • 原创:74篇
    • 转载:291篇
    • 译文:0篇
    • 评论:4条
    最新评论