HDFS的JavaAPI操作

  1. package hdfs;  
  2.   
  3. import static org.junit.Assert.fail;  
  4.   
  5. import java.util.Arrays;  
  6.   
  7. import org.apache.hadoop.conf.Configuration;  
  8. import org.apache.hadoop.fs.BlockLocation;  
  9. import org.apache.hadoop.fs.FSDataOutputStream;  
  10. import org.apache.hadoop.fs.FileStatus;  
  11. import org.apache.hadoop.fs.FileSystem;  
  12. import org.apache.hadoop.fs.Path;  
  13. import org.apache.hadoop.hdfs.DistributedFileSystem;  
  14. import org.apache.hadoop.hdfs.protocol.DatanodeInfo;  
  15. import org.junit.Test;  
  16.   
  17. public class TestHdfs {  
  18.   
  19.         @Test  
  20.         public void test() {  
  21.                 fail("Not yet implemented");  
  22.         }  
  23.           
  24.         //上传本地文件到HDFS  
  25.         @Test  
  26.         public void testUpload() throws Exception{  
  27.                   
  28.                 Configuration conf = new Configuration();  
  29.                 conf.addResource(new Path("D:\\myeclipse\\Hadoop\\hadoopEx\\src\\conf\\hadoop.xml"));  
  30.                   
  31.                 FileSystem hdfs = FileSystem.get(conf);  
  32.                 Path src = new Path("F:\\lzp\\T.txt");  
  33.                 Path dst = new Path("/");  
  34.                 hdfs.copyFromLocalFile(src, dst);  
  35.                   
  36.                 System.out.println("Upload to " + conf.get("fs.default.name"));  
  37.                 FileStatus files[] = hdfs.listStatus(dst);  
  38.                 for(FileStatus file : files){  
  39.                         System.out.println(file.getPath());  
  40.                 }  
  41.         }  
  42.           
  43.         //创建HDFS文件  
  44.         @Test  
  45.         public void testCreate() throws Exception{  
  46.                   
  47.                 Configuration conf = new Configuration();  
  48.                 conf.addResource(new Path("D:\\myeclipse\\Hadoop\\hadoopEx\\src\\conf\\hadoop.xml"));  
  49.                   
  50.                 byte[] buff = "hello world!".getBytes();  
  51.                   
  52.                 FileSystem hdfs = FileSystem.get(conf);  
  53.                 Path dst = new Path("/test");  
  54.                 FSDataOutputStream outputStream = null;  
  55.                 try{  
  56.                         outputStream = hdfs.create(dst);  
  57.                         outputStream.write(buff,0,buff.length);  
  58.                 }catch(Exception e){  
  59.                         e.printStackTrace();  
  60.                           
  61.                 }finally{  
  62.                         if(outputStream != null){  
  63.                                 outputStream.close();  
  64.                         }  
  65.                 }  
  66.                   
  67.                 FileStatus files[] = hdfs.listStatus(dst);  
  68.                 for(FileStatus file : files){  
  69.                         System.out.println(file.getPath());  
  70.                 }  
  71.         }  
  72.           
  73.         //重命名HDFS文件  
  74.         @Test  
  75.         public void testRename() throws Exception{  
  76.                   
  77.                 Configuration conf = new Configuration();  
  78.                 conf.addResource(new Path("D:\\myeclipse\\Hadoop\\hadoopEx\\src\\conf\\hadoop.xml"));  
  79.                   
  80.                   
  81.                 FileSystem hdfs = FileSystem.get(conf);  
  82.                 Path dst = new Path("/");  
  83.                   
  84.                 Path frpath = new Path("/test");  
  85.                 Path topath = new Path("/test1");  
  86.                   
  87.                 hdfs.rename(frpath, topath);  
  88.                   
  89.                 FileStatus files[] = hdfs.listStatus(dst);  
  90.                 for(FileStatus file : files){  
  91.                         System.out.println(file.getPath());  
  92.                 }  
  93.         }  
  94.           
  95.         //刪除HDFS文件  
  96.         @Test  
  97.         public void testDel() throws Exception{  
  98.                   
  99.                 Configuration conf = new Configuration();  
  100.                 conf.addResource(new Path("D:\\myeclipse\\Hadoop\\hadoopEx\\src\\conf\\hadoop.xml"));  
  101.                   
  102.                   
  103.                 FileSystem hdfs = FileSystem.get(conf);  
  104.                 Path dst = new Path("/");  
  105.                   
  106.                 Path topath = new Path("/test1");  
  107.                   
  108.                 boolean ok = hdfs.delete(topath,false);  
  109.                 System.out.println( ok ? "删除成功" : "删除失败");  
  110.                   
  111.                 FileStatus files[] = hdfs.listStatus(dst);  
  112.                 for(FileStatus file : files){  
  113.                         System.out.println(file.getPath());  
  114.                 }  
  115.         }  
  116.           
  117.         //查看HDFS文件的最后修改时间  
  118.         @Test  
  119.         public void testgetModifyTime() throws Exception{  
  120.                   
  121.                 Configuration conf = new Configuration();  
  122.                 conf.addResource(new Path("D:\\myeclipse\\Hadoop\\hadoopEx\\src\\conf\\hadoop.xml"));  
  123.                   
  124.                   
  125.                 FileSystem hdfs = FileSystem.get(conf);  
  126.                 Path dst = new Path("/");  
  127.                   
  128.                 FileStatus files[] = hdfs.listStatus(dst);  
  129.                 for(FileStatus file : files){  
  130.                         System.out.println(file.getPath() +"\t" + file.getModificationTime());  
  131.                 }  
  132.         }  
  133.           
  134.         //查看HDFS文件是否存在  
  135.         @Test  
  136.         public void testExists() throws Exception{  
  137.                   
  138.                 Configuration conf = new Configuration();  
  139.                 conf.addResource(new Path("D:\\myeclipse\\Hadoop\\hadoopEx\\src\\conf\\hadoop.xml"));  
  140.                   
  141.                   
  142.                 FileSystem hdfs = FileSystem.get(conf);  
  143.                 Path dst = new Path("/T.txt");  
  144.                   
  145.                 boolean ok  = hdfs.exists(dst);  
  146.                 System.out.println( ok ? "文件存在" : "文件不存在");  
  147.         }  
  148.           
  149.         //查看某个文件在HDFS集群的位置  
  150.         @Test  
  151.         public void testFileBlockLocation() throws Exception{  
  152.                   
  153.                 Configuration conf = new Configuration();  
  154.                 conf.addResource(new Path("D:\\myeclipse\\Hadoop\\hadoopEx\\src\\conf\\hadoop.xml"));  
  155.                   
  156.                   
  157.                 FileSystem hdfs = FileSystem.get(conf);  
  158.                 Path dst = new Path("/T.txt");  
  159.                   
  160.                 FileStatus fileStatus =  hdfs.getFileStatus(dst);  
  161.                 BlockLocation[] blockLocations =hdfs.getFileBlockLocations(fileStatus, 0, fileStatus.getLen());  
  162.                 for(BlockLocation block : blockLocations){  
  163.                         System.out.println(Arrays.toString(block.getHosts())+ "\t" + Arrays.toString(block.getNames()));  
  164.                 }  
  165.         }  
  166.           
  167.         //获取HDFS集群上所有节点名称  
  168.         @Test  
  169.         public void testGetHostName() throws Exception{  
  170.                   
  171.                 Configuration conf = new Configuration();  
  172.                 conf.addResource(new Path("D:\\myeclipse\\Hadoop\\hadoopEx\\src\\conf\\hadoop.xml"));  
  173.                   
  174.                   
  175.                 DistributedFileSystem hdfs = (DistributedFileSystem)FileSystem.get(conf);  
  176.                 DatanodeInfo[] dataNodeStats = hdfs.getDataNodeStats();  
  177.                   
  178.                 for(DatanodeInfo dataNode : dataNodeStats){  
  179.                         System.out.println(dataNode.getHostName() + "\t" + dataNode.getName());  
  180.                 }  
  181.         }  
  182.   
  183. }  
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值