使用Java进行HDFS操作

创建新目录

package hdfs_pro;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class CreateDir {
    public static void main(String[] args) {
        try {
            Configuration conf = new Configuration();
            FileSystem fs = FileSystem.get(conf);
            String dirPath = args[0];
            Path hdfsPath = new Path(dirPath);  
        if(fs.mkdirs(hdfsPath)){
                System.out.println("Directory "+ dirPath +" has been created successfully!");
        }
        }catch(Exception e) {
            e.printStackTrace();
        }
    }
}

判断文件夹是否存在

package hdfs_pro;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class DirExist {
    public static void main(String[] args) {
        try {
            Configuration conf = new Configuration();
            FileSystem fs = FileSystem.get(conf);
            String dirName = args[0];
            if(fs.exists(new Path(dirName ))) {
                System.out.println("Directory Exists!");
            } else {
                  System.out.println("Directory not Exists!");
            }
        }catch(Exception e) {
            e.printStackTrace();
        }
    }
}

显示一个指定文件夹下所有文件

package hdfs_pro;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
public class ListFiles {
    public static void main(String[] args) {
        try {
            Configuration conf = new Configuration();
            FileSystem fs = FileSystem.get(conf);
            String filePath = args[0];
            Path srcPath = new Path(filePath);
            FileStatus[] stats = fs.listStatus(srcPath);
            Path[] paths = FileUtil.stat2Paths(stats);
            for(Path p : paths)
                  System.out.println(p.getName());
        }catch(Exception e) {
            e.printStackTrace();
        }
    }
}

删除目录

package hdfs_pro;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class DeleteDir {
    public static void main(String[] args) {
        try {
            Configuration conf = new Configuration();
            FileSystem fs = FileSystem.get(conf);
            String dirPath = args[0];
        Path hdfsPath = new Path(dirPath);  
        if(fs.delete(hdfsPath,true)){
            System.out.println("Directory "+ dirPath +" has been deleted successfully!");
        }
        }catch(Exception e) {
            e.printStackTrace();
        }
    }
}

创建新文件

package hdfs_pro;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class CreateFile {
    public static void main(String[] args) {
        try {
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(conf);
        String filePath = args[0];
          Path hdfsPath = new Path(filePath);  
          fs.create(hdfsPath);
        }catch(Exception e) {
              e.printStackTrace();
        }
    }
}

判断文件是否存在

package hdfs_pro;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class FileExist {
    public static void main(String[] args) {
        try {
              Configuration conf = new Configuration();
              FileSystem fs = FileSystem.get(conf);
              String fileName = args[0];
              if(fs.exists(new Path(fileName))) {
                    System.out.println("File Exists!");
              } else {
                    System.out.println("File not Exists!");
              }
        }catch(Exception e) {
              e.printStackTrace();
        }
    }
}

从本地拷贝文件到新创建的目录中

package hdfs_pro;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;

public class CopyFileToFolder{
    public static void main(String[] args) {
		try {
			Configuration conf = new Configuration();
	        FileSystem fs = FileSystem.get(conf);
	        String source = args[0];
	        String target = args[1];
	        Path sourcePath = new Path(source);  
			Path targetPath = new Path(target);  
			
			if(fs.mkdirs(targetPath)){
                System.out.println("Directory "+ targetPath +" has been created successfully!");
			}
			FileUtil.copy(fs, sourcePath, fs, targetPath, true, conf);

        }catch(Exception e) {
            e.printStackTrace();
    	}
    }
}

将HDFS中指定文件的内容输出到终端中

创建新文件

package hdfs_pro;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class CreateFile {
    public static void main(String[] args) {
        try {
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(conf);
        String filePath = args[0];
          Path hdfsPath = new Path(filePath);  
          fs.create(hdfsPath);
        }catch(Exception e) {
              e.printStackTrace();
        }
    }
}

写文件

package hdfs_pro;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class WriteFile {
    public static void main(String[] args) {
        try {
              Configuration conf = new Configuration();
              FileSystem fs = FileSystem.get(conf);
              String filePath = args[0];
              Path srcPath = new Path(filePath);
              FSDataOutputStream os = fs.create(srcPath,true,1024,(short)1,(long)(1<<26));
              String str = "Hello, this is a sentence that should be written into the file.\n";
              os.write(str.getBytes());
              os.flush();
              os.close();
            
              os = fs.append(srcPath);
              str = "Hello, this is another sentence that should be written into the file.\n";
              os.write(str.getBytes());
              os.flush();
              os.close();
        }catch(Exception e) {
            e.printStackTrace();
        }
    }
}

读文件到终端

package hdfs_pro;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class ReadFile {
    public static void main(String[] args) {
        try {
              Configuration conf = new Configuration();
              FileSystem fs = FileSystem.get(conf);
              String filePath = args[0];
              Path srcPath = new Path(filePath);
            
              FSDataInputStream is = fs.open(srcPath);
              while(true) {
                    String line = is.readLine();
                    if(line == null) {
                          break;
                    }
                    System.out.println(line);
            }
                is.close();
        }catch(Exception e) {
            e.printStackTrace();
        }
    }
}

完成指定目录下指定类型文件的合并

package hdfs_pro;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
public class ListFiles {
    public static void main(String[] args) {
        try {
            Configuration conf = new Configuration();
            FileSystem fs = FileSystem.get(conf);
            String filePath = args[0];
            Path srcPath = new Path(filePath);
            FileStatus[] stats = fs.listStatus(srcPath);
            Path[] paths = FileUtil.stat2Paths(stats);
            for(Path p : paths)
                  System.out.println(p.getName());
        }catch(Exception e) {
            e.printStackTrace();
        }
    }
}

将文件从源路径移动到目的路径

package hdfs_pro;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;


public class MoveFile {
	public static void main(String[] args) {
		try {
			Configuration conf = new Configuration();
			FileSystem fs = FileSystem.get(conf);
			String source= args[0];
			String target= args[1];
			Path sourcePath = new Path(source);  
			Path targetPath = new Path(target);  
            if (!fs.exists(targetPath)){
                fs.rename(sourcePath,targetPath); 
            }else {
                System.out.println("File Exists!");
            }

        }catch(Exception e) {
            e.printStackTrace();
    	}
	}
}

删除文件

package hdfs_pro;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class DeleteFile {
    public static void main(String[] args) {
        try {
              Configuration conf = new Configuration();
              FileSystem fs = FileSystem.get(conf);
              String filePath = args[0];
          Path hdfsPath = new Path(filePath);  
          if(fs.delete(hdfsPath,false)){
              System.out.println("File "+ filePath +" has been deleted successfully!");
          }
        }catch(Exception e) {
            e.printStackTrace();
        }
    }
}
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值