Hadoop文件系统
基本的文件系统命令操作, 通过hadoop fs -help可以获取所有的命令的详细帮助文件。
Java抽象类org.apache.hadoop.fs.FileSystem定义了hadoop的一个文件系统接口。该类是一个抽象类,通过以下两种静态工厂方法可以过去FileSystem实例:
public static FileSystem.get(Configuration conf) throws IOException
public static FileSystem.get(URI uri, Configuration conf) throws IOException
具体方法实现:
1、public boolean mkdirs(Path f) throws IOException
一次性新建所有目录(包括父目录), f是完整的目录路径。
2、public FSOutputStream create(Path f) throws IOException
创建指定path对象的一个文件,返回一个用于写入数据的输出流
create()有多个重载版本,允许我们指定是否强制覆盖已有的文件、文件备份数量、写入文件缓冲区大小、文件块大小以及文件权限。
3、public boolean copyFromLocal(Path src, Path dst) throws IOException
将本地文件拷贝到文件系统
4、public boolean exists(Path f) throws IOException
检查文件或目录是否存在
5、public boolean delete(Path f, Boolean recursive)
永久性删除指定的文件或目录,如果f是一个空目录或者文件,那么recursive的值就会被忽略。只有recursive=true时,一个非空目录及其内容才会被删除。
6、FileStatus类封装了文件系统中文件和目录的元数据,包括文件长度、块大小、备份、修改时间、所有者以及权限信息。
通过"FileStatus.getPath()"可查看指定HDFS中某个目录下所有文件。
//output~
=======create dir=======
new dir hdfs://sfserver20.localdomain:9000/test
=======copy file=======
Upload to hdfs://sfserver20.localdomain:9000/test
hdfs://sfserver20.localdomain:9000/test/Configure.xml
hdfs://sfserver20.localdomain:9000/test/word.txt
=======create file=======
new file hdfs://sfserver20.localdomain:9000/test/word.txt
=======before delete file=======
new dir hdfs://sfserver20.localdomain:9000/test/del
new file hdfs://sfserver20.localdomain:9000/test/del.txt
/test has all files:
hdfs://sfserver20.localdomain:9000/test/Configure.xml
hdfs://sfserver20.localdomain:9000/test/del
hdfs://sfserver20.localdomain:9000/test/del.txt
hdfs://sfserver20.localdomain:9000/test/word.txt
=======after delete file=======
/test/del delete? true
/test/del.txt delete? true
/test/no exist? false
/test has all files:
hdfs://sfserver20.localdomain:9000/test/Configure.xml
hdfs://sfserver20.localdomain:9000/test/word.txt
基本的文件系统命令操作, 通过hadoop fs -help可以获取所有的命令的详细帮助文件。
Java抽象类org.apache.hadoop.fs.FileSystem定义了hadoop的一个文件系统接口。该类是一个抽象类,通过以下两种静态工厂方法可以过去FileSystem实例:
public static FileSystem.get(Configuration conf) throws IOException
public static FileSystem.get(URI uri, Configuration conf) throws IOException
具体方法实现:
1、public boolean mkdirs(Path f) throws IOException
一次性新建所有目录(包括父目录), f是完整的目录路径。
2、public FSOutputStream create(Path f) throws IOException
创建指定path对象的一个文件,返回一个用于写入数据的输出流
create()有多个重载版本,允许我们指定是否强制覆盖已有的文件、文件备份数量、写入文件缓冲区大小、文件块大小以及文件权限。
3、public boolean copyFromLocal(Path src, Path dst) throws IOException
将本地文件拷贝到文件系统
4、public boolean exists(Path f) throws IOException
检查文件或目录是否存在
5、public boolean delete(Path f, Boolean recursive)
永久性删除指定的文件或目录,如果f是一个空目录或者文件,那么recursive的值就会被忽略。只有recursive=true时,一个非空目录及其内容才会被删除。
6、FileStatus类封装了文件系统中文件和目录的元数据,包括文件长度、块大小、备份、修改时间、所有者以及权限信息。
通过"FileStatus.getPath()"可查看指定HDFS中某个目录下所有文件。
- package hdfsTest;
- import java.io.IOException;
- import org.apache.hadoop.conf.Configuration;
- import org.apache.hadoop.fs.FSDataOutputStream;
- import org.apache.hadoop.fs.FileStatus;
- import org.apache.hadoop.fs.FileSystem;
- import org.apache.hadoop.fs.Path;
- public class OperatingFiles {
- //initialization
- static Configuration conf = new Configuration();
- static FileSystem hdfs;
- static {
- String path = "/usr/java/hadoop-1.0.3/conf/";
- conf.addResource(new Path(path + "core-site.xml"));
- conf.addResource(new Path(path + "hdfs-site.xml"));
- conf.addResource(new Path(path + "mapred-site.xml"));
- path = "/usr/java/hbase-0.90.3/conf/";
- conf.addResource(new Path(path + "hbase-site.xml"));
- try {
- hdfs = FileSystem.get(conf);
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
- //create a direction
- public void createDir(String dir) throws IOException {
- Path path = new Path(dir);
- hdfs.mkdirs(path);
- System.out.println("new dir \t" + conf.get("fs.default.name") + dir);
- }
- //copy from local file to HDFS file
- public void copyFile(String localSrc, String hdfsDst) throws IOException{
- Path src = new Path(localSrc);
- Path dst = new Path(hdfsDst);
- hdfs.copyFromLocalFile(src, dst);
- //list all the files in the current direction
- FileStatus files[] = hdfs.listStatus(dst);
- System.out.println("Upload to \t" + conf.get("fs.default.name") + hdfsDst);
- for (FileStatus file : files) {
- System.out.println(file.getPath());
- }
- }
- //create a new file
- public void createFile(String fileName, String fileContent) throws IOException {
- Path dst = new Path(fileName);
- byte[] bytes = fileContent.getBytes();
- FSDataOutputStream output = hdfs.create(dst);
- output.write(bytes);
- System.out.println("new file \t" + conf.get("fs.default.name") + fileName);
- }
- //list all files
- public void listFiles(String dirName) throws IOException {
- Path f = new Path(dirName);
- FileStatus[] status = hdfs.listStatus(f);
- System.out.println(dirName + " has all files:");
- for (int i = 0; i< status.length; i++) {
- System.out.println(status[i].getPath().toString());
- }
- }
- //judge a file existed? and delete it!
- public void deleteFile(String fileName) throws IOException {
- Path f = new Path(fileName);
- boolean isExists = hdfs.exists(f);
- if (isExists) { //if exists, delete
- boolean isDel = hdfs.delete(f,true);
- System.out.println(fileName + " delete? \t" + isDel);
- } else {
- System.out.println(fileName + " exist? \t" + isExists);
- }
- }
- public static void main(String[] args) throws IOException {
- OperatingFiles ofs = new OperatingFiles();
- System.out.println("\n=======create dir=======");
- String dir = "/test";
- ofs.createDir(dir);
- System.out.println("\n=======copy file=======");
- String src = "/home/ictclas/Configure.xml";
- ofs.copyFile(src, dir);
- System.out.println("\n=======create a file=======");
- String fileContent = "Hello, world! Just a test.";
- ofs.createFile(dir+"/word.txt", fileContent);
- }
- }
//output~
=======create dir=======
new dir hdfs://sfserver20.localdomain:9000/test
=======copy file=======
Upload to hdfs://sfserver20.localdomain:9000/test
hdfs://sfserver20.localdomain:9000/test/Configure.xml
hdfs://sfserver20.localdomain:9000/test/word.txt
=======create file=======
new file hdfs://sfserver20.localdomain:9000/test/word.txt
=======before delete file=======
new dir hdfs://sfserver20.localdomain:9000/test/del
new file hdfs://sfserver20.localdomain:9000/test/del.txt
/test has all files:
hdfs://sfserver20.localdomain:9000/test/Configure.xml
hdfs://sfserver20.localdomain:9000/test/del
hdfs://sfserver20.localdomain:9000/test/del.txt
hdfs://sfserver20.localdomain:9000/test/word.txt
=======after delete file=======
/test/del delete? true
/test/del.txt delete? true
/test/no exist? false
/test has all files:
hdfs://sfserver20.localdomain:9000/test/Configure.xml
hdfs://sfserver20.localdomain:9000/test/word.txt