hdfsbiancheng

文章展示了使用ApacheHadoop的FileSystemAPI进行文件和目录创建、检查是否存在、删除、列出内容、写入和读取HDFS(HadoopDistributedFileSystem)的基本操作示例。
摘要由CSDN通过智能技术生成

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.util.Scanner;
import java.net.URI;

public class CreateDir {
    public static void main(String[] args) {
        try {
            Scanner sc = new Scanner(System.in);
            String dirPath = '/'+sc.next();
            FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), new Configuration());
            Path hdfsPath = new Path(dirPath);
            if(fs.mkdirs(hdfsPath)){
                System.out.println("Directory "+ dirPath +" has been created successfully!");
            }
        }catch(Exception e) {
            e.printStackTrace();
        }
    }
}
 
 
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.util.Scanner;
import java.net.URI;

public class DirExist {
    public static void main(String[] args) {
        try {
            Scanner sc = new Scanner(System.in);
            String dirName = '/'+sc.next();
            FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), new Configuration());
            if(fs.exists(new Path(dirName ))) {
                System.out.println("Directory Exists!");
            } else {
                System.out.println("Directory not Exists!");
            }
        }catch(Exception e) {
            e.printStackTrace();
        }
    }
}
 
 
 
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import java.net.URI; import java.util.Scanner; public class ListFiles { public static void main(String[] args) { try { Scanner sc = new Scanner(System.in); String filePath = sc.next(); FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), new Configuration()); Path srcPath = new Path(filePath); FileStatus[] stats = fs.listStatus(srcPath); Path[] paths = FileUtil.stat2Paths(stats); for(Path p : paths) System.out.println(p.getName()); }catch(Exception e) { e.printStackTrace(); } } }
 

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.net.URI;
import java.util.Scanner;

public class DeleteDir {
    public static void main(String[] args) {
        try {
            Scanner sc = new Scanner(System.in);
            String dirPath = '/'+sc.next();
            FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), new Configuration());
            Path hdfsPath = new Path(dirPath);
            if(fs.delete(hdfsPath,true)){
                System.out.println("Directory "+ dirPath +" has been deleted successfully!");
            }
        }catch(Exception e) {
            e.printStackTrace();
        }
    }
}
 
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.net.URI;
import java.util.Scanner;

public class CreateFile {
    public static void main(String[] args) {
        try {
            Scanner sc = new Scanner(System.in);
            String filePath = '/'+sc.next();
            FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), new Configuration());
            Path hdfsPath = new Path(filePath);
            fs.create(hdfsPath);
        }catch(Exception e) {
            e.printStackTrace();
        }
    }
}
 
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.net.URI;
import java.util.Scanner;

public class FileExist {
    public static void main(String[] args) {
        try {
            Scanner sc = new Scanner(System.in);
            String fileName = '/'+sc.next();
            FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), new Configuration());

            if(fs.exists(new Path(fileName))) {
                System.out.println("File Exists!");
            } else {
                System.out.println("File not Exists!");
            }
        }catch(Exception e) {
            e.printStackTrace();
        }
    }
}
 
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.net.URI;
import java.util.Scanner;

public class WriteFile {
    public static void main(String[] args) {
        try {
            Scanner sc = new Scanner(System.in);
            String filePath = '/'+sc.next();
            FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), new Configuration());

            Path srcPath = new Path(filePath);
            FSDataOutputStream os = fs.create(srcPath,true,1024,(short)1,(long)(1<<26));
            String str = "Hello, this is a sentence that should be written into the file.\n";
            os.write(str.getBytes());
            os.flush();
            os.close();

            os = fs.append(srcPath);
            str = "Hello, this is another sentence that should be written into the file.\n";
            os.write(str.getBytes());
            os.flush();
            os.close();
        }catch(Exception e) {
            e.printStackTrace();
        }
    }
}
 
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.net.URI;
import java.util.Scanner;

public class ReadFile {
    public static void main(String[] args) {
        try {
            Scanner sc = new Scanner(System.in);
            String filePath = '/'+sc.next();
            FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), new Configuration());
            
            Path srcPath = new Path(filePath);

            FSDataInputStream is = fs.open(srcPath);
            while(true) {
                String line = is.readLine();
                if(line == null) {
                    break;
                }
                System.out.println(line);
            }
            is.close();
        }catch(Exception e) {
            e.printStackTrace();
        }
    }
}
 
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.net.URI;
import java.util.Scanner;

public class Rename {
    public static void main(String[] args) {
        try {
            Scanner sc = new Scanner(System.in);
            String srcStrPath = '/'+sc.next();
            String dstStrPath = '/'+sc.next();
            FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), new Configuration());
            Path srcPath = new Path(srcStrPath);
            Path dstPath = new Path(dstStrPath);
            if(fs.rename(srcPath,dstPath)) {
                System.out.println("rename from " + srcStrPath + " to " + dstStrPath + "successfully!");
            }
        }catch(Exception e) {
            e.printStackTrace();
        }
    }
}
 
 
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.net.URI;
import java.util.Scanner;

public class DeleteFile {
    public static void main(String[] args) {
        try {
            Scanner sc = new Scanner(System.in);
            String filePath = '/'+sc.next();
            FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), new Configuration());
            
            Path hdfsPath = new Path(filePath);
            if(fs.delete(hdfsPath,false)){
                System.out.println("File "+ filePath +" has been deleted successfully!");
            }
        }catch(Exception e) {
            e.printStackTrace();
        }
    }
}
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值