package com.hj.hadoop;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class HDFS {
Configuration conf = null;
FileSystem fs = null;
private void init() throws Exception {
// 获取当前所在目录的路径
String proDir = System.getProperty("user.dir");
System.out.println(proDir);
// 获取conf路径
String baseDir = proDir + File.separator + "conf"; //D:\StudyPrefecture\EclipseProject\TK_BD\conf
String corePath = baseDir + File.separator + "core-site.xml";
String hdfsPath = baseDir + File.separator + "hdfs-site.xml";
conf = new Configuration();
conf.addResource(new Path(corePath));
conf.addResource(new Path(hdfsPath));
conf.set("fs.defaultFS", "hdfs://hadoop:9000");
fs = FileSystem.get(conf);
}
// 上传文件
private void upload() throws IllegalArgumentException, IOException {
fs.copyFromLocalFile(new Path("D:/test.txt"), new Path("/test.txt"));
fs.close();
}
// 使用流的方式上传
private void ioupload() throws IllegalArgumentException, IOException {
FileInputStream in = new FileInputStream("D:/test.txt");
FSDataOutputStream out = fs.create(new Path("/test111.txt"),true);
IOUtils.copy(in, out);
}
// 下载文件
private void download() throws IllegalArgumentException, IOException {
fs.copyToLocalFile(false, new Path("/test.txt"), new Path("D:/data/test01.txt"), true);
fs.close();
}
// 使用流的方式下载
private void iodownload() throws IllegalArgumentException, IOException {
FSDataInputStream in = fs.open(new Path("/test.txt"));
FileOutputStream out = new FileOutputStream("D:/data/test02.txt");
IOUtils.copy(in, out);
}
// 创建目录
private void mkdir() throws IllegalArgumentException, IOException {
// boolean布尔类型
boolean b = fs.mkdirs(new Path("/ZJ"));
System.out.println(b);
}
// 删除目录或文件
private void delete() throws IllegalArgumentException, IOException {
boolean b = fs.delete(new Path("/ZJ"));
System.out.println(b);
}
// 查看目录下的文件和文件夹
private void list() throws FileNotFoundException, IllegalArgumentException, IOException {
// 以数组的方式返回
FileStatus[] listStatus = fs.listStatus(new Path("/"));
for(FileStatus file: listStatus){
System.out.println((file.isFile()?"file:":"Directory:")+file.getPath().getName());
}
}
// 读机制
private void read() throws IllegalArgumentException, IOException {
String line = null;
FSDataInputStream in = fs.open(new Path("/test.txt"));
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in));
while((line = bufferedReader.readLine()) != null ){
System.out.println(line);
}
bufferedReader.close();
}
// 写机制
private void write(String filePath) throws IllegalArgumentException, IOException {
FSDataOutputStream out = fs.create(new Path(filePath));
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(out));
// writer.write("hello words");
writer.append("sdgfhadskfkjhdsjf", 2, 3);
writer.close();
}
public static void main(String[] args) throws Exception {
// TODO Auto-generated method stub
HDFS hdfs = new HDFS();
hdfs.init();
// hdfs.upload();
// hdfs.ioupload();
// hdfs.download();
// hdfs.iodownload();
// hdfs.mkdir();
// hdfs.delete();
// hdfs.list();
// hdfs.read();
// hdfs.write("/test.txt");
}
}
基于Java开发的HDFS篇
最新推荐文章于 2024-07-25 16:52:43 发布