package com.hdfs;
import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
public class FileUtilsDemo {
public static void main(String[] args) throws Exception {
String hdfsPath = "hdfs://192.168.6.108:9000";
String fileName = "hdfs://192.168.6.108:9000/usr/input/test.txt";
String filePath = "hdfs://192.168.6.108:9000/usr/input/test";
String fileList = "hdfs://192.168.6.108:9000/usr/input";
String reNameFileName = "hdfs://192.168.6.108:9000/usr/input/a.txt";
String inputFileName = "input/test.txt";
// 读取文件内容
// readFile(fileName);
// 获取文件列表
// getFileList(fileList);
// 创建文件夹
// createFile(hdfsPath,filePath);
// 删除文件夹以及文件夹中的文件
// deleteFile(hdfsPath, filePath, true);
// 重命名文件
//reNameFileName(hdfsPath, reNameFileName);
// 追加内容到指定文件中
appendFileToHdfs(hdfsPath, fileName, inputFileName);
}
/**
* 追加内容到指定文件中
*
* @param hdfsPath
* @param fileName
* @throws inputFileName
* @throws Exception
*/
public static void appendFileToHdfs(String hdfsPath, String hdfsFileName, String inputFileName) throws Exception {
Configuration conf = new Configuration();
conf.setBoolean("dfs.support.append", true);
// 如果在linux环境修改了这个参数没有重启hdfs则在这里还是重新设置一次,在linux中进行修改时如果在linux环境运行则无需重启hdfs
conf.set("dfs.client.block.write.replace-datanode-on-failure.policy", "NEVER");
FileSystem fileSystem = FileSystem.get(new URI(hdfsPath), conf);
InputStream in = new BufferedInputStream(new FileInputStream(inputFileName));
FSDataOutputStream out = fileSystem.append(new Path(hdfsFileName));
IOUtils.copyBytes(in, out, 4096, true);
System.out.println("已添加完");
}
/**
* 对文件进行重命名
*
* @param hdfsPath
* @param reNameFileName
* @throws Exception
*/
public static void reNameFileName(String hdfsPath, String reNameFileName) throws Exception {
FileSystem fileSystem = FileSystem.get(new URI(hdfsPath), new Configuration());
boolean flag = fileSystem.rename(new Path(reNameFileName), new Path(reNameFileName + "tmp"));
System.out.println(flag);
}
/**
* 读取文件的内容
*
* @param filePath
* @throws IOException
*/
public static void readFile(String filePath) throws IOException {
URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
URL url = new URL(filePath);
InputStream in = url.openStream();
IOUtils.copyBytes(in, System.out, 1024, true);
}
/**
* 获取文件列表
*
* @param filePath
* @throws URISyntaxException
* @throws IOException
*/
@SuppressWarnings("deprecation")
public static void getFileList(String filePath) throws Exception {
FileSystem fileSystem = FileSystem.get(new URI(filePath), new Configuration());
FileStatus[] listStatus = fileSystem.listStatus(new Path(filePath));
for (FileStatus fileStatus : listStatus) {
if (fileStatus.isDir()) {
System.out.println(fileStatus.getPath().toString());
getFileList(fileStatus.getPath().toString());
} else {
System.out.println(fileStatus.isDir() ? "文件夹"
: "文件" + " " + fileStatus.getOwner() + " " + fileStatus.getReplication() + " "
+ fileStatus.getPath());
}
}
}
/**
* 创建文件夹
*
* @param filePath
* @throws Exception
*/
public static void createFile(String hdfsPath, String filePath) throws Exception {
Configuration conf = new Configuration();
URI uri = new URI(hdfsPath);
FileSystem fileSystem = FileSystem.get(uri, conf);
boolean flag = fileSystem.mkdirs(new Path(filePath));
System.out.println(flag);
}
/**
* 删除文件
*
* @param hdfsPath
* @param filePath
* @param isDelete 是否递归删除文件夹中的文件
* @throws Exception
*/
public static void deleteFile(String hdfsPath, String filePath, boolean isDelete) throws Exception {
Configuration conf = new Configuration();
URI uri = new URI(hdfsPath);
FileSystem fileSystem = FileSystem.get(uri, conf);
boolean flag = fileSystem.delete(new Path(filePath), true);
System.out.println(flag);
}
}
Hadoop之HDFS的Java Interface(三)
最新推荐文章于 2023-01-17 17:32:14 发布