今天操作hdfs有些频繁,作为一个资深的大四小白码农,封装是一种职业病,于是乎,忙里偷闲的写了一个工具类供其他类使用,看着main函数里简短的几行代码,不禁长吁一口气。
话不多说,贴上自己的代码。(为了更好的方便自己和他人,我破例写了一些注释,手动滑稽)
import java.io.IOException;
import java.io.StringWriter;
import java.net.URI;
import java.net.URISyntaxException;
import java.sql.Date;
import java.text.NumberFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.FsStatus;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
public class HDFSUtils {
private String strUri = "hdfs://192.168.52.129:9000";
private String strHadoopTmp = "E:\\Application\\hadoop-3";
private FileSystem fs = null;
private String user = "hadoop";
private Configuration conf = new Configuration();
private void init() throws IOException, InterruptedException, URISyntaxException {
if (System.getProperty("os.name").equalsIgnoreCase("windows")) {
System.setProperty("hadoop.tmp.dir", strHadoopTmp);
}
fs = FileSystem.get(new URI(strUri), conf, user);
}
public HDFSUtils() throws IOException, InterruptedException, URISyntaxException {
init();
}
/**
*
* @param strUri Hadoop URI
* @param strHadoopTmp hadoop.tmp.dir
* @param user 用户名
* @throws IOException
* @throws InterruptedException
* @throws URISyntaxException
*/
public HDFSUtils(String strUri, String strHadoopTmp, String user)
throws IOException, InterruptedException, U