一个Hadoop HDFS操作类

一个Hadoop HDFS操作类

package com.viburnum.util;

import java.net.URI;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.io.*;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

public class HdfsUtil {

	// HDFS API
	public static String hdfsUrl = "";

	// car information save directory
	private static String carInfoDir = "/car/lbs/";

	// lbs file name
	private static String carInfoFile = "";
	
	private static String carInfoFilePrefix = "lbs_";
	
	// properties file
	private static String properitesFile = "/viburnum.properties";

	private static Configuration conf = new Configuration();

	private static FileSystem hdfs;

	public HdfsUtil() {
		hdfsUrl = PropUtil.getResourceValue(properitesFile, "hdfs.url");
		carInfoDir = PropUtil.getResourceValue(properitesFile, "car.lbs.dir");
		try {
			conf.setBoolean("dfs.support.append", true);
			hdfs = FileSystem.get(URI.create(hdfsUrl), conf);
		} catch (IOException e) {
			e.printStackTrace();
		}
		SimpleDateFormat sdfDate = new SimpleDateFormat("yyyyMMdd");
		SimpleDateFormat sdfTime = new SimpleDateFormat("HHmmssSSS");
		String fileDate = sdfDate.format(new Date());
		String fileTime = sdfTime.format(new Date()) + System.nanoTime();
		carInfoFile = carInfoDir + "/" + fileDate + "/" + carInfoFilePrefix + fileTime + ".txt";
	}

	public static String getProperitesFile() {
		return properitesFile;
	}

	public static void setProperitesFile(String properitesFile) {
		HdfsUtil.properitesFile = properitesFile;
	}

	public static String getHdfsUrl() {
		return hdfsUrl;
	}

	public static void setHdfsUrl(String hdfsUrl) {
		HdfsUtil.hdfsUrl = hdfsUrl;
	}

	public static String getCarInfoDir() {
		return carInfoDir;
	}

	public static void setCarInfoDir(String carInfoDir) {
		HdfsUtil.carInfoDir = carInfoDir;
	}

	public static String getCarInfoFile() {
		return carInfoFile;
	}

	public static void setCarInfoFile(String carInfoFile) {
		HdfsUtil.carInfoFile = carInfoFile;
	}

	public static Configuration getConf() {
		return conf;
	}

	public static void setConf(Configuration conf) {
		HdfsUtil.conf = conf;
	}

	/**
	 * create HDFS folder
	 * 
	 * @param dirPath
	 * @return
	 */
	public static void createDir(String dirPath) throws Exception {
		hdfs.mkdirs(new Path(dirPath));
	}

	/**
	 * delete HDFS folder
	 * 
	 * @param dirPath
	 * @return
	 */
	@SuppressWarnings("deprecation")
	public static void deleteDir(String dirPath) throws Exception {
		hdfs.delete(new Path(dirPath));
	}

	/**
	 * create a file with default name
	 * 
	 * @param filePath
	 * @return
	 */
	public static void createFile(String content) throws Exception {
		FSDataOutputStream out = hdfs.create(new Path(getCarInfoFile()));
		out.write((content + "\n").getBytes());
		out.close();
	}
	
	/**
	 * create a file with given name
	 * 
	 * @param fileName
	 * @param content
	 * @throws Exception
	 */
	public static void createFile(String fileName, String content) throws Exception {
		setCarInfoFile(fileName);
		FSDataOutputStream out = hdfs.create(new Path(fileName));
		out.write((content + "\n").getBytes());
		out.close();
	}

	/**
	 * create a file if append
	 * 
	 * @param content
	 * @throws Exception
	 */
	public static void createFile(String content, Boolean append) throws Exception {
		if (append == true){
			Path fileName = new Path(getCarInfoFile());
			if (!hdfs.exists(fileName)) {
				hdfs.create(fileName);
			}
			InputStream in = new ByteArrayInputStream(content.getBytes());
			FSDataOutputStream out = hdfs.append(new Path(getCarInfoFile()));
			IOUtils.copyBytes(in, out, conf);
			out.close();
		} else {
			createFile(content);
		}
	}
	
	/**
	 * writing content append to a file use the file path
	 * 
	 * @param filePath
	 * @param fileContent
	 * @throws Exception
	 */
	public static void writeFile(String fileName, String content)
			throws Exception {
		InputStream in = new BufferedInputStream(new ByteArrayInputStream(content.getBytes()));
		OutputStream out = hdfs.append(new Path(fileName));
		IOUtils.copyBytes(in, out, 4096, true);
	}
	
	/**
	 * normal read one file's all content and append to another 
	 * 
	 * @param content
	 * @throws Exception
	 */
	public static void appendAll(String content) throws Exception{
		FSDataOutputStream out = hdfs.create(new Path(getCarInfoFile()));
		byte[] contentBytes = (content + "\n").getBytes();
		byte[] oldContentBytes = readFile(getCarInfoFile());
		out.write(byteMerge(contentBytes, oldContentBytes));
		out.close();
	}
	
	/**
	 * rename a file
	 * 
	 * @param oldPath
	 * @param newPath
	 * @return
	 */
	public static void renameFile(String oldPath, String newPath)
			throws Exception {
		hdfs.rename(new Path(oldPath), new Path(newPath));
	}

	/**
	 * delete a file
	 * 
	 * @param hadoopFile
	 * @return isDeleted
	 */
	public static boolean deleteFile(String hadoopFile) throws Exception {
		@SuppressWarnings("deprecation")
		boolean isDeleted = hdfs.delete(new Path(hadoopFile));
		return isDeleted;
	}

	/**
	 * upload a local file
	 * 
	 * @param localPath
	 * @param hadoopPath
	 * @return
	 */
	public static void uploadLocalFile(String localPath, String hadoopPath)
			throws Exception {
		hdfs.copyFromLocalFile(new Path(localPath), new Path(hadoopPath));
	}

	/**
	 * read the file bytes
	 * 
	 * @param hadoopFile
	 * @return buffer
	 */
	public static byte[] readFile(String hadoopFile) throws Exception {
		Path path = new Path(hadoopFile);
		if (hdfs.exists(path)) {
			FSDataInputStream in = hdfs.open(path);
			FileStatus stat = hdfs.getFileStatus(path);
			byte[] buffer = new byte[Integer.parseInt(String.valueOf(stat
					.getLen()))];
			in.readFully(0, buffer);
			in.close();
			return buffer;
		} else {
			throw new Exception("the file is not found .");
		}
	}

	/**
	 * list files under the folder
	 * 
	 * @param hadoopPath
	 * @return fileString
	 */
	public static String listFiles(String hadoopPath) throws Exception {
		Path dst = new Path(hadoopPath);
		FileStatus[] files = hdfs.listStatus(dst);
		String fileString = "";
		for (FileStatus file : files) {
			System.out.println(file.getPath().toString());
			fileString += file.getPath().toString() + " ";
		}
		return fileString;
	}

	/**
	 * list block info of the file
	 * 
	 * @param hadoopPath
	 * @return blockString
	 */
	public static String getBlockInfo(String hadoopPath) throws Exception {
		FileStatus fileStatus = hdfs.getFileStatus(new Path(hadoopPath));
		BlockLocation[] blkloc = hdfs.getFileBlockLocations(fileStatus, 0,
				fileStatus.getLen());
		String blockString = "";
		for (BlockLocation loc : blkloc) {
			for (int i = 0; i < loc.getHosts().length; i++)
				System.out.println(loc.getHosts()[i]);
		}
		return blockString;
	}
	
	/**
	 * merge two byte[]
	 * 
	 * @param byte_1
	 * @param byte_2
	 * @return byte_3
	 */
	public static byte[] byteMerge(byte[] byte_1, byte[] byte_2){
		byte[] byte_3 = new byte[byte_1.length+byte_2.length];
		System.arraycopy(byte_1, 0, byte_3, 0, byte_1.length);
		System.arraycopy(byte_2, 0, byte_3, byte_1.length, byte_2.length);
		return byte_3;
	}
		
	public void finalize() throws Throwable{
		hdfs.close();
		super.finalize();
	}

}

评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值