//demo :如何在hadoop 环境下使用压缩和解压功能
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.io.compress.CompressionInputStream;
import org.apache.hadoop.io.compress.CompressionOutputStream;
import org.apache.hadoop.util.ReflectionUtils;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
public class DemoCompression {
public static void main(String[] args) throws Exception {
//压缩方法
compress("D:\\b\\web.txt","org.apache.hadoop.io.compress.GzipCodec");
//解压缩方法
decompress("E:\\bigdata_code\\web.txt.gz",".txt");
}
/**
* 压缩方法
* @param s 文件路径 + 文件名
* @param s1 编/解码器
*/
private static void compress(String filename, String method) throws Exception {
//创建输入流
FileInputStream fis = new FileInputStream(new File(filename));
//通过反射找到编/解码的类
Class codeClass = Class.forName(method);
//通过反射工具找到编码器对象 & conf配置
CompressionCodec codec = (CompressionCodec) ReflectionUtils.newInstance(codeClass,new Configuration());
//创建输出流
FileOutputStream fos = new FileOutputStream(new File(filename + codec.getDefaultExtension()));
//获得解码器对象
CompressionOutputStream cos = codec.createOutputStream(fos);
//流拷贝
IOUtils.copyBytes(fis,cos,4*1024*1024,false);
cos.close();
fos.close();
fis.close();
}
/**
* 解压缩
* @param 文件路径 + 文件名
* @param 后缀
*/
private static void decompress(String filename, String decodec) throws IOException {
//获取实例
CompressionCodecFactory factory = new CompressionCodecFactory(new Configuration());
CompressionCodec codec = factory.getCodec(new Path(filename));
//压缩文件输入
CompressionInputStream cis = codec.createInputStream(new FileInputStream(new File(filename)));
//文件输出
FileOutputStream fos = new FileOutputStream(new File(filename + decodec));
//流拷贝
IOUtils.copyBytes(cis,fos,4*1024*1024,false);
//关闭流
fos.close();
cis.close();
}
}
另外,下面是代码片段,演示如何在MR中使用 hadoop 解压功能 。
//代码片段,演示如何在MR中使用 hadoop 解压功能
URI[] cacheFiles = context.getCacheFiles();
FileSystem hdfs = FileSystem.get(cacheFiles[0], context.getConfiguration());
FSDataInputStream recordDataInput = hdfs.open(new Path(cacheFiles[0].getPath()));
log.info("OfflineDataReducer:setup(),begin to read completed record file:"+cacheFiles[0].getPath());
//根据文件后缀去解压文件
CompressionCodecFactory factory = new CompressionCodecFactory( context.getConfiguration());
CompressionCodec codec = factory.getCodec(new Path(cacheFiles[0].getPath()));
if (codec == null) {
log.warn("OfflineDataReducer setup():can't create decoder");
return;
}
InputStream in = codec.createInputStream(recordDataInput);
if(in == null){
log.warn("OfflineDataReducer setup():can't create InputStream");
return;
}
BufferedReader buffreader = new BufferedReader(new InputStreamReader(in));
String line = null;
while ((line = buffreader.readLine()) != null) {
//逐行处理文件数据
}