数据流的压缩/解压缩
CompressionCodec有两个方法可以用于轻松地压缩或解压缩数据。要想对正在被写入一个输出流的数据进行压缩,我们可以使用createOutputStream(OutputStreamout)方法创建一个CompressionOutputStream,将其以压缩格式写入底层的流。相反,要想对从输入流读取而来的数据进行解压缩,则调用createInputStream(InputStreamin)函数,从而获得一个CompressionInputStream,从而从底层的流读取未压缩的数据。
数据准备
数据流压缩
编解码器的包对应如下:
压缩格式 | 对应的编/解码器 |
---|---|
DEFLATE | org.apache.hadoop.io.compress.DefaultCodec |
gzip | org.apache.hadoop.io.compress.GzipCodec |
bzip2 | org.apache.hadoop.io.compress.BZip2Codec |
以bzip2作为示例
package compresstest;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionOutputStream;
import org.apache.hadoop.util.ReflectionUtils;
import java.io.*;
public class CompressTest {
/***
* 压缩:使用CompressionCodec.createOutputStream(OutputStreamout)方法创建一个CompressionOutputStream
* 参数:
* filepath:读取文件的路径
* res:压缩文件的输出路径
* method:选择压缩的编解码器
*/
public static void compress(String filepath,String res,String method) throws IOException, ClassNotFoundException {
//1.读取文档数据
FileInputStream fis = new FileInputStream(new File(filepath));
//2.获取编码器/解码器
Class compresscodc = Class.forName(method);
//3.获取压缩的输出流CompressionOutputStream
CompressionCodec codec= (CompressionCodec) ReflectionUtils.newInstance(compresscodc,new Configuration());
CompressionOutputStream cos=codec.createOutputStream(new FileOutputStream(new File(res+codec.getDefaultExtension())));//要加上后缀名
//4.使用IO工具类进行输入流和输出流对接
IOUtils.copyBytes(fis,cos,5*1204*1204,false);
//5.关流
IOUtils.closeStream(fis);
IOUtils.closeStream(cos);
}
public static void main(String[] args) throws IOException, ClassNotFoundException {
compress("D:\\BigdataTest\\Compress\\compress.txt",
"D:\\BigdataTest\\Compress\\CompresResult\\compress",
"org.apache.hadoop.io.compress.BZip2Codec");
}
}
结果展示
数据流解压缩
将刚刚压缩好的包再解压缩回去。
package compresstest;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.io.compress.CompressionInputStream;
import org.apache.hadoop.io.compress.CompressionOutputStream;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.ReflectionUtils;
import java.io.*;
public class CompressTest {
/***
* 压缩:使用CompressionCodec.createOutputStream(OutputStreamout)方法创建一个CompressionOutputStream
* 参数:
* filepath:读取文件的路径
* res:压缩文件的输出路径
* method:选择压缩的编解码器
*/
public static void compress(String filepath,String res,String method) throws IOException, ClassNotFoundException {
//1.读取文档数据
FileInputStream fis = new FileInputStream(new File(filepath));
//2.获取编码器/解码器
Class compresscodc = Class.forName(method);
//3.获取压缩的输出流CompressionOutputStream
CompressionCodec codec= (CompressionCodec) ReflectionUtils.newInstance(compresscodc,new Configuration());
CompressionOutputStream cos=codec.createOutputStream(new FileOutputStream(new File(res+codec.getDefaultExtension())));//要加上后缀名
//4.使用IO工具类进行输入流和输出流对接
IOUtils.copyBytes(fis,cos,5*1204*1204,false);
//5.关流
IOUtils.closeStream(fis);
IOUtils.closeStream(cos);
}
/***
*解压缩
* 参数分析:
* inputfile:输入的压缩文件路径
* outputfile:解压缩后的文件输出路径
* type:输出文件的格式
*/
public static void deCmpression(String inputfile,String outputfile,String type) throws IOException {
//1.获取编码器对象CompressionCordec
CompressionCodecFactory factory = new CompressionCodecFactory(new Configuration());
CompressionCodec codec=factory.getCodec(new Path(inputfile));
//2.获取解压的输入流,ComprssionInputStream
CompressionInputStream cis = codec.createInputStream(new FileInputStream(new File(inputfile)));
//3.获取文件输出流FileOutputStream
FileOutputStream fos=new FileOutputStream(new File(outputfile+"."+type));//后缀名
//4.对接流
IOUtils.copyBytes(cis,fos,5*1024*1024,false);
//5.关流
IOUtils.closeStream(cis);
IOUtils.closeStream(fos);
}
public static void main(String[] args) throws IOException, ClassNotFoundException {
// compress("D:\\BigdataTest\\Compress\\compress.txt",
// "D:\\BigdataTest\\Compress\\CompresResult\\compress",
// "org.apache.hadoop.io.compress.BZip2Codec");
deCmpression("D:\\BigdataTest\\Compress\\CompresResult\\compress.bz2","D:\\BigdataTest\\Compress\\depress","txt");
}
}