理论分析
MapReduce源文件出现很多小文件会生成很多切片很多从而生成很多maptask,而且每个task处理的文件很小浪费资源,降低工作效率。
针对这种情况的优化无非以下几种方式:
(1)在数据采集的时候,就将小文件或小批数据合成大文件再上传HDFS
(2)在业务处理之前,在HDFS上使用mapreduce程序对小文件进行合并
(3)在mapreduce处理时,可采用CombineTextInputFormat提高效率
本文采用自定义InputFormat的方式使用mapreduce程序对小文件进行合并,处理输入小文件的问题。
(1)自定义一个类MyInputFormat继承FileInputFormat
(2)改写RecordReader,实现一次读取一个完整文件封装为KV
(3)自定义一个类MyRecordReader继承RecordReader来实现文件的读取
(4)在输出时使用SequenceFileOutPutFormat输出合并文件
案例
InPutFormatMap
public class InPutFormatMap extends Mapper<NullWritable, BytesWritable, Text, BytesWritable> {
Text k = new Text();
@Override
protected void setup(Context context) throws IOException, InterruptedException {
FileSplit inputSplit = (FileSplit)context.getInputSplit();
String name = inputSplit.getPath().toString();
k.set(name);
}
@Override
protected void map(NullWritable key, BytesWritable value, Context context) throws IOException, InterruptedException {
context.write(k, value);
}
InPutFormatReduce
public class InPutFormatReduce extends Reducer<Text, BytesWritable, Text, BytesWritable> {
@Override
protected void reduce(Text key, Iterable<BytesWritable> values, Context context) throws IOException, InterruptedException {
for (BytesWritable b : values) {
context.write(key, b);
}
}
}
InPutFormatReduce
public class InPutFormatDirver {
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
args = new String[]{ "/Users/alu/workproject/Training/src/main/resources/define_in_out_format_homework/in",
"/Users/alu/workproject/Training/src/main/resources/define_in_out_format_homework/in/current"
};
Configuration conf = new Configuration();
Job job = Job.getInstance(conf);
job.setJarByClass(InPutFormatDirver.class);
job.setMapperClass(InPutFormatMap.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(BytesWritable.class);
job.setReducerClass(InPutFormatReduce.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(BytesWritable.class);
job.setInputFormatClass(MyFileInPutFormat.class);
//设置输出的二进制
job.setOutputFormatClass(SequenceFileOutputFormat.class);
Path path = new Path(args[1]);
FileSystem file = FileSystem.get(conf);
if (file.exists(path)) {
file.delete(path, true);
}
FileInputFormat.setInputPaths(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
job.waitForCompletion(true);
}
}
MyFileInPutFormat
public class MyFileInPutFormat extends FileInputFormat<NullWritable, BytesWritable> {
@Override
protected boolean isSplitable(JobContext context, Path filename) {
return false;
}
@Override
public RecordReader<NullWritable, BytesWritable> createRecordReader(InputSplit inputSplit, TaskAttemptContext context) throws IOException, InterruptedException {
MyRecordReader recordReader = new MyRecordReader();
recordReader.initialize(inputSplit, context);
return recordReader;
}
}
MyRecordReader
public class MyRecordReader extends RecordReader<NullWritable, BytesWritable> {
private Configuration configuration;
private FileSplit split;
//是否数据加工
private boolean processed = false;
private BytesWritable value = new BytesWritable();
/**
* 初始化
*/
@Override
public void initialize(InputSplit inputSplit, TaskAttemptContext context) throws IOException, InterruptedException {
configuration = context.getConfiguration();
split = (FileSplit)inputSplit;
}
@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
if (!processed) {
//定义存储数据的缓冲区
byte[] contents = new byte[(int)split.getLength()];
FileSystem fs = null;
FSDataInputStream fis = null;
try {
//获取文件系统
Path path = split.getPath();
fs = path.getFileSystem(configuration);
//创建读数据的流
fis = fs.open(path);
//读取文件
IOUtils.readFully(fis, contents, 0, contents.length);
//写文件
value.set(contents, 0, contents.length);
} catch (Exception e) {
e.printStackTrace();
} finally {
IOUtils.closeStream(fis);
}
//不重复度数据
processed = true;
return true;
}
return false;
}
@Override
public NullWritable getCurrentKey() throws IOException, InterruptedException {
return NullWritable.get();
}
@Override
public BytesWritable getCurrentValue() throws IOException, InterruptedException {
return value;
}
@Override
public float getProgress() throws IOException, InterruptedException {
return processed ? 1 : 0;
}
@Override
public void close() throws IOException {
}
}