package com.oracle.multiple;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
public class MultipleOutputMain {
public static class Map extends Mapper<LongWritable,Text,Text,Text>{
public void map(LongWritable key,Text value,Context context) throws IOException,InterruptedException{
String[] line = value.toString().split(" ", 2);
context.write(new Text(line[0]), value);
}
}
public static class Reduce extends Reducer<Text, Text, NullWritable, Text>{
MultipleOutputs<NullWritable, Text> multipleOutputs;
@Override
protected void setup(Context context)
throws IOException, InterruptedException {
//初始化
multipleOutputs = new MultipleOutputs<NullWritable, Text>(context);
}
@Override
protected void reduce(Text key, Iterable<Text> values,Context context)
throws IOException, InterruptedException {
for(Text value:values){
//写入HDFS文件,格式为 (key.toString()-r-00000),将key值相同的写入到同一个文件
multipleOutputs.write(NullWritable.get(), value,key.toString() );
}
}
@Override
protected void cleanup(Context context)
throws IOException, InterruptedException {
//关闭
multipleOutputs.close();
}
}
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf);
job.setJarByClass(MultipleOutputMain.class);
job.setJobName("MultipleOutputsMain");
job.setMapperClass(Map.class);
job.setReducerClass(Reduce.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
//不设置Reduce段输出格式
/*job.setOutputKeyClass(NullWritable.class);
job.setOutputValueClass(Text.class);*/
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
System.exit(job.waitForCompletion(true)?0:1);
}
}
MapReduce使用MultipleOutputs分组输出多个文件
最新推荐文章于 2019-10-30 11:07:06 发布