mapreduce中combiner应用实例

每一个map都可能会产生大量的本地输出Combiner的作用就是对map端的输出先做一次合并,以减少在map和reduce节点之间的数据传输量,以提高网络IO性能,是MapReduce的一种优化手段之一

下面是一个应用,数据与结果都在代码中做了示范

package first.first_maven;

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
/*
 * 数据文件
 * 1.txt
 * hadoop
 * hadoop
 * hello world
 * 
 * 2.txt
 * hi hello wh
 * 
 * 结果:
 * hadoop 1.txt:2
 * hello 1.txt:1;2.txt:1
 * world 1.txt:1
 * hi 2.txt:1
 * wh 2.txt:1
 * 
 * 
 * 
 */


public class IndexDemo {
	
	
	public static class MyMapper1 extends Mapper<LongWritable, Text, Text, Text>{
		/*
		 * hadoop_1.txt 1
		 * hadoop_1.txt 1
		 * hello_1.txt 1
		 * world_1.txt 1
		 * 
		 */

		@Override
		protected void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
		
//			super.map(key, value, context);
			//获取数据文件名
			InputSplit is=context.getInputSplit();
			String filename=((FileSplit)is).getPath().getName();
			
			String words[]=value.toString().split(" ");
			for(String word:words){
				
				context.write(new Text(word+"_"+filename), new Text(1+""));
				
			}
			
		}
		 
		
	}
	

public static class MyCombiner1 extends Reducer<Text, Text, Text, Text>{
	/*
	 * hadoop 1.txt:2
	 * hello 1.txt:1
	 * world 1.txt:1
	 * 
	 */
	

	@Override
	protected void reduce(Text key, Iterable<Text> value,Context context)
			throws IOException, InterruptedException {
		
//		super.reduce(key, value, context); 
		String[] strs=key.toString().split("_");
		int count=0;
		for(Text v:value){
			count+=Integer.parseInt(v.toString());
			
		}
		context.write(new Text(strs[0]), new Text(strs[1]+":"+count));
		
	}

	
	
}
	
	public static class MyReducer1 extends Reducer<Text, Text, Text, Text>{

		@Override
		protected void reduce(Text key, Iterable<Text> value,Context context)
				throws IOException, InterruptedException {
			
//			super.reduce(key, value, context);
			String str="";
			for(Text v:value){
				str+=v.toString()+";";
			}
			
			context.write(key, new Text(str.substring(0,str.length()-1)));
			
		}
		
		
	}
	
	
	public static void main(String[] args) throws Exception {
        Configuration conf=new Configuration();
        Job job = Job.getInstance(conf, "myjob");
        job.setJarByClass(WordCount.class);

        job.setMapperClass(MyMapper1.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Text.class);
        FileInputFormat.addInputPath(job,new Path(args[0]));

        job.setCombinerClass(MyCombiner1.class);
        
        job.setReducerClass(MyReducer1.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);
        FileOutputFormat.setOutputPath(job,new Path(args[1]));

        int isok=job.waitForCompletion(true)?0:1;

        System.exit(isok);
        }

}

 

转载于:https://my.oschina.net/u/4010291/blog/3008355

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值