hadoop 自学指南三之WordCount解析(2)

一、前言

自从0.20.2版本开始,hadoop 提供了一个新的API,新的API在org.apache.hadoop.mapreduce中,旧的api在org.apache.hadoop.mapred中

二、新版的wordCount

package hadoop.v3;

import java.io.IOException;
import java.util.StringTokenizer;

import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.hai.hdfs.utils.HDFSUtils;

/**
 * @author : chenhaipeng
 * @date : 2015年9月6日 上午2:00:50
 */
public class WordCountNewAPI extends Configured implements Tool {
	
	public static class Map extends Mapper<LongWritable, Text, Text, IntWritable>{
		private final static IntWritable one = new IntWritable(1);
		private Text word = new Text();
		/* (non-Javadoc)
		 * @see org.apache.hadoop.mapreduce.Mapper#map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapreduce.Mapper.Context)
		 */
		@Override
		public void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
			String line = value.toString();
			StringTokenizer tokenizer = new StringTokenizer(line);
			while(tokenizer.hasMoreTokens()){
				word.set(tokenizer.nextToken());
				context.write(word, one);
			}
		}
		
	}
	
	public static class Reduce extends Reducer<Text, IntWritable, Text, IntWritable>{
		/* (non-Javadoc)
		 * @see org.apache.hadoop.mapreduce.Reducer#reduce(java.lang.Object, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer.Context)
		 */
		@Override
		public void reduce(Text key, Iterable<IntWritable> values,Context context) throws IOException, InterruptedException {
			int sum = 0; 
			for(IntWritable value : values){
				sum += value.get();
			}
			context.write(key, new IntWritable(sum));
		}
		
	}

	public static void main(String[] args) throws Exception {
		int ret = ToolRunner.run(new WordCountNewAPI(), args);
		System.exit(ret);

	}
	
	public static void deletedir(String path){
		try {
			HDFSUtils.DeleteHDFSFile(path);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	/* 
	 * @see org.apache.hadoop.util.Tool#run(java.lang.String[])
	 */
	@Override
	public int run(String[] args) throws Exception {
		Job job = new Job(getConf());
		job.setJarByClass(WordCountNewAPI.class);
		job.setJobName("WordCountNewAPI");
		
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(IntWritable.class);
		
		job.setInputFormatClass(TextInputFormat.class);
		job.setOutputFormatClass(TextOutputFormat.class);
		
		job.setMapperClass(Map.class);
		job.setReducerClass(Reduce.class);
		
		FileInputFormat.setInputPaths(job, new Path(args[0]));
		deletedir(args[1]);
		FileOutputFormat.setOutputPath(job, new Path(args[1]));
		
		boolean sucess = job.waitForCompletion(true);
		return sucess == true? 0 : 1;
		
	}

}


区别:

1、在新的api中mapper与reducer 是抽象类而不是接口,Map 函数Reducer函数已经不再实现Mapper和Reducer,而是继承Mapper和Reducer抽象类

2、新的api使用context 代替OutputCollector和Reporter角色

3、Job配置由Configuration来完成

4、Job类job 控制



  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值