package com.zhangbk.mapreduce;
import java.io.IOException;
import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
/**
*
* @author root
*
*/
public class WordCountMapReduce extends Configured implements Tool {
//step1: Map Class
/**
* public class Mapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT>
* @author root
*
*/
public static class WordCountMapper extends
Mapper<LongWritable,Text,Text,IntWritable>{
private Text mapOutputKey = new Text();
private final static IntWritable mapOutputValue = new IntWritable(1);
@Override
public void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
//line value
String lineValue = value.toString();
//split
//String[] strs = lineValue.split("\t");
StringTokenizer stringTokenizer = new StringTokenizer(lineValue);
//iterator
while(stringTokenizer.hasMoreTokens()){
//get word value
String wordValue = stringTokenizer.nextToken();
//set value
mapOutputKey.set(wordValue);
//output
context.write(mapOutputKey, mapOutputValue);
}
}
}
//step2: Reduce Class
/**
* public class Reducer<KEYIN,VALUEIN,KEYOUT,VALUEOUT>
* @author root
*
*/
public static class WordCountReducer extends
Reducer<Text, IntWritable, Text, IntWritable>{
private IntWritable outputValue = new IntWritable();
@Override
public void reduce(Text key, Iterable<IntWritable> values,
Context context) throws IOException, InterruptedException {
//sum tmp
int sum = 0;
//iterator
for(IntWritable value : values){
//total
sum += value.get();
}
//set value
outputValue.set(sum);
//output
context.write(key, outputValue);
}
}
//step: Driver, component job
public int run(String[] args) throws Exception{
//1. get configuration
Configuration conf = this.getConf();
//create job
Job job = Job.getInstance(conf, this.getClass().getSimpleName());
//run jar
job.setJarByClass(this.getClass());
//set job
//input -> map -> reduce -> output
Path inPath = new Path(args[0]);
FileInputFormat.addInputPath(job, inPath);
job.setMapperClass(WordCountMapper.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
job.setReducerClass(WordCountReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
Path outPath = new Path(args[1]);
FileOutputFormat.setOutputPath(job, outPath);
boolean isSuccess = job.waitForCompletion(true);
return isSuccess ? 0 : 1;
}
public static void main(String[] args) throws Exception{
Configuration conf = new Configuration();
int status = ToolRunner.run(conf, new WordCountMapReduce(), args);
System.exit(status);
}
}