WordCount
WordCount
package a.b.c;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
//继承Mapper类,重写Reduccer方法
class WordCountMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
@Override
protected void map(LongWritable key, Text value,
org.apache.hadoop.mapreduce.Mapper.Context context)
throws IOException, InterruptedException {
String lineString=value.toString();
String [] wordStrings=lineString.split(",");
for (String word:wordStrings){
context.write(new Text(word), new IntWritable(1));
}
}
/* protected void mapper(LongWritable key,Text value,Context context) throws IOException,InterruptedException {
String lineString=value.toString();
String [] wordStrings=lineString.split(",");
for (String word:wordStrings){
context.write(new Text(word), new IntWritable(1));
}
}*/
}
//继承Reducer类,重写reducer方法
class WordCountReducer extends Reducer<Text, IntWritable, Text, IntWritable>{
@Override
protected void reduce(Text key, Iterable<IntWritable> values,Context context)
throws IOException, InterruptedException {
int count=0;
for (IntWritable value:values){
count+=value.get();
}
context.write(key, new IntWritable(count));
}
/* protected void reduce(Text key, Iterable<IntWritable> values,
Context context)
throws IOException, InterruptedException {
int count=0;
for (IntWritable value:values){
count+=value.get();
}
context.write(key, new IntWritable(count));
}*/
}
public class WordCount {
public static void main(String[] args) throws IOException ,ClassNotFoundException,InterruptedException{
//设置程序名
Configuration configuration=new Configuration();
Job job=Job.getInstance(configuration,"wordCount");
job.setJarByClass(WordCount.class);
//mapper设置
job.setMapperClass(WordCountMapper.class);
System.out.println("before mapppp");
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
System.out.println("after mapppp");
//reducer设置
job.setReducerClass(WordCountReducer.class);
//输入输出路径
FileInputFormat.setInputPaths(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
//返回执行成功结果
boolean res=job.waitForCompletion(true);
System.exit(res?0:1);
}
}