Map部分:
import java.io.IOException;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.Mapper;
/* 1.确定K1,V1,K2,V2的变量类型
* 2.重写map方法,其中map方法里的key、value变量是K1,V1
* 3.将处理好的数据传给上下文
* */
public class WordCountMapper extends Mapper<LongWritable,Text,Text,IntWritable>{
//K1[long],V1[string];K2[string],V2[int]
//long封装成LongWritable,string封装成Text,int封装成IntWritable
@Override
protected void map(LongWritable key, Text value,
Mapper<LongWritable,Text,Text,IntWritable>.Context context)
throws IOException, InterruptedException {
//1.将获取到的Text数据类型转换成String类型
String line = value.toString();
//2.按照空格进行切割
String words[] = line.split(" ");
//3.遍历数组,每出现一个单词就标记一个数组,例如<单词,1>
for(int i = 0;i<words.length;i++){
context.write(new Text(words[i]),new IntWritable(1));
}
}
}
Reducer部分:
import java.io.IOException;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.Reducer;
public class WordCountReducer extends Reducer<Text, IntWritable, Text, IntWritable>{
@Override
protected void reduce(Text key, Iterable<IntWritable> value,
Reducer<Text, IntWritable, Text, IntWritable>.Context context)
throws IOException, InterruptedException {
//1.定义一个计数器
int count = 0;
for(IntWritable iw:value){
count += iw.get();
}
context.write(key,new IntWritable(count));
}
}
Driver部分:
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class WordCountDriver {
public static void main(String[] args) throws Exception {
//1.封装MR的相关信息
Configuration conf = new Configuration();
//2.指定运行模式
//conf.set("mapreduce.framework.name", "local");
//3.指定MRjob包运行的主类
Job wcjob = Job.getInstance(conf);
wcjob.setJarByClass(WordCountDriver.class);
//4.指定map类和reduce类
wcjob.setMapperClass(WordCountMapper.class);
wcjob.setReducerClass(WordCountReducer.class);
//5.设置map类的输出key和value的类型
wcjob.setMapOutputKeyClass(Text.class);
wcjob.setMapOutputValueClass(IntWritable.class);
//6.设置reduce类的输出key和value类型
wcjob.setOutputKeyClass(Text.class);
wcjob.setOutputValueClass(IntWritable.class);
//7.指定要处理的文件所在目录
FileInputFormat.setInputPaths(wcjob, "E:/mr/input");
//8.指定计算结果的文件保存目录
FileOutputFormat.setOutputPath(wcjob, new Path("E:/mr/output"));
//9.提交执行
boolean res = wcjob.waitForCompletion(true);
System.exit(res?0:1 );
}
}
最后结果: