需求
创建三个文件,放在同一文件夹下,求文件中每个单词在各个文件中出现的频率,输出格式为:
I a.txt 1,b.txt 1,c.txt 1
amin a.txt 1,c.txt 1
coco a.txt 1
hello a.txt 2,b.txt 2,c.txt 2
需求分析
Map1—>I-a.txt 1
I-b.txt 1
I-c.txt 1
Reduce1—>amin-a.txt 1
amin-c.txt 1
coco-a.txt 2
hello-a.txt 2
hello-b.txt 2
Map2—>amin a.txt 1
amin c.txt 1
coco a.txt 2
Reduce—>I a.txt 1,b.txt 1,c.txt 1
amin a.txt 1,c.txt 1
coco a.txt 1
hello a.txt 2,b.txt 2,c.txt 2
源代码
第一个MapReduce
public class CreateIndexOne {
public static class MapTask extends Mapper<LongWritable, Text, Text, IntWritable>{
String pathName = null;
@Override
protected void setup(Mapper<LongWritable, Text, Text, IntWritable>.Context context)
throws IOException, InterruptedException {
//获取文件名
FileSplit fileSplit = (FileSplit) context.getInputSplit();
pathName = fileSplit.getPath().getName();
}
@Override
protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, IntWritable>.Context context)
throws IOException, InterruptedException {
String[] split = value.toString().split(" ");
for (String word : split) {
context.write(new Text(word+"-"+pathName), new IntWritable(1));
}
}
}
public static class ReduceTask extends Reducer<Text, IntWritable, Text, IntWritable>{
@Override
protected void reduce(Text key, Iterable<IntWritable> values,
Reducer<Text, IntWritable, Text, IntWritable>.Context context) throws IOException, InterruptedException {
int count = 0;
for (IntWritable intWritable : values) {
count++;
}
context.write(key, new IntWritable(count));
}
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf,"index1");
job.setMapperClass(MapTask.class);
job.setReducerClass(ReduceTask.class);
job.setJarByClass(CreateIndexOne.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
FileInputFormat.addInputPath(job, new Path("D:\\a\\b"));
FileOutputFormat.setOutputPath(job, new Path("D:\\a\\index1-out"));
//判断文件是否存在
File file = new File("D:\\a\\index1-out");
if(file.exists()){
FileUtils.deleteDirectory(file);
}
boolean completion = job.waitForCompletion(true);
System.out.println(completion?"你很优秀!":"调bug");
}
}
第二个MapReduce
public class CreateIndexTwo {
public static class MapTask extends Mapper<LongWritable, Text, Text, Text>{
@Override
protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, Text>.Context context)
throws IOException, InterruptedException {
String[] split = value.toString().split("-");
String word = split[0];
String num = split[1];
context.write(new Text(word), new Text(num));
}
}
public static class ReduceTask extends Reducer<Text, Text, Text, Text>{
@Override
protected void reduce(Text key, Iterable<Text> values, Reducer<Text, Text, Text, Text>.Context context)
throws IOException, InterruptedException {
StringBuilder sb = new StringBuilder();
/*boolean flag = true;
for (Text text : values) {
if(flag){
sb.append(text.toString());
flag = false;
}else{
sb.append(",");
sb.append(text.toString());
}
}*/
for (Text num : values) {
sb.append(num.toString()).append(",");
}
context.write(key, new Text(sb.deleteCharAt(sb.length()-1).toString()));
}
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf,"index2");
job.setMapperClass(MapTask.class);
job.setReducerClass(ReduceTask.class);
job.setJarByClass(CreateIndexTwo.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
FileInputFormat.addInputPath(job, new Path("D:\\a\\index1-out"));
FileOutputFormat.setOutputPath(job, new Path("D:\\a\\index2-out"));
//判断文件是否存在
File file = new File("D:\\a\\index2-out");
if(file.exists()){
FileUtils.deleteDirectory(file);
}
boolean completion = job.waitForCompletion(true);
System.out.println(completion?"你很优秀!":"调bug");
}
}