问题:有几个文件,找出这些文件中每个内容出现的次数(并找出在哪个文件中出现)
例如:Tom - 1.text 5(tom在1.text中出现5次)
(1)先将text中的文件
package com.diit;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
public class InveseIndexOne {
public static class InveseIndexOneMap extends Mapper<LongWritable, Text,Text, IntWritable>{
String fileName = null;
Text k = new Text();
@Override
protected void setup(Context context) throws IOException, InterruptedException {
//获取文件名
FileSplit inputSplit = (FileSplit) context.getInputSplit();
fileName = inputSplit.getPath().getName();
}
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String[] words = value.toString().split("");
for (String word : words
) {
//
k.set(word + "-" + fileName);
context.write(k, new IntWritable(1));
}
}
}
public static class InveseIndexReducer extends Reducer<Text,IntWritable,Text,IntWritable>{
@Override
protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
Integer count = 0;
for (IntWritable v:values
) {
count++;
}
context.write(key,new IntWritable(count));
}
}
public static void main(String[] args) throws Exception {
Configuration configuration = new Configuration();
Job job = Job.getInstance(configuration);
job.setJarByClass(InveseIndexOne.class);
job.setMapperClass(InveseIndexOneMap.class);
job.setReducerClass(InveseIndexReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
FileInputFormat.setInputPaths(job,new Path(""));
//检查输出目录是否存在,存在则删除
Path output = new Path("");
FileSystem fileSystem = FileSystem.get(configuration);
boolean exists = fileSystem.exists(output);
if(exists){
fileSystem.delete(output,true);
}
FileOutputFormat.setOutputPath(job,output);
boolean b = job.waitForCompletion(true);
System.exit(b?0:1);
}
}
package com.diit;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
public class InveseIndexTwo {
public static class InveseIndexTwoMap extends Mapper<LongWritable,Text,Text, Text>{
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String[] split = value.toString().split("-");
//内容
String word = split[0];
//文件名及计数
String index = split[1];
context.write(new Text(word),new Text(index));
}
}
public static class InveseIndexTwoReducer extends Reducer<Text,Text,Text,Text>{
@Override
protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
//线程不安全的StringBuilder相比较与StringSuffre效率更高,在此处并不涉及线程安全问题
StringBuilder stringBuilder = new StringBuilder();
for (Text v:values
) {
stringBuilder.append(values).append(",");
}
context.write(key,new Text(stringBuilder.toString()));
}
}
public static void main(String[] args) throws Exception {
//格式
Configuration conf = new Configuration();
Job job = Job.getInstance();
job.setJarByClass(InveseIndexTwo.class);
job.setMapperClass(InveseIndexTwoMap.class);
job.setReducerClass(InveseIndexTwoReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
FileInputFormat.setInputPaths(job,new Path(""));
FileOutputFormat.setOutputPath(job,new Path(""));
boolean b = job.waitForCompletion(true);
System.exit(b?0:1);
}
}