这是数据库操作join的特例,目的是讲file1和file2做join操作
file1如下:
1 jack
2 daniel
4 martin
5 king
3 mary
4 jane
file2如下:
5 london
2 london
4 rome
2 gasgow
3 paris
1 madnd
要求的结果如下:
1 jack:madnd
2 daniel:london
2 daniel:gasgow
3 mary:paris
4 martin:rome
4 jane:rome
5 king:london
举个简单例子
file1 1 cjd 1 0cjd
file2 1 shanghai map之后得到 1 1shanghai reduce之后得到 1 cjd:shanghai
1 hangzhou 1 1hangzhou 1 cjd:hangzhou
代码如下:
import java.io.IOException;
import java.util.List;
import java.util.ArrayList;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.input.MultipleInputs;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
public class Join extends Configured implements Tool{
enum Counter
{
LINERSKIP;
}
public static class MapOne extends Mapper<LongWritable,Text,Text,Text>
{
public void map(LongWritable key,Text value,Context context) throws IOException,InterruptedException
{
try{
String tag = "0";
String[] linesplit = value.toString().split(" ");
context.write(new Text(linesplit[0]), new Text(tag+linesplit[1]));
}
catch(java.lang.ArrayIndexOutOfBoundsException e){
context.getCounter(Counter.LINERSKIP).increment(1);
return;
}
}
}
public static class MapTwo extends Mapper<LongWritable,Text,Text,Text>
{
public void map(LongWritable key,Text value,Context context) throws IOException,InterruptedException
{
try{
String tag = "1";
String[] linesplit = value.toString().split(" ");
context.write(new Text(linesplit[0]), new Text(tag+linesplit[1]));
}
catch(java.lang.ArrayIndexOutOfBoundsException e){
context.getCounter(Counter.LINERSKIP).increment(1);
return;
}
}
}
public static class Reduce extends Reducer<Text,Text,Text,Text>
{
public void reduce(Text key , Iterable<Text> values,Context context) throws IOException,InterruptedException
{
List<String> list0 = new ArrayList<String>();
List<String> list1 = new ArrayList<String>();
for(Text value : values)
{
String str = value.toString();
if(str.charAt(0) == '0'){
list0.add(str.substring(1));
}
else{
list1.add(str.substring(1));
}
}
for(String str0 : list0)
for(String str1 : list1)
{
String valueinfo = str0 + ":" + str1;
context.write(key, new Text(valueinfo) );
}
}
}
public int run(String[] args)throws Exception
{
Configuration conf = getConf();
Job job = new Job(conf,"Join");
job.setJarByClass(Join.class);
MultipleInputs.addInputPath(job, new Path(args[0]), TextInputFormat.class,MapOne.class);//两个map对应两个输入文件
MultipleInputs.addInputPath(job, new Path(args[1]), TextInputFormat.class,MapTwo.class);
FileOutputFormat.setOutputPath(job,new Path(args[2]));
job.setReducerClass(Reduce.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
job.waitForCompletion(true);
return job.isSuccessful() ? 0 : 1;
}
public static void main(String[] args) throws Exception
{
int res = ToolRunner.run(new Configuration(), new Join(),args);
System.exit(res);
}
}