实例中给出child-parent表,要求输出grandchild-grandparent表
样例输入:
file:
child parent
Tom Lucy
Tom Jack
Jone Lucy
Jone Jack
Lucy Mary
Lucy Ben
Jack Alice
Jack Jesse
Terry Alice
Terry Jesse
Philip Terry
Philip Alma
Mark Terry
Mark Alma
package mapreduce.test;
import java.io.IOException;
import java.util.Iterator;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class STjoin {
public static int time =0;
//Map将输入分割成child和parent,然后正序输出一次作为右表,反序输出作为左表
//需要注意的是在输出的value中必须加上左右表区别的标志
public static class Map extends Mapper<Object, Text, Text, Text>{
@Override
protected void map(Object key, Text value, Context context)
throws IOException, InterruptedException {
String childname = new String();
String parentname = new String();
String relationtype = new String();
String line = value.toString();
int i=0;
while(line.charAt(i) !=' '){
i++;
}
String[] values = {line.substring(0,i),line.substring(i+1)};
if(values[0].compareTo("child") != 0)
{
childname = values[0];
parentname = values[1];
relationtype = "1";//左表
context.write(new Text(values[1]), new Text(relationtype + "+" + childname + "+" + parentname));
relationtype = "2";//右表
context.write(new Text(values[0]), new Text(relationtype + "+" + childname + "+" + parentname));
}
}
}
public static class Reduce extends Reducer<Text, Text, Text, Text>{
protected void reduce(Text key, Iterable<Text> values,Context context)
throws IOException, InterruptedException {
if(time==0){//输出表头
context.write(new Text("grandchild"), new Text("grandparend"));
time++;
}
int grandchildnum = 0;
String grandchild[] = new String[10];
int grandparentnum = 0;
String grandparent[] = new String[10];
Iterator ite = values.iterator();
while(ite.hasNext()){
String record = ite.next().toString();
int len = record.length();
int i = 2;
if(len == 0 ) continue;
char relationtype = record.charAt(0);
String childname = new String();
String parentname = new String();
//获取value-list中value的child
while(record.charAt(i) != '+')
{
childname = childname + record.charAt(i);
}
i=i+1;
//获取value-list中value的parent
while(i<len)
{
parentname = parentname + record.charAt(i);
i++;
}
//左表读取child放入grandchild
if(relationtype == '1'){
grandchild[grandchildnum] = childname;
grandchildnum++;
}else{
grandparent[grandparentnum] = parentname;
grandparentnum++;
}
}
//grandchild和grandparent数组求笛卡尔积
if(grandparentnum !=0 && grandchildnum !=0){
for(int m=0;m<grandchildnum;m++){
for(int n=0;n<grandparentnum;n++){
context.write(new Text(grandchild[m]), new Text(grandparent[n]));
}
}
}
}
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = new Job(conf,"single table join");
job.setJarByClass(STjoin.class);
job.setMapperClass(Map.class);
job.setReducerClass(Reduce.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}