MapReduce关系代数运算——自然连接

MapReduce关系代数运算——自然连接

关系沿用之前的R。

创建两个文件

表1 student
idnamesexage
1Amyfemale18
2Tommale19
3Sammale21
4Johnmale19
5Lilyfemale21
6Rosefemale20
表2 grade
idclassgrade
1Math89
2Math75
4English85
3English95
5Math91
5English88
6Math78
6English99
2English80

MapReduce程序设计

  • NaturalJoinMap
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
 
public class NaturalJoinMap extends Mapper<LongWritable, Text, IntWritable, Text> {
 
    private String fileName = "";
    private Text val = new Text();
    private IntWritable stuKey = new IntWritable();
 
    protected void setup(Context context) throws java.io.IOException, InterruptedException {
        FileSplit fileSplit = (FileSplit) context.getInputSplit();
        fileName = fileSplit.getPath().getName();
    };
 
    protected void map(LongWritable key, Text value, Context context) throws java.io.IOException, InterruptedException {
        String[] arr = value.toString().split(" ");
        stuKey.set(Integer.parseInt(arr[0]));
        val.set(fileName + " " + value.toString());
        context.write(stuKey, val);
    };
 
}
  • NaturalJoinReduce
import java.util.ArrayList;
import java.util.List;
 
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
 
public class NaturalJoinReduce extends Reducer<IntWritable, Text, Text, NullWritable> {
 
    private Text student = new Text();
    private Text value = new Text();
 
    protected void reduce(IntWritable key, Iterable<Text> values, Context context) throws java.io.IOException, InterruptedException {
        List<String> grades = new ArrayList<String>();
        for (Text val : values) {
            if (val.toString().contains("student")) {
                student.set(studentStr(val.toString()));
            } else {
                grades.add(gradeStr(val.toString()));
            }
        }
        for (String grade : grades) {
            value.set(student.toString() + grade);
            context.write(value, NullWritable.get());
        }
    };
 
    private String studentStr(String line) {
        String[] arr = line.split(" ");
        StringBuilder str = new StringBuilder();
        for (int i = 1; i < arr.length; i++) {
            str.append(arr[i] + " ");
        }
        return str.toString();
    }
 
    private String gradeStr(String line) {
        String[] arr = line.split(" ");
        StringBuilder str = new StringBuilder();
        for (int i = 2; i < arr.length; i++) {
            str.append(arr[i] + " ");
        }
        return str.toString();
    }
 
}
  • NaturalJoin
import java.io.IOException;
 
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 
public class NaturalJoin {
 
    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        if (args == null || args.length != 2) {
            throw new RuntimeException("请输入输入路径、输出路径");
        }
        Configuration conf = new Configuration();
        Job job = Job.getInstance(conf);
        job.setJobName("NaturalJoin");
        job.setJarByClass(NaturalJoin.class);
 
        job.setMapperClass(NaturalJoinMap.class);
        job.setMapOutputKeyClass(IntWritable.class);
        job.setMapOutputValueClass(Text.class);
 
        job.setReducerClass(NaturalJoinReduce.class);
        job.setOutputKeyClass(IntWritable.class);
        job.setOutputValueClass(NullWritable.class);
 
        FileInputFormat.addInputPaths(job, args[0]);
        FileOutputFormat.setOutputPath(job, new Path(args[1]));
        System.exit(job.waitForCompletion(true) ? 0 : 1);
    }
 
}

 

运行

像之前的WordCount一样将代码打包出来,生成NaturalJoin .jar文件:

hadoop jar NaturalJoin .jar /input /output relationB

输出结果:
输出结果
欢迎查看我的博客:Welcome To Ryan’s Home

  • 6
    点赞
  • 40
    收藏
    觉得还不错? 一键收藏
  • 7
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 7
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值