MapReduce的reduceJoin算法

下面是reduceJoin的源码,本人感觉reduceJoin写起来比较简单哦,可以理解
数据:

orders.txt
	1001,20150710,p0001,2
	1002,20150710,p0002,3
	1002,20150710,p0003,3
product.txt
	p0001,小米5,1000,2000
	p0002,锤子T1,1000,3000

本人用的是Javabean的方式
Javabean

import org.apache.hadoop.io.Writable;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

public class javaBean implements Writable {
    private String id;
    private String date;
    private String pid;
    private String amount;
    private String name;
    private String categoryId;
    private String price;
                            
    @Override
    public void write(DataOutput out) throws IOException {
        out.writeUTF(id+"");
        out.writeUTF(date+"");
        out.writeUTF(pid+"");
        out.writeUTF(amount+"");
        out.writeUTF(name+"");
        out.writeUTF(categoryId+"");
        out.writeUTF(price+"");
    }

    @Override
    public void readFields(DataInput in) throws IOException {
        this.id =  in.readUTF();
        this.date =  in.readUTF();
        this.pid =  in.readUTF();
        this.amount =  in.readUTF();
        this.name =  in.readUTF();
        this.categoryId =  in.readUTF();
        this.price =  in.readUTF();

    }
    //javaBean 的 有参无参 get set toString 方法此处我都省略了....
}


map

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;

import java.io.IOException;

public class mapJoin extends Mapper<LongWritable, Text,Text,javaBean> {
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {

        FileSplit fileSplit = (FileSplit) context.getInputSplit();
        String name = fileSplit.getPath().getName();
        javaBean bean = new javaBean();

        String[] split = value.toString().split(",");

        if (name.contains("orders")){
            bean.setId(split[0]);
            bean.setDate(split[1]);
            bean.setPid(split[2]);
            bean.setAmount(split[3]);
            System.out.println(1);
            context.write(new Text(bean.getPid()), bean);
        }else{
            bean.setName(split[1]);
            bean.setCategoryId(split[2]);
            bean.setPrice(split[3]);
            System.out.println(2);
            context.write(new Text(split[0]), bean);
        }
    }
}

reduce

import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

public class reduceJoin extends Reducer<Text,javaBean,javaBean, NullWritable> {
    @Override
    protected void reduce(Text key, Iterable<javaBean> values, Context context) throws IOException, InterruptedException {
        javaBean javaBean = new javaBean();
        for (javaBean value : values) {
            System.out.println(value.getId());
            if (value.getId()!=null&&!value.getId().equals("null")){
                javaBean.setId(value.getId());
                javaBean.setDate(value.getDate());
                javaBean.setPid(value.getPid());
                javaBean.setAmount(value.getAmount());
            }else{
                javaBean.setName(value.getName());
                javaBean.setCategoryId(value.getCategoryId());
                javaBean.setPrice(value.getPrice());
            }
        }
        context.write(javaBean,NullWritable.get());
    }
}

驱动类

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class JobMain {
    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        Job job = Job.getInstance(conf,"Partiton.Partitions");
        job.setJarByClass(JobMain.class);

        job.setMapperClass(mapJoin.class);
        job.setReducerClass(reduceJoin.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(javaBean.class);

        job.setOutputKeyClass(javaBean.class);
        job.setOutputValueClass(NullWritable.class);



        FileInputFormat.addInputPath(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));

        System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}

  • 2
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值