数据
订单数据表order:
date | pid | amount |
20150710 | P0001 | 2 |
20150710 | P0002 | 3 |
商品信息表product
pid | pname | category_id | price |
P0001 | 小米5 | 1000 | 2 |
P0002 | 锤子T1 | 1000 | 3 |
需求:
输出一条记录,该记录格式如下
date | pid | amount | pname | category_id | price |
20150710 | P0001 | 2 | 小米5 | 1000 | 2 |
20150710 | P0002 | 3 | 锤子T1 | 1000 | 3 |
思路
通过将关联的条件pid作为map输出的key,将两表满足join条件的数据并携带数据所来源的文件信息,发往同一个reduce task,在reduce中进行数据的串联。
测试数据
代码实现
package cn.feizhou.rjoin;
import java.io.IOException;
import java.util.ArrayList;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class RJoin {
static class RJoinMapper extends Mapper<LongWritable, Text, Text, InfoBean> {
InfoBean bean = new InfoBean();
Text k = new Text();
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String line = value.toString();
String pid="";
FileSplit inputSplit = (FileSplit) context.getInputSplit();
String name = inputSplit.getPath().getName();
// 通过文件名判断是哪种数据
if (name.startsWith("order")) {
String[] fields = line.split(",");
// date pid amount
pid=fields[1];
bean.setDate(fields[0]);
bean.setP_id(pid);
bean.setAmount(Integer.parseInt(fields[2]));
bean.setFlag("0");
//设置没有的数据
bean.setPname("0");
bean.setCategory_id(0);
bean.setPrice(0.00F);
} else if (name.startsWith("product")) {
//设置没有的数据
bean.setDate("0");
bean.setAmount(0);
String[] fields = line.split(",");
// pid pname category_id price
pid=fields[0];
bean.setP_id(pid);
bean.setPname(fields[1]);
bean.setCategory_id(Integer.parseInt(fields[2]));
bean.setPrice(Float.parseFloat(fields[3]));
bean.setFlag("1");
}
k.set(pid);
context.write(k, bean);
}
}
static class RJoinReducer extends Reducer<Text, InfoBean, InfoBean, NullWritable> {
@Override
protected void reduce(Text pid, Iterable<InfoBean> beans, Context context) throws IOException, InterruptedException {
//产品
InfoBean pdBean = new InfoBean();
//订单
ArrayList<InfoBean> orderBeans = new ArrayList<InfoBean>();
//一个产品有多个订单
for (InfoBean bean : beans) {
if ("1".equals(bean.getFlag())) { //产品的
try {
BeanUtils.copyProperties(pdBean, bean);
} catch (Exception e) {
e.printStackTrace();
}
} else {
InfoBean odbean = new InfoBean();
try {
BeanUtils.copyProperties(odbean, bean);
orderBeans.add(odbean);
} catch (Exception e) {
e.printStackTrace();
}
}
}
// 拼接两类数据形成最终结果
for (InfoBean bean : orderBeans) {
bean.setPname(pdBean.getPname());
bean.setCategory_id(pdBean.getCategory_id());
bean.setPrice(pdBean.getPrice());
context.write(bean, NullWritable.get());
}
}
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
// conf.set("mapred.textoutputformat.separator", "\t");
Job job = Job.getInstance(conf);
// 指定本程序的jar包所在的本地路径
// job.setJarByClass(RJoin.class);
// job.setJar("c:/join.jar");
job.setJarByClass(RJoin.class);
// 指定本业务job要使用的mapper/Reducer业务类
job.setMapperClass(RJoinMapper.class);
job.setReducerClass(RJoinReducer.class);
// 指定mapper输出数据的kv类型
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(InfoBean.class);
// 指定最终输出的数据的kv类型
job.setOutputKeyClass(InfoBean.class);
job.setOutputValueClass(NullWritable.class);
// 指定job的输入原始文件所在目录
FileInputFormat.setInputPaths(job, new Path(args[0]));
// 指定job的输出结果所在目录
FileOutputFormat.setOutputPath(job, new Path(args[1]));
// 将job中配置的相关参数,以及job所用的java类所在的jar包,提交给yarn去运行
/* job.submit(); */
boolean res = job.waitForCompletion(true);
System.exit(res ? 0 : 1);
}
}
----------------------------------------
package cn.feizhou.rjoin;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.hadoop.io.Writable;
public class InfoBean implements Writable {
private String date;
private String p_id;
private int amount;
private String pname;
private int category_id;
private float price;
// flag=0表示这个对象是封装订单表记录
// flag=1表示这个对象是封装产品信息记录
private String flag;
public InfoBean() {
}
public String getDate() {
return date;
}
public void setDate(String date) {
this.date = date;
}
public String getP_id() {
return p_id;
}
public void setP_id(String p_id) {
this.p_id = p_id;
}
public int getAmount() {
return amount;
}
public void setAmount(int amount) {
this.amount = amount;
}
public String getPname() {
return pname;
}
public void setPname(String pname) {
this.pname = pname;
}
public int getCategory_id() {
return category_id;
}
public void setCategory_id(int category_id) {
this.category_id = category_id;
}
public float getPrice() {
return price;
}
public void setPrice(float price) {
this.price = price;
}
public String getFlag() {
return flag;
}
public void setFlag(String flag) {
this.flag = flag;
}
/**
* private int date; private String dateString; private int p_id;
* private int amount; private String pname; private int category_id;
* private float price;
*/
@Override
public void write(DataOutput out) throws IOException {
out.writeUTF(date);
out.writeUTF(p_id);
out.writeInt(amount);
out.writeUTF(pname);
out.writeInt(category_id);
out.writeFloat(price);
out.writeUTF(flag);
}
@Override
public void readFields(DataInput in) throws IOException {
this.date = in.readUTF();
this.p_id = in.readUTF();
this.amount = in.readInt();
this.pname = in.readUTF();
this.category_id = in.readInt();
this.price = in.readFloat();
this.flag = in.readUTF();
}
@Override
public String toString() {
return "InfoBean [date=" + date + ", p_id=" + p_id + ", amount=" + amount + ", pname=" + pname
+ ", category_id=" + category_id + ", price=" + price + ", flag=" + flag + "]";
}
}
测试结果