需求分析
我们有两个表:
订单数据表:
商品信息表:
我们要将商品信息表中数据根据商品pid合并到订单数据表中。最终实现这样的效果:
代码实现和结果分析
package reduceJoin;
import org.apache.hadoop.io.Writable;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
public class tableBean implements Writable {
private String id;//订单id
private String pid;//商品id
private int amount;//商品数量
private String pname;//商品名称
private String flag;//标记是什么表 order pd
//空参构造
public tableBean() {
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getPid() {
return pid;
}
public void setPid(String pid) {
this.pid = pid;
}
public int getAmount() {
return amount;
}
public void setAmount(int amount) {
this.amount = amount;
}
public String getPname() {
return pname;
}
public void setPname(String pname) {
this.pname = pname;
}
public String getFlag() {
return flag;
}
public void setFlag(String flag) {
this.flag = flag;
}
@Override
public void write(DataOutput out) throws IOException {
out.writeUTF(id);
out.writeUTF(pid);
out.writeInt(amount);
out.writeUTF(pname);
out.writeUTF(flag);
}
@Override
public void readFields(DataInput in) throws IOException {
this.id=in.readUTF();
this.pid=in.readUTF();
this.amount=in.readInt();
this.pname=in.readUTF();
this.flag=in.readUTF();
}
@Override
public String toString() {
return id + "\t" + pname + "\t" + amount;
}
}
package reduceJoin;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
public class tableDriver {
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
Job job = Job.getInstance(new Configuration());
job.setJarByClass(tableDriver.class);
job.setMapperClass(tableMapper.class);
job.setReducerClass(tableReducer.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(tableBean.class);
job.setOutputKeyClass(tableBean.class);
job.setOutputValueClass(NullWritable.class);
FileInputFormat.setInputPaths(job, new Path("D:\\input"));
FileOutputFormat.setOutputPath(job, new Path("D:\\hadoop\\output"));
boolean b = job.waitForCompletion(true);
System.exit(b ? 0 : 1);
}
}
package reduceJoin;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import java.io.File;
import java.io.IOException;
public class tableMapper extends Mapper<LongWritable, Text,Text,tableBean> {
private String fileName;
private Text outK=new Text();
private tableBean outV=new tableBean();
@Override
protected void setup(Context context) throws IOException, InterruptedException {
//初始化方法
FileSplit split = (FileSplit) context.getInputSplit();
//获取对应文件名称
fileName = split.getPath().getName();
}
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
//获取一行
String line = value.toString();
//判断是哪个文件
if(fileName.contains("order")){//处理的是订单表
String[] split = line.split("\t");
//封装 k v
outK.set(split[1]);
outV.setId(split[0]);
outV.setPid(split[1]);
outV.setAmount(Integer.parseInt(split[2]));
outV.setPname("");
outV.setFlag("order");
}else {//处理的是商品表
String[] split = line.split("\t");
outK.set(split[0]);
outV.setId("");
outV.setPid(split[0]);
outV.setAmount(0);
outV.setPname(split[1]);
outV.setFlag("pd");
}
//写出
context.write(outK,outV);
}
}
package reduceJoin;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
public class tableReducer extends Reducer<Text,tableBean,tableBean, NullWritable> {
@Override
protected void reduce(Text key, Iterable<tableBean> values, Context context) throws IOException, InterruptedException {
// 01 1001 1 order
// 01 1004 4 order
// 01 小米 pd
//准备初始化集合
ArrayList<tableBean> orderBeans = new ArrayList<>();
tableBean pdBean=new tableBean();
//循环遍历
for (tableBean value : values) {
if ("order".equals(value.getFlag())){
//创建一个临时tableBean对象接收value
tableBean tmpTableBean = new tableBean();
try {
BeanUtils.copyProperties(tmpTableBean,value);
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
orderBeans.add(tmpTableBean);
}else {
try {
BeanUtils.copyProperties(pdBean,value);
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
}
}
//循环遍历orderBeans,赋值pdname
for (tableBean orderBean : orderBeans) {
orderBean.setPname(pdBean.getPname());
context.write(orderBean,NullWritable.get());
}
}
}
输出结果: