1. 新建order与product文件
在resource中新建order目录并创建文件
order.txt
// id pid amount
1001 01 1
1002 02 2
1003 03 3
1004 01 4
1005 03 5
1006 02 6
product.txt
//pid pname
01 苹果
02 华为
03 小米
我们预期业务输出为
//id pname amount
1001 苹果 1
1004 苹果 4
1002 华为 2
1006 华为 6
1003 小米 3
1005 小米 5
2. 新建OrderProductDto
@Data
public class OrderProductDto implements Writable {
private String id = ""; // 订单ID
private String pid = ""; // 商品ID
private int amount = 0; // 商品数量
private String productName = ""; // 商品名称
private String flag = ""; // 记录表类型order或product
@Override
public void write(DataOutput out) throws IOException {
out.writeUTF(id);
out.writeUTF(pid);
out.writeInt(amount);
out.writeUTF(productName);
out.writeUTF(flag);
}
@Override
public void readFields(DataInput in) throws IOException {
this.id = in.readUTF();
this.pid = in.readUTF();
this.amount = in.readInt();
this.productName = in.readUTF();
this.flag = in.readUTF();
}
@Override
public String toString() {
// 格式与输出结果保持一致
return id + "\t" + productName + "\t" + amount;
}
}
3.新建OrderProductMapper类
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
public class OrderProductMapper extends Mapper<LongWritable, Text, Text, OrderProductDto> {
private String filename;
private Text keyOut = new Text();
private OrderProductDto orderProductDto = new OrderProductDto();
@Override
protected void setup(Mapper<LongWritable, Text, Text, OrderProductDto>.Context context) throws IOException, InterruptedException {
// 获取当前处理行的文件名,setup只需初始化一次
FileSplit fileSplit = (FileSplit) context.getInputSplit();
this.filename = fileSplit.getPath().getName();
}
@Override
protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, OrderProductDto>.Context context) throws IOException, InterruptedException {
String line = value.toString();
String[] split = line.split(" ");
if (filename.contains("order")) {
// id pid amount
// 我们需要通过pid关联,所以需要将pid设置为key
String id = split[0];
String pid = split[1];
String amount = split[2];
keyOut.set(pid);
orderProductDto.setId(id);
orderProductDto.setPid(pid);
orderProductDto.setAmount(Integer.parseInt(amount));
orderProductDto.setFlag("order");
} else if (filename.contains("product")) {
// pid pname
String pid = split[0];
String productName = split[1];
keyOut.set(pid);
orderProductDto.setPid(pid);
orderProductDto.setProductName(productName);
orderProductDto.setFlag("product");
}
context.write(keyOut, orderProductDto);
}
}
4.新建OrderProductReducer类
public class OrderProductReducer extends Reducer<Text, OrderProductDto, OrderProductDto, NullWritable> {
@Override
protected void reduce(Text key, Iterable<OrderProductDto> values, Reducer<Text, OrderProductDto, OrderProductDto, NullWritable>.Context context) throws IOException, InterruptedException {
// 存放所有订单信息
List<OrderProductDto> orderDtoList = new ArrayList<>();
// 存放产品信息
OrderProductDto productDto = new OrderProductDto();
for (OrderProductDto value : values) {
if ("order".equals(value.getFlag())) {
// 防止因为指向同一内存地址导致覆盖
OrderProductDto temp = new OrderProductDto();
try {
BeanUtils.copyProperties(temp, value);
} catch (IllegalAccessException | InvocationTargetException e) {}
orderDtoList.add(temp);
} else if ("product".equals(value.getFlag())) {
try {
BeanUtils.copyProperties(productDto, value);
} catch (IllegalAccessException | InvocationTargetException e) {}
}
}
// 循环遍历赋值订单表产品名称
for (OrderProductDto dto : orderDtoList) {
dto.setProductName(productDto.getProductName());
context.write(dto, NullWritable.get());
}
}
}
5.新建OrderProductDriver类
public class OrderProductDriver {
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf, "order product");
job.setJarByClass(OrderProductDriver.class);
job.setMapperClass(OrderProductMapper.class);
job.setReducerClass(OrderProductReducer.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(OrderProductDto.class);
job.setOutputKeyClass(OrderProductDto.class);
job.setOutputValueClass(NullWritable.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}
传参并运行,NICE
E:\Java\blogCode\hadoop\src\main\resources\order E:\Java\blogCode\hadoop\src\main\resources\order_ret
欢迎关注公众号算法小生与我沟通交流