下面是一个完整的mapJoin源码,可以仔细品品哦!!
所需的数据:
orders.txt
1001,20150710,p0001,2
1002,20150710,p0002,3
1002,20150710,p0003,3
product.txt
p0001,小米5,1000,2000
p0002,锤子T1,1000,3000
mapJoin只需要两个类,一个是map,一个驱动类。
map类
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.util.HashMap;
public class mapJoin extends Mapper<LongWritable, Text,Text,Text> {
HashMap<String, String> mapJoin_map = new HashMap<>();
@Override
protected void setup(Context context) throws IOException, InterruptedException {
URI[] cacheFiles = DistributedCache.getCacheFiles(context.getConfiguration());
FileSystem fileSystem = FileSystem.get(cacheFiles[0], context.getConfiguration());
FSDataInputStream inputStream = fileSystem.open(new Path(cacheFiles[0]));
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream,"UTF-8"));
String line="";
while ((line=reader.readLine())!=null){
String[] split = line.split(",");
mapJoin_map.put(split[0],split[1]+","+split[2]+","+split[3]);
}
reader.close();
fileSystem.close();
}
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String[] split = value.toString().split(",");
String datas = mapJoin_map.get(split[2]);
String JoinDatas = datas + "," + split[0] + "," + split[1] + "," + split[3];
context.write(new Text(split[2]), new Text(JoinDatas));
}
}
驱动类
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.net.URI;
public class JobMain {
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
//注意要将product.txt文件放在集群哦!!!!!
DistributedCache.addCacheFile(new URI("hdfs://node01:8020/product.txt"),conf);
Job job = Job.getInstance(conf,"Partiton.Partitions");
job.setJarByClass(JobMain.class);
job.setMapperClass(mapJoin.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}