这是某资讯APP公司的面试题,考察MapReduce的编程思想。
给定一个人脉关系的文件,从中找到二度人脉。比如给定如下的人脉关系,
A B C D E
B E F
C G
G H I J
应输出
A F
A G
C H
C I
C J
这里假设关系是单向的,比如通过第一行"A B C D",我们认为B是A的好友,但A不是B的好友(有点像单相思)。
方法是map阶段输出每人的前向结点和后向结点,前向和后向可以用1和0标记;reduce阶段输出每人的前向和后向即为二度人脉。但要考虑一种特殊情况,两人之间可能既是一度人脉又是二度人脉,比如上例中的A和E,此时按就近原则认为二者是一度人脉,结果中需要把它过滤掉。代码如下
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
public class TwoDegreeConnection {
public static class ConnectionMap extends Mapper<LongWritable, Text, Text, Text> {
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String line = value.toString();
String ele[] = line.split(" ");
int i;
for (i=1; i<ele.length; i++) {
//前向和后向都要输出
context.write(new Text(ele[0]), new Text(ele[i] + " " + String.valueOf(0)));
context.write(new Text(ele[i]), new Text(ele[0] + " " + String.valueOf(1)));
}
}
}
public static class ConnectionReduce extends Reducer<Text, Text, Text, Text> {
Set<String> oneDegree = new HashSet<String>(); //保存一度人脉关系,以便后面过滤结果
protected void setup(Reducer<Text, Text, Text, Text>.Context context) throws IOException {
Configuration conf = context.getConfiguration();
String input = conf.get("input");
FileSystem fs = FileSystem.get(conf);
FSDataInputStream dis = fs.open(new Path(input));
InputStreamReader isr = new InputStreamReader(dis, "utf-8");
BufferedReader br = new BufferedReader(isr);
String s = br.readLine(), sub[];
int i;
while (s != null) {
sub = s.split(" ");
for (i=1; i<sub.length; i++) {
oneDegree.add(sub[0] + sub[i]);
}
s = br.readLine();
}
dis.close();
}
public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
Set<String> set_start = new HashSet<String>();
Set<String> set_end = new HashSet<String>();
String s[];
for (Text val : values) {
s = val.toString().split(" ");
if (s[1].equals("0")) {
set_end.add(s[0]);
} else {
set_start.add(s[0]);
}
}
String start, end, tmp;
Iterator<String> it_start = set_start.iterator();
Iterator<String> it_end;
while (it_start.hasNext()) {
start = it_start.next();
it_end = set_end.iterator();
while (it_end.hasNext()) {
end = it_end.next();
tmp = start + end;
if (!oneDegree.contains(tmp)) {
context.write(new Text(start), new Text(end));
}
}
}
}
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
conf.setStrings("input", args[0]);
Job job = Job.getInstance(conf);
job.setJarByClass(TwoDegreeConnection.class);
job.setJobName("TwoDegreeConnection");
job.setMapperClass(ConnectionMap.class);
job.setReducerClass(ConnectionReduce.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
job.waitForCompletion(true);
}
}