MapReduce例子——找出QQ共同好友

///\\\\\\\\
fri.txt 如下: person: friend1, friend2, friend3, friend4, …..

A:B,C,D,F,E,O
B:A,C,E,K
C:F,A,D,I
D:A,E,F,L
E:B,C,D,M,L
F:A,B,C,D,E,O,M
G:A,C,D,E,F
H:A,C,D,E,O
I:A,O
J:B,O
K:A,C,D
L:D,E,F
M:E,F,G
O:A,H,I,J

\\\\\\

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.Job;

public class friends {

    static class FriendMapper extends Mapper<LongWritable, Text, Text, Text>{       
        @Override
        protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, Text>.Context context)
                throws IOException, InterruptedException {

            String line = value.toString();
            String[] person_friends = line.split(":");
            String person = person_friends[0];
            String friends = person_friends[1];

            for(String friend:friends.split(",")) {
                context.write(new Text(friend), new Text(person));                      
            }                                                           
        }           
    }   
    static class FriendsReducer extends Reducer<Text, Text, Text, Text>{
        @Override
        protected void reduce(Text friend, Iterable<Text> persons, Context context)
                throws IOException, InterruptedException {

            StringBuffer sb = new StringBuffer();
            for(Text person:persons) {              
                sb.append(person).append(",");                              
            }           
            context.write(friend, new Text(sb.toString()));                 
        }           
    }                               
    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {       
        Configuration conf =  new Configuration();      
        Job job = Job.getInstance(conf);
        job.setJarByClass(friends.class);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);        
        job.setMapperClass(FriendMapper.class);
        job.setReducerClass(FriendsReducer.class);      

    //  FileInputFormat.setInputPaths(job, new Path("hdfs://Master:9000/data/demon/friends/input"));
    //  FileOutputFormat.setOutputPath(job, new Path("hdfs://Master:9000//data/demon/friends/output"));     
        FileInputFormat.setInputPaths(job, new Path("/home/hadoop/examples/friends/input"));
        FileOutputFormat.setOutputPath(job, new Path("/home/hadoop/examples/friends/out_1"));

        job.waitForCompletion(true);            
    }
}

//\\\\
得到结果如下: friend: person1, person2, person3, ….

A   I,K,C,B,G,F,H,O,D,
B   A,F,J,E,
C   A,E,B,H,F,G,K,
D   G,C,K,A,L,F,E,H,
E   G,M,L,H,A,F,B,D,
F   L,M,D,C,G,A,
G   M,
H   O,
I   O,C,
J   O,
K   B,
L   D,E,
M   E,F,
O   A,H,I,J,F,

/\\\\\
///\\\\

import java.io.IOException;
import java.util.Arrays;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.Job;


public class FriendStepTo {

    static class FriendToMapper extends Mapper<LongWritable, Text, Text, Text>{ 
        @Override
        protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, Text>.Context context)
                throws IOException, InterruptedException {

            String line = value.toString();
            String[] friend_person = line.split("\t");

            String friend = friend_person[0];
            String[] persons = friend_person[1].split(",");

            Arrays.sort(persons);
            for(int i=0; i<persons.length-2; i++) {
                for(int j=i+1; j<persons.length-1; j++) {
                    context.write(new Text(persons[i]+"-" +persons[j]), new Text(friend));                  
                }                               
            }       
        }           
    }   
    static class FriendsToReducer extends Reducer<Text, Text, Text, Text>{
        @Override
        protected void reduce(Text person_person, Iterable<Text> friends, Context context)
                throws IOException, InterruptedException {

            StringBuffer sb = new StringBuffer();
            for(Text friend:friends) {              
                sb.append(friend).append(" ");                              
            }           
            context.write(person_person, new Text(sb.toString()));                  
        }           
    }                               
    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        Configuration conf =  new Configuration();      
        Job job = Job.getInstance(conf);
        job.setJarByClass(FriendStepTo.class);      
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);        
        job.setMapperClass(FriendToMapper.class);
        job.setReducerClass(FriendsToReducer.class);        

    //  FileInputFormat.setInputPaths(job, new Path("hdfs://Master:9000/data/demon/friends/input"));
    //  FileOutputFormat.setOutputPath(job, new Path("hdfs://Master:9000//data/demon/friends/output"));     
        FileInputFormat.setInputPaths(job, new Path("/home/hadoop/examples/friends/out_1"));
        FileOutputFormat.setOutputPath(job, new Path("/home/hadoop/examples/friends/out_2"));       

        job.waitForCompletion(true);            
    }
}

///\\\\\\\\\\\
得到的结果如下: person1 - person2 : friend1, friend2 ,…..

A-B C E 
A-C F D 
A-D E F 
A-E B C D 
A-F C D B E O 
A-G D E F C 
A-H E O C D 
A-I O 
A-K D 
A-L F E 
B-C A 
B-D E A 
B-E C 
B-F E A C 
B-G C E A 
B-H E C A 
B-I A 
B-K A 
B-L E 
C-D F A 
C-E D 
C-F D A 
C-G F A D 
C-H A D 
C-I A 
C-K D A 
C-L F 
D-F E A 
D-G A E F 
D-H A E 
D-I A 
D-K A 
D-L F E 
E-F C D B 
E-G D C 
E-H D C 
E-K D 
F-G C E D A 
F-H C A D E O 
F-I A O 
F-K D A 
F-L E 
G-H D E C A 
G-I A 
G-K A D 
G-L F E 
H-I A O 
H-K A D 
H-L E 
I-K A 
  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值