hadoop 求共同好友

以下是qq的好友列表数据,冒号前是一个用户,冒号后是该用户的所有好友(数据中的好友关系是单向的)
A:B,C,D,F,E,O
B:A,C,E,K
C:F,A,D,I
D:A,E,F,L
E:B,C,D,M,L
F:A,B,C,D,E,O,M
G:A,C,D,E,F
H:A,C,D,E,O
I:A,O
J:B,O
K:A,C,D
L:D,E,F
M:E,F,G
O:A,H,I,J

求出哪些人两两之间有共同好友,及他俩的共同好友都有谁?

以A举例 A 的 好友有 B C D F E O

              B  ----------> A  C       E  K      AB互相为好友,共同好友是C和 E

第一步:

     mapper

  

package com.wxj.togetherfriend.step1;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

/*
 * Created by wxj on 2019/8/19 0019 20:15
 */
public class Step1Mapper extends Mapper<LongWritable, Text,Text,Text> {

     Text t = new Text();
    Text t2 = new Text();
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {

        //根据分号把每行数据进行切分
        String[] split = value.toString().split(":");
        //把分号后的数据根据逗号进行切分
        String[] sp = split[1].split(",");
        //写出数据
        t2.set(split[0]);
        for (String s : sp) {
            t.set(s);
            context.write(t,t2);
        }



    }
}

     reduce

package com.wxj.togetherfriend.step1;

import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

/*
 * Created by wxj on 2019/8/19 0019 23:03
 */
public class Step1Reducer extends Reducer<Text,Text,Text,Text> {
    @Override
    protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
        StringBuffer sb = new StringBuffer();
        for (Text value : values) {
            sb.append(value.toString()).append("-");
        }

        context.write(new Text(sb.toString()),key);
    }
}

    main

 

package com.wxj.togetherfriend.step1;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

/*
 * Created by wxj on 2019/8/19 0019 23:20
 */
public class Step1Main  extends Configured implements Tool {
    @Override
    public int run(String[] args) throws Exception {
        Job job = Job.getInstance(super.getConf(), "step1");
        //第一步:读取文件
        job.setInputFormatClass(TextInputFormat.class);
        TextInputFormat.addInputPath(job,new Path("file:///E:\\****\\input"));

        //第二步:设置我们的mapper类
        job.setMapperClass(Step1Mapper.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Text.class);


        //第七步:reduce阶段
        job.setReducerClass(Step1Reducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);

        //第八步:输出
        job.setOutputFormatClass(TextOutputFormat.class);
        TextOutputFormat.setOutputPath(job,new Path("file:///E:\\*****\\mystep1out"));
        boolean b = job.waitForCompletion(true);
        return b?0:1;
    }

    static {
        try {
            System.load("D:\\soft\\hadoop-2.6.0-cdh5.14.0\\bin\\hadoop.dll");
        } catch (UnsatisfiedLinkError e) {
            System.err.println("Native code library failed to load.\n" + e);
            System.exit(1);
        }
    }

    public static void main(String[] args) throws Exception {
        int run = ToolRunner.run(new Configuration(), new Step1Main(), args);
        System.exit(run);
    }

}

第一步得出的数据是:

   

F-D-O-I-H-B-K-G-C-	A
E-A-J-F-	B
K-A-B-E-F-G-H-	C
G-K-C-A-E-L-F-H-	D
G-F-M-B-H-A-L-D-	E
M-D-L-A-C-G-	F
M-	G
O-	H
C-O-	I
O-	J
B-	K
E-D-	L
F-E-	M
J-I-H-A-F-	O

 

第二步:

     mapper

    

package com.wxj.togetherfriend.step2;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;
import java.util.Arrays;

/*
 * Created by wxj on 2019/8/20 0020 18:44
 */
public class Step2Mapper extends Mapper<LongWritable,Text,Text,Text> {
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        String[] split = value.toString().split("\t");

        String[] user = split[0].split("-");
        //对数组进行排序
        Arrays.sort(user);
        for(int i=0;i<user.length-1;i++){
            for(int j=i+1;j<user.length;j++){
                context.write(new Text(user[i]+""+user[j]),new Text(split[1]));
            }
        }


    }
}

     reduce

    

package com.wxj.togetherfriend.step2;

import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

/*
 * Created by wxj on 2019/8/20 0020 18:44
 */
public class Step2Reducer extends Reducer<Text,Text,Text,Text> {
    @Override
    protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
        StringBuffer buffer  = new StringBuffer();
        for (Text value : values) {
            buffer.append(value.toString()+"-");
        }
        //输出数据格式如下  A-E   B-D-
        context.write(key,new Text(buffer.toString()));
    }
}

     main

  

package com.wxj.togetherfriend.step2;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
/*
 * Created by wxj on 2019/8/20 0020 18:44
 */
public class Step2Main extends Configured  implements Tool{
    @Override
    public int run(String[] args) throws Exception {
        Job job = Job.getInstance(super.getConf(), "step2");

        job.setInputFormatClass(TextInputFormat.class);
        TextInputFormat.addInputPath(job,new Path("file:///E:\\*****\\mystep1out"));

        job.setMapperClass(Step2Mapper.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Text.class);

        job.setReducerClass(Step2Reducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);


        job.setOutputFormatClass(TextOutputFormat.class);
        TextOutputFormat.setOutputPath(job,new Path("file:///E:\\*****\\mystep2out"));

        boolean b = job.waitForCompletion(true);
        return b?0:1;
    }

    static {
        try {
            System.load("D:\\soft\\hadoop-2.6.0-cdh5.14.0\\bin\\hadoop.dll");
        } catch (UnsatisfiedLinkError e) {
            System.err.println("Native code library failed to load.\n" + e);
            System.exit(1);
        }
    }
    public static void main(String[] args) throws Exception {
        int run = ToolRunner.run(new Configuration(), new Step2Main(), args);
        System.exit(run);


    }

}

结果:

AB	C-E-
AC	D-F-
AD	F-E-
AE	C-B-D-
AF	D-O-E-B-C-
AG	C-D-F-E-
AH	E-C-O-D-
AI	O-
AJ	O-B-
AK	C-D-
AL	E-D-F-
AM	F-E-
BC	A-
BD	E-A-
BE	C-
BF	E-A-C-
BG	A-E-C-
BH	E-C-A-
BI	A-
BK	A-C-
BL	E-
BM	E-
BO	A-
CD	F-A-
CE	D-
CF	A-D-
CG	F-D-A-
CH	D-A-
CI	A-
CK	A-D-
CL	D-F-
CM	F-
CO	I-A-
DE	L-
DF	A-E-
DG	F-A-E-
DH	A-E-
DI	A-
DK	A-
DL	F-E-
DM	F-E-
DO	A-
EF	M-C-B-D-
EG	C-D-
EH	C-D-
EJ	B-
EK	C-D-
EL	D-
FG	A-D-E-C-
FH	D-O-C-E-A-
FI	O-A-
FJ	B-O-
FK	A-D-C-
FL	D-E-
FM	E-
FO	A-
GH	E-A-C-D-
GI	A-
GK	C-D-A-
GL	D-E-F-
GM	E-F-
GO	A-
HI	O-A-
HJ	O-
HK	D-A-C-
HL	E-D-
HM	E-
HO	A-
IJ	O-
IK	A-
IO	A-
KL	D-
KO	A-
LM	F-E-

 

  • 0
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值