二度人脉推荐(案例)

- 基于:hadoop2.x集群:HDFS + MapReduce

JobFriends
Mao01
Resource01
Map02
Resource02

JobFriends

package com.friend;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class JobFriends {
	public static void main(String[] args) {
		Boolean flag = jobOne();
		if(flag) {
			jobTwo();
		}
	}


	 static Boolean jobOne() {
		 Configuration config =new Configuration();
		 config.set("fs.defaultFS", "hdfs://node01:8020");
		 config.set("yarn.resourcemanager.hostname", "node03:8088");
		
		Boolean flag = false;
		try {
			Job job = Job.getInstance(config);
			
			job.setJarByClass(JobFriends.class);
			job.setJobName("fof one job");
			
			job.setMapperClass(Map01.class);
			job.setReducerClass(Reduce01.class);
			
			job.setMapOutputKeyClass(FoF.class);
			job.setMapOutputValueClass(IntWritable.class);
						 	
			FileInputFormat.addInputPath(job, new Path("/JF/input/qq.txt"));
          
			Path output =new Path("/JF/tuijian/01/");
			FileSystem fs = FileSystem.get(config);
			
			if (fs.exists(output)) {
				fs.delete(output, true);
			}
			FileOutputFormat.setOutputPath(job, output);
			
			
			flag = job.waitForCompletion(true);
			if (flag) {
				System.out.println("job 1 success~~");
			}
		} catch (Exception e) {
			e.printStackTrace();
		};
		return flag;
	}
	 
	 
	 static Boolean jobTwo() {  
		 Configuration config =new Configuration();
		 config.set("fs.defaultFS", "hdfs://node01:8020");
		 config.set("yarn.resourcemanager.hostname", "node03:8088");
		 
		 Boolean flag = false;
		 try {
			 Job job = Job.getInstance(config);
			 
			 job.setJarByClass(JobFriends.class);
			 job.setJobName("fof two job");
			 
			 job.setMapperClass(Map02.class);
			 job.setReducerClass(Reduce02.class);
			 
			 job.setMapOutputKeyClass(FriendSort.class);
			 job.setMapOutputValueClass(IntWritable.class);
			 
			 
			 FileInputFormat.addInputPath(job, new Path("/JF/tuijian/01/"));
			 
			 Path output = new Path("/JF/tuijian/02/");
			 
			 FileSystem fs = FileSystem.get(config);
			 if (fs.exists(output)) {
				 fs.delete(output, true);
			 }
			 
			 FileOutputFormat.setOutputPath(job, output);
			 
			 flag = job.waitForCompletion(true);
			 if (flag) {
				 System.out.println("job 2 success~~");
			 }
			 
		 } catch (Exception e) {
			 e.printStackTrace();
		 };
		 return flag;
	 }
}

Map01

package com.friend;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.util.StringUtils;

//输入行号+数据 ,输出map+gongtonghaoy
public class Map01 extends Mapper<LongWritable, Text, FoF, IntWritable>{
	@Override
	protected void map(LongWritable key, Text value, Context context)
			throws java.io.IOException ,InterruptedException {
		String line = value.toString(); //读取一行 tom   hadoop hive cat hello
		String[] friends = StringUtils.split(line,'\t');
		//直接好友关系
		for(int i = 1; i<friends.length; i++){ //从hadoop hive cat hello
			String friend = friends[i]; //用户的好友
			context.write(new FoF(friends[0],friends[i]),  new IntWritable(0));//用户和用户的每个好友  tom hive  tom cat
			//间接好友关系
			for(int j = i+1; j<friends.length; j++ ){//遍历 tom hadoop之后的每个好友 hive cat hello
				String friend2 = friends[j];// hive
				context.write(new FoF(friend,friend2),new IntWritable(1));
			}
		}
	}
}
//tom hadoop 0
//hadoop hive 1
//hadoop cat 1
//hadoop hello 1
//tom hive 0
//hive cat 1
//hive hello 1
//tom cat 0
//cat hello 1

Reduce01

package com.friend;

import java.io.IOException;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.util.StringUtils;
import org.jboss.netty.util.internal.StringUtil;

//key ,value : tom hadoop {1,1,1,1,1}
public class Reduce01 extends Reducer<FoF, IntWritable, Text, NullWritable> {
	@Override
	protected void reduce(FoF key, Iterable<IntWritable> value,
			Context context) throws IOException, InterruptedException {
		int sum = 0;
		boolean f = true;
		for(IntWritable i : value){
			if(i.get() == 0){ //如果是直接好友关系
				f = false;
				break;
			}
			sum+=i.get();//如果是间接好友关系,计算共同好友数量1+1+1+1=4  
		}
		if(f){//如果是间接好友关系 
			String msg = StringUtils.split(key.toString(),'\t')[0]+" "
					+ StringUtil.split(key.toString(),'\t')[1] +" " + sum;//将制表符换成空格,加上共同好友数量
			//tom hadoop 4
			context.write(new Text(msg), NullWritable.get());
		}
	}
}

Map02

package com.friend;

import java.io.IOException;

import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

public class Map02 extends Mapper<LongWritable, Text, FriendSort, IntWritable>{
	@Override
	protected void map(LongWritable key, Text value,Context context)
			throws IOException, InterruptedException {
		String lines = value.toString(); //tom hadoop 4
		String friend1 = StringUtils.split(lines,' ')[0];
		String friend2 = StringUtils.split(lines,' ')[1];
		int hot = Integer.parseInt(StringUtils.split(lines, ' ')[2]);
		
		System.out.println(friend1+" "+friend2+" "+hot);
		System.out.println(friend2+" "+friend1+" "+hot);
		//按用户名tom进行排序,再按用户共同好友数进行排序 tom hadoop 4/ tom hive 2
		context.write(new FriendSort(friend1,friend2,hot),new IntWritable(hot));
		System.out.println(friend1+"-"+friend2+"-"+hot);
		context.write(new FriendSort(friend2,friend1,hot),new IntWritable(hot));
	}
}

Reduce02

package com.friend;

import java.io.IOException;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class Reduce02 extends Reducer<FriendSort, IntWritable, Text, NullWritable>{
	@Override
	protected void reduce(FriendSort friend, Iterable<IntWritable> hot,Context context)
			throws IOException, InterruptedException {
		int sum = 0;
		for(IntWritable i : hot){
			sum=i.get();
		}
		System.out.println("==============");
		String msg = friend.getFriend01()+" "+friend.getFriend02()+" "+sum;
		System.out.println(msg);
		context.write(new Text(msg), NullWritable.get());
	}
}

FoF

package com.friend;

import org.apache.hadoop.io.Text;

//对key用户名做排序    a-b b-a  统一处理成a-b ->
public class FoF extends Text{
	public FoF(){
		super();
	}
	
	public FoF(String friend01,String friend02){
		set(getof(friend01,friend02));
	}
	
	public String getof(String friend01,String friend02){
		int c = friend01.compareTo(friend02);
		if(c > 0){
			return friend02+"\t"+friend01;
		}
		return friend01+"\t"+friend02;
	}
}

FriendSort

package com.friend;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

import org.apache.hadoop.io.WritableComparable;

//实现WritableComparable比较器接口
public class FriendSort implements WritableComparable<FriendSort>{
	private String friend01;
	private String friend02;
	private int hot;
	public FriendSort() {
		super();
	}

	public FriendSort(String friend01, String friend02, int hot) {
		super();
		this.friend01 = friend01;
		this.friend02 = friend02;
		this.hot = hot;
	}

	public String getFriend01() {
		return friend01;
	}

	public void setFriend01(String friend01) {
		this.friend01 = friend01;
	}

	public String getFriend02() {
		return friend02;
	}

	public void setFriend02(String friend02) {
		this.friend02 = friend02;
	}

	public int getHot() {
		return hot;
	}

	public void setHot(int hot) {
		this.hot = hot;
	}


	public void readFields(DataInput in) throws IOException {
		this.friend01=in.readUTF();
		this.friend02=in.readUTF();
		this.hot=in.readInt();
	}

	public void write(DataOutput out) throws IOException {
		out.writeUTF(friend01);
		out.writeUTF(friend02);
		out.writeInt(hot);
	}

	public int compareTo(FriendSort friend) {
		int c =friend01.compareTo(friend.getFriend01());//下一阶段的用户名与该阶段用户名是否相同,是否是同一用户
		if(c==0){
			return -Integer.compare(hot, friend.getHot());//同一用户,比较共同好友数量 tom hadoop 4 tom hive 2,对好友数量进行倒叙排序
		}
		return c;
	}

}

  • 1
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值