MapReduce编程案例(三)
-
键值对如何传入自定义对象类型
- 实现WritableComparable接口
- 实现Writable接口
-
二者的区别
- 如果对象类型作为key传入,MapReduce底层会对键进行排序,此时需要告诉MapReduce需要进行排序的是什么
- 如果对象类型作为value传入,则只需实现Wriable接口
案例: 求上下行流量
1363157985066 13726230503 00-FD-07-A4-72-B8:CMCC 120.196.100.82 i02.c.aliimg.com 24 27 2481 24681 200 1363157995052 13826544101 5C-0E-8B-C7-F1-E0:CMCC 120.197.40.4 4 0 264 0 200 1363157991076 13926435656 20-10-7A-28-CC-0A:CMCC 120.196.100.99 2 4 132 1512 200 1363154400022 13926251106 5C-0E-8B-8B-B1-50:CMCC 120.197.40.4 4 0 240 0 200 1363157993044 18211575961 94-71-AC-CD-E6-18:CMCC-EASY 120.196.100.99 iface.qiyi.com 视频网站 15 12 1527 2106 200 1363157995074 84138413 5C-0E-8B-8C-E8-20:7DaysInn 120.197.40.4 122.72.52.12 20 16 4116 1432 200 1363157993055 13560439658 C4-17-FE-BA-DE-D9:CMCC 120.196.100.99 18 15 1116 954 200 1363157995033 15920133257 5C-0E-8B-C7-BA-20:CMCC 120.197.40.4 sug.so.360.cn 信息安全 20 20 3156 2936 200 1363157983019 13719199419 68-A1-B7-03-07-B1:CMCC-EASY 120.196.100.82 4 0 240 0 200 1363157984041 13660577991 5C-0E-8B-92-5C-20:CMCC-EASY 120.197.40.4 s19.cnzz.com 站点统计 24 9 6960 690 200 1363157973098 15013685858 5C-0E-8B-C7-F7-90:CMCC 120.197.40.4 rank.ie.sogou.com 搜索引擎 28 27 3659 3538 200 1363157986029 15989002119 E8-99-C4-4E-93-E0:CMCC-EASY 120.196.100.99 www.umeng.com 站点统计 3 3 1938 180 200 1363157992093 13560439658 C4-17-FE-BA-DE-D9:CMCC 120.196.100.99 15 9 918 4938 200 1363157986041 13480253104 5C-0E-8B-C7-FC-80:CMCC-EASY 120.197.40.4 3 3 180 180 200 1363157984040 13602846565 5C-0E-8B-8B-B6-00:CMCC 120.197.40.4 2052.flash2-http.qq.com 综合门户 15 12 1938 2910 200 1363157995093 13922314466 00-FD-07-A2-EC-BA:CMCC 120.196.100.82 img.qfc.cn 12 12 3008 3720 200 1363157982040 13502468823 5C-0A-5B-6A-0B-D4:CMCC-EASY 120.196.100.99 y0.ifengimg.com 综合门户 57 102 7335 110349 200 1363157986072 18320173382 84-25-DB-4F-10-1A:CMCC-EASY 120.196.100.99 input.shouji.sogou.com 搜索引擎 21 18 9531 2412 200 1363157990043 13925057413 00-1F-64-E1-E6-9A:CMCC 120.196.100.55 t3.baidu.com 搜索引擎 69 63 11058 48243 200 1363157988072 13760778710 00-FD-07-A4-7B-08:CMCC 120.196.100.82 2 2 120 120 200 1363157985066 13726238888 00-FD-07-A4-72-B8:CMCC 120.196.100.82 i02.c.aliimg.com 24 27 2481 24681 200 1363157993055 13560436666 C4-17-FE-BA-DE-D9:CMCC 120.196.100.99 18 15 1116 954 200
1 编写JavaBean
package TopN;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
/**
* 本案例的功能:演示自定义数据类型如何实现hadoop的序列化接口
* 1、该类一定要保留空参构造函数
* 2、write方法中输出字段二进制数据的顺序 要与 readFields方法读取数据的顺序一致
*
* @author ThinkPad
*
*/
public class FlowBean implements WritableComparable<FlowBean> {
private long upFlow;
private long dFlow;
private String phone;
private long amountFlow;
public FlowBean() {
}
public FlowBean(long upFlow, long dFlow) {
this.upFlow = upFlow;
this.dFlow = dFlow;
this.amountFlow = upFlow dFlow;
}
public String getPhone() {
return phone;
}
public void setPhone(String phone) {
this.phone = phone;
}
public long getUpFlow() {
return upFlow;
}
public void setUpFlow(int upFlow) {
this.upFlow = upFlow;
}
public long getdFlow() {
return dFlow;
}
public void setdFlow(int dFlow) {
this.dFlow = dFlow;
}
public long getAmountFlow() {
return amountFlow;
}
public void setAmountFlow(int amountFlow) {
this.amountFlow = amountFlow;
}
/**
* hadoop系统在序列化该类的对象时要调用的方法
*/
@Override
public void write(DataOutput out) throws IOException {
out.writeLong(upFlow);
//out.writeUTF(phone);
out.writeLong(dFlow);
out.writeLong(amountFlow);
}
/**
* hadoop系统在反序列化该类的对象时要调用的方法
*/
@Override
public void readFields(DataInput in) throws IOException {
this.upFlow = in.readLong();
//this.phone = in.readUTF();
this.dFlow = in.readLong();
this.amountFlow = in.readLong();
}
@Override
public String toString() {
return this.upFlow "," this.dFlow "," this.amountFlow;
}
@Override
public int compareTo(FlowBean o) {
return this.getAmountFlow()>o.getAmountFlow()?1:0;
}
}
2 编写Mapper类
package TopN;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import java.io.IOException;
public class TopNMapper extends Mapper <LongWritable, Text, Text,FlowBean >{
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String line = value.toString();
String[] fields = line.split("\t");
String phone = fields[1];
long upFlow = Long.parseLong(fields[fields.length-3]);
long dFlow = Long.parseLong(fields[fields.length-2]);
context.write(new Text(phone), new FlowBean(upFlow,dFlow));
}
}
3 编写Reducer类
package TopN;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import java.io.IOException;
public class TopNReduce extends Reducer <Text, FlowBean, Text, FlowBean>{
@Override
protected void reduce(Text key, Iterable<FlowBean> values, Context context) throws IOException, InterruptedException {
long upSum = 0;
long dSum = 0;
for(FlowBean value:values){
upSum = value.getUpFlow();
dSum = value.getdFlow();
}
context.write(key, new FlowBean(upSum, dSum));
}
}
4 编写Job(主函数) yarn集群中运行
package TopN;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
import java.net.URI;
public class TopNFlow {
public static void main(String[] args) throws Exception {
//设置系统环境变量
System.setProperty("HADOOP_USER_NAME","root");
Configuration conf = new Configuration();
conf.set("mapreduce.framework.name","yarn");
conf.set("yarn.resourcemanager.hostname","master");
conf.set("fs.defaultFS","hdfs://master:9000");
//添加跨平台参数
conf.set("mapreduce.app-submission.cross-platform","true");
Job job = Job.getInstance(conf);
job.setJar("E:\\Hadoop\\HDFSDemo\\target\\HDFSDemo-1.0-SNAPSHOT.jar");
job.setMapperClass(TopNMapper.class);
job.setReducerClass(TopNReduce.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(FlowBean.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(FlowBean.class);
FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"),conf,"root");
Path output = new Path("/TopN/output");
if(fs.exists(output)){
fs.delete(output,true);
}
//设置输入输出路径
FileInputFormat.setInputPaths(job,new Path("/TopN/input"));
FileOutputFormat.setOutputPath(job,output);
//提交任务
boolean b = job.waitForCompletion(true);
if (b){
System.out.println("任务完成!");
}else {
System.out.println("任务失败!");
}
}
}