MapReduce流量统计

MapReduce流量统计

源数据

1363157985066	13726230503	00-FD-07-A4-72-B8:CMCC	120.196.100.82	i02.c.aliimg.com		24	27	2481	24681	200
1363157995052 	13826544101	5C-0E-8B-C7-F1-E0:CMCC	120.197.40.4			4	0	264	0	200
1363157991076 	13926435656	20-10-7A-28-CC-0A:CMCC	120.196.100.99			2	4	132	1512	200
1363154400022 	13926251106	5C-0E-8B-8B-B1-50:CMCC	120.197.40.4			4	0	240	0	200
1363157993044 	18211575961	94-71-AC-CD-E6-18:CMCC-EASY	120.196.100.99	iface.qiyi.com	视频网站	15	12	1527	2106	200
1363157995074 	84138413	5C-0E-8B-8C-E8-20:7DaysInn	120.197.40.4	122.72.52.12		20	16	4116	1432	200
1363157993055 	13560439658	C4-17-FE-BA-DE-D9:CMCC	120.196.100.99			18	15	1116	954	200
1363157995033 	15920133257	5C-0E-8B-C7-BA-20:CMCC	120.197.40.4	sug.so.360.cn	信息安全	20	20	3156	2936	200
1363157983019 	13719199419	68-A1-B7-03-07-B1:CMCC-EASY	120.196.100.82			4	0	240	0	200
1363157984041 	13660577991	5C-0E-8B-92-5C-20:CMCC-EASY	120.197.40.4	s19.cnzz.com	站点统计	24	9	6960	690	200
1363157973098 	15013685858	5C-0E-8B-C7-F7-90:CMCC	120.197.40.4	rank.ie.sogou.com	搜索引擎	28	27	3659	3538	200
1363157986029 	15989002119	E8-99-C4-4E-93-E0:CMCC-EASY	120.196.100.99	www.umeng.com	站点统计	3	3	1938	180	200
1363157992093 	13560439658	C4-17-FE-BA-DE-D9:CMCC	120.196.100.99			15	9	918	4938	200
1363157986041 	13480253104	5C-0E-8B-C7-FC-80:CMCC-EASY	120.197.40.4			3	3	180	180	200
1363157984040 	13602846565	5C-0E-8B-8B-B6-00:CMCC	120.197.40.4	2052.flash2-http.qq.com	综合门户	15	12	1938	2910	200
1363157995093 	13922314466	00-FD-07-A2-EC-BA:CMCC	120.196.100.82	img.qfc.cn		12	12	3008	3720	200
1363157982040 	13502468823	5C-0A-5B-6A-0B-D4:CMCC-EASY	120.196.100.99	y0.ifengimg.com	综合门户	57	102	7335	110349	200
1363157986072 	18320173382	84-25-DB-4F-10-1A:CMCC-EASY	120.196.100.99	input.shouji.sogou.com	搜索引擎	21	18	9531	2412	200
1363157990043 	13925057413	00-1F-64-E1-E6-9A:CMCC	120.196.100.55	t3.baidu.com	搜索引擎	69	63	11058	48243	200
1363157988072 	13760778710	00-FD-07-A4-7B-08:CMCC	120.196.100.82			2	2	120	120	200
1363157985066 	13726238888	00-FD-07-A4-72-B8:CMCC	120.196.100.82	i02.c.aliimg.com		24	27	2481	24681	200
1363157993055 	13560436666	C4-17-FE-BA-DE-D9:CMCC	120.196.100.99			18	15	1116	954	200

基础版(统计上传流量 下载流量 总流量)

package com.hrh.base;

import org.apache.hadoop.io.WritableComparable;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

/**
 * @Title: FlowBean
 * @ProjectName BigData
 * @Description: TODO
 */
public class FlowBean implements WritableComparable{

    private long upFlow;
    private long downFlow;
    private long sumFlow;

    public FlowBean() {
    }

    public FlowBean(long upFlow, long downFlow, long sumFlow) {
        this.upFlow = upFlow;
        this.downFlow = downFlow;
        this.sumFlow = sumFlow;
    }

    public void set(long upFlow, long downFlow) {
        this.upFlow = upFlow;
        this.downFlow = downFlow;
        this.sumFlow = upFlow + downFlow;
    }

    public long getUpFlow() {
        return upFlow;
    }

    public void setUpFlow(long upFlow) {
        this.upFlow = upFlow;
    }

    public long getDownFlow() {
        return downFlow;
    }

    public void setDownFlow(long downFlow) {
        this.downFlow = downFlow;
    }

    public long getSumFlow() {
        return sumFlow;
    }

    public void setSumFlow(long sumFlow) {
        this.sumFlow = sumFlow;
    }

    @Override
    public String toString() {
        return upFlow +"\t" + downFlow +"\t" + sumFlow;
    }

    @Override
    public int compareTo(Object o) {
        return 0;
    }

    @Override
    public void write(DataOutput out) throws IOException {
        out.writeLong(upFlow);
        out.writeLong(downFlow);
        out.writeLong(sumFlow);
    }


    @Override
    public void readFields(DataInput in) throws IOException {
        this.upFlow = in.readLong();
        this.downFlow = in.readLong();
        this.sumFlow = in.readLong();
    }
}

package com.hrh.base;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

/**
 * @Title: FlowMapper
 * @ProjectName BigData
 * @Description: TODO
 */
public class FlowMapper extends Mapper<LongWritable,Text,Text,FlowBean> {

    FlowBean flowBean=new FlowBean();
    Text text=new Text();
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        String line = value.toString();
        String[] split = line.split("\t");
        String phoneNum = split[1];

        text.set(phoneNum);
        flowBean.set(Long.parseLong(split[split.length - 3]),Long.parseLong(split[split.length - 2]));

        context.write(text,flowBean);
    }
}

package com.hrh.base;

import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

/**
 * @Title: FlowReducer
 * @ProjectName BigData
 * @Description: TODO
 */
public class FlowReducer extends Reducer<Text,FlowBean,Text,FlowBean> {


    @Override
    protected void reduce(Text key, Iterable<FlowBean> values, Context context) throws IOException, InterruptedException {
        FlowBean flowBean=values.iterator().next();
        context.write(key,flowBean);
    }
}

package com.hrh.base;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

/**
 * @Title: FlowDriver
 * @ProjectName BigData
 * @Description: TODO
 */
public class FlowDriver {
    public static void main(String[] args) throws Exception {
        Configuration conf=new Configuration();
        conf.set("mapreduce.framework.name","local");

        Job job = Job.getInstance(conf);

        job.setJarByClass(FlowDriver.class);

        job.setMapperClass(FlowMapper.class);
        job.setReducerClass(FlowReducer.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(FlowBean.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(FlowBean.class);

        FileInputFormat.setInputPaths(job,"E:\\test\\input\\data.txt");
        FileOutputFormat.setOutputPath(job,new Path("E:\\test\\output"));

        boolean b = job.waitForCompletion(true);

        System.exit(b?0:1);

    }
}

part-r-00000

13480253104	180	180	360
13502468823	7335	110349	117684
13560436666	1116	954	2070
13560439658	1116	954	2070
13602846565	1938	2910	4848
13660577991	6960	690	7650
13719199419	240	0	240
13726230503	2481	24681	27162
13726238888	2481	24681	27162
13760778710	120	120	240
13826544101	264	0	264
13922314466	3008	3720	6728
13925057413	11058	48243	59301
13926251106	240	0	240
13926435656	132	1512	1644
15013685858	3659	3538	7197
15920133257	3156	2936	6092
15989002119	1938	180	2118
18211575961	1527	2106	3633
18320173382	9531	2412	11943
84138413	4116	1432	5548

排序版(按总流量倒序)

package com.hrh.sort;

import org.apache.hadoop.io.WritableComparable;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

/**
 * @Title: FlowBean
 * @ProjectName BigData
 * @Description: TODO
 */
public class FlowBean implements WritableComparable<FlowBean>{

    private long upFlow;
    private long downFlow;
    private long sumFlow;

    public FlowBean() {
    }

    public FlowBean(long upFlow, long downFlow, long sumFlow) {
        this.upFlow = upFlow;
        this.downFlow = downFlow;
        this.sumFlow = sumFlow;
    }

    public void set(long upFlow, long downFlow) {
        this.upFlow = upFlow;
        this.downFlow = downFlow;
        this.sumFlow = upFlow + downFlow;
    }

    public long getUpFlow() {
        return upFlow;
    }

    public void setUpFlow(long upFlow) {
        this.upFlow = upFlow;
    }

    public long getDownFlow() {
        return downFlow;
    }

    public void setDownFlow(long downFlow) {
        this.downFlow = downFlow;
    }

    public long getSumFlow() {
        return sumFlow;
    }

    public void setSumFlow(long sumFlow) {
        this.sumFlow = sumFlow;
    }

    @Override
    public String toString() {
        return upFlow +"\t" + downFlow +"\t" + sumFlow;
    }

    @Override
    public int compareTo(FlowBean o) {

        if(this.sumFlow!=o.getSumFlow()){
            return this.sumFlow>o.getSumFlow()?-1:1;
        }
        return this.upFlow>o.getUpFlow()?-1:1;
    }

    @Override
    public void write(DataOutput out) throws IOException {
        out.writeLong(upFlow);
        out.writeLong(downFlow);
        out.writeLong(sumFlow);
    }


    @Override
    public void readFields(DataInput in) throws IOException {
        this.upFlow = in.readLong();
        this.downFlow = in.readLong();
        this.sumFlow = in.readLong();
    }
}

package com.hrh.sort;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

/**
 * @Title: FlowMapper
 * @ProjectName BigData
 * @Description: TODO
 */
public class FlowMapper extends Mapper<LongWritable,Text,FlowBean,Text> {

    FlowBean flowBean=new FlowBean();
    Text text=new Text();

    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        String line = value.toString();
        String[] split = line.split("\t");
        String phoneNum = split[0];

        text.set(phoneNum);
        flowBean.set(Long.parseLong(split[1]),Long.parseLong(split[2]));

        context.write(flowBean,text);
    }
}

package com.hrh.sort;

import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

/**
 * @Title: FlowReducer
 * @ProjectName BigData
 * @Description: TODO
 */
public class FlowReducer extends Reducer<FlowBean,Text,Text,FlowBean> {

    @Override
    protected void reduce(FlowBean key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
        Text phoneNum = values.iterator().next();
        context.write(phoneNum,key);
    }
}

package com.hrh.sort;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

/**
 * @Title: FlowDriver
 * @ProjectName BigData
 * @Description: TODO
 */
public class FlowDriver {
    public static void main(String[] args) throws Exception {
        Configuration conf=new Configuration();
        conf.set("mapreduce.framework.name","local");

        Job job = Job.getInstance(conf);

        job.setJarByClass(FlowDriver.class);

        job.setMapperClass(FlowMapper.class);
        job.setReducerClass(FlowReducer.class);

        job.setMapOutputKeyClass(FlowBean.class);
        job.setMapOutputValueClass(Text.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(FlowBean.class);

        FileInputFormat.setInputPaths(job,"E:\\test\\output");
        FileOutputFormat.setOutputPath(job,new Path("E:\\test\\sortoutput"));

        boolean b = job.waitForCompletion(true);

        System.exit(b?0:1);

    }
}

part-r-00000

13502468823	7335	110349	117684
13925057413	11058	48243	59301
13726238888	2481	24681	27162
13726230503	2481	24681	27162
18320173382	9531	2412	11943
13660577991	6960	690	7650
15013685858	3659	3538	7197
13922314466	3008	3720	6728
15920133257	3156	2936	6092
84138413	4116	1432	5548
13602846565	1938	2910	4848
18211575961	1527	2106	3633
15989002119	1938	180	2118
13560439658	1116	954	2070
13560436666	1116	954	2070
13926435656	132	1512	1644
13480253104	180	180	360
13826544101	264	0	264
13926251106	240	0	240
13719199419	240	0	240
13760778710	120	120	240

分区加排序版(按手机号分区)

package com.hrh.province;

import org.apache.hadoop.io.WritableComparable;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

/**
 * @Title: FlowBean
 * @ProjectName BigData
 * @Description: TODO
 */
public class FlowBean implements WritableComparable<FlowBean>{

    private long upFlow;
    private long downFlow;
    private long sumFlow;

    public FlowBean() {
    }

    public FlowBean(long upFlow, long downFlow, long sumFlow) {
        this.upFlow = upFlow;
        this.downFlow = downFlow;
        this.sumFlow = sumFlow;
    }

    public void set(long upFlow, long downFlow) {
        this.upFlow = upFlow;
        this.downFlow = downFlow;
        this.sumFlow = upFlow + downFlow;
    }

    public long getUpFlow() {
        return upFlow;
    }

    public void setUpFlow(long upFlow) {
        this.upFlow = upFlow;
    }

    public long getDownFlow() {
        return downFlow;
    }

    public void setDownFlow(long downFlow) {
        this.downFlow = downFlow;
    }

    public long getSumFlow() {
        return sumFlow;
    }

    public void setSumFlow(long sumFlow) {
        this.sumFlow = sumFlow;
    }

    @Override
    public String toString() {
        return upFlow +"\t" + downFlow +"\t" + sumFlow;
    }

    @Override
    public int compareTo(FlowBean o) {

        if(this.sumFlow!=o.getSumFlow()){
            return this.sumFlow>o.getSumFlow()?-1:1;
        }
        return this.upFlow>o.getUpFlow()?-1:1;
    }

    @Override
    public void write(DataOutput out) throws IOException {
        out.writeLong(upFlow);
        out.writeLong(downFlow);
        out.writeLong(sumFlow);
    }


    @Override
    public void readFields(DataInput in) throws IOException {
        this.upFlow = in.readLong();
        this.downFlow = in.readLong();
        this.sumFlow = in.readLong();
    }
}

package com.hrh.province;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

/**
 * @Title: FlowMapper
 * @ProjectName BigData
 * @Description: TODO
 */
public class FlowMapper extends Mapper<LongWritable,Text,FlowBean,Text> {

    FlowBean flowBean=new FlowBean();
    Text text=new Text();

    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        String line = value.toString();
        String[] split = line.split("\t");
        String phoneNum = split[0];

        text.set(phoneNum);
        flowBean.set(Long.parseLong(split[1]),Long.parseLong(split[2]));

        context.write(flowBean,text);
    }
}

package com.hrh.province;

import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Partitioner;

import javax.swing.*;
import java.util.HashMap;
import java.util.Map;

/**
 * @Title: ProvincePartition
 * @ProjectName BigData
 * @Description: TODO
 */
public class ProvincePartition extends Partitioner<FlowBean,Text> {

    static Map<String,Integer> map=new HashMap();

    static{
        map.put("134",0);
        map.put("135",1);
        map.put("136",2);
        map.put("137",3);
        map.put("138",4);
    }

    @Override
    public int getPartition(FlowBean flowBean, Text text, int numPartitions) {
        String phoneNum = text.toString();
        Integer code = map.get(phoneNum.substring(0, 3));
        if(code!=null){
            return code;
        }
        return 5;
    }
}

package com.hrh.province;

import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

/**
 * @Title: FlowReducer
 * @ProjectName BigData
 * @Description: TODO
 */
public class FlowReducer extends Reducer<FlowBean,Text,Text,FlowBean> {

    @Override
    protected void reduce(FlowBean key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
        Text phoneNum = values.iterator().next();
        context.write(phoneNum,key);
    }
}

package com.hrh.province;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

/**
 * @Title: FlowDriver
 * @ProjectName BigData
 * @Description: TODO
 */
public class FlowDriver {
    public static void main(String[] args) throws Exception {
        Configuration conf=new Configuration();
        conf.set("mapreduce.framework.name","local");

        Job job = Job.getInstance(conf);

        job.setJarByClass(FlowDriver.class);

        job.setMapperClass(FlowMapper.class);
        job.setReducerClass(FlowReducer.class);

        job.setMapOutputKeyClass(FlowBean.class);
        job.setMapOutputValueClass(Text.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(FlowBean.class);

        job.setNumReduceTasks(6);
        job.setPartitionerClass(ProvincePartition.class);

        FileInputFormat.setInputPaths(job,"E:\\test\\output");
        FileOutputFormat.setOutputPath(job,new Path("E:\\test\\provinceoutput"));

        boolean b = job.waitForCompletion(true);

        System.exit(b?0:1);

    }
}

# part-r-00000
13480253104	180	180	360

# part-r-00001
13502468823	7335	110349	117684
13560439658	1116	954	2070
13560436666	1116	954	2070

# part-r-00002
13660577991	6960	690	7650
13602846565	1938	2910	4848

# part-r-00003
13726238888	2481	24681	27162
13726230503	2481	24681	27162
13719199419	240	0	240
13760778710	120	120	240

# part-r-00004
13826544101	264	0	264

# part-r-00005
13925057413	11058	48243	59301
18320173382	9531	2412	11943
15013685858	3659	3538	7197
13922314466	3008	3720	6728
15920133257	3156	2936	6092
84138413	4116	1432	5548
18211575961	1527	2106	3633
15989002119	1938	180	2118
13926435656	132	1512	1644
13926251106	240	0	240

分组版(只要上传流量相同,分为一组)

package com.hrh.group;

import org.apache.hadoop.io.WritableComparable;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

/**
 * @Title: FlowBean
 * @ProjectName BigData
 * @Description: TODO
 */
public class FlowBean implements WritableComparable<FlowBean>{

    private long upFlow;
    private long downFlow;
    private long sumFlow;

    public FlowBean() {
    }

    public FlowBean(long upFlow, long downFlow, long sumFlow) {
        this.upFlow = upFlow;
        this.downFlow = downFlow;
        this.sumFlow = sumFlow;
    }

    public void set(long upFlow, long downFlow) {
        this.upFlow = upFlow;
        this.downFlow = downFlow;
        this.sumFlow = upFlow + downFlow;
    }

    public long getUpFlow() {
        return upFlow;
    }

    public void setUpFlow(long upFlow) {
        this.upFlow = upFlow;
    }

    public long getDownFlow() {
        return downFlow;
    }

    public void setDownFlow(long downFlow) {
        this.downFlow = downFlow;
    }

    public long getSumFlow() {
        return sumFlow;
    }

    public void setSumFlow(long sumFlow) {
        this.sumFlow = sumFlow;
    }

    @Override
    public String toString() {
        return upFlow +"\t" + downFlow +"\t" + sumFlow;
    }

    @Override
    public int compareTo(FlowBean o) {

        if(this.sumFlow!=o.getSumFlow()){
            return this.sumFlow>o.getSumFlow()?-1:1;
        }
        return this.upFlow>o.getUpFlow()?-1:1;
    }

    @Override
    public void write(DataOutput out) throws IOException {
        out.writeLong(upFlow);
        out.writeLong(downFlow);
        out.writeLong(sumFlow);
    }


    @Override
    public void readFields(DataInput in) throws IOException {
        this.upFlow = in.readLong();
        this.downFlow = in.readLong();
        this.sumFlow = in.readLong();
    }
}

package com.hrh.group;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

/**
 * @Title: FlowMapper
 * @ProjectName BigData
 * @Description: TODO
 */
public class FlowMapper extends Mapper<LongWritable,Text,FlowBean,Text> {

    FlowBean flowBean=new FlowBean();
    Text text=new Text();
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        String line = value.toString();
        String[] split = line.split("\t");
        String phoneNum = split[0];

        text.set(phoneNum);
        flowBean.set(Long.parseLong(split[1]),Long.parseLong(split[2]));
        System.out.println(flowBean);
        context.write(flowBean,text);
    }
}

package com.hrh.group;

import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;

/**
 * @Title: GroupComparator
 * @ProjectName BigData
 * @Description: TODO
 */
public class GroupComparator extends WritableComparator {

    //必须写 传入作为key的bean的class类型,以及制定需要让框架做反射获取实例对象
    public GroupComparator() {
        super(FlowBean.class,true);
    }

    @Override
    public int compare(WritableComparable a, WritableComparable b) {
        FlowBean aa= (FlowBean) a;
        FlowBean bb= (FlowBean) b;

        return aa.getUpFlow()==bb.getUpFlow()?0:1;
    }
}

package com.hrh.group;

import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;
import java.util.Iterator;

/**
 * @Title: FlowReducer
 * @ProjectName BigData
 * @Description: TODO
 */
public class FlowReducer extends Reducer<FlowBean,Text,Text,FlowBean> {

    @Override
    protected void reduce(FlowBean key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
        String kk="";
        Iterator<Text> iterator = values.iterator();
        while(iterator.hasNext()){
            kk+=iterator.next().toString();
            kk+="..";
        }
        System.out.println(kk);
        context.write(new Text(kk),key);
    }
}

package com.hrh.group;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

/**
 * @Title: FlowDriver
 * @ProjectName BigData
 * @Description: TODO
 */
public class FlowDriver {
    public static void main(String[] args) throws Exception {
        Configuration conf=new Configuration();
        conf.set("mapreduce.framework.name","local");

        Job job = Job.getInstance(conf);

        job.setJarByClass(FlowDriver.class);

        job.setMapperClass(FlowMapper.class);
        job.setReducerClass(FlowReducer.class);

        job.setMapOutputKeyClass(FlowBean.class);
        job.setMapOutputValueClass(Text.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(FlowBean.class);


        job.setGroupingComparatorClass(GroupComparator.class);

        FileInputFormat.setInputPaths(job,"E:\\test\\output");
        FileOutputFormat.setOutputPath(job,new Path("E:\\test\\groupoutput"));

        boolean b = job.waitForCompletion(true);

        System.exit(b?0:1);

    }
}

13502468823..	7335	110349	117684
13925057413..	11058	48243	59301
13726238888..13726230503..	2481	24681	27162
18320173382..	9531	2412	11943
13660577991..	6960	690	7650
15013685858..	3659	3538	7197
13922314466..	3008	3720	6728
15920133257..	3156	2936	6092
84138413..	4116	1432	5548
13602846565..	1938	2910	4848
18211575961..	1527	2106	3633
15989002119..	1938	180	2118
13560439658..13560436666..	1116	954	2070
13926435656..	132	1512	1644
13480253104..	180	180	360
13826544101..	264	0	264
13926251106..13719199419..	240	0	240
13760778710..	120	120	240
  • 0
    点赞
  • 6
    收藏
    觉得还不错? 一键收藏
  • 1
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值