MapReduce 类的封装思想

要有把需要的信息当成类的思想

package ProvinceMR;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.hadoop.io.WritableComparable;

public class BeanFlow implements WritableComparable<BeanFlow>{
    private long upFlow;
    private long downFlow;
    private long sumFlow;


    public BeanFlow(){}

    public BeanFlow(long upFlow, long downFlow) {
        super();
        this.upFlow = upFlow;
        this.downFlow = downFlow;
        this.sumFlow = upFlow+downFlow;
    }

    public long getUpFlow() {
        return upFlow;
    }
    public void setUpFlow(long upFlow) {
        this.upFlow = upFlow;
    }
    public long getDownFlow() {
        return downFlow;
    }
    public void setDownFlow(long downFlow) {
        this.downFlow = downFlow;
    }
    public long getSumFlow() {
        return sumFlow;
    }
    public void setSumFlow(long sumFlow) {
        this.sumFlow = sumFlow;
    }

    @Override
    public void readFields(DataInput in) throws IOException {
        this.upFlow=in.readLong();
        this.downFlow=in.readLong();
        this.sumFlow=in.readLong();
    }

    @Override
    public void write(DataOutput out) throws IOException {
        out.writeLong(upFlow);
        out.writeLong(downFlow);
        out.writeLong(sumFlow);
    }

    @Override
    public String toString() {
        return upFlow + "\t" + downFlow + "\t" + sumFlow;
    }   

    @Override
    public int compareTo(BeanFlow o) {
        int res = this.sumFlow > o.sumFlow ? 1 : -1;
        return res;
    }







}
package ProvinceMR;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

public class FlowMapper extends Mapper<LongWritable, Text,Text,BeanFlow>{
    protected void map(LongWritable key, Text value,Context context) 
            throws java.io.IOException ,InterruptedException {
        String str = value.toString();
        String[] splStr = str.split("\t");
        String phoneNum = splStr[1];
        String upFlow = splStr[splStr.length - 3];
        String downFlow = splStr[splStr.length - 2];
        BeanFlow beanflow = new BeanFlow(Long.valueOf(upFlow), Long.valueOf(downFlow));
        context.write(new Text(phoneNum),beanflow);
        System.out.println("1111");
    }

}
package ProvinceMR;

import java.io.IOException;

import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class FlowReducer extends Reducer<Text, BeanFlow, Text, BeanFlow>{
    @Override
    protected void reduce(Text phoneNum, Iterable<BeanFlow> beanFlow, Context context)
            throws IOException, InterruptedException {
        long up_Flow = 0;
        long down_Flow = 0;
        for (BeanFlow beanflow : beanFlow) {
            System.out.println(beanflow.getUpFlow());
            System.out.println(beanflow.getDownFlow());

            up_Flow+=beanflow.getUpFlow();
            down_Flow+=beanflow.getDownFlow();
        }
        BeanFlow bf = new BeanFlow(up_Flow,down_Flow);
        context.write(phoneNum, bf);
        System.out.println("22222");
    }
}
package ProvinceMR;

import java.util.HashMap;

import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Partitioner;



public class PartionTest extends Partitioner<Text,BeanFlow>{

    static HashMap<String,Integer> provinceMap = new HashMap<String,Integer>();
    static{
        provinceMap.put("135", 0);
        provinceMap.put("136", 1);
        provinceMap.put("137", 2);
        provinceMap.put("138", 3);
        provinceMap.put("139", 4);
    }

    public int getPartition(Text key, BeanFlow value, int numPartitions) {
        Integer code = provinceMap.get(key.toString().substring(0, 3));
        return code == null ? 5 : code;

    }
}
package ProvinceMR;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class ProvinceDriver {

    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {

        String path1="";
        String path2="";
        if(args.length==2){
            path1=args[0];
            path2=args[1];
        }else{
            return;
        }

        Configuration conf = new Configuration();
        Job job = Job.getInstance(conf);

        job.setJarByClass(ProvinceDriver.class);

        job.setMapperClass(FlowMapper.class);
        job.setReducerClass(FlowReducer.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(BeanFlow.class);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(BeanFlow.class);

        job.setPartitionerClass(PartionTest.class);
        job.setNumReduceTasks(6);

        FileInputFormat.setInputPaths(job, new Path(path1));
        FileOutputFormat.setOutputPath(job, new Path(path2));
        boolean res = job.waitForCompletion(true);
        System.exit(res?0:1);


    }

}
展开阅读全文

没有更多推荐了,返回首页