PhoneNumber查询手机号码

 

PhoneMapper



import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

public class PhoneMapper extends Mapper<LongWritable, Text,Text, IntWritable> {

    Text text=new Text();
    IntWritable intWritable=new IntWritable();
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        System.out.println("PhoneMapper key:"+key.get()+"value:"+value.toString());
        String phone=value.toString().split(",")[0].substring(0,3);
        text.set(phone);
        intWritable.set(1);
        context.write(text,intWritable);
    }
}

 PhonePartitioner分区



import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Partitioner;

public class PhonePartitioner extends Partitioner <Text, IntWritable>{
    @Override
    public int getPartition(Text text, IntWritable intWritable, int i) {
        System.out.println("执行PhoneMapper后,进入到PhonePartitioner,key值"+text);
        int pnum=0;
        switch(text.toString().substring(0,3)){
            case"131":
                pnum=1;
                break;
            case "132":
                pnum=2;
                break;
            case"133":
                pnum=3;
                break;
            case "134":
                pnum=4;
                break;
            case"135":
                pnum=5;
                break;
            case "136":
                pnum=6;
                break;
            case"137":
                pnum=7;
                break;
            case "138":
                pnum=8;
                break;
            case"139":
                pnum=9;
                break;
            case"130":
                pnum=0;
                break;


        }

        return pnum;
    }
}

PhoneCombiner聚合


import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

public class PhoneCombiner extends Reducer <Text, IntWritable,Text, IntWritable> {
    @Override
    protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
        System.out.println("进入到PhoneCombiner,key:"+key);
        int count=0;
        for (IntWritable intWritable:
                values) {
            count+=intWritable.get();

        }
        context.write(key,new IntWritable(count));
        System.out.println(" value:"+count);
    }
}

PhoneReducer


import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

public class PhoneReducer extends Reducer <Text, IntWritable,Text,LongWritable> {
    @Override
    protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
        System.out.print("进入到PhoneReducer,key:"+key);
        long count=0;
        for (IntWritable intWritable:
                values) {
            count+=intWritable.get();

        }
        context.write(key,new LongWritable(count));
        System.out.println(" value:"+count);
    }
}

PhoneDriver

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

public class PhoneDriver {
    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        Configuration configuration=new Configuration();
        Job job=Job.getInstance(configuration);
        //配置启动类
        job.setJarByClass(PhoneDriver.class);
        //配置Mapper类
        job.setMapperClass(PhoneMapper.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);
        //配置Partitioner类(分区)
        job.setPartitionerClass(PhonePartitioner.class);
        job.setNumReduceTasks(10);
        //配置区内聚合
        job.setCombinerClass(PhoneCombiner.class);
        //配置Reducer
        job.setReducerClass(PhoneReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(LongWritable.class);

        FileInputFormat.setInputPaths(job,new Path("E:\\hadoopstu\\in\\demo2\\phone.csv"));
//        Path outPath=new Path("E:\\hadoopstu\\in\\out2");
        Path outPath=new Path("hdfs://linux01:9000/output1");
        FileSystem fs =FileSystem.get(outPath.toUri(),configuration);
        if(fs.exists(outPath)){
            fs.delete(outPath,true);
        }
        FileOutputFormat.setOutputPath(job,outPath);

        job.waitForCompletion(true);




    }
}

  • 1
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值