手机自定义分区

本文详细介绍了手机系统的Phionpartition、Mapper、Reduce和Driver四个关键部分,揭示了自定义分区的实现流程和技术细节。
摘要由CSDN通过智能技术生成

Phionpartition

java
package phion;

import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Partitioner;

import java.util.Arrays;

public class PhionPartition extends Partitioner<Text,NullWritable> {
    public static final String[]YD={
            "134","135","136",
            "137","138","139",
            "150","151","152",
            "157","158","159",
            "188","187","182",
            "183","184","178",
            "147","172","198"
    };
    //联通前三位
    public  static final String[]LT={
            "130","131","132",
            "145","155","156",
            "166","171","175",
            "176","185","186","166"
    };
    //电信前三位
    public  static final String[]DX={
            "133","149","153",
            "173","177","180",
            "181","189","199"};

    public int getPartition(Text text, NullWritable nullWritable, int i) {
        String phone=text.toString();
        String phone1=phone.substring(0,3);
        if(Arrays.asList(YD).contains(phone1)){
            return 0;
        }else if(Arrays.asList(LT).contains(phone1)){
            return 1;
        }else{
            return 2;
        }
    }
};

Mapper

java
package phion;

import org.apache.commons.lang.ObjectUtils;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

class PartitionMapper extends Mapper<LongWritable, Text,Text, NullWritable> {
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        //获取数据
        String line = value.toString();
        //切分“\t”
        String[] phions = line.split("\t");

        for (String phion : phions){
            context.write(new Text(phion), NullWritable.get());
        }
    }
}

Reduce

java
package phion;

import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

public class PhionReduce extends Reducer<Text, NullWritable,Text, NullWritable> {
    @Override
    protected void reduce(Text key, Iterable<NullWritable> values, Context context) throws IOException, InterruptedException {
        context.write(key,NullWritable.get());
    }
}

Driver

java
package phion;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

public class PhionDriver {
    public static void main(String[] args) throws InterruptedException, IOException, ClassNotFoundException {
        //实例化配置文件
        Configuration configuration = new Configuration();

        //定义一个job任务
        Job job = Job.getInstance(configuration);
        //配置job的信息
        job.setJarByClass(Partitioner.class);

        //指定自定义的mapper类以及mapper的输出数据类型到job
        job.setMapperClass(PartitionMapper.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(NullWritable.class);
        //设置map预合并
        job.setCombinerClass(PhionReduce.class);

        //指定自定义的reduce以及reduce的输出数据类型(总输出的类型)到job
        job.setReducerClass(PhionReduce.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(NullWritable.class);
        job.setNumReduceTasks(3);


        //配置输入数据的路径  查看继承树的快捷键 Ctrl+h
        FileInputFormat.setInputPaths(job,new Path("G:\\a\\phone.txt"));
        //配置输出数据的路径
        FileOutputFormat.setOutputPath(job,new Path("G:\\a\\phone1.txt"));

        //提交任务
        job.waitForCompletion(true);
    }
}

评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值