二次排序原理

二次排序原理

二次排序原理
自定义key,组合key

二次排序

Key是可以排序的。
需要对value排序。
1.自定义key
	package com.it18zhang.hdfs.maxtemp.allsort.secondarysort;

	import org.apache.hadoop.io.WritableComparable;

	import java.io.DataInput;
	import java.io.DataOutput;
	import java.io.IOException;

	/**
	 * 自定义组合key
	 */
	public class ComboKey implements WritableComparable<ComboKey> {
		private int year ;
		private int temp ;

		public int getYear() {
			return year;
		}

		public void setYear(int year) {
			this.year = year;
		}

		public int getTemp() {
			return temp;
		}

		public void setTemp(int temp) {
			this.temp = temp;
		}

		/**
		 * 对key进行比较实现
		 */
		public int compareTo(ComboKey o) {
			int y0 = o.getYear();
			int t0 = o.getTemp() ;
			//年份相同(升序)
			if(year == y0){
				//气温降序
				return -(temp - t0) ;
			}
			else{
				return year - y0 ;
			}
		}

		/**
		 * 串行化过程
		 */
		public void write(DataOutput out) throws IOException {
			//年份
			out.writeInt(year);
			//气温
			out.writeInt(temp);
		}

		public void readFields(DataInput in) throws IOException {
			year = in.readInt();
			temp = in.readInt();
		}
	}

2.自定义分区类,按照年份分区
	/**
	 * 自定义分区类
	 */
	public class YearPartitioner extends Partitioner<ComboKey,NullWritable> {

		public int getPartition(ComboKey key, NullWritable nullWritable, int numPartitions) {
			int year = key.getYear();
			return year % numPartitions;
		}
	}

3.定义分组对比器
	public class YearGroupComparator extends WritableComparator {

		protected YearGroupComparator() {
			super(ComboKey.class, true);
		}

		public int compare(WritableComparable a, WritableComparable b) {
			ComboKey k1 = (ComboKey)a ;
			ComboKey k2 = (ComboKey)b ;
			return k1.getYear() - k2.getYear() ;
		}
	}

4.定义Key排序对比器
	package com.it18zhang.hdfs.maxtemp.allsort.secondarysort;

	import org.apache.hadoop.io.WritableComparable;
	import org.apache.hadoop.io.WritableComparator;

	/**
	 *ComboKeyComparator
	 */
	public class ComboKeyComparator extends WritableComparator {

		protected ComboKeyComparator() {
			super(ComboKey.class, true);
		}

		public int compare(WritableComparable a, WritableComparable b) {
			ComboKey k1 = (ComboKey) a;
			ComboKey k2 = (ComboKey) b;
			return k1.compareTo(k2);
		}
	}

5.编写Mapper
6.编写Reduce
	package com.it18zhang.hdfs.maxtemp.allsort.secondarysort;

	import org.apache.commons.lang.ObjectUtils;
	import org.apache.hadoop.io.IntWritable;
	import org.apache.hadoop.io.NullWritable;
	import org.apache.hadoop.mapreduce.Reducer;

	import java.io.IOException;

	/**
	 * Reducer
	 */
	public class MaxTempReducer extends Reducer<ComboKey, NullWritable, IntWritable, IntWritable>{

		/**
		 */
		protected void reduce(ComboKey key, Iterable<NullWritable> values, Context context) throws IOException, InterruptedException {
			int year = key.getYear();
			int temp = key.getTemp();
			System.out.println("==============>reduce");
			for(NullWritable v : values){
				System.out.println(key.getYear() + " : " + key.getTemp());
			}
			context.write(new IntWritable(year),new IntWritable(temp));
		}
	}

7.App
	package com.it18zhang.hdfs.maxtemp.allsort.secondarysort;

	import org.apache.hadoop.conf.Configuration;
	import org.apache.hadoop.fs.Path;
	import org.apache.hadoop.io.IntWritable;
	import org.apache.hadoop.io.NullWritable;
	import org.apache.hadoop.mapreduce.Job;
	import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
	import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
	import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

	/**
	 *
	 */
	public class MaxTempApp {
		public static void main(String[] args) throws Exception {

			Configuration conf = new Configuration();
			conf.set("fs.defaultFS","file:///");

			Job job = Job.getInstance(conf);

			//设置job的各种属性
			job.setJobName("SecondarySortApp");                        //作业名称
			job.setJarByClass(MaxTempApp.class);                 //搜索类
			job.setInputFormatClass(TextInputFormat.class); //设置输入格式

			//添加输入路径
			FileInputFormat.addInputPath(job,new Path(args[0]));
			//设置输出路径
			FileOutputFormat.setOutputPath(job,new Path(args[1]));

			job.setMapperClass(MaxTempMapper.class);             //mapper类
			job.setReducerClass(MaxTempReducer.class);           //reducer类

			//设置Map输出类型
			job.setMapOutputKeyClass(ComboKey.class);            //
			job.setMapOutputValueClass(NullWritable.class);      //

			//设置ReduceOutput类型
			job.setOutputKeyClass(IntWritable.class);
			job.setOutputValueClass(IntWritable.class);         //

			//设置分区类
			job.setPartitionerClass(YearPartitioner.class);
			//设置分组对比器
			job.setGroupingComparatorClass(YearGroupComparator.class);
			//设置排序对比器
			job.setSortComparatorClass(ComboKeyComparator.class);

			job.setNumReduceTasks(3);                           //reduce个数
			//
			job.waitForCompletion(true);
		}
	}

之后IDEA或者ECLIPSE下运行就可以了。
IDEA需要导入依赖或者相关包。
感兴趣的可以关注微信公众号:IT编程学习栈
扫描下方二维码关注
在这里插入图片描述

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

IT编程学习栈

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值