mapreduce的二次排序(字符型)



import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

import org.apache.hadoop.io.WritableComparable;

public class StringPair implements WritableComparable {
	private String name;
	private String addr;

	public void write(DataOutput out) throws IOException {
		out.writeUTF(name);
		out.writeUTF(addr);
	}

	public void readFields(DataInput in) throws IOException {
		this.name = in.readUTF();
		this.addr = in.readUTF();
	}

	public int compareTo(StringPair o) {
		if (!this.name.equals(o.name)) {
			return (int) this.name.compareTo(o.name);
		} else {
			return (int) this.addr.compareTo(o.addr);
		}
	}

	public String getName() {
		return name;
	}

	public void setName(String name) {
		this.name = name;
	}

	public String getAddr() {
		return addr;
	}

	public void setAddr(String addr) {
		this.addr = addr;
	}

}





package test;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class SecondSort {
	
	public static class MyPatitioner extends Partitioner{

		@Override
		public int getPartition(StringPair key, Text value, int numPartitions) {
			
			 return (key.getName().hashCode() & Integer.MAX_VALUE) % numPartitions;
			 
		}

	}
	
	public static class MyGroup extends WritableComparator{
		public MyGroup(){
			// 将要比较的数据类型发给WritableComparator父类,因为欧文继承来自WritableComparator
			// 的原因就是想使用WritableComparator中已经定义的一些特性
			super(StringPair.class,true) ;
		}

		@SuppressWarnings("rawtypes")
		@Override
		public int compare(WritableComparable a, WritableComparable b) {
		StringPair o1 = (StringPair)a ;
		StringPair o2 = (StringPair)b ;
		String first = o1.getName() ;
		String second = o2.getName() ; 
			return first.compareTo(second) ;
		}
		
		
		
	}
	
	public static class SSmap extends Mapper{
		@Override
		protected void map(Object key, Text value, Context context)
				throws IOException, InterruptedException {
			String [] line = value.toString().split(",") ;
			StringPair s = new StringPair() ;
			s.setName(line[0]);
			s.setAddr(line[1]);
			context.write(s, new Text(line[1]));
		}
	}
	
	public static class SSreduce extends Reducer{

		@Override
		protected void reduce(StringPair key, Iterable values,Context context)
				throws IOException, InterruptedException {
		
			for(Text t:values){
				context.write(new Text(key.getName()),t );
			}
			context.write(new Text("----------------------------------"),new Text("")) ;
		}
	}
	
	public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
		Configuration conf =new Configuration() ;
		Job job = Job.getInstance(conf) ;
		
		Path in = new Path("/input4") ;
		Path out = new Path("/output5") ;
		FileSystem hdfs = FileSystem.get(conf) ;
		hdfs.delete(out, true) ;
		
		
		job.setJarByClass(SecondSort.class);
		job.setMapperClass(SSmap.class);
		job.setReducerClass(SSreduce.class);
		
		job.setMapOutputKeyClass(StringPair.class);
		job.setMapOutputValueClass(Text.class);
		
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);
		
		job.setPartitionerClass(MyPatitioner.class);
		job.setGroupingComparatorClass(MyGroup.class);
	
		job.setNumReduceTasks(2);
		
		FileInputFormat.addInputPath(job, in);
		FileOutputFormat.setOutputPath(job, out);
		System.exit(job.waitForCompletion(true)?0:1);
	}
}











 {
	private String name;
	private String addr;

	public void write(DataOutput out) throws IOException {
		out.writeUTF(name);
		out.writeUTF(addr);
	}

	public void readFields(DataInput in) throws IOException {
		this.name = in.readUTF();
		this.addr = in.readUTF();
	}

	public int compareTo(StringPair o) {
		if (!this.name.equals(o.name)) {
			return (int) this.name.compareTo(o.name);
		} else {
			return (int) this.addr.compareTo(o.addr);
		}
	}

	public String getName() {
		return name;
	}

	public void setName(String name) {
		this.name = name;
	}

	public String getAddr() {
		return addr;
	}

	public void setAddr(String addr) {
		this.addr = addr;
	}

}





package test;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class SecondSort {
	
	public static class MyPatitioner extends Partitioner{

		@Override
		public int getPartition(StringPair key, Text value, int numPartitions) {
			
			 return (key.getName().hashCode() & Integer.MAX_VALUE) % numPartitions;
			 
		}

	}
	
	public static class MyGroup extends WritableComparator{
		public MyGroup(){
			// 将要比较的数据类型发给WritableComparator父类,因为欧文继承来自WritableComparator
			// 的原因就是想使用WritableComparator中已经定义的一些特性
			super(StringPair.class,true) ;
		}

		@SuppressWarnings("rawtypes")
		@Override
		public int compare(WritableComparable a, WritableComparable b) {
		StringPair o1 = (StringPair)a ;
		StringPair o2 = (StringPair)b ;
		String first = o1.getName() ;
		String second = o2.getName() ; 
			return first.compareTo(second) ;
		}
		
		
		
	}
	
	public static class SSmap extends Mapper{
		@Override
		protected void map(Object key, Text value, Context context)
				throws IOException, InterruptedException {
			String [] line = value.toString().split(",") ;
			StringPair s = new StringPair() ;
			s.setName(line[0]);
			s.setAddr(line[1]);
			context.write(s, new Text(line[1]));
		}
	}
	
	public static class SSreduce extends Reducer{

		@Override
		protected void reduce(StringPair key, Iterable values,Context context)
				throws IOException, InterruptedException {
		
			for(Text t:values){
				context.write(new Text(key.getName()),t );
			}
			context.write(new Text("----------------------------------"),new Text("")) ;
		}
	}
	
	public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
		Configuration conf =new Configuration() ;
		Job job = Job.getInstance(conf) ;
		
		Path in = new Path("/input4") ;
		Path out = new Path("/output5") ;
		FileSystem hdfs = FileSystem.get(conf) ;
		hdfs.delete(out, true) ;
		
		
		job.setJarByClass(SecondSort.class);
		job.setMapperClass(SSmap.class);
		job.setReducerClass(SSreduce.class);
		
		job.setMapOutputKeyClass(StringPair.class);
		job.setMapOutputValueClass(Text.class);
		
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);
		
		job.setPartitionerClass(MyPatitioner.class);
		job.setGroupingComparatorClass(MyGroup.class);
	
		job.setNumReduceTasks(2);
		
		FileInputFormat.addInputPath(job, in);
		FileOutputFormat.setOutputPath(job, out);
		System.exit(job.waitForCompletion(true)?0:1);
	}
}











  • 1
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值