package first.first_maven;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.hadoop.io.WritableComparable;
public class SecondSortWritable implements WritableComparable<SecondSortWritable>{
public int first;
public int second;
public SecondSortWritable() {}
public SecondSortWritable(int first, int second) {
super();
this.first = first;
this.second = second;
}
public int getFirst() {
return first;
}
public void setFirst(int first) {
this.first = first;
}
public int getSecond() {
return second;
}
public void setSecond(int second) {
this.second = second;
}
@Override
public void readFields(DataInput in) throws IOException {
this.first=in.readInt();
this.second=in.readInt();
}
@Override
public void write(DataOutput out) throws IOException {
out.writeInt(this.first);
out.writeInt(this.second);
}
@Override
public int compareTo(SecondSortWritable o) {
//按第一个升序
int tmp =this.first-o.first;
//在第一个相同的情况下,按第二个升序
if(tmp==0){
tmp=this.second-o.second;
}
return tmp;
}
@Override
public String toString() {
// TODO Auto-generated method stub
return "first=" + first + ", second=" + second;
}
}
package first.first_maven;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class SecondSort {
public static class MyMapper extends Mapper<LongWritable, Text, SecondSortWritable, NullWritable>{
@Override
protected void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
String words[]=value.toString().split(" ");
SecondSortWritable sw=new SecondSortWritable(Integer.parseInt(words[0]),Integer.parseInt(words[1]));
context.write(sw, NullWritable.get());
}
}
public static class MyReducer extends Reducer<SecondSortWritable, NullWritable, SecondSortWritable, NullWritable>{
@Override
protected void reduce(SecondSortWritable key, Iterable<NullWritable> value,Context context)
throws IOException, InterruptedException {
context.write(key, NullWritable.get());
}
}
public static void main(String[] args) throws Exception {
Configuration conf=new Configuration();
Job job = Job.getInstance(conf, "myjob");
job.setJarByClass(WordCount.class);
job.setMapperClass(MyMapper.class);
job.setMapOutputKeyClass(SecondSortWritable.class);
job.setMapOutputValueClass(NullWritable.class);
FileInputFormat.addInputPath(job,new Path(args[0]));
job.setReducerClass(MyReducer.class);
job.setOutputKeyClass(SecondSortWritable.class);
job.setOutputValueClass(NullWritable.class);
FileOutputFormat.setOutputPath(job,new Path(args[1]));
int isok=job.waitForCompletion(true)?0:1;
System.exit(isok);
}
}