spark系列一:二次排序diy

创建secondarysort对象
package cn.spark.study.core;
import scala.Serializable;
public class secondarysort implements Comparable<secondarysort>,Serializable{

 private static final long serialVersionUID = 2757892230477242825L;
 private int first;
 private int second;
 
 public secondarysort(int first, int second) {
  super();
  this.first = first;
  this.second = second;
 }
 public secondarysort() {
  super();
  // TODO Auto-generated constructor stub
 }
 public int getFirst() {
  return first;
 }
 public void setFirst(int first) {
  this.first = first;
 }
 public int getSecond() {
  return second;
 }
 public void setSecond(int second) {
  this.second = second;
 }
   
 @Override
 public String toString() {
  return "secondarysort [first=" + first + ", second=" + second + "]";
 }
   
 @Override
 public int hashCode() {
  final int prime = 31;
  int result = 1;
  result = prime * result + first;
  result = prime * result + second;
  return result;
 }
 @Override
 public boolean equals(Object obj) {
  if (this == obj)
   return true;
  if (obj == null)
   return false;
  if (getClass() != obj.getClass())
   return false;
  secondarysort other = (secondarysort) obj;
  if (first != other.first)
   return false;
  if (second != other.second)
   return false;
  return true;
 }
 @Override
 public int compareTo(secondarysort other) {
  // TODO Auto-generated method stub
  int res = this.getFirst() - other.getFirst();
  if(res == 0){
   res = this.getSecond() -other.getSecond();
  }
  return res;
 }
}






package cn.spark.study.core;
import java.util.Comparator;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import scala.Tuple2;
public class secondsortdemo {
 public static void main(String[] args) {
  SparkConf conf = new SparkConf()
    .setAppName("collectionparallelize")
    .setMaster("local");
  JavaSparkContext sc = new JavaSparkContext(conf);
  JavaRDD<String> lines = sc.textFile("C://Users//hlz//Desktop/sort.txt");
  
  JavaPairRDD<secondarysort,String> comparableRDD = lines.mapToPair(new PairFunction<String,secondarysort,String>(){
   
   private static final long serialVersionUID = 1L;
   @Override
   public Tuple2<secondarysort, String> call(String line) throws Exception {
    String[] split = line.split(" ");
    secondarysort ss = new secondarysort(Integer.valueOf(split[0]),Integer.valueOf(split[1]));
    return new Tuple2<secondarysort, String>(ss,line);
   }
   
  });
  
  JavaPairRDD<secondarysort,String> sortedRDD = comparableRDD.sortByKey();
  
  JavaRDD<String> sortrdd = sortedRDD.map(new Function<Tuple2<secondarysort,String>,String>(){
   
   private static final long serialVersionUID = 1L;
   @Override
   public String call(Tuple2<secondarysort, String> v1) throws Exception {
    // TODO Auto-generated method stub
    return v1._2;
   }
   
  });
  
  sortrdd.foreach(new VoidFunction<String>(){
   private static final long serialVersionUID = 1L;
   @Override
   public void call(String t) throws Exception {
    // TODO Auto-generated method stub
    System.out.println(t);
   }
   
   
  });
  
  sc.close();
 }
}


来自 “ ITPUB博客 ” ,链接:http://blog.itpub.net/30541278/viewspace-2153602/,如需转载,请注明出处,否则将追究法律责任。

转载于:http://blog.itpub.net/30541278/viewspace-2153602/

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值