import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
/**
* Created by MC on 2018/6/6.
* 二次排序
*/
object SecondSort {
def mySecondSort(sc : SparkContext): String ={
val array : Array[Tuple2[Int,Int]]= Array((1,1),(1,2),(333,2),(1,4),(1,5),
(2,1),(2,2),(2,3),(2,4),(4,5),(45,1),(87,2),(333,3),(333,4),(32,5))
val rdd1 : RDD[Tuple2[Int,Int]]= sc.parallelize(array)
val map : RDD[(SecondSortEntity,(Int,Int))]= rdd1.map(x=>(
new SecondSortEntity(x._1,x._2),x))
val rdd2 = map.sortByKey() rdd2.collect().toList.toString() }
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setMaster("local").setAppName("SecondSort")
val sc = new SparkContext(conf)
var result : String = ""
result = mySecondSort(sc)
println(result)
sc.stop()
}
}
//实体类
/**
* Created by MC on 2018/6/6.
*/
class SecondSortEntity(val num1:Int, val num2:Int) extends Ordered[SecondSortEntity] with Serializable{
//定义排序规则
override def compare(that: SecondSortEntity): Int = {
var result = this.num1-that.num1
if(result == 0)result = that.num2-this.num2
result
}
}