import org.apache.spark.{SparkConf, SparkContext}
object CustomSort {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("CustomSort").setMaster("local[2]")
val sc = new SparkContext(conf)
val rdd1 = sc.parallelize(List(("xiaoli",91,28,1),("xiaoming",89,25,6),("xiaohong",86,21,1)))
import OrderContext._
/* def sortBy[K](
f : scala.Function1[T, K],
ascending : scala.Boolean = { /* compiled code */ },
numPartitions : scala.Int = { /* compiled code */ })
(implicit ord : scala.Ordering[K], ctag : scala.reflect.ClassTag[K]) : org.apache.spark.rdd.RDD[T] = { /* compiled code */ }*/
val rdd2 = rdd1.sortBy(x=>Gril(x._2,x._3),false)
println(rdd2.collect().toBuffer)
sc.stop()
}
}
//case class Gril(openSize:Int,age:Int) extends Serializable
/*源码object Ordered {
/** Lens from `Ordering[T]` to `Ordered[T]` */
implicit def orderingToOrdered[T](x: T)(implicit ord: Ordering[T]): Ordered[T] =
new Ordered[T] { def compare(that: T): Int = ord.compare(x, that) }
}*/
case class Gril(val openSize:Int,val age:Int) extends Ordered[Gril] with Serializable{
override def compare(that: Gril): Int = {
if (this.openSize == that.openSize){
that.age - this.age
}else{
this.openSize - that.openSize
}
}
}
/**
* 自定义排序
*/
object OrderContext{
implicit val grilOrdering = new Ordering[Gril]{
override def compare(x: Gril, y: Gril): Int = {
if (x.openSize > y.openSize){
1
}else if(x.openSize == y.openSize){
if (x.age > y.age) -1 else 1
}else -1
}
}
}