没有排序的数据1 5 2
2 2 4
2 2 5
1 3 6
1 2 1
排序后的数据(用后面的代码实现)
2 2 5
2 2 4
1 2 1
1 3 6
1 5 2
package bigdata1707.day08 import org.apache.spark.rdd.RDD import org.apache.spark.{SparkConf, SparkContext} /** * Created by wj on 2018/5/9. * 按照输入数据的第一列倒序 第二列正序 第三列倒序 * */ class SecondSort (val firstNum : Int , val secondNum : Int , val thirdNum : Int) extends Ordered[SecondSort] with Serializable{ override def compare(that: SecondSort): Int = { var result : Int = this.firstNum - that.firstNum if(result == 0){ result = that.secondNum - this.secondNum if(result == 0){ result = this.thirdNum - that.thirdNum } } result } } object SecondSort { def main(args: Array[String]): Unit = { val conf: SparkConf = new SparkConf().setMaster("local").setAppName("SecondSort") val sc: SparkContext = new SparkContext(conf) val textFileRDD: RDD[String] = sc.textFile("D:\\acheshi\\3.txt") val mapRDD = textFileRDD.map(line=> { (new SecondSort(line.split(" ")(0).toInt, line.split(" ")(1).toInt ,line.split(" ")(2).toInt), line) }); val sortByKeyRDD: RDD[(SecondSort, String)] = mapRDD.sortByKey(false) val mapValueRDD: RDD[String] = sortByKeyRDD.map(_._2) // println(mapValueRDD.collect().toList.toString) mapValueRDD.foreach(println) sc.stop() } }