import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.rdd.RDD /** * Created by EA on 2016/8/24. */ object Test3 { def main(args: Array[ String ]) { val conf = new SparkConf().setAppName("ScalaTest").setMaster("local") val sc = new SparkContext(conf) val rddInt: RDD[ Int ] = sc.parallelize(List(1, 2, 3, 4, 5), 3) //定义一个Int类型的List,初始值为:1~5,分布在3个分区上 val rddAggr1:
Spark RDD中的aggregate函数理解
最新推荐文章于 2023-01-31 14:13:45 发布