RDD 分区数的设置
package com.shujia.spark
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
object Demo2Partition {
def main(args: Array[String]): Unit = {
/**
* 创建上下文对象
*
*/
val conf: SparkConf = new SparkConf()
.setAppName("partition")
.setMaster("local")
//设置spark 默认分区数 (spark.default.parallelism(默认并行度)), 只在shuffle之后生效
conf.set("spark.default.parallel