def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName(“test10”).setMaster(“local[2]”)
val sc = new SparkContext(conf)
sc.setLogLevel(“WARN”)
val rdd1 = sc.parallelize(List("hello","word","aa","hello","bb","cc","aa","cc","word"))
// 先切割 给切割后的每个元素赋值 键一样的值累加
val res1 = rdd1.flatMap(x=>x.split(",")).map(x=>(x,1)).reduceByKey(+)
println(res1.collect().toBuffer)
println("*****************************")
// val rdd = sc.textFile(“D://aaa.txt”)
val rdd2 = sc.parallelize(List("hello,word,aa,hello","bb,cc,aa,cc,word"))
// 先将两个整体压缩成一个 将压缩后的元素切割 每个元素赋初始值 键一样的值相加
val res2 = rdd2.flatMap(x=>x.split(",")).flatMap(x=>x.split(","