马克-to-win @ 马克java社区:
package com
import org.apache.spark.rdd.RDD
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
object TestRDD {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("TestRDD").setMaster("local")
val sc = new SparkContext(conf)
val rdd1 = sc.makeRDD(List(1, 4, 3, 7, 5))
val rdd1_1 = rdd1.map { y => y * y }
val aList = rdd1_1.collect()
println("map 用法 is " + aList.mkString(","))
val rdd2 = sc.makeRDD(Array(1, 4, 3, 7, 5))
val rdd2_1 = rdd2.filter { x => x < 5 }
println("filter 用法 " + rdd2_1.collect().mkString(","))
版权保护,尊重原创,原文出处:http://www.mark-to-win.com/tutorial/mydb_SparkScala_ScalaRDDExample.html