将传进去的数组或者字符串进行切分操作
package sparkcore
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD
object flatMapDemo {
def main(args: Array[String]): Unit = {
System.setProperty("hadoop.home.dir","D:\\spark")
val conf = new SparkConf().setMaster("local[*]").setAppName("UserLogin")
val sc = new SparkContext(conf)
val arr = Array("hadoop spark")
val arrRdd: RDD[String] = sc.parallelize(arr)
val chars: Array[Char] = arrRdd.flatMap(e => {
e
}).collect()
chars.foreach(println)
sc.stop()
}
}
运行结果如下