import org.apache.spark.{SparkConf, SparkContext}
/**
* Created by liupeng on 2017/6/16.
*/
object A_saveAsObjectFile {
System.setProperty("hadoop.home.dir","F:\\hadoop-2.6.5")
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("saveAsObjectFile_test").setMaster("local")
val sc = new SparkContext(conf)
//准备一下数据
val nameList: List[String] = List("c", "d", "a", "e", "f", "k")
val numbers = sc.parallelize(nameList)
//基于java序列化保存文件
numbers.saveAsObjectFile("str_test")
var data = sc.objectFile[Array[String]]("str_test")
.collect()
}
}
运行结果会生成一个目录: