import org.apache.hadoop.io.compress.GzipCodec
import org.apache.spark.{SparkConf, SparkContext}
/**
* Created by liupeng on 2017/6/16.
*/
object A_saveAsTextFile {
System.setProperty("hadoop.home.dir","F:\\hadoop-2.6.5")
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("saveAsTextFile_test").setMaster("local")
val sc = new SparkContext(conf)
//准备一下数据
val nameList: List[String] = List("c", "d", "a", "e", "f", "k")
val numbers = sc.parallelize(nameList)
//将数据集作为文本文件保存到指定的文件系统、hdfs、或者hadoop支持的其他文件系统中
numbers.saveAsTextFile("test_data_save")
numbers.saveAsTextFile("test_data_save2", classOf[GzipCodec])
}
}
运行结果生成为两个目录: