#源代码如下 from pyspark import SparkConf, SparkContext import os os.environ['PYSPARK_PYTHON'] = "D:/JAVA文档/venv/Scripts/python.exe" conf = SparkConf().setMaster("local[*]").setAppName("test_spark") #链式调用 sc = SparkContext(conf=conf) rdd = sc.parallelize([1,2,5]) print("版本",sc.version) def func(data): return data * 10 rdd = rdd.map(func) print(rdd.collect()) sc.stop()
05-15
3026
06-08
1万+
08-01
1790
09-28
2466