Spark 简要示例
from pyspark import SparkConf, SparkContext
import os
# 构建执行环境入口对象
os.environ['PYSPARK_PYTHON'] = "D:/Soft/anaconda/python.exe"
conf = SparkConf().setMaster("local[*]").setAppName("test_spark")
sc = SparkContext(conf=conf)
# 读取文件
rdd = sc.textFile("E:/Document/mybatis/helloWorld/hello.txt")
# 取出全部单词
word_rdd = rdd.flatMap(lambda x: x.split(" "))
# 将单词转换成二元组,单词为key,value为1
word_with_one_rdd = word_rdd.map(lambda word: (word, 1))
# 分组求和
result = word_with_one_rdd.reduceByKey(lambda a, b: a + b)
print(result.collect())
# [('itcase', 2), ('itheima', 8), ('pyspark', 2)]