from pyspark import SparkConf,SparkContext
# 指定python解释器路径
#import os
#os.environ["PYSPARK_PYTHON"] = "/usr/local/bin/python3"
a = SparkConf().setMaster("local[*]").setAppName("test")
b = SparkContext(conf=a)
rdd = b.textFile("/Users/mac/Desktop/表.txt")
def bianli(s):
a = []
for i in s:
a.append(i)
return a
print(rdd.collect())
rdd1 = rdd.flatMap(bianli).map(lambda a:(a,1))
rdd2 = rdd1.reduceByKey(lambda a,b:a+b)
print(rdd2.collect())
b.stop()
运行结果:
['sdfsdfsdmsnklfnklsnddsfdfs']
[('d', 6), ('l', 2), ('s', 7), ('f', 5), ('m', 1), ('n', 3), ('k', 2)]