初学spark 还有很多不会的。这是针对spark下README.md进行单词计数的python代码。
这段代码主要计数的对象是单词”Spark“ 和”spark“
# -*- coding:UTF-8 -*-
from pyspark import SparkContext, SparkConf
# 读取文件并处理
def load_and_deal(file_path):
inputRDD = sc.textFile(file_path)
SparkRdd = inputRDD.filter(lambda s: "Spark" in s)
sparkRdd = inputRDD.filter(lambda s: "spark" in s)
allRdd = SparkRdd.union(sparkRdd)
disRdd = allRdd.distinct()
words = disRdd.flatMap(lambda x: x.split(" "))
result = words.map(lambda x: (x, 1)).reduceByKey(lambda x, y: x+y)
return result
# 计数
def count_words(rdd):
Sparknum = 0
sparknum = 0
for item in rdd.collect():
print(item) # 包含Spark和saprk的所有句子中的单词计数
print item[0].count("spark")
if "Spark" in item[0]:
Sparknum += item[1] # Spark计数
if "spark" in item[0]:
sparknum += item[1] # spark计数
return Sparknum, sparknum
def main():
# 1 读取文件并处理
file = 'README.md'
result = load_and_deal(file)
# 2 计数
Sparknum,sparknum = count_words(result)
# 3 输出
print("the nums of Spark:" + str(Sparknum)) # 21
print("the nums of spark:" + str(sparknum)) # 13
if __name__ == "__main__":
conf = SparkConf().setMaster("local").setAppName("words count")
sc = SparkContext(conf=conf)
main()