[root@node00 sbin]# spark-shell --master local[2]
val file = spark.sparkContext.textFile("file:///usr/local/wc.txt")
val wordCounts = file.flatMap(line => line.split(",")).map((word => (word,1))).reduceByKey(_ + _)
wordCounts.collect
[root@node00 sbin]# spark-shell --master local[2]
val file = spark.sparkContext.textFile("file:///usr/local/wc.txt")
val wordCounts = file.flatMap(line => line.split(",")).map((word => (word,1))).reduceByKey(_ + _)
wordCounts.collect