import org.apache.spark.sql.SparkSession
/*
sparksql单词计数
*/object SparksqlWordCount {def main(args: Array[String]):Unit={val session = SparkSession.builder().appName("SparkSqlWordCount").master("local[*]").getOrCreate()
session.sparkContext.setLogLevel("WARN")val lines = session.read.textFile("data\\words.txt")import session.implicits._
val words = lines.flatMap(_.split(" "))val df = words.withColumnRenamed("value","word")
df.createTempView("v_words")val result = session.sql("select word,count(*) as num from v_words group by word order by num desc")
result.show()
session.close()}}
import org.apache.spark.{SparkConf, SparkContext}/* sparkrdd单词计数 */object WordCount { def main(args: Array[String]): Unit = { val conf = new SparkConf().setAppName("SparkRDDWordCount") .setMaster("local[*]") val sc = new SparkContext