实践1:wordcount
1.1本地跑wordcount代码(无状态):
package com.badou.streaming
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.storage.StorageLevel
object wordCount {
def main(args: Array[String]) {
if (args.length < 2) {
System.err.println("Usage: wordCount <hostname> <port>")
System.exit(1)
}
val sparkConf = new SparkConf().setMaster("local[2]")setAppName("wordCount")
val ssc = new StreamingContext(sparkConf, Seconds(5))
val lines = ssc.socketTextStream(args(0), args(1).toInt, StorageLevel.MEMORY_AND_DISK_SER)
val words = lines.flatMap(_.split(" "))
val wordCounts = words.map(x => (x, 1)).reduceByKey(_ + _)
wordCounts.print()
wordCounts.saveAsTextFiles("hdfs://master:9000/stream_out", "doc")
ssc.start()
ssc.awaitTermination()
}
}
测试及结果:
1.2打jar包在本地spark上跑:
去掉setmaster参数:
在脚本上指定master,run.sh脚本:
运行脚本:
1>代表标准输出
2>代表错误输出
实践2:wordcount(有状态)
有状态代码:
完整代码:
WordCountState.scala:
package com.badou.streaming
import org.apache.spark.{HashPartitioner, SparkConf}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.storage.StorageLevel
object WordCountState {
def updateFunction(currentValues: Seq[Int], preValues: Option[Int]): Option[Int] = {
val current = currentValues.sum
val pre = preValues.getOrElse(0)
Some(current + pre)
}
def main(args: Array[String]) {
if (args.length < 2) {
System.err.println("Usage: WordCountState <hostname> <port>")
System.exit(1)
}
val sparkConf = new SparkConf().setMaster("local[2]").setAppName("NetworkWordCount")
val ssc = new StreamingContext(sparkConf, Seconds(5))
ssc.checkpoint("hdfs://master:9000/hdfs_checkpoint")
val lines = ssc.socketTextStream(args(0), args(1).toI