输入流
文件输入流
// spark streaming 文件输入流
// val inputFile = "file:///usr/local/spark/mycode/wordcount/word.txt"
val inputFile = "hdfs://192.168.126.130:9000/usr/local"
val conf = new SparkConf().setAppName("streamingApp").setMaster("local")
// val sc = new SparkContext(conf)
// val ssc = new StreamingContext(sc,Seconds(20));
val ssc = new StreamingContext(conf,Seconds(20));// 监听间隔时间20s
val lines = ssc.textFileStream(inputFile);
val word = lines.flatMap(line => line.split(" ")).map(word => (word, 1)).reduceByKey((a, b) => a + b)
word.print()
ssc.start() // 开启文件监听
ssc.awaitTermination()
向监听的hdfs 文件路径上传本地文件
hadoop fs -copyFromLocal /usr/local/spark/mycode/wordcount/word.txt /usr/local
效果图