Spark实时单词计数(WordCount)
前言:
这里我们使用netcat来作为数据源,在cmd中使用netcat向本机固定端口发送数据,而实时将单词书数量输出在控制台。
依赖
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.12</artifactId>
<version>3.0.1</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.12</artifactId>
<version>3.1.2</version>
<scope>provided</scope>
</dependency>
</dependencies>
java代码
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import scala.Tuple2;
import java.util.Arrays;
import java.util.Iterator;
public class WordCount {
public static void main(String[] args) throws InterruptedException {
//最少要两个本地线程,一个负责接收数据,一个负责处理数据
SparkConf conf = new SparkConf().setAppName("xiaobai").setMaster("local[2]");
JavaStreamingContext context = new JavaStreamingContext(conf,Duration.apply(3000));
// 过滤日志打印
context.sparkContext().setLogLevel("ERROR");
JavaReceiverInputDStream<String> data = context.socketTextStream("localhost", 8888);
data.flatMap(new FlatMapFunction<String, String>() {
@Override
public Iterator<String> call(String s) throws Exception {
return Arrays.asList(s.split(" ")).iterator();
}
}).mapToPair(new PairFunction<String, String, Integer>() {
@Override
public Tuple2<String, Integer> call(String s) throws Exception {
return new Tuple2<>(s, 1);
}
}).reduceByKey(new Function2<Integer, Integer, Integer>() {
@Override
public Integer call(Integer integer, Integer integer2) throws Exception {
return integer + integer2;
}
}).print();
context.start();
context.awaitTermination();
context.stop();
}
}
cmd指令:
控制台输出: