1,先写了个简单的案例,本地运行成功
package com.coder.flink.core.a_kafkaStream;
import java.util.Arrays;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.*;
import org.apache.kafka.streams.errors.StreamsException;
import org.apache.kafka.streams.kstream.*;
public class WordCountApplication {
public static void main(final String[] args) throws Exception {
Properties props = new Properties();
//todo 设置名称
props.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-application");
//todo 设置节点
props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "node2.hadoop:9092,node3.hadoop:9092");
//todo 设置线程数
props.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, "4");
//todo 设置序列化
props.put(StreamsConfig.DEFA