依赖
<!--flink核心包-->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>1.7.2</version>
</dependency>
<!--flink流处理包-->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_2.12</artifactId>
<version>1.7.2</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka_2.11</artifactId>
<version>1.7.2</version>
</dependency>
发送
netcat下载安装:https://blog.csdn.net/qq_38762390/article/details/115789281
nc -lk 7777
代码
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
public class StreamToKafka {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStreamSource<String> data = env.socketTextStream("192.168.25.129", 7777);
String brokerList = "192.168.25.129:9092";
String topic = "test";
// 产生的数据是String类型
FlinkKafkaProducer<String> producer = new FlinkKafkaProducer<>(brokerList, topic, new SimpleStringSchema());
data.addSink(producer);
env.execute();
}
}
开启Kafka消费者
bin/kafka-console-consumer.sh --bootstrap-server 192.168.25.129:9092 --from-beginning --topic test
Flink:将Kafka中的数据作为Flink的数据源
https://blog.csdn.net/weixin_45427648/article/details/130181653