1. 创建maven项目,添加依赖
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.version}</artifactId>
<version>1.10.1</version>
<scope>${project.build.scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-core</artifactId>
<version>1.10.1</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka-0.11_2.12</artifactId>
<version>1.10.1</version>
</dependency>
2. 编写程序代码
package com.demo;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010;
import java.util.Properties;
public class KafkaStringCosumer {
public static void main(String[] args) throws Exception {
//创建执行环境
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
Properties props = new Properties();
// 指定kafka的主机地址和端口号
props.setProperty("bootstrap.servers", "localhost:9092");
props.setProperty("group.id", "flink-group");
props.setProperty("auto.offset.reset", "latest");
FlinkKafkaConsumer010<String> consumer =
new FlinkKafkaConsumer010<>("flink-topic", new SimpleStringSchema(), props);
DataStream<String> dataStream = env.addSource(consumer);
dataStream.print();
env.execute();
}
}
3. 启动kafka测试环境
1. 启动zookeeper。
2. 启动kafka服务。
.\bin\windows\kafka-server-start.bat .\config\server.properties
3. 创建主题
.\bin\windows\kafka-topics.bat --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic flink-topic
4. 启动生产者
.\bin\windows\kafka-console-producer.bat --broker-list localhost:9092 --topic flink-topic
5. 运行程序,生产者输入信息,在控制台查看接收到的信息输出。