创建Maven项目
- 配置pom.xml文件
<!- 根据自己使用的版本情况进行修改版本号 ->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-scala_2.11</artifactId>
<version>1.7.2</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-scala_2.11</artifactId>
<version>1.7.2</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka_2.11</artifactId>
<version>1.7.2</version>
</dependency>
创建scala Object实例
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.flink.streaming.api.scala._
object FlinkToKafka {
def main(args: Array[String]): Unit = {
//创建流处理环境
val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
//配置Kafka信息
val prop = new Properties()
prop.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.**.**:9092")
prop.setProperty(ConsumerConfig.GROUP_ID_CONFIG,"flink-kafka-demo")
prop.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringSerializer")
prop.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringDeserializer")
prop.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"latest")
//获取topic为senser的数据
val KafkaToStream: DataStream[String] = env.addSource(new FlinkKafkaConsumer[String]("sensor",new SimpleStringSchema(),prop))
//输出获取数据
KafkaToStream.print()
env.execute("kafkademo")
}
}
创建Topic
- 创建命令:
kafka-topics.sh --create --zookeeper 192.168.**.**:2181 --topic senser --partitions 1 --replication-factor 1
创建生成者
- 创建命令:
kafka-console-producer.sh --topic senser --broker-list 192.168.**.**:9092
启动Flink Stream
生产者输入数据
hello word
hello flink
查看Flink Stream输出内容