基础内容,但是很多细节经常忘记,比如要导入那些包之类的。索性就记录下来,方便以后查阅。
导包
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-scala_2.11</artifactId>
<version>1.11.1</version>
</dependency>
<!--缺这个依赖会报错:java.lang.IllegalStateException: No ExecutorFactory found to execute the application.-->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_2.11</artifactId>
<version>1.11.1</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka-0.10_2.11</artifactId>
<version>1.11.1</version>
</dependency>
代码
package it.aspirin.flinkx.demo
import java.util.Properties
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.datastream.DataStreamSource
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.common.serialization.StringDeserializer
object FlinkDemo {
def main(args: Array[String]): Unit = {
val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
env.setParallelism(4)
val username = ""
val password = ""
val props = new Properties()
props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092")
props.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")
props.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, classOf[StringDeserializer].getName)
props.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, classOf[StringDeserializer].getName)
//kerberos需要带这句
props.setProperty("sasl.jaas.config",
s"""org.apache.kafka.common.security.plain.PlainLoginModule required username="${username}" password="${password}";""")
val value: DataStreamSource[String] = env.addSource(new FlinkKafkaConsumer010[String]("metric-topic", new SimpleStringSchema(), props))
value.print("kafka")
env.execute()
}
}