1 webui的依赖
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-runtime-web_2.11</artifactId>
<version>${flink.version}</version>
<scope>${scope.type}</scope>
</dependency>
-
2 本地运行代码
import org.apache.flink.api.common.eventtime.WatermarkStrategy
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.configuration.{Configuration, RestOptions}
import org.apache.flink.connector.kafka.source.KafkaSource
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer
import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment, createTypeInformation}
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment
object DataStreamReadKafka {
def main(args: Array[String]): Unit = {
// 创建带webui的本地执行环境
val conf = new Configuration()
conf.setString(RestOptions.BIND_PORT, "8081") // 指定访问端口
val env = StreamExecutionEnvironment
.createLocalEnvironmentWithWebUI(conf)
env.setParallelism(1)
val tab_env = StreamTableEnvironment.create(env)
// 读取kafka数据
val topics = "test,test_2".split(",").toList
val source = KafkaSource
.builder[String]
.setBootstrapServers("localhost:9092")
.setTopics(topics: _*)
.setGroupId("shy_test")
.setStartingOffsets(OffsetsInitializer.earliest)
.setValueOnlyDeserializer(new SimpleStringSchema())
.build
val input_stream = env.fromSource(source, WatermarkStrategy.noWatermarks(), "kafka_source")
input_stream.print()
// 启动执行环境
env.execute()
}
}
3 访问webui
注意: 每个任务指定一个端口访问webUI