Flume采集流式数据发送到Kafka
bin/flume-ng agent --conf-file ./job/flume-kafka.conf -c conf/ --name a1 -Dflume.root.logger=DEBUG,console
//基于flink的API
case class Tel(iphone:Long,timestamp:Long)
object zuoye1 {
def main(args: Array[String]): Unit = {
val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
env.setParallelism(1)
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val properties = new Properties()
//kafka配置
properties.setProperty(“bootstrap.servers”, “hdp001:9092,hdp002:9092,hdp003:9092”)
properties.setProperty(“key.deserializer”, “org.apache.kafka.common.serialization.StringDeserializer”)
properties.setProperty(“value.deserializer”, “org.apache.kafka.common.serialization.StringDeserializer”)
//数据
val datastream: DataStream[Tel] = env.addSource(new FlinkKafkaConsumer[String](“tel”, new SimpleStringSchema(), prop