package flinkdemo.sinkDemo;
import flinkdemo.sourceDemo.deserialization.ConsumerRecordKafkaDeserializationSchema;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import java.nio.charset.StandardCharsets;
import java.util.*;
import org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition;
import org.apache.kafka.clients.consumer.ConsumerRecord;
/**
* @author zhangkai
* @create 2019/12/11
*/
public class SinkToKafka {
public static void main(String[] args) throws Exception {
String topicList = "otter_sms_0";
System.out.println(topicList);
StreamExecutionEnvironment see = Stre
flink从kafka-topic固定offset消费的5种方式
最新推荐文章于 2023-03-20 18:09:34 发布