flink 消费kafka数据几种方式

8 篇文章 0 订阅

先总结三种方式,这三种就是常见的.

/**
 * @author daqu
 * @date 2022/6/13 15:22
 * @descriable:
 */
@Slf4j
public class KafkaConsumerUtils {

    //简单模式
    public static FlinkKafkaConsumer createKafkaConsumer(StreamExecutionEnvironment env, String topic, String groupID) throws Exception {

        Properties prop  =new Properties();
        prop.setProperty("bootstrap.servers", "192.168.1.178:9092,192.168.1.179:9092,192.168.1.180:9092");
        prop.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        prop.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        prop.setProperty("auto.offset.reset", "latest");


        SimpleStringSchema schema = new SimpleStringSchema();
        FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<>(topic, schema, prop);
        //设置自动提交offset
        consumer.setCommitOffsetsOnCheckpoints(true);
        return  consumer;
    }

    //设置消费位置
    public static FlinkKafkaConsumer flinkKafkaConsumerSerDe (StreamExecutionEnvironment env , String topic , String groupID , KafkaDeserializationSchema schema, StartupMode sm) throws IOException {

        Properties prop  =new Properties();
        prop.setProperty("bootstrap.servers", "192.168.1.178:9092,192.168.1.179:9092,192.168.1.180:9092");
        prop.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        prop.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        prop.setProperty("auto.offset.reset", "latest");



        FlinkKafkaConsumer consumer = new FlinkKafkaConsumer(topic, schema, prop);
        //设置kafka 的消费位置
        if(sm.equals(StartupMode.EARLIEST)){
            consumer.setStartFromEarliest();
        }else if (sm.equals(StartupMode.LATEST)){
            consumer.setStartFromLatest();
        }
        //设置自动提交offset
        consumer.setCommitOffsetsOnCheckpoints(true);
        return  consumer;
    }

    //多个topic
    public static FlinkKafkaConsumer mutiTopickafkaConsumer(StreamExecutionEnvironment env,String groupID) throws IOException {

        Properties prop  =new Properties();
        prop.setProperty("bootstrap.servers", "192.168.1.178:9092,192.168.1.179:9092,192.168.1.180:9092");
        prop.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        prop.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        prop.setProperty("auto.offset.reset", "latest");
        LinkedList<String> topics = new LinkedList<>();
        topics.add("test01");
        topics.add("test02");

        FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<>(topics, new SimpleStringSchema(), prop);
        return  consumer;
    }

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        String topic ="test01";
        String groupId="kafka-group";
        FlinkKafkaConsumer consumer = KafkaConsumerUtils.createKafkaConsumer(env, topic, groupId);
        env.addSource(consumer).print();

        KafkaDeserializationSchemaWrapper schemaWrapper = new KafkaDeserializationSchemaWrapper(new SimpleStringSchema());
        FlinkKafkaConsumer consumer1 = KafkaConsumerUtils.flinkKafkaConsumerSerDe(env, topic, groupId, schemaWrapper, StartupMode.LATEST);
        env.addSource(consumer1).print();


        FlinkKafkaConsumer consumer2 = KafkaConsumerUtils.mutiTopickafkaConsumer(env, groupId);
        env.addSource(consumer2).print();


        env.execute();
    }
}

  • 1
    点赞
  • 16
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值