黑猴子的家:Kafka 消费者API

Code -> GitHub

https://github.com/liufengji/kafka_api.git

1、在控制台创建发送者

[victor@node3 kafka]$ bin/kafka-console-producer.sh --broker-list node1:9092 \
--topic first

>hello world
>hello 

2、创建消费者(过时API)

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;

public class CustomConsumer {

    @SuppressWarnings("deprecation")
    public static void main(String[] args) {

        Properties properties = new Properties();
        
        properties.put("zookeeper.connect", "node1:2181");
        properties.put("group.id", "g1");
        properties.put("zookeeper.session.timeout.ms", "500");
        properties.put("zookeeper.sync.time.ms", "250");
        properties.put("auto.commit.interval.ms", "1000");
        
        // 创建消费者连接器
        ConsumerConnector consumer =
               Consumer.createJavaConsumerConnector(new ConsumerConfig(properties));
        
        HashMap<String, Integer> topicCount = new HashMap<>();
        topicCount.put("first", 1);
        
        Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap =
                                          consumer.createMessageStreams(topicCount);
        
        KafkaStream<byte[], byte[]> stream = consumerMap.get("first").get(0);
        
        ConsumerIterator<byte[], byte[]> it = stream.iterator();
        
        while (it.hasNext()) {
            System.out.println(new String(it.next().message()));
        }
    }
}

3、官方提供案例(自动维护消费情况)(新API)

import java.util.Arrays;
import java.util.Properties;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

public class CustomNewConsumer {

    public static void main(String[] args) {

        Properties props = new Properties();
        // 定义kakfa 服务的地址,不需要将所有broker指定上 
        props.put("bootstrap.servers", "node1:9092");
        // 制定consumer group 
        props.put("group.id", "test");
        // 是否自动确认offset 
        props.put("enable.auto.commit", "true");
        // 自动确认offset的时间间隔 
        props.put("auto.commit.interval.ms", "1000");
        // key的序列化类
        props.put("key.deserializer", 
        "org.apache.kafka.common.serialization.StringDeserializer");
        // value的序列化类 
        props.put("value.deserializer",
        "org.apache.kafka.common.serialization.StringDeserializer");
        // 定义consumer 
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
        
        // 消费者订阅的topic, 可同时订阅多个 
        consumer.subscribe(Arrays.asList("first", "second","third"));

        while (true) {
            // 读取数据,读取超时时间为100ms 
            ConsumerRecords<String, String> records = consumer.poll(100);
            
            for (ConsumerRecord<String, String> record : records)
                System.out.printf("offset = %d, key = %s, value = %s%n",
                      record.offset(), record.key(), record.value());

        }
    }
}
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值