同步提交offset
同步提交offset,可以重试
package com.ln.kafka.custom;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import java.util.Arrays;
import java.util.Properties;
/**
* @ProjectName: kafka
* @Package: com.ln.kafka.custom
* @Name:SyncSubmitOffset
* @Author:linianest
* @CreateTime:2021/1/8 10:56
* @version:1.0
* @Description TODO:手动提交offset:同步提交offset
*/
/**
* 1:关闭自动提交offset的功能
*/
public class SyncSubmitOffset {
public static void main(String[] args) {
Properties props = new Properties();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "hadoop101:9092");
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
props.put(ConsumerConfig.GROUP_ID_CONFIG, "1234");
// todo 自动提交offset,默认值为true,设置成false,自动提交效率可能慢
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
// 创建对象
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
// 订阅的消费者主题
consumer.subscribe(Arrays.asList("first"));
while (true) {
// 拉取数据
ConsumerRecords<String, String> records = consumer.poll(100);
for (ConsumerRecord record : records) {
System.out.printf("topic=%d,key=%s,value=%s%n", record.topic(), record.offset(), record.value());
}
// 每消费一批数据,同步手动提交一次offset
consumer.commitSync();
}
}
}
异步提交offset
消费线程不阻塞,消费能力提升,但是容易消费的数据丢失。
特点:异步提交offset,延时低
package com.ln.kafka.custom;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import java.util.Arrays;
import java.util.Properties;
/**
* @ProjectName: kafka
* @Package: com.ln.kafka.custom
* @Name:AsyncSubmitOffset
* @Author:linianest
* @CreateTime:2021/1/8 11:04
* @version:1.0
* @Description TODO:手动提交offset:异步提交offset
*/
public class AsyncSubmitOffset {
public static void main(String[] args) {
Properties props = new Properties();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "hadoop101:9092");
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
props.put(ConsumerConfig.GROUP_ID_CONFIG, "1234");
// todo 自动提交offset,默认值为true,设置成false,自动提交效率可能慢
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
// 创建对象
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
// 订阅的消费者主题
consumer.subscribe(Arrays.asList("first"));
while (true) {
// 拉取数据
ConsumerRecords<String, String> records = consumer.poll(100);
for (ConsumerRecord record : records) {
System.out.printf("topic=%d,key=%s,value=%s%n", record.topic(), record.offset(), record.value());
}
// 每消费一批数据,异步手动提交一次offset,消费线程不阻塞
consumer.commitAsync();
}
}
}