hello一直显示与服务器断开,kafka 从seek的offset开始读取数据,这时服务器断开,该如何从中断的offset这里开始读取呢?...

package com.example.demo.Consumer;

import org.apache.kafka.clients.consumer.ConsumerRecord;

import org.apache.kafka.clients.consumer.ConsumerRecords;

import org.apache.kafka.clients.consumer.KafkaConsumer;

import org.apache.kafka.clients.consumer.OffsetAndTimestamp;

import org.apache.kafka.common.PartitionInfo;

import org.apache.kafka.common.TopicPartition;

import java.text.DateFormat;

import java.text.ParseException;

import java.text.SimpleDateFormat;

import java.util.*;

public class Consumer{

public static void main(String[] args) throws ParseException {

// hello();

hi();

}

private static long timeToTimestamp(String time) throws ParseException {

SimpleDateFormat fm = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");

Date dt = fm.parse(time);

long ts = dt.getTime();

return ts;

}

private static void hello() throws ParseException {

Properties props = new Properties();

props.put("bootstrap.servers", "localhost:9092");

props.put("group.id", "tstest");

props.put("enable.auto.commit", "true");

props.put("auto.commit.interval.ms", "100");

props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

KafkaConsumer consumer = new KafkaConsumer<>(props);

String topic = "tstest";

DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");

List partitionInfos = consumer.partitionsFor(topic);

List topicPartitions = new ArrayList<>();

Map timestampsToSearch = new HashMap<>();

long fetchDataTime = timeToTimestamp("2021-03-16 17:16:00");

for(PartitionInfo partitionInfo : partitionInfos) {

topicPartitions.add(new TopicPartition(partitionInfo.topic(), partitionInfo.partition()));

timestampsToSearch.put(new TopicPartition(partitionInfo.topic(), partitionInfo.partition()), fetchDataTime);

}

consumer.assign(topicPartitions);

Map map = consumer.offsetsForTimes(timestampsToSearch);

OffsetAndTimestamp offsetTimestamp = null;

System.out.println("开始设置各分区初始偏移量...");

for(Map.Entry entry : map.entrySet()) {

// 如果设置的查询偏移量的时间点大于最大的索引记录时间,那么value就为空

offsetTimestamp = entry.getValue();

if(offsetTimestamp != null) {

int partition = entry.getKey().partition();

long timestamp = offsetTimestamp.timestamp();

long offset = offsetTimestamp.offset();

System.out.println("partition = " + partition +

", time = " + df.format(new Date(timestamp))+

", offset = " + offset);

// 设置读取消息的偏移量

consumer.seek(entry.getKey(), offset);

}

}

System.out.println("设置各分区初始偏移量结束...");

while(true) {

ConsumerRecords records = consumer.poll(1000);

for (ConsumerRecord record : records) {

System.out.printf("topic = %s ,partition = %d,offset = %d, key = %s, value = %s%n", record.topic(), record.partition(),

record.offset(), record.key(), record.value());}

}

}

private static void hi() throws ParseException {

Properties props = new Properties();

props.put("bootstrap.servers", "localhost:9092");

props.put("group.id", "tstest");

props.put("enable.auto.commit", "true");

props.put("auto.commit.interval.ms", "100");

props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

KafkaConsumer consumer = new KafkaConsumer<>(props);

String topic = "tstest";

DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");

List partitionInfos = consumer.partitionsFor(topic);

List topicPartitions = new ArrayList<>();

Map timestampsToSearch = new HashMap<>();

long fetchDataTime = timeToTimestamp("2021-03-16 17:16:00");

for(PartitionInfo partitionInfo : partitionInfos) {

topicPartitions.add(new TopicPartition(partitionInfo.topic(), partitionInfo.partition()));

// timestampsToSearch.put(new TopicPartition(partitionInfo.topic(), partitionInfo.partition()), fetchDataTime);

}

consumer.assign(topicPartitions);

/*

Map map = consumer.offsetsForTimes(timestampsToSearch);

OffsetAndTimestamp offsetTimestamp = null;

System.out.println("开始设置各分区初始偏移量...");

for(Map.Entry entry : map.entrySet()) {

// 如果设置的查询偏移量的时间点大于最大的索引记录时间,那么value就为空

offsetTimestamp = entry.getValue();

if(offsetTimestamp != null) {

int partition = entry.getKey().partition();

long timestamp = offsetTimestamp.timestamp();

long offset = offsetTimestamp.offset();

System.out.println("partition = " + partition +

", time = " + df.format(new Date(timestamp))+

", offset = " + offset);

// 设置读取消息的偏移量

consumer.seek(entry.getKey(), offset);

}

}

System.out.println("设置各分区初始偏移量结束...");

*/

while(true) {

ConsumerRecords records = consumer.poll(1000);

for (ConsumerRecord record : records) {

System.out.printf("topic = %s ,partition = %d,offset = %d, key = %s, value = %s%n", record.topic(), record.partition(),

record.offset(), record.key(), record.value());}

}

}

}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值