Kafka java端获取Topoic,新建Topic,offset(当前偏移量,最终偏移量)信息

Kafka java端获取Topoic,新建Topic,offset(当前偏移量,最终偏移量)信息

网上很多介绍怎么获取偏移量的,但是都是互抄,而且版本过老,方法不适用,或者要创建Consumer才能获取偏移量,我就奇了怪了,监听自动创建Consumer,新建再获取什么鬼,所以这里介绍的是使用Kafka自带的Api获取。

一些解释都写在了注释里

依赖
implementation group: 'org.springframework.kafka', name: 'spring-kafka', version: '2.6.6'
配置文件//仅供参考
#kafka
spring.kafka.bootstrap-servers=????
#消费者数量,不大于分片数
spring.kafka.listener.concurrency=10
spring.kafka.producer.value-serializer=org.springframework.kafka.support.serializer.JsonSerializer
spring.kafka.consumer.value-deserializer=org.springframework.kafka.support.serializer.JsonDeserializer
#消费者反序列化信任后才行
spring.kafka.consumer.properties.spring.json.trusted.packages=*
spring.kafka.consumer.group-id=test
创建AdminClient Bean对象
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.KafkaAdmin;

import java.util.HashMap;
import java.util.Map;

/**
 * @author xvjinjiang
 * create at  2021/6/1 15:01
 */
@Configuration
public class KafkaConfig {

    @Value("${spring.kafka.bootstrap-servers}")
    private String bootstrapServers;

    @Bean //创建一个kafka管理类,相当于rabbitMQ的管理类rabbitAdmin,没有此bean无法自定义的使用adminClient创建topic
    public KafkaAdmin kafkaAdmin() {
        Map<String, Object> props = new HashMap<>();
        //配置Kafka实例的连接地址
        //kafka的地址,不是zookeeper
        props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
        KafkaAdmin admin = new KafkaAdmin(props);
        return admin;
    }

    @Bean  //kafka客户端,在spring中创建这个bean之后可以注入并且创建topic,用于集群环境,创建对个副本
    public AdminClient adminClient() {
        return AdminClient.create(kafkaAdmin().getConfig());
    }

}
接口实现
import io.swagger.v3.oas.annotations.Operation;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.admin.*;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.KafkaFuture;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.TopicPartitionInfo;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.web.bind.annotation.*;

import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

/**
 * @author xvjinjiang
 * create at  2021/6/1 15:08
 */
@RestController
@RequestMapping("kafka")
@Slf4j
public class KafkaController {

    private final AdminClient adminClient;

    public KafkaController(AdminClient adminClient) {
        this.adminClient = adminClient;
    }

    @PostMapping("create-topic")
    @Operation(operationId = "createTopic", summary = "创建新kafkaTopic")
    public void createTopic(@RequestParam String topicName, @RequestParam Integer num) {
        //主题名称、分区数和副本数
        NewTopic topic = new NewTopic(topicName, num, (short) 1);
        adminClient.createTopics(Collections.singletonList(topic));
    }

    @GetMapping("topic-names")
    @Operation(operationId = "getTopics", summary = "获取Topics")
    public Set<String> getTopics() {
        ListTopicsResult listTopics = adminClient.listTopics();
        try {
            return listTopics.names().get();
        } catch (Exception e) {
            log.error(e.getMessage());
        }
        return null;
    }

    /**
     * 获取topic下partition信息
     */
    @GetMapping("topic-describe")
    @Operation(operationId = "getTopicsInfo", summary = "获取Topics")
    public List<String> getTopicsInfo(@RequestParam String topicName) throws Exception {
        DescribeTopicsResult describeTopicsResult = adminClient.describeTopics(Collections.singleton(topicName));
        KafkaFuture<Map<String, TopicDescription>> all = describeTopicsResult.all();
        Map<String, TopicDescription> stringTopicDescriptionMap = all.get();
        TopicDescription topicDescription = stringTopicDescriptionMap.get(topicName);
        List<String> res = topicDescription.partitions().stream().map(TopicPartitionInfo::toString).collect(Collectors.toList());
        return res;

    }

    /**
     * 获取groupId下的偏移信息
     */
    @GetMapping("group-describe")
    @Operation(operationId = "getGroupInfo", summary = "获取GroupInf")
    public  List<KafkaGroupInfo> getGroupInfo(@RequestParam String groupId,@RequestParam String topic) throws Exception {

        ListConsumerGroupOffsetsResult listConsumerGroupOffsetsResult = adminClient.listConsumerGroupOffsets(groupId);
        //当前偏移量
        Map<TopicPartition, OffsetAndMetadata> topicPartitionOffsetAndMetadataMap = listConsumerGroupOffsetsResult.partitionsToOffsetAndMetadata().get(10, TimeUnit.SECONDS);
        Map<TopicPartition, OffsetSpec> map = new HashMap<>();
        topicPartitionOffsetAndMetadataMap.keySet().forEach(x -> {
            map.put(x, OffsetSpec.latest());
        });
        ListOffsetsResult listOffsetsResult = adminClient.listOffsets(map);
        Set<TopicPartition> topicPartitions = topicPartitionOffsetAndMetadataMap.keySet();
        List<KafkaGroupInfo> res = new ArrayList<>();
        for (TopicPartition topicPartition : topicPartitions) {
            if (!topicPartition.toString().startsWith(topic))continue;
            //最终偏移量
            ListOffsetsResult.ListOffsetsResultInfo listOffsetsResultInfo = listOffsetsResult.partitionResult(topicPartition).get();
            //拼装返回对象
            KafkaGroupInfo kafkaGroupInfo = new KafkaGroupInfo();
            //toString=》topic + "-" + partition
            kafkaGroupInfo.consumerName = topicPartition.toString();
            Long lastOffset = listOffsetsResultInfo.offset();
            kafkaGroupInfo.lastOffset = lastOffset;
            Long currentOffset = topicPartitionOffsetAndMetadataMap.get(topicPartition).offset();
            kafkaGroupInfo.currentOffset = currentOffset;
            kafkaGroupInfo.lag = lastOffset - currentOffset;
            res.add(kafkaGroupInfo);
        }
        return res;
    }
}
  • 2
    点赞
  • 13
    收藏
    觉得还不错? 一键收藏
  • 5
    评论
如果您想获取Kafka Topic偏移量,而不想订阅它,可以使用Kafka的AdminClient API。以下是获取Kafka Topic偏移量的示例代码: ```java import org.apache.kafka.clients.admin.AdminClient; import org.apache.kafka.clients.admin.AdminClientConfig; import org.apache.kafka.clients.admin.ListConsumerGroupOffsetsResult; import org.apache.kafka.clients.admin.ListConsumerGroupsResult; import org.apache.kafka.clients.admin.ListConsumerGroupOffsetsOptions; import org.apache.kafka.clients.admin.ListConsumerGroupsOptions; import org.apache.kafka.clients.admin.ListOffsetsResult; import org.apache.kafka.clients.admin.OffsetSpec; import org.apache.kafka.clients.admin.TopicDescription; import org.apache.kafka.clients.admin.TopicListing; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.serialization.StringDeserializer; import java.util.Collections; import java.util.Properties; import java.util.Set; import java.util.concurrent.ExecutionException; public class KafkaTopicOffsetExample { private static final String BOOTSTRAP_SERVERS = "localhost:9092"; private static final String GROUP_ID = "my-group"; private static final String TOPIC_NAME = "my-topic"; public static void main(String[] args) throws ExecutionException, InterruptedException { // 创建AdminClient实例 Properties props = new Properties(); props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS); AdminClient adminClient = AdminClient.create(props); // 获取Topic Description Set<String> topicNames = Collections.singleton(TOPIC_NAME); var topicDesc = adminClient.describeTopics(topicNames).all().get().get(TOPIC_NAME); // 获取Topic Partitions var partitions = topicDesc.partitions(); // 获取偏移量 ListOffsetsResult listOffsetsResult = adminClient.listOffsets(Collections.singletonMap(new TopicPartition(TOPIC_NAME, 0), OffsetSpec.latest())); long offset = listOffsetsResult.values().get(new TopicPartition(TOPIC_NAME, 0)).get().offset(); System.out.println("Offset: " + offset); // 关闭AdminClient adminClient.close(); } } ``` 在这个例子中,我们使用Kafka的AdminClient类获取Kafka Topic偏移量。首先,我们创建AdminClient实例并获取Topic Description和Partitions。然后,我们使用listOffsets()方法获取Topic的最新偏移量。最后,我们打印偏移量并关闭AdminClient。注意,我们在这个例子中只获取了一个分区的偏移量,如果您想获取所有分区的偏移量,需要在listOffsets()方法中传递一个TopicPartition列表。
评论 5
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值