package com.****.message;
import java.time.Duration;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.listener.AbstractConsumerSeekAware;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class CustomSeekableStreamConsumer extends AbstractConsumerSeekAware {
private final KafkaConsumer<String, String> consumer;
private BlockingQueue<ConsumerRecord<String, String>> messageQueue;
private final int queueCapacity;
public CustomSeekableStreamConsumer(ConsumerFactory<String, String> consumerFactory, int queueCapacity) {
super();
this.consumer = (KafkaConsumer<String, String>) consumerFactory.createConsumer();
this.messageQueue = new LinkedBlockingQueue<>(queueCapacity);
this.queueCapacity = queueCapacity;
}
public void startConsuming() {
while (true) {
// 检查队列是否已满
if (messageQueue.size() == queueCapacity) {
// 队列已满,暂停拉取消息
try {
Thread.sleep(300);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
log.error("Interrupted while waiting for queue space", e);
}
continue;
}
ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100));
for (ConsumerRecord<String, String> record : records) {
try {
messageQueue.put(record);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
log.error("Interrupted while putting message into queue", e);
}
}
}
}
public ConsumerRecord<String, String> takeMessage() throws InterruptedException {
return messageQueue.take();
}
}
package com.***.listener;
import java.util.Collections;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import javax.annotation.PostConstruct;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.scheduling.annotation.Scheduled;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class MessageProcessor {
@Autowired
private RuleService ruleService;
//队列大小
public final int QUEUE_CAPACITY = 1000;
//线程池大小
public final int THREAD_COUNT = 10;
private final CustomSeekableStreamConsumer consumer;
private final ExecutorService executorService;
public MessageProcessor(ConsumerFactory<String, String> consumerFactory) {
this.consumer = new CustomSeekableStreamConsumer(consumerFactory, QUEUE_CAPACITY);
this.executorService = Executors.newFixedThreadPool(THREAD_COUNT);
}
@PostConstruct
public void init() {
startConsuming();
}
private void startConsuming() {
executorService.submit(() -> consumer.startConsuming());
}
@Scheduled(initialDelay = 7254, fixedDelay = Long.MAX_VALUE)
public void startProcessing() {
executorService.submit(() -> consumer.startConsuming());
while (true) {
try {
ConsumerRecord<String, String> record = consumer.takeMessage();
if (null != record) {
processMessage(record);
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
log.error("Interrupted while processing message", e);
}
}
}
private void processMessage(ConsumerRecord<String, String> record) {
try {
// 处理业务逻辑
} catch (Exception e) {
log.error("处理失败 e={}", e);
}
}
}