生产者每秒生产1000条告警
import cn.hutool.json.JSONUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Component;
import java.util.concurrent.atomic.AtomicInteger;
@Component
public class Runer implements ApplicationRunner {
@Autowired
private KafkaTemplate<String, String> kafkaTemplate;
@Override
public void run(ApplicationArguments args) throws Exception {
AtomicInteger atomicInteger = new AtomicInteger(0);
for (int i = 0; i < 60 * 1; i++) {
new Thread(()->{
for (int i1 = 0; i1 < 1000; i1++) {
send(atomicInteger);
atomicInteger.incrementAndGet();
}
}).start();
Thread.sleep(1000L);
}
System.out.println("============共发送============="+atomicInteger.get());
}
/**
* @param seq
*/
public void send(AtomicInteger seq) {
KafkaEnty kafkaEnty = new KafkaEnty();
String str = seq + "Kafka 本身作为流处理平台,在大数据处理能力上应用广泛;同时 Kafka 也可以作为消息队列。本文将介绍基于 SpringBoot 2.6 集成 Kafka 2.8。";
kafkaEnty.setSeq(seq);
kafkaEnty.setContext(str);
kafkaTemplate.send("myProducer", JSONUtil.toJsonStr(kafkaEnty));
}
}
消费者将数据入库
import cn.hutool.json.JSONUtil;
import com.cloud.consumer.pojo.EmtyService;
import com.cloud.consumer.pojo.KafkaEnty;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
import java.util.Optional;
/**
* @Description: Kafka 消息消费者
* @Author: junqiang.lu
* @Date: 2022/2/25
*/
@Slf4j
@Component
public class KafkaMQConsumer {
@Autowired
private EmtyService emtyService;
/**
* 消息接受者
* @param record
*/
@KafkaListener(topics = "myProducer")
public void receive(ConsumerRecord<?, ?> record) {
log.info("record: {}", record);
Optional.ofNullable(record.value())
.ifPresent(message -> {
log.info("message: {}", message.toString());
emtyService.insert(JSONUtil.toBean(message.toString(), KafkaEnty.class));
});
}
}
发现每秒只能消费500条数据,于是我怀疑是拉去能力不足,于是设置每秒拉去2000条,最长0.5秒拉一次
spring:
kafka:
bootstrap-servers: 127.0.0.1:9092
consumer:
group-id: 1
max-poll-records: 2000
fetch-max-wait: 500
发现问题还是没有解决,于是我注销数据库入库操作,只打印,发现及时消费所有消息,于是给消息处理加上多线程,问题解决
@KafkaListener(topics = "myProducer")
public void receive(ConsumerRecord<?, ?> record) {
log.info("record: {}", record);
Optional.ofNullable(record.value())
.ifPresent(message -> {
log.info("message: {}", message.toString());
//多线程处理
new Thread(()->{
emtyService.insert(JSONUtil.toBean(message.toString(), KafkaEnty.class));
}).start();
});
}