package com.example.demo.service;
import com.alibaba.fastjson.JSONObject;
import com.example.demo.vo.KafkaCECMessage;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import javax.annotation.Resource;
import java.util.Properties;
@Service
@Slf4j
public class KafkaProducerService {
@Value("")
private String kafkaServers;
@Value("")
private String topic;
private KafkaProducer<String, String> kafkaProducer = null;
private static final String LOG_TAG = "kafkaProducer";
@PostConstruct
private void init() {
if (StringUtils.isNotBlank(topic) && StringUtils.isNotBlank(kafkaServers)) {
Properties properties = new Properties();
properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaServers);
properties.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
properties.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
kafkaProducer = new KafkaProducer<>(properties);
}
}
@PreDestroy
private void shutdown() {
if (null != kafkaProducer) {
kafkaProducer.close();
log.info(LOG_TAG, "shutdown", "kafka producer closed");
}
}
public boolean kafkaIsStart(){
return StringUtils.isNotBlank(topic);
}
public <T> void sendMessage(String type, T data){
KafkaCECMessage<T> message = new KafkaCECMessage<>();
message.setType(type);
message.setData(data);
message.setTimestamp(System.currentTimeMillis());
try {
String json = JSONObject.toJSONString(message);
ProducerRecord<String, String> record;
record = new ProducerRecord<>(topic,json);
kafkaProducer.send(record);
} catch (Exception e) {
log.error(LOG_TAG, "error", e.getMessage());
}
}
}
package com.example.demo.service;
import com.alibaba.fastjson.JSONObject;
import com.example.demo.vo.KafkaCECMessage;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import java.time.Duration;
import java.util.Collections;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
@Component
@Slf4j
public class KafkaConsumerService {
@Value("")
private String kafkaServers;
@Value("")
private String topic;
@Value("")
private String groupId;
private volatile boolean consumerSwitch = true;
private ExecutorService executor = null;
private ExecutorService consumerExecutor = null;
private KafkaConsumer<String, String> consumer = null;
private static String LOG_TAG = "kafka-consumer-cec";
@PostConstruct
private void init() {
executor = Executors.newSingleThreadExecutor();
consumerExecutor =new ThreadPoolExecutor(10, 1,
0L, TimeUnit.MILLISECONDS,
new LinkedBlockingQueue<>(1024),
new ThreadFactoryBuilder().setNameFormat("consumer-%d").build(), new ThreadPoolExecutor.CallerRunsPolicy()
);
log.info(LOG_TAG, "init", "single executor is created");
Properties properties = new Properties();
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaServers);
properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
properties.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
properties.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 5);
properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
consumer = new KafkaConsumer<>(properties);
consumer.subscribe(Collections.singleton(topic));
try {
executor.execute(this::run);
} catch (Exception e) {
e.printStackTrace();
}
}
@PreDestroy
private void shutdown() {
consumerSwitch = false;
if (null != executor) {
executor.shutdown();
log.info(LOG_TAG, "shutdown", "single executor has shutdown");
}
if (null != consumer) {
consumer.close();
}
if(null != consumerExecutor){
consumerExecutor.shutdown();
}
}
private void run() {
while (consumerSwitch) {
ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(500));
for (ConsumerRecord<String, String> record : records) {
consumerExecutor.submit(() -> {
try {
KafkaCECMessage message = JSONObject.parseObject(record.value(), KafkaCECMessage.class);
String type = message.getType();
} catch (Exception e) {
e.printStackTrace();
log.error(LOG_TAG,"error",e.getMessage(),"data",record.value());
}
});
}
}
}
}
@Data
public class KafkaCECMessage<T> {
private String type;
private T data;
private Long timestamp;
}