1、依赖:
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
<version>1.2.3.RELEASE</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>1.0.0</version>
</dependency>
2、代码:
日志依赖。
spring相关依赖
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
</dependency>
...
3、代码:
@Component
public class KafkaComponent {
private static final Logger LOGGER = LoggerFactory.getLogger(KafkaComponent.class);
private static KafkaProducer<String, String> alarmProducer;
private static KafkaConsumer<String, String> consumer;
//发布到Kafka的topic名称
private static final String topic = "topic_name";
//接受消息的KafkaIP信息
private static final String broker ="192.168.10.11:9092";
/**
* 默认构建生产者
*/
@PostConstruct
private void build() {
Properties properties = new Properties();
properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, broker);
properties.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
properties.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
alarmProducer = new KafkaProducer<>(properties);
}
/**
* 消费者构建
*/
public void buildConsumer() {
Properties properties1 = new Properties();
properties1.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, broker);
properties1.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
properties1.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
properties1.setProperty(ConsumerConfig.GROUP_ID_CONFIG, ConsumerGroup.COLLECTOR);
properties1.setProperty(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, "600000");
consumer = new KafkaConsumer<>(properties1);
consumer.subscribe(Collections.singleton(topic));
}
/**
* 构建信息并发送
*/
public void sendMsg(String msg) {
ProducerRecord<String, String> record = new ProducerRecord<>(topic, msg);
sendRecord(alarmProducer,record);
}
/**
* 消费消息前先构建消费者
*/
public void consumeData() {
ConsumerRecords<String , String> consumerRecords = consumer.poll(5000);
consumerRecords.forEach(record ->{
LOGGER.info("kafka==> key:{} value:{},time:{}", record.key(), record.value(), record.timestamp());
});
}
/**
* 向kafka发送消息
* @param kafkaProducer
* @param producerRecord
*/
private void sendRecord(KafkaProducer kafkaProducer, ProducerRecord producerRecord){
kafkaProducer.send(producerRecord, (metadata, exception) -> logError(producerRecord, exception));
}
private void logError(ProducerRecord producerRecord, Exception e) {
if (e != null) {
LOGGER.error("kafka producer send baseline error :{}", producerRecord.toString(), e);
}
}
}
4、调用:
// 注入
@Autowired
private KafkaComponent kafkaComponent;
/**********************************/
// 调用
LOGGER.info("发布kafka信息:{}", json.toJSONString());
kafkaComponent.sendMsg(json.toJSONString());
参考:https://blog.csdn.net/Zyy_z_/article/details/101680138