1.需要引入的依赖包
<dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-web</artifactId> </dependency> <dependency> <groupId>org.springframework.kafka</groupId> <artifactId>spring-kafka</artifactId> </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-test</artifactId> <scope>test</scope> </dependency> <dependency> <groupId>org.springframework.kafka</groupId> <artifactId>spring-kafka-test</artifactId> <scope>test</scope> </dependency>
2. 配置
2.1 kafka存字符串
spring.kafka.bootstrap-servers=192.168.43.122:9092 spring.kafka.producer.buffer-memory=33554432 spring.kafka.producer.acks=all spring.kafka.producer.retries=1 spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer #spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.ByteArraySerializer spring.kafka.producer.batch-size=16384 spring.kafka.consumer.auto-commit-interval=100 spring.kafka.consumer.bootstrap-servers=192.168.43.122:9092 spring.kafka.consumer.enable-auto-commit=true spring.kafka.consumer.group-id=zhsq spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer #spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.ByteArrayDeserializer spring.kafka.consumer.fetch-min-size=10
2.2 kafka存byte[]
#生产者配置 spring.kafka.bootstrap-servers=192.168.43.122:9092 spring.kafka.producer.buffer-memory=33554432 spring.kafka.producer.acks=all spring.kafka.producer.retries=1 spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer #spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.ByteArraySerializer spring.kafka.producer.batch-size=16384 #消费者配置 spring.kafka.consumer.auto-commit-interval=100 spring.kafka.consumer.bootstrap-servers=192.168.43.122:9092 spring.kafka.consumer.enable-auto-commit=true spring.kafka.consumer.group-id=zhsq spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer #spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.ByteArrayDeserializer spring.kafka.consumer.fetch-min-size=10
3.生产消费
生产者
package com.netpos.springkafka.Schedule; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.kafka.core.KafkaTemplate; import org.springframework.scheduling.annotation.EnableScheduling; import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Component; import org.springframework.util.concurrent.ListenableFuture; import java.util.UUID; @Component @EnableScheduling public class KafkaProducer { @Autowired private KafkaTemplate kafkaTemplate; /** * 发送字符串 */ @Scheduled(cron = "0/1 * * * * ?") public void send(){ String message = UUID.randomUUID().toString(); ListenableFuture future = kafkaTemplate.send("test", message); future.addCallback(o -> System.out.println("send-消息发送成功:" + message), throwable -> System.out.println("消息发送失败:" + message)); } /** * 发送byte[] */ @Scheduled(cron = "0/2 * * * * ?") public void send_byte(){ String message = UUID.randomUUID().toString(); ListenableFuture future = kafkaTemplate.send("test_byte", message.getBytes()); future.addCallback(o -> System.out.println("send_byte-消息发送成功:" + message), throwable -> System.out.println("消息发送失败:" + message)); } } 消费者 package com.netpos.springkafka; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.springframework.kafka.annotation.KafkaListener; import org.springframework.stereotype.Component; @Component public class ConsumerTest { //接收字符串 @KafkaListener(topics = {"test"}) public void getMessage(ConsumerRecord<String,String> record){ System.out.println("消费信息"+record.value()); } //接收byte数组 // @KafkaListener(topics = {"test_byte"}) public void getMessageByte(ConsumerRecord<String, byte[]> record){ System.out.println("消费信息"+new String(record.value())); } }