第一步 引包
<!-- springboot整合kafka -->
<dependency>
<groupId>org.springframework.integration</groupId>
<artifactId>spring-integration-core</artifactId>
<version>4.3.6.RELEASE</version>
<classifier>sources</classifier>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
第二部配置生产者 config
/**
* 生产者配置类
*/
@Configuration
@EnableKafka
public class KafkaProducersConfig {
@Value("${souche.kafka.binder.brokers}")
private String brokers;
@Bean
public KafkaTemplate<String, String> kafkaTemplate() {
KafkaTemplate<String, String> kafkaTemplate = new KafkaTemplate<String, String>(producerFactory());
return kafkaTemplate;
}
public ProducerFactory<String, String> producerFactory() {
// set the producer properties
Map<String, Object> properties = new HashMap<String, Object>();
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers);
properties.put(ProducerConfig.BATCH_SIZE_CONFIG, 65536);
properties.put(ProducerConfig.LINGER_MS_CONFIG, 1);
properties.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 524288);
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
return new DefaultKafkaProducerFactory<String, String>(properties);
}
}
第三步 配置消费者config
/**
* 接收者配置类
*/
@Configuration
@EnableKafka
public class KafkaConsumerConfig {
@Value("${souche.kafka.binder.brokers}")
private String brokers;
@Value("${souche.kafka.group}")
private String group;
@Bean
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<String, String>();
factory.setConsumerFactory(consumerFactory());
factory.setConcurrency(4);
factory.getContainerProperties().setPollTimeout(4000);
return factory;
}
@Bean
public KafkaListeners kafkaListeners() {
return new KafkaListeners();
}
public ConsumerFactory<String, String> consumerFactory() {
Map<String, Object> properties = new HashMap<String, Object>();
properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers);
properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
properties.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "100");
properties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "15000");
properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
properties.put(ConsumerConfig.GROUP_ID_CONFIG, group);
properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
return new DefaultKafkaConsumerFactory<String, String>(properties);
}
}
第四步配置消费者处理类
/**
* 接受kafka消息的类
*/
public class KafkaListeners {
private final Logger logger = LoggerFactory.getLogger(getClass());
@KafkaListener(topics = {"boot"})
public void processMessage(String content) {
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
}
System.out.println("接收信息");
System.out.println(content);
}
}
配置文件的配置
souche:
kafka:
binder:
#kafka地址
brokers: 192.168.106.5:9092
#zk地址
zk-nodes: 192.168.106.5:2181
group: test
测试类
@Api("测试")
@RestController
@RequestMapping("/test")
public class TestController {
private final Logger logger = LoggerFactory.getLogger(getClass());
@Autowired
KafkaTemplate kafkaTemplate;
@ApiOperation("测试kafka")
@PostMapping("/Kafka")
@ResponseBody
public void testkafka() throws Exception {
kafkaTemplate.send("boot", UUID.randomUUID().toString());
}
}