一、 docker安装kafka
1、下载zookeeper镜像
[root@localhost ~]# docker pull wurstmeister/zookeeper
2、下载kafka的镜像
[root@localhost ~]# docker pull wurstmeister/kafka
3、启动镜像
- 启动zookeeper
[root@localhost ~]# docker run -d --name zookeeper -p 2181:2181 wurstmeister/zookeeper
- 启动kafka
[root@localhost ~]# docker run -d --name kafka -p 9092:9092 --link zookeeper --env KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181 --env KAFKA_ADVERTISED_HOST_NAME=192.168.0.104 --env KAFKA_ADVERTISED_PORT=9092 --volume /etc/localtime:/etc/localtime wurstmeister/kafka:latest
4、测试
kakfa 官网快速入门: http://kafka.apache.org/documentation/#quickstart
- 关闭防火墙
[root@localhost ~]# firewall-cmd --state
running
[root@localhost ~]# systemctl stop firewalld.service
[root@localhost ~]# firewall-cmd --state
not running
分别打开三个窗口
# 获取kafka的容器id
[root@localhost ~]# docker ps
[root@localhost ~]# docker exec -it 45d805660187 bash
- 创建测试主题
[root@localhost ~]# kafka-topics.sh --create --topic quickstart-events --bootstrap-server localhost:9092
- 生产消息
[root@localhost ~]# kafka-console-producer.sh --topic quickstart-events --bootstrap-server localhost:9092
- 消费消息
kafka-console-consumer.sh --topic quickstart-events --from-beginning --bootstrap-server localhost:9092
- 查看所有的主题topic
[root@localhost ~]# kafka-topics.sh --list --zookeeper zookeeper:2181 //查看我们的topic列表
```![在这里插入图片描述](https://img-blog.csdnimg.cn/20210321121518346.png)
- 查看具体的topic的信息
```sh
[root@localhost ~]# kafka-topics.sh --describe --topic quickstart-events --bootstrap-server localhost:9092
Topic:quickstart-events PartitionCount:1 ReplicationFactor:1 Configs:
Topic: quickstart-events Partition: 0 Leader: 0 Replicas: 0 Isr: 0
二、 springboot整合kafka
整体的项目结构:
创建springboot项目
2.1、添加pom.xml依赖
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.4.3</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>com.bigdata</groupId>
<artifactId>springboot-kafka</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>springboot-kafka</name>
<description>Demo project for Spring Boot about kafka</description>
<properties>
<java.version>1.8</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-streams</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.44</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
2.2、application.yml
spring:
kafka:
bootstrap-servers: 192.168.0.104:9092
producer:
retries: 0
batch-size: 16384
buffer-memory: 33554432
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
acks: 1
consumer:
auto-commit-interval: 1S
auto-offset-reset: earliest
# 是否自动提交偏移量,默认值是true,为了避免出现重复数据和数据丢失,可以把它设置为false,然后手动提交偏移量
enable-auto-commit: false
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
listener:
concurrency: 5
ack-mode: manual_immediate
missing-topics-fatal: false
###2.3、 webconfig的相关配置
package com.bigdata.springbootkafka.config;
import org.springframework.boot.web.client.RestTemplateBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.client.RestTemplate;
@Configuration
public class WebConfig {
@Bean
public RestTemplate restTemplate(RestTemplateBuilder builder){
return builder.build();
}
}
2.3、 常量
package com.bigdata.springbootkafka.constant;
public class KafkaMessageConstant {
public static final String DEFAULF_TOPIC = "DEFAULF_TOPIC_TEST";
public static final String TOPIC_GROUP1 = "TOPIC.GROUP1";
}
2.4、 controller
package com.bigdata.springbootkafka.controller;
import com.bigdata.springbootkafka.message.Producer;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
@RestController
@RequestMapping("/kafka")
@Slf4j
public class KafkaMessageController {
@Autowired
private Producer producer;
@PostMapping("/test")
public void sendMessage(@RequestParam("data") String data) {
log.info("send message: " + data);
producer.sendMessageDefault(data);
log.info("send message is ok");
}
}
2.5、 producer
package com.bigdata.springbootkafka.message;
import com.alibaba.fastjson.JSON;
import com.bigdata.springbootkafka.constant.KafkaMessageConstant;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.stereotype.Component;
import org.springframework.util.concurrent.ListenableFuture;
import org.springframework.util.concurrent.ListenableFutureCallback;
@Component
@Slf4j
public class Producer {
@Autowired
private KafkaTemplate<String, Object> kafkaTemplate;
public void sendMessage(String topic, Object data) {
String jsonStringData = JSON.toJSONString(data);
log.info("topic:{}" + topic + "data: { }" + jsonStringData);
ListenableFuture<SendResult<String, Object>> future = kafkaTemplate.send(topic, jsonStringData);
future.addCallback(new ListenableFutureCallback<SendResult<String, Object>>() {
@Override
public void onFailure(Throwable throwable) {
// 发送失败
log.error("生产者 发送消息失败: {}" + throwable.getMessage());
}
@Override
public void onSuccess(SendResult<String, Object> stringObjectSendResult) {
log.info("生产者 发送消息成功: {}" + stringObjectSendResult.getProducerRecord().toString());
}
});
}
public void sendMessageDefault(Object data) {
sendMessage(KafkaMessageConstant.DEFAULF_TOPIC, data);
}
}
2.6、 cousumer
package com.bigdata.springbootkafka.message;
import com.bigdata.springbootkafka.constant.KafkaMessageConstant;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.messaging.handler.annotation.Header;
import org.springframework.stereotype.Component;
import lombok.extern.slf4j.Slf4j;
import java.util.Optional;
@Component
@Slf4j
public class Consumer {
@KafkaListener(topics = KafkaMessageConstant.DEFAULF_TOPIC, groupId = KafkaMessageConstant.TOPIC_GROUP1)
public void receiveKafkaMessage(ConsumerRecord<?, ?> consumerRecord, Acknowledgment ack, @Header(KafkaHeaders.RECEIVED_TOPIC) String topic) {
Optional<?> kafkaMessage = Optional.ofNullable(consumerRecord.value());
log.info(">>>>>>>>>> record =" + kafkaMessage);
if (kafkaMessage.isPresent()) {
Object message = kafkaMessage.get();
log.info("消费了Consumer test Topic:" + topic + ",Message:" + message);
ack.acknowledge();
}
}
}
2.7、 单元测试
package com.bigdata.springbootkafka;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import org.springframework.web.client.RestTemplate;
@RunWith(SpringJUnit4ClassRunner.class)
@SpringBootTest(classes = SpringbootKafkaApplication.class)
@Slf4j
class SpringbootKafkaApplicationTests {
@Autowired
private RestTemplate restTemplate;
@Test
void contextLoad1() {
log.info("==============发送消息 begin ============== ");
String url = "http://localhost:8080/kafka/test";
String data = "restTemplate jxq kafka test";
HttpHeaders headers = new HttpHeaders();
// 以表单的方式提交
headers.setContentType(MediaType.APPLICATION_FORM_URLENCODED);
//将请求头部和参数合成一个请求
MultiValueMap<String, Object> paramMap = new LinkedMultiValueMap<String, Object>();
paramMap.add("data", data);
//执行HTTP请求,将返回的结构使用ResultVO类格式化
restTemplate.postForEntity(url, paramMap, String.class);
// restTemplate.postForLocation("http://localhost:8080/kafka/test"+data,null);
log.info("==============发送消息 end ============== ");
}
}
参考:
https://blog.csdn.net/qq_42715450/article/details/114293390
http://kafka.apache.org/documentation/#quickstart