目录
1、Kafka安装
1.1上传安装包到服务器或下载
cd /usr/local
#下载安装包
wget https://archive.apache.org/dist/kafka/3.0.0/kafka_2.12-3.0.0.tgz
1.2解压
tar -zxvf kafka_2.12-3.0.0.tgz
# 改名
mv kafka_2.12-3.0.0 kafka
# 创建zk存储路径 和 kafka存储路径
mkdir -p /usr/local/kafka/zk/datas
mkdir -p /usr/local/kafka/datas
1.3修改配置文件
vim /usr/local/kafka/config/zookeeper.properties
配置文件内容
#数据存放路径 dataDir=/usr/local/kafka/zk/datas
# the port at which the clients will connect
clientPort=2181
# disable the per-ip limit on the number of connections since this is a non- production config maxClientCnxns=0
# Disable the adminserver by default to avoid port conflicts.
# Set the port to something non-conflicting if choosing to enable this
admin.enableServer=false
# admin.serverPort=8080
![在这里插入图片描述](https://img-blog.csdnimg.cn/6a92a04a13d74c279b19509421374d3c.png
1.4kafka启动命令
1.5kafka关闭命令
2、SpringBoot使用kafka
依赖
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
配置文件
server:
port: 8080
spring:
kafka:
bootstrap-servers: 106.14.223.42:9092
producer: # 生产者
retries: 3 # 设置大于 0 的值,则客户端会将发送失败的记录重新发送
batch-size: 16384
buffer-memory: 33554432
acks: 1
# 指定消息key和消息体的编解码方式
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
consumer:
group-id: default-group
enable-auto-commit: false
auto-offset-reset: earliest
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
max-poll-records: 500
listener:
# 当每一条记录被消费者监听器(ListenerConsumer)处理之后提交
# RECORD
# 当每一批poll()的数据被消费者监听器(ListenerConsumer)处理之后提交
# BATCH
# 当每一批poll()的数据被消费者监听器(ListenerConsumer)处理之后,距离上次提交时间大于TIME时提交
# TIME
# 当每一批poll()的数据被消费者监听器(ListenerConsumer)处理之后,被处理record数量大于等于COUNT时提交
# COUNT
# TIME | COUNT 有一个条件满足时提交
# COUNT_TIME
# 当每一批poll()的数据被消费者监听器(ListenerConsumer)处理之后, 手动调用Acknowledgment.acknowledge()后提交
# MANUAL
# 手动调用Acknowledgment.acknowledge()后立即提交,一般使用这种
# MANUAL_IMMEDIATE
ack-mode: MANUAL_IMMEDIATE
###java代码
package com.ljh.controller;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RestController;
import javax.annotation.Resource;
/**
* @Author lijiahao
* @CreateTime: 2022/7/8 13:53
*/
@RestController
public class SendController {
@Resource
KafkaTemplate kafkaTemplate;
@GetMapping("/{id}")
public String send(@PathVariable("id") String id){
kafkaTemplate.send("ljh_topic","ljh yao jia you211"+id);
return "发送成功";
}
@KafkaListener(topics = "ljh_topic")
public void listenTopic(ConsumerRecord<String, String> record, Acknowledgment ack){
String value = record.value();
System.out.println(value);
System.out.println(record);
//手动提交offset
ack.acknowledge();
}
}