1.引入依赖(根据自己的springcloud版本)
<!--kafka-->
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream</artifactId>
<version>2.0.1.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-stream-kafka</artifactId>
<version>2.0.1.RELEASE</version>
</dependency>
2.yml文件配置
spring:
cloud:
stream:
kafka:
binder:
brokers: kafka地址 多个以逗号(,)分割
#zk-nodes: zookeeper地址 多个以逗号(,)分割
required-acks: 1
#auto-create-topics: true #自动创建topic
#auto-add-partitions: true #自动增加分区
configuration:
client:
id: test
auto:
offset:
reset: latest
# key 和 value 序列化
key:
deserializer: org.apache.kafka.common.serialization.StringDeserializer
value:
deserializer: org.apache.kafka.common.serialization.StringDeserializer
enable:
auto:
commit: false #取消自动提交 设置true 则自动提交
session:
timeout:
ms: 900000 # 设置时间 不低于max-poll-interval-ms
max:
partition:
fetch:
bytes: 1536000 # 解决kafka消费big size信息时,而产生larger than the fetch size的问题
poll:
interval:
ms: 900000 # 每次消费的处理时间 15分钟
records: 5 #每次拉取数据条数
bindings:
#缺省的输入、输出通道
default_input:
consumer:
autoCommitOffset: false
maxPollInterval: 80000
bindings:
#缺省的输入、输出通道
default_input:
destination: default_topic
content-type: application/json
binder: kafka
group: default_group
consumer:
max-attempts: 2 #重试次数
default_output:
destination: default_topic
content-type: application/json
binder: kafka
2.1 注意
autoCommitOffset的设置位置.
spring.cloud.stream.kafka.bindings.input.consumer.autoCommitOffset=false#应该在这里设置
spring.cloud.stream.bindings.input.consumer.autoCommitOffset=false#这里设置是无效的,获取Acknowledgment时会是null
3.代码实现
3.1设置通道
public interface KafkaChannel {
/**
* 发送消息通道名称
*/
String DEFAULT_OUTPUT = "default_output";
/**
* 接收消息通道名称
*/
String DEFAULT_INPUT = "default_input";
/**
* 发送消息通道
* @return channel 返回信息发送通道
*/
@Output(DEFAULT_OUTPUT)
MessageChannel sendDefaultMessage();
/**
* 接收消息通道
* @return channel 返回信息接收通道
*/
@Input(DEFAULT_INPUT)
MessageChannel recieveDefaultMessage();
}
3.2 发送消息
@Component
public class KafkaMessageSender {
@Autowired
private KafkaChannel channel;
/**
* 消息发送到默认通道:通道对应主题
* @param message
*/
public boolean sendToDefaultChannel(String message){
boolean isSuccess = channel.sendDefaultMessage().send(MessageBuilder.withPayload(message).build());
return isSuccess;
}
}
3.3 设置监听
@EnableBinding(value = KafkaChannel.class)
public class KafkaStreamListener {
/**
* 从缺省通道接收消息
* @param message
*/
@StreamListener(KafkaChannel.DEFAULT_INPUT)
public void receive(Message<String> message){
System.out.println(message.getPayload());
Acknowledgment acknowledgment = message.getHeaders().get(KafkaHeaders.ACKNOWLEDGMENT, Acknowledgment.class);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
System.out.println(sdf.format(new Date())+"------start--------默认消息:" + message);
if(acknowledgment!=null){
//业务处理 message.getPayload()
System.out.println("acknowledgment");
//手动提交数据
acknowledgment.acknowledge();
}
System.out.println(sdf.format(new Date())+"------end--------默认消息");
}
}
4.测试
@GetMapping(value = "/api/test1")
public void test1(String message) {
System.out.println("-------------------");
boolean b = sender.sendToDefaultChannel(message);
System.out.println(b);
}