项目中突然要用到kafka,由于没有学过,所以面向百度编程,上网查了很多的资料,最后总结出这篇完整的博客,记录自己学习的过程,还有和大家分享,希望能够帮助到各位。
pom依赖
maven工程pom文件导入kafka依赖
<!--============Kafka============-->
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
<version>2.0.4.RELEASE</version>
</dependency>
<!--============junit============-->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
</dependency>
<!--============spring-test============-->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>4.3.8.RELEASE</version>
</dependency>
生产者xml配置文件
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
http://www.springframework.org/schema/context
http://www.springframework.org/schema/context/spring-context.xsd">
<!-- 定义producer的参数 -->
<bean id="producerProperties" class="java.util.HashMap">
<constructor-arg>
<map>
<!-- bootstrap.servers的ip地址和端口号-->
<entry key="bootstrap.servers" value="192.168.103.10:9092" />
<!--<entry key="group.id" value="${group.id}" />-->
<entry key="retries" value="3" />
<entry key="batch.size" value="4096" />
<entry key="linger.ms" value="10" />
<entry key="buffer.memory" value="40960" />
<entry key="acks" value="all" />
<entry key="key.serializer"
value="org.apache.kafka.common.serialization.StringSerializer" />
<entry key="value.serializer"
value="org.apache.kafka.common.serialization.StringSerializer"/>
</map>
</constructor-arg>
</bean>
<!-- 创建kafkatemplate需要使用的producerfactory bean -->
<bean id="producerFactory"
class="org.springframework.kafka.core.DefaultKafkaProducerFactory">
<constructor-arg>
<ref bean="producerProperties" />
</constructor-arg>
</bean>
<!-- 创建kafkatemplate bean,使用的时候,只需要注入这个bean,即可使用template的send消息方法 -->
<bean id="kafkaTemplate" class="org.springframework.kafka.core.KafkaTemplate">
<constructor-arg ref="producerFactory" />
<constructor-arg name="autoFlush" value="true" />
<property name="defaultTopic" value="hw_data" />
</bean>
</beans>
消费者配置xml文件
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
http://www.springframework.org/schema/context
http://www.springframework.org/schema/context/spring-context.xsd"><!-- 1.定义consumer的参数 -->
<!-- 基本的配置参数 可以写成配置文件或者这种${bootstrap.servers} 配置文件获取的 可以区分开发测试环境 -->
<bean id="consumerProperties" class="java.util.HashMap">
<constructor-arg>
<map>
<entry key="bootstrap.servers" value="192.168.103.10:9092" />
<!-- 指定消费组名 -->
<entry key="group.id" value="hw-data-group" />
<entry key="enable.auto.commit" value="true" />
<entry key="session.timeout.ms" value="30000" />
<entry key="auto.commit.interval.ms" value="1000" />
<entry key="retry.backoff.ms" value="100" />
<entry key="key.deserializer"
value="org.apache.kafka.common.serialization.StringDeserializer" />
<entry key="value.deserializer"
value="org.apache.kafka.common.serialization.StringDeserializer" />
</map>
</constructor-arg>
</bean>
<!-- 创建工厂 然后把配置信息注入-->
<bean id="consumerFactory"
class="org.springframework.kafka.core.DefaultKafkaConsumerFactory" >
<constructor-arg>
<ref bean="consumerProperties" />
</constructor-arg>
</bean>
<!-- 把实际消费的类关联进来 -->
<bean id="KfkConsumerListener" class="com.lsh.kafka.KfkConsumerListener" />
<!-- 然后把这个类和消费的topic注入这个container topic也配置成灵活的 -->
<bean id="containerProperties" class="org.springframework.kafka.listener.config.ContainerProperties">
<constructor-arg name="topics">
<list>
<!--消费的topic主题名称-->
<value>first</value>
<value>hw_data</value>
</list>
</constructor-arg>
<property name="messageListener" ref="KfkConsumerListener" />
</bean>
<!-- 5.多线程消息监听容器,消费者并发消息监听容器,执行doStart()方法 -->
<!-- <bean id="messageListenerContainer" class="org.springframework.kafka.listener.ConcurrentMessageListenerContainer" init-method="doStart" >-->
<!-- <constructor-arg ref="consumerFactory" />-->
<!-- <constructor-arg ref="containerProperties" />-->
<!-- <property name="concurrency" value="2" />-->
<!-- </bean>-->
<!-- 单线程消息监听容器 -->
<bean id="messageListenerContainer" class="org.springframework.kafka.listener.KafkaMessageListenerContainer" init-method="doStart">
<constructor-arg ref="consumerFactory"/>
<constructor-arg ref="containerProperties"/>
</bean>
</beans>
Spring的配置文件
在Spring的配置文件applicationContext.xml中引入kafka的配置文件
<!--引入Kafka配置文件-->
<import resource="spring-kafka-producer.xml"/>
<import resource="spring-kafka-consumer.xml"/>
消费者
package com.lsh.kafka;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.listener.MessageListener;
/**
* @author :LiuShihao
* @date :Created in 2020/8/14 9:29 上午
* @desc :消费者消费kafka消息
*/
@Slf4j
public class KfkConsumerListener implements MessageListener<String, String>{
@Override
public void onMessage(ConsumerRecord<String, String> data) {
if ("first".equals(data.topic())){
System.out.println("first主题:"+data.value()+"被消费");
}else if ("second".equals(data.topic())){
System.out.println("second主题:"+data.value()+"被消费");
}
}
}
测试类进行测试
最后进行测试
使用测试类需要导入相关的依赖
package com.lsh.kafka;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
/**
* @author :LiuShihao
* @date :Created in 2020/8/14 11:47 下午
* @desc :使用KafkaTemplate发送消息
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {"classpath:applicationContext.xml"})
public class kafkaTest {
@Autowired
KafkaTemplate kafkaTemplate;
@Test
public void test1(){
kafkaTemplate.send("hw_data", LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyyMMdd HH:mm:ss")));
System.out.println("kafkaTemplate已生产消息");
}
}
消费者成功消费消息
搞定!收工!