软件环境
jdk1.8
spring4.3.5
kafka2.10_0.10.0.0
增加maven依赖配置
<dependency> <groupId>org.apache.kafka</groupId> <artifactId>kafka_2.10</artifactId> <version>0.10.0.0</version> </dependency> <dependency> <groupId>org.springframework.integration</groupId> <artifactId>spring-integration-kafka</artifactId> <version>1.3.0.RELEASE</version> </dependency> <dependency> <groupId>org.springframework.kafka</groupId> <artifactId>spring-kafka</artifactId> <version>1.0.0.RELEASE</version> </dependency>
增加spring-kafka-producer.xml
<?xml version="1.0" encoding="UTF-8"?> <beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:context="http://www.springframework.org/schema/context" xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd"> <!-- 定义producer的参数 --> <bean id="producerProperties" class="java.util.HashMap"> <constructor-arg> <map> <entry key="bootstrap.servers" value="127.0.0.1:9092" /> <entry key="group.id" value="0" /> <entry key="retries" value="1" /> <entry key="batch.size" value="16384" /> <entry key="linger.ms" value="1" /> <entry key="buffer.memory" value="33554432" /> <entry key="key.serializer" value="org.apache.kafka.common.serialization.StringSerializer" /> <entry key="value.serializer" value="org.apache.kafka.common.serialization.StringSerializer" /> </map> </constructor-arg> </bean> <!-- 创建kafkatemplate需要使用的producerfactory bean --> <bean id="producerFactory" class="org.springframework.kafka.core.DefaultKafkaProducerFactory"> <constructor-arg> <ref bean="producerProperties" /> </constructor-arg> </bean> <!-- 创建kafkatemplate bean,使用的时候,只需要注入这个bean,即可使用template的send消息方法 --> <bean id="kafkaTemplate" class="org.springframework.kafka.core.KafkaTemplate"> <constructor-arg ref="producerFactory" /> <constructor-arg name="autoFlush" value="true" /> <property name="defaultTopic" value="defaultTopic" /> <property name="producerListener" ref="kafkaProducerListener"/> </bean> <!-- 该监听器是用来判断kafka发送数据是否成功以及获取反馈信息 --> <bean id="kafkaProducerListener" class="com.zns.listenter.KafkaProducerListener" /> </beans>
增加spring-kafka-consumer.xml
<?xml version="1.0" encoding="UTF-8"?> <beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:context="http://www.springframework.org/schema/context" xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx.xsd http://www.springframework.org/schema/jee http://www.springframework.org/schema/jee/spring-jee.xsd http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd"> <!-- 定义consumer的参数 --> <bean id="consumerProperties" class="java.util.HashMap"> <constructor-arg> <map> <entry key="bootstrap.servers" value="127.0.0.1:9092" /> <entry key="zookeeper.connect" value="127.0.0.1:2181"/> <entry key="group.id" value="0"/> <entry key="enable.auto.commit" value="false"/> <entry key="auto.commit.interval.ms" value="1000"/> <entry key="session.timeout.ms" value="15000"/> <entry key="key.deserializer" value="org.apache.kafka.common.serialization.StringDeserializer"/> <entry key="value.deserializer" value="org.apache.kafka.common.serialization.StringDeserializer"/> </map> </constructor-arg> </bean> <!-- 创建consumerFactory bean --> <bean id="consumerFactory" class="org.springframework.kafka.core.DefaultKafkaConsumerFactory"> <constructor-arg> <ref bean="consumerProperties"/> </constructor-arg> </bean> <!-- 创建messageListenerContainer bean,使用的时候,只需要注入这个bean --> <bean id="messageListenerContainer1" class="org.springframework.kafka.listener.KafkaMessageListenerContainer" init-method="doStart"> <constructor-arg ref="consumerFactory"/> <constructor-arg ref="containerProperties1"/> </bean> <!-- 消费者容器配置信息 --> <bean id="containerProperties1" class="org.springframework.kafka.listener.config.ContainerProperties"> <constructor-arg value="topic1"/> <property name="messageListener" ref="kafkaConsumerListener1"/> </bean> <bean id="messageListenerContainer2" class="org.springframework.kafka.listener.KafkaMessageListenerContainer" init-method="doStart"> <constructor-arg ref="consumerFactory"/> <constructor-arg ref="containerProperties2"/> </bean> <!-- 消费者容器配置信息 --> <bean id="containerProperties2" class="org.springframework.kafka.listener.config.ContainerProperties"> <constructor-arg value="topic2"/> <property name="messageListener" ref="kafkaConsumerListener2"/> </bean> <!-- 消息监听器 --> <bean id="kafkaConsumerListener1" class="com.zns.listenter.KafkaConsumerListener1"/> <bean id="kafkaConsumerListener2" class="com.zns.listenter.KafkaConsumerListener2"/> </beans>
KafkaProducerListener
package com.zns.listenter; import org.apache.kafka.clients.producer.RecordMetadata; import org.springframework.kafka.support.ProducerListener; /** * kafkaProducer监听器,在producer配置文件中开启 */ @SuppressWarnings("rawtypes") public class KafkaProducerListener implements ProducerListener{ /** * 方法返回值代表是否启动kafkaProducer监听器 */ public boolean isInterestedInSuccess() { System.out.println("///kafkaProducer监听器启动///"); return true; } /** * 发送消息成功后调用 */ public void onSuccess(String topic, Integer partition, Object key, Object value, RecordMetadata recordMetadata) { System.out.println("==========kafka发送数据成功(日志开始)=========="); System.out.println("----------topic:"+topic); System.out.println("----------partition:"+partition); System.out.println("----------key:"+key); System.out.println("----------value:"+value); System.out.println("----------RecordMetadata:"+recordMetadata); System.out.println("~~~~~~~~~~kafka发送数据成功(日志结束)~~~~~~~~~~"); } /** * 发送消息错误后调用 */ public void onError(String topic, Integer partition, Object key, Object value, Exception exception) { System.out.println("==========kafka发送数据错误(日志开始)=========="); System.out.println("----------topic:"+topic); System.out.println("----------partition:"+partition); System.out.println("----------key:"+key); System.out.println("----------value:"+value); System.out.println("----------Exception:"+exception); System.out.println("~~~~~~~~~~kafka发送数据错误(日志结束)~~~~~~~~~~"); exception.printStackTrace(); } }
KafkaConsumerListener1
package com.zns.listenter; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.springframework.kafka.listener.MessageListener; /** * kafka消息监听器 * 自动监听是否有消息需要消费 * */ public class KafkaConsumerListener1 implements MessageListener<String, String> { /** * 监听器自动执行该方法 * 消费消息 * 自动提交offset * 执行业务代码 * (high level api 不提供offset管理,不能指定offset进行消费) */ public void onMessage(ConsumerRecord<String, String> record) { System.out.println("=============kafkaConsumer开始消费============="); String topic = record.topic(); String key = record.key(); String value = record.value(); long offset = record.offset(); int partition = record.partition(); System.out.println("-------------topic:"+topic); System.out.println("-------------value:"+value); System.out.println("-------------key:"+key); System.out.println("-------------offset:"+offset); System.out.println("-------------partition:"+partition); System.out.println("~~~~~~~~~~~~~kafkaConsumer消费结束~~~~~~~~~~~~~"); } }
KafkaConsumerListener2
package com.zns.listenter; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.springframework.kafka.listener.MessageListener; /** * kafka消息监听器 * 自动监听是否有消息需要消费 * */ public class KafkaConsumerListener2 implements MessageListener<String, String> { /** * 监听器自动执行该方法 * 消费消息 * 自动提交offset * 执行业务代码 * (high level api 不提供offset管理,不能指定offset进行消费) */ public void onMessage(ConsumerRecord<String, String> record) { System.out.println("=============kafkaConsumer开始消费============="); String topic = record.topic(); String key = record.key(); String value = record.value(); long offset = record.offset(); int partition = record.partition(); System.out.println("-------------topic:"+topic); System.out.println("-------------value:"+value); System.out.println("-------------key:"+key); System.out.println("-------------offset:"+offset); System.out.println("-------------partition:"+partition); System.out.println("~~~~~~~~~~~~~kafkaConsumer消费结束~~~~~~~~~~~~~"); } }
引入xml文件
<import resource="classpath:spring-kafka-producer.xml"/> <import resource="classpath:spring-kafka-consumer.xml"/>
调用测试
@Autowired private KafkaTemplate<String, String> kafkaTemplate; kafkaTemplate.send("topic1", "111"); kafkaTemplate.send("topic2", "222");
启动运行zookeeper和kafka
调用上面发送代码,可以看到消费者端监听到了消息.