依赖包:
<dependency>
<groupId>org.springframework.integration</groupId>
<artifactId>spring-integration-kafka</artifactId>
<version>1.3.0.RELEASE</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>0.10.1.0</version>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
<version>1.0.0.RC1</version>
</dependency>
spring配置
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/context
http://www.springframework.org/schema/context/spring-context.xsd
http://www.springframework.org/schema/tx
http://www.springframework.org/schema/tx/spring-tx.xsd
http://www.springframework.org/schema/jee
http://www.springframework.org/schema/jee/spring-jee.xsd">
<!-- 定义producer的参数 -->
<bean id="producerProperties" class="java.util.HashMap">
<constructor-arg>
<map>
<entry key="bootstrap.servers" value="172.19.18.130:9092"/>
<entry key="group.id" value="group100"/>
<entry key="retries" value="10"/>
<entry key="batch.size" value="16384"/>
<entry key="linger.ms" value="1"/>
<entry key="buffer.memory" value="33554432"/>
<entry key="key.serializer" value="org.apache.kafka.common.serialization.IntegerSerializer"/>
<entry key="value.serializer" value="org.apache.kafka.common.serialization.StringSerializer"/>
</map>
</constructor-arg>
</bean>
<!-- 创建kafkatemplate需要使用的producerfactory bean -->
<bean id="producerFactory" class="org.springframework.kafka.core.DefaultKafkaProducerFactory">
<constructor-arg>
<ref bean="producerProperties"/>
</constructor-arg>
</bean>
<!-- 创建kafkatemplate bean,使用的时候,只需要注入这个bean,即可使用template的send消息方法 -->
<bean id="kafkaTemplate" class="org.springframework.kafka.core.KafkaTemplate">
<constructor-arg ref="producerFactory"/>
<constructor-arg name="autoFlush" value="true"/>
<property name="defaultTopic" value="test"/>
<property name="producerListener" ref="producerListener"/>
</bean>
<bean id="producerListener" class="com.monitor.aaa.controller.KafkaProducerListener" />
<!-- 定义consumer的参数 -->
<bean id="consumerProperties" class="java.util.HashMap">
<constructor-arg>
<map>
<entry key="bootstrap.servers" value="172.19.18.130:9092"/>
<entry key="group.id" value="group100"/>
<entry key="enable.auto.commit" value="false"/>
<entry key="auto.commit.interval.ms" value="1000"/>
<entry key="session.timeout.ms" value="15000"/>
<entry key="key.deserializer" value="org.apache.kafka.common.serialization.StringDeserializer"/>
<entry key="value.deserializer" value="org.apache.kafka.common.serialization.StringDeserializer"/>
</map>
</constructor-arg>
</bean>
<!-- 创建consumerFactory bean -->
<bean id="consumerFactory" class="org.springframework.kafka.core.DefaultKafkaConsumerFactory">
<constructor-arg>
<ref bean="consumerProperties"/>
</constructor-arg>
</bean>
<!-- 实际执行消息消费的类 -->
<bean id="messageListernerConsumerService" class="com.monitor.aaa.controller.KafkaConsumer"/>
<!-- 消费者容器配置信息 -->
<bean id="containerProperties" class="org.springframework.kafka.listener.config.ContainerProperties">
<constructor-arg value="test"/>
<property name="messageListener" ref="messageListernerConsumerService" />
</bean>
<!-- 把这个container和factory 注入 -->
<bean id="messageListenerContainer"
class="org.springframework.kafka.listener.KafkaMessageListenerContainer"
init-method="doStart" >
<constructor-arg ref="consumerFactory" />
<constructor-arg ref="containerProperties" />
</bean>
KafkaProducerController
package com.monitor.aaa.controller;
import java.util.Map;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import com.ls.sms.monitor.aaa.service.KafkaProducerService;
@Controller
@RequestMapping("/kafka")
public class KafkaProducerController {
@Autowired
private KafkaProducerServer kafkaProducer;
@Autowired
private KafkaTemplate<Integer,String> kafkaTemplate;
@RequestMapping("/sendMessage1")
public void sendMessage1(){
System.out.println("test spring kafka");
kafkaTemplate.sendDefault(String.valueOf(object));
System.out.println("ooooo");
}
@RequestMapping("/sendMessage")
public void sendMessage(){
String topic = "ls_coco1";
String value = "test";
String ifPartition = "0";
Integer partitionNum = 3;
String role = "test";//用来生成key
Map<String,Object> res = kafkaProducer.sndMesForTemplate
(topic, value, ifPartition, partitionNum, role);
System.out.println("测试结果如下:===============");
String message = (String)res.get("message");
String code = (String)res.get("code");
System.out.println("code:"+code);
System.out.println("message:"+message);
}
}
KafkaProducerListener
package com.monitor.aaa.controller;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.kafka.support.ProducerListener;
/**
* kafkaProducer监听器,在producer配置文件中开启
*
*/
@SuppressWarnings("rawtypes")
public class KafkaProducerListener implements ProducerListener{
protected final Logger LOG = LoggerFactory.getLogger("kafkaProducer");
/**
* 发送消息成功后调用
*/
@Override
public void onSuccess(String topic, Integer partition, Object key,
Object value, RecordMetadata recordMetadata) {
LOG.info("==========kafka发送数据成功(日志开始)==========");
LOG.info("----------topic:"+topic);
LOG.info("----------partition:"+partition);
LOG.info("----------key:"+key);
LOG.info("----------value:"+value);
LOG.info("----------RecordMetadata:"+recordMetadata);
LOG.info("~~~~~~~~~~kafka发送数据成功(日志结束)~~~~~~~~~~");
}
/**
* 发送消息错误后调用
*/
@Override
public void onError(String topic, Integer partition, Object key,
Object value, Exception exception) {
LOG.info("==========kafka发送数据错误(日志开始)==========");
LOG.info("----------topic:"+topic);
LOG.info("----------partition:"+partition);
LOG.info("----------key:"+key);
LOG.info("----------value:"+value);
LOG.info("----------Exception:"+exception);
LOG.info("~~~~~~~~~~kafka发送数据错误(日志结束)~~~~~~~~~~");
exception.printStackTrace();
}
/**
* 方法返回值代表是否启动kafkaProducer监听器
*/
@Override
public boolean isInterestedInSuccess() {
LOG.info("///kafkaProducer监听器启动///");
return true;
}
}
KafkaProducerServer
package com.monitor.aaa.controller;
import java.util.Map;
public interface KafkaProducerServer {
public Map<String,Object> sndMesForTemplate(String topic, Object value, String ifPartition,
Integer partitionNum, String role);
}
KafkaConsumer
package com.monitor.aaa.controller;
import java.io.IOException;
import javax.servlet.http.HttpServletResponse;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.listener.MessageListener;
import org.springframework.stereotype.Component;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
public class KafkaConsumer implements MessageListener<String, String>{
@Override
public void onMessage(ConsumerRecord<String, String> arg0) {
String topic = arg0.topic();
String key = arg0.key();
String value = arg0.value();
long offset = arg0.offset();
int partition = arg0.partition();
System.out.println(topic);
System.out.println(key);
System.out.println(value);
System.out.println(offset);
System.out.println(partition);
}
}
kafka命令
bin/zookeeper-server-start.sh config/zookeeper.properties
bin/kafka-server-start.sh config/server.properties &
bin/kafka-console-producer.sh --broker-list 172.19.18.130:9092 --topic test
bin/kafka-console-consumer.sh --zookeeper 172.19.18.130:2181 --topic test --from-beginning