首先导入两个包:
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>0.10.0.0</version>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
<version>1.0.5.RELEASE</version>
</dependency>
一个kafkaClient包,一个spring整合包。
然后建立生产者配置文件:
kafka_producer.xml
然后是消费者配置文件:
建立测试类:
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>0.10.0.0</version>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
<version>1.0.5.RELEASE</version>
</dependency>
一个kafkaClient包,一个spring整合包。
然后建立生产者配置文件:
kafka_producer.xml
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/context
http://www.springframework.org/schema/context/spring-context.xsd">
<!-- kafkaProducer的配置文件 -->
<bean id="producerProperties" class="java.util.HashMap">
<constructor-arg>
<map>
<entry key="bootstrap.servers" value="localhost:9092" />
<entry key="group.id" value="test" />
<entry key="retries" value="1" />
<entry key="batch.size" value="16384" />
<entry key="linger.ms" value="1" />
<entry key="buffer.memory" value="33554432" />
<entry key="key.serializer"
value="org.apache.kafka.common.serialization.StringSerializer" />
<entry key="value.serializer"
value="org.apache.kafka.common.serialization.StringSerializer" />
</map>
</constructor-arg>
</bean>
<!-- 生成 KafkaProducer对象-->
<bean id="producerFactory"
class="org.springframework.kafka.core.DefaultKafkaProducerFactory">
<constructor-arg>
<ref bean="producerProperties" />
</constructor-arg>
</bean>
<!-- 用于发送信息 -->
<bean id="kafkaTemplate" class="org.springframework.kafka.core.KafkaTemplate">
<constructor-arg ref="producerFactory" />
<constructor-arg name="autoFlush" value="true" />
<property name="defaultTopic" value="linlin" />
<property name="producerListener" ref="producerListener" />
</bean>
<!-- 发送信息成功后的回调,producer的实际编码我们只需要实现ProducerListener接口然后注入到上面的KafkaTemplate即可 -->
<bean id="producerListener" class="com.kafka.linstener.KafkaProducerListener" />
</beans>
然后是消费者配置文件:
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/context
http://www.springframework.org/schema/context/spring-context.xsd">
<!-- 消费者配置 -->
<bean id="consumerProperties" class="java.util.HashMap">
<constructor-arg>
<map>
<entry key="bootstrap.servers" value="localhost:9092" />
<entry key="group.id" value="test" />
<entry key="enable.auto.commit" value="false" />
<entry key="auto.commit.interval.ms" value="1000" />
<entry key="session.timeout.ms" value="15000" />
<entry key="key.deserializer"
value="org.apache.kafka.common.serialization.StringDeserializer" />
<entry key="value.deserializer"
value="org.apache.kafka.common.serialization.StringDeserializer" />
</map>
</constructor-arg>
</bean>
<!-- 消费者工厂用于生产KafkaConsumer对象 -->
<bean id="consumerFactory"
class="org.springframework.kafka.core.DefaultKafkaConsumerFactory">
<constructor-arg>
<ref bean="consumerProperties" />
</constructor-arg>
</bean>
<!-- 实际执行消息消费的类,业务逻辑都放在这里,实现MessageListener接口-->
<bean id="messageListernerConsumerService" class="com.kafka.linstener.KafkaConsumerListener"/>
<!-- 消费者容器配置信息 -->
<bean id="containerProperties_trade" class="org.springframework.kafka.listener.config.ContainerProperties">
<constructor-arg value="linlin"/>
<property name="messageListener" ref="messageListernerConsumerService"/>
</bean>
<bean id="messageListenerContainer_other" class="org.springframework.kafka.listener.KafkaMessageListenerContainer"
init-method="doStart">
<constructor-arg ref="consumerFactory"/>
<constructor-arg ref="containerProperties_trade"/>
</bean>
</beans>
建立测试类:
@Component
public class KafkaServer {
@Autowired
private KafkaTemplate<String, String> kafkaTemplate;
public void sendMessage(String topic, String key, Object value) {
kafkaTemplate.setProducerListener(new KafkaProducerListener<String, String>());
ListenableFuture<SendResult<String, String>> f = kafkaTemplate.send(topic, key, JSON.toJSONString(value));
try {
System.out.println(f.get().getProducerRecord().key());
System.out.println(f.get().getProducerRecord().value());
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
System.out.println("发送了消息成功");
}
public static void main(String[] args) {
ApplicationContext applicationContext = new FileSystemXmlApplicationContext("classpath:applicationContext.xml");
KafkaServer server = (KafkaServer) applicationContext.getBean("kafkaServer");
RoleBean bean = new RoleBean();
bean.setDm("test");
bean.setMc("测试2");
server.sendMessage("linlin", "test", bean);
}
}
public class KafkaConsumerListener<K, V> implements MessageListener<K, V>{
@Override
public void onMessage(ConsumerRecord<K, V> record) {
System.out.println("消费 ——————————————————————————");
System.out.println(record.key());
System.out.println(record.value());
}
}
public class KafkaProducerListener<K, V> extends ProducerListenerAdapter<K, V> {
@Override
public void onSuccess(String topic, Integer partition, K key, V value, RecordMetadata recordMetadata) {
System.out.println("topic___________________________"+topic);
System.out.println("key______________________________"+key);
System.out.println("value___________________________"+value);
System.out.println("发送成功");
}
@Override
public void onError(String topic, Integer partition, K key, V value, Exception exception) {
System.out.println("发送失败");
}
@Override
public boolean isInterestedInSuccess() {
return true;
}
}