一、注入生产者和消费者
1、创建ApplicationContext-mq-kafka.xml文件
2、将该配置文件在ApplicationContext中引用
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd">
<!-- 1.定义consumer的参数 -->
<bean id="consumerProperties" class="java.util.HashMap">
<constructor-arg>
<map>
<entry key="bootstrap.servers" value="${kafka.mq.service}"/>
<entry key="group.id" value="${kafka.mq.topic.id}"/>
<entry key="enable.auto.commit" value="true"/>
<entry key="session.timeout.ms" value="15000"/>
<entry key="key.deserializer"
value="org.apache.kafka.common.serialization.StringDeserializer"/>
<entry key="value.deserializer"
value="org.apache.kafka.common.serialization.StringDeserializer"/>
<!-- 如果配置了外网的路由,则需要在这里添加配置-->
<!-- 注意这里的username包含了服务器的名和用户id名,这里双引号要使用转义符-->
<!-- <entry key="sasl.jaas.config"-->
<!-- value="org.apache.kafka.common.security.plain.PlainLoginModule required username="xxx#xxx" password="xxxxx";"/>-->
<!-- <entry key="security.protocol"-->
<!-- value="SASL_PLAINTEXT"/>-->
<!-- <entry key="sasl.mechanism"-->
<!-- value="PLAIN"/>-->
</map>
</constructor-arg>
</bean>
<!-- 2.创建consumerFactory bean -->
<bean id="consumerFactory"
class="org.springframework.kafka.core.DefaultKafkaConsumerFactory">
<constructor-arg>
<ref bean="consumerProperties"/>
</constructor-arg>
</bean>
<!-- 3.定义消费实现类 -->
<bean id="kafkaConsumerService" class="com.uxseeing.consumer.CMYKafkaMqConsumer"/>
<!-- 4.消费者容器配置信息 -->
<bean id="containerProperties" class="org.springframework.kafka.listener.config.ContainerProperties">
<!-- topic -->
<constructor-arg name="topics">
<list>
<!-- <value>topic_trac</value>-->
<value>${kafka.mq.topic.id}</value>
<!--<value>test_topic</value>-->
</list>
</constructor-arg>
<property name="messageListener" ref="kafkaConsumerService"/>
</bean>
<!-- 5.消费者并发消息监听容器,执行doStart()方法 -->
<bean id="messageListenerContainer" class="com.uxseeing.config.ListenerContainerConfig"
init-method="doStart">
<constructor-arg ref="consumerFactory"/>
<constructor-arg ref="containerProperties"/>
<property name="concurrency" value="3"/>
</bean>
<!-- 定义producer的参数 -->
<bean id="producerProperties" class="java.util.HashMap">
<constructor-arg>
<map>
<entry key="bootstrap.servers" value="${kafka.mq.service}"/>
<entry key="group.id" value="${kafka.mq.group.id}"/>
<entry key="retries" value="3"/>
<entry key="batch.size" value="1638"/>
<entry key="linger.ms" value="1"/>
<entry key="session.timeout.ms" value="15000"/>
<entry key="buffer.memory" value="33554432"/>
<entry key="acks" value="all"/>
<entry key="key.serializer"
value="org.apache.kafka.common.serialization.StringSerializer"/>
<entry key="value.serializer"
value="org.apache.kafka.common.serialization.StringSerializer"/>
<!-- 如果配置了外网的路由,则需要在这里添加配置-->
<!-- <entry key="sasl.jaas.config"-->
<!-- value="org.apache.kafka.common.security.plain.PlainLoginModule required username="xxx#xxx" password="xxx";"/>-->
<!-- <entry key="security.protocol"-->
<!-- value="SASL_PLAINTEXT"/>-->
<!-- <entry key="sasl.mechanism"-->
<!-- value="PLAIN"/>-->
</map>
</constructor-arg>
</bean>
<!-- 创建kafkatemplate需要使用的producerfactory bean -->
<bean id="producerFactory"
class="org.springframework.kafka.core.DefaultKafkaProducerFactory">
<constructor-arg>
<ref bean="producerProperties"/>
</constructor-arg>
</bean>
<!-- 创建kafkatemplate bean,使用的时候,只需要注入这个bean,即可使用template的send消息方法 -->
<bean id="kafkaTemplate" class="org.springframework.kafka.core.KafkaTemplate">
<constructor-arg ref="producerFactory"/>
<constructor-arg name="autoFlush" value="true"/>
<property name="defaultTopic" value="default"/>
</bean>
</beans>
二、自定义消费者和生产者的类
@Conditional(KafkaCondition.class)
@Component
public class CMYKafkaMqConsumer implements MessageListener<String, String> {
@Autowired
CMYCommonMqConsumer cmyCommonMqConsumer;
private static Logger log = LoggerFactory.getLogger(CMYKafkaMqConsumer.class);
@Override
public void onMessage(ConsumerRecord<String, String> data) {
try {
cmyCommonMqConsumer.consume(data.value());
log.debug("kafka消息消费成功 topic:{}, tag:{} , data:{} ", data.topic(), data.key(), data.value());
} catch (Exception e) {
log.error(e.toString());
log.error("kafka消息消费失败 topic:{}, tag:{} , data:{} ", data.topic(), data.key(), data);
}
}
}
@Conditional(KafkaCondition.class)
@Service("kafka")
@ImportResource("classpath:spring/ApplicationContext-mq-kafka.xml")
public class CMYKafkaMqSender implements CMYCommonMqSender {
private static Logger log = LoggerFactory.getLogger(CMYKafkaMqSender.class);
@Override
public void send(String jsonData) {
sendMessage(SystemEnv.getProperty("kafka.mq.topic.id"), 0, null, "key", jsonData);
}
@Autowired
private KafkaTemplate kafkaTemplate;
<K, T> void sendMessage(String topic, Integer partition, Long timestamp, K key, T data) {
ListenableFuture<SendResult<K, T>> listenableFuture = null;
if (kafkaTemplate.getDefaultTopic().equals(topic)) {
listenableFuture = kafkaTemplate.sendDefault(partition, timestamp, key, data);
} else {
listenableFuture = kafkaTemplate.send(topic, partition, timestamp, key, data);
}
// 发送成功回调
SuccessCallback<SendResult<K, T>> successCallback = result -> {
// 成功业务逻辑
log.debug("kafka消息发送成功 topic:{} , partion:{} , key:{} , data:{}", topic, partition, key, data);
};
// 发送失败回调
FailureCallback failureCallback = ex -> {
// 失败业务逻辑
log.error(ex.toString());
log.debug("kafka消息发送失败 topic:{} , partion:{} , key:{} , data:{}", topic, partition, key, data);
throw new RuntimeException(ex);
};
listenableFuture.addCallback(successCallback, failureCallback);
}
}
三、配置容器类
@Conditional(KafkaCondition.class)
@Component
@ImportResource("classpath:spring/ApplicationContext-mq-kafka.xml")
public class ListenerContainerConfig extends ConcurrentMessageListenerContainer {
public ListenerContainerConfig(ConsumerFactory consumerFactory, ContainerProperties containerProperties) {
super(consumerFactory, containerProperties);
}
}
四、配置Condition类
通过配置condition我们可以指定在什么时候主动加载生产者和消费者,可以根据配置文件实现
public class KafkaCondition extends MqConditionUtil implements Condition {
@Override
public boolean matches(ConditionContext conditionContext, AnnotatedTypeMetadata annotatedTypeMetadata) {
Properties properties = super.getProperties(conditionContext);
String enableMq = properties.getProperty("mq.EnableTrac");
if (!"true".equals(enableMq)) {
return false;
}
String serviceUrl = properties.getProperty("kafka.mq.service");
if ("null".equals(serviceUrl)) {
return false;
}
return true;
}
}
public class MqConditionUtil {
protected static Properties properties = null;
protected Properties getProperties(ConditionContext conditionContext) {
if (properties == null) {
properties = new Properties();
Environment environment = conditionContext.getEnvironment();
String path = switchPath(environment);
// 使用ClassLoader加载properties配置文件生成对应的输入流
InputStream in = MqConditionUtil.class.getClassLoader().getResourceAsStream(path + "/env.properties");
// 使用properties对象加载输入流
try {
properties.load(in);
} catch (IOException e) {
e.printStackTrace();
}
}
return properties;
}
private String switchPath(Environment environment) {
if (environment.getActiveProfiles().length == 0) {
return "config/dev";
}
if (environment.acceptsProfiles("dev")) {
return "config/dev";
} else if (environment.acceptsProfiles("live")) {
return "config/live";
} else if (environment.acceptsProfiles("test")) {
return "config/test";
} else if (environment.acceptsProfiles("samsclub")) {
return "config/samsclub";
} else if (environment.acceptsProfiles("samstest")) {
return "config/samstest";
}
return "";
}
}