消息队列-kafka

zookeeper和kafka-0.8客户端请自行搜索...

kafka依赖  版本1.3.0RELEASE

<dependency>
				<groupId>org.springframework.integration</groupId>
				<artifactId>spring-integration-kafka</artifactId>
				<version>1.3.0.RELEASE</version>
				<exclusions>
					<exclusion>
						<groupId>org.springframework</groupId>
						<artifactId>*</artifactId>
					</exclusion>
				</exclusions>
			</dependency>
			<dependency>
				<groupId>org.springframework</groupId>
				<artifactId>spring-messaging</artifactId>
				<version>${spring.version}</version>
			</dependency>
			<!-- avro -->
			<dependency>
				<groupId>org.apache.avro</groupId>
				<artifactId>avro</artifactId>
				<version>1.7.7</version>
			</dependency>


kafka配置

producer

<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
	xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:int="http://www.springframework.org/schema/integration"
	xmlns:int-kafka="http://www.springframework.org/schema/integration/kafka"
	xmlns:task="http://www.springframework.org/schema/task"
	xsi:schemaLocation="http://www.springframework.org/schema/integration/kafka http://www.springframework.org/schema/integration/kafka/spring-integration-kafka.xsd
        http://www.springframework.org/schema/integration http://www.springframework.org/schema/integration/spring-integration.xsd
        http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
        http://www.springframework.org/schema/task http://www.springframework.org/schema/task/spring-task.xsd">

	<!-- topic test config -->
	<int:channel id="kafkaChannel">
		<int:queue />
	</int:channel>

	<!-- commons config -->
	<bean id="stringSerializer" class="org.apache.kafka.common.serialization.StringSerializer" />
	
	<bean id="kafkaEncoder" class="org.springframework.integration.kafka.serializer.avro.AvroReflectDatumBackedKafkaEncoder">
		<constructor-arg value="java.lang.String" />
	</bean>
	
	<bean id="producerProperties" class="org.springframework.beans.factory.config.PropertiesFactoryBean">
		<property name="properties">
			<props>
				<prop key="topic.metadata.refresh.interval.ms">3600000</prop>
                <prop key="message.send.max.retries">5</prop>
				<prop key="serializer.class">kafka.serializer.StringEncoder</prop>
				<prop key="request.required.acks">1</prop>
			</props>
		</property>
	</bean>
	
	<task:executor id="taskExecutor" pool-size="5" keep-alive="120" queue-capacity="500" />

	<int-kafka:outbound-channel-adapter id="kafkaOutboundChannelAdapter"
		kafka-producer-context-ref="producerContext" auto-startup="true"
		channel="kafkaChannel" order="3">
		<int:poller fixed-delay="1000" time-unit="MILLISECONDS" receive-timeout="1" task-executor="taskExecutor" />
	</int-kafka:outbound-channel-adapter>
	
	<int-kafka:producer-context id="producerContext" producer-properties="producerProperties">
		<int-kafka:producer-configurations>
			<!-- 多个topic配置 -->
			<int-kafka:producer-configuration
				broker-list="${kafka.broker.list}"
				key-serializer="stringSerializer" 
				value-class-type="java.lang.String"
				value-serializer="stringSerializer" 
				topic="test" />
			<<int-kafka:producer-configuration
				broker-list="<span style="font-family: Arial, Helvetica, sans-serif;">${kafka.broker.list}</span><span style="font-family: Arial, Helvetica, sans-serif;">"</span>
				key-serializer="stringSerializer" 
				value-class-type="java.lang.String"
				value-serializer="stringSerializer" 
				topic="otherTopic" />
		</int-kafka:producer-configurations>
	</int-kafka:producer-context>
	
</beans>

consumer

<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
	xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:int="http://www.springframework.org/schema/integration"
	xmlns:int-kafka="http://www.springframework.org/schema/integration/kafka"
	xmlns:task="http://www.springframework.org/schema/task"
	xsi:schemaLocation="http://www.springframework.org/schema/integration/kafka 
                        http://www.springframework.org/schema/integration/kafka/spring-integration-kafka.xsd
                        http://www.springframework.org/schema/integration 
                        http://www.springframework.org/schema/integration/spring-integration.xsd
                        http://www.springframework.org/schema/beans 
                        http://www.springframework.org/schema/beans/spring-beans.xsd
                        http://www.springframework.org/schema/task 
                        http://www.springframework.org/schema/task/spring-task.xsd">

	<!-- topic test conf -->
	<int:channel id="kafkaConsumerChannel">
		<int:dispatcher task-executor="kafkaMessageExecutor" />
	</int:channel>
	
	<!-- channel配置 auto-startup="true" 否则接收不发数据 -->
	<int-kafka:inbound-channel-adapter
		id="kafkaInboundChannelAdapter" kafka-consumer-context-ref="consumerContext"
		auto-startup="true" channel="kafkaConsumerChannel">
		<int:poller fixed-delay="1" time-unit="MILLISECONDS" />
	</int-kafka:inbound-channel-adapter>

	<task:executor id="kafkaMessageExecutor" pool-size="8" keep-alive="120" queue-capacity="500" />
	
	<bean id="kafkaDecoder" class="org.springframework.integration.kafka.serializer.common.StringDecoder" />

	<bean id="consumerProperties" class="org.springframework.beans.factory.config.PropertiesFactoryBean">
		<property name="properties">
			<props>
				<prop key="auto.offset.reset">smallest</prop>
				<prop key="socket.receive.buffer.bytes">10485760</prop> <!-- 10M -->
				<prop key="fetch.message.max.bytes">5242880</prop>
				<prop key="auto.commit.interval.ms">1000</prop>
			</props>
		</property>
	</bean>
	
	<!-- 消息接收的BEEN -->
	<bean id="kkConsumer" class="com.pay.kafka.KKConsumer" />
	<!-- 指定接收的方法 -->
	<int:outbound-channel-adapter channel="kafkaConsumerChannel" ref="kkConsumer" method="kkMsgConsumer" />

	<!-- zookeeper配置 可以配置多个 -->
	<int-kafka:zookeeper-connect id="zookeeperConnect"
		zk-connect="${dubbo.registry.address}" zk-connection-timeout="6000"
		zk-session-timeout="6000" zk-sync-time="2000" />
		
	<int-kafka:consumer-context id="consumerContext"
		consumer-timeout="1000" zookeeper-connect="zookeeperConnect"
		consumer-properties="consumerProperties">
		<int-kafka:consumer-configurations>
			<int-kafka:consumer-configuration
				group-id="default1" 
				value-decoder="kafkaDecoder" 
				key-decoder="kafkaDecoder"
				max-messages="5000">
				<!-- 多个TOPIC配置 -->
				<int-kafka:topic id="test" streams="4" />
				<int-kafka:topic id="otherTopic" streams="4" />
			</int-kafka:consumer-configuration>
		</int-kafka:consumer-configurations>
	</int-kafka:consumer-context>
</beans>

package com.pay.kafka;

import java.util.Date;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.integration.kafka.support.KafkaHeaders;
import org.springframework.integration.support.MessageBuilder;
import org.springframework.messaging.MessageChannel;

public class KKProducer {

  private static Logger log = LoggerFactory.getLogger(KKProducer.class);

  private static MessageChannel channel;
  public void setChannel(MessageChannel channel) {
    KKProducer.channel = channel;
  }

  public static void kkMsgProducer(String topic, String strMsg) {
    log.debug(String.format("生产者kafka,topic:%s,消息:%s,时间:%s", topic, strMsg, new Date()));
    channel.send(MessageBuilder.withPayload(strMsg).setHeader(KafkaHeaders.TOPIC, topic).build());
  }
}

package com.pay.kafka;

import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class KKConsumer {

  private static Logger log = LoggerFactory.getLogger(KKConsumer.class);

  public void kkMsgConsumer(Map<String, Map<Integer, String>> msgs) {
    for (Map.Entry<String, Map<Integer, String>> entry : msgs.entrySet()) {
      LinkedHashMap<Integer, String> messages = (LinkedHashMap<Integer, String>) entry.getValue();
      Set<Integer> keys = messages.keySet();
      for (Integer i : keys) {
        log.debug("消费者kafka,Partition:{}", i);
      }
      Collection<String> values = messages.values();
      for (Iterator<String> iterator = values.iterator(); iterator.hasNext();) {
        String msg = iterator.next();
        log.debug("消费者kafka,topic:{},消息:{}", entry.getKey(), msg);
      }
    }
  }
}





  • 1
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值