依赖
# pom.xml
org.springframework.kafka
spring-kafka
1.1.1.RELEASE
配置文件
# application.yml
spring:
kafka:
bootstrap-servers: 192.168.1.117:9092
producer:
# 重试次数
retries: 3
# 批量发送的消息数量
batch-size: 16384
# 32MB的批处理缓冲区
buffer-memory: 33554432
consumer:
# 默认消费者组
group-id: etl
# 最早未被消费的offset
auto-offset-reset: earliest
# 批量一次最大拉取数据量
max-poll-records: 1000
# 自动提交
auto-commit-interval: 1000
enable-auto-commit: true
topicName:
topic2: topic2Name
topic5: topic5Name
配置
// KafkaConfiguration.java
package kafka_test;
import com.google.common.collect.Maps;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;