springboot集成spring.kafka

springboot版本:2.0.4.RELEASE

添加pom依赖

<!--kafka,版本号跟springboot版本匹配-->
<dependency>
    <groupId>org.springframework.kafka</groupId>
    <artifactId>spring-kafka</artifactId>
</dependency>

我的maven项目kafka目录结构

自定义kafka配置文件

#是否开启生产者配置
kafka.producer.active=false
#IP:9092,IP:9092(kafka生产集群ip+port端口)
kafka.producer.servers=1270.0.0.1:9092
#如果该值大于零时,表示启用重试失败的发送次数
kafka.producer.retries=0
#每当多个记录被发送到同一分区时,生产者将尝试将记录一起批量处理为更少的请求,单位字节
kafka.producer.batch.size=4096
#当分批数量远小于设定值时等待时间,以便获取更多的数据,单位毫秒
kafka.producer.linger=1
#生产者可用于缓冲等待发送到服务器的记录的内存总字节数,默认值为33554432
kafka.producer.buffer.memory=40960
#开启权限认证
kafka.producer.auth.active=true
#用户名
kafka.producer.auth.username=
#密码
kafka.producer.auth.password=


#kafka消费者配置
#是否开启消费配置
kafka.consumer.active=false
#IP:9092,IP:9092(kafka生产集群ip+port端口)
kafka.consumer.servers=1270.0.0.1:9092
#是否自动提交
kafka.consumer.enable.auto.commit=true
#连接超时时间
kafka.consumer.session.timeout=20000
#如果'enable.auto.commit'为true,则消费者偏移自动提交给Kafka的频率(以毫秒为单位),默认值为500
kafka.consumer.auto.commit.interval=100
#(实时生产,实时消费,不会从头开始消费)
kafka.consumer.auto.offset.reset=latest
#消费组
kafka.consumer.group.id=kafka
#设置消费线程数
kafka.consumer.concurrency=10
#开启权限认证
kafka.consumer.auth.active=true
#用户名
kafka.consumer.auth.username=
#密码
kafka.consumer.auth.password=


#kafka线程池配置
kafka.task.active=false
#线程池创建时候初始化的线程数
kafka.task.corePoolSize=10
#线程池最大的线程数,只有在缓冲队列满了之后才会申请超过核心线程数的线程
kafka.task.maxPoolSize=20
#用来缓冲执行任务的队列
kafka.task.queueCapacity=200
#当超过了核心线程出之外的线程在空闲时间到达之后会被销毁
kafka.task.keepAliveSeconds=60
#用来设置线程池关闭的时候等待所有任务都完成再继续销毁其他的Bean,这样这些异步任务的销毁就会先于Redis线程池的销毁。同时,这里还设置了setAwaitTerminationSeconds
kafka.task.waitForTasksToCompleteOnShutdown=true
#该方法用来设置线程池中任务的等待时间,如果超过这个时候还没有销毁就强制销毁,以确保应用最后能够被关闭,而不是阻塞住
kafka.task.awaitTerminationSeconds=60

判断是否开启消费者配置类:ScanConsumerCondition

import org.springframework.context.annotation.Condition;
import org.springframework.context.annotation.ConditionContext;
import org.springframework.core.type.AnnotatedTypeMetadata;


public class ScanConsumerCondition implements Condition {
	
    @Override
    public boolean matches(ConditionContext conditionContext, AnnotatedTypeMetadata annotatedTypeMetadata) {
        return "true".equalsIgnoreCase(PropertiesUtil.getPropValue("kafka.properties", "kafka.consumer.active"));
    }
    
}

判断是否开启生产者配置类:ScanProducerCondition

import org.springframework.context.annotation.Condition;
import org.springframework.context.annotation.ConditionContext;
import org.springframework.core.type.AnnotatedTypeMetadata;


public class ScanProducerCondition implements Condition {
	
    @Override
    public boolean matches(ConditionContext conditionContext, AnnotatedTypeMetadata annotatedTypeMetadata) {
        return "true".equalsIgnoreCase(PropertiesUtil.getPropValue("kafka.properties", "kafka.producer.active"));
    }
 

生产者配置类:KafkaProducerConfig

import java.util.HashMap;
import java.util.Map;

import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Conditional;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;



@Configuration
@EnableKafka
@Conditional(ScanProducerCondition.class)// 判断是否开启配置
@PropertySource(value = "classpath:kafka.properties",ignoreResourceNotFound = true)
public class KafkaProducerConfig {
	
    private static final Logger logger = Logger.getLogger(KafkaConsumerConfig.class);
	
	
	@Bean
	public KafkaTemplate<String, String> kafkaTemplate() throws Exception {
		logger.info("=============================[开启kafka生产者配置]=====================================");
		return new KafkaTemplate<String, String>(producerFactory());
	}
	//========================================================加载生产者配置文件=======================================================
	@Value("${kafka.producer.servers}")
	private String producerServers;
	@Value("${kafka.producer.retries}")
	private int producerRetries;
	@Value("${kafka.producer.batch.size}")
	private int producerBatchSize;
	@Value("${kafka.producer.linger}")
	private int producerLinger;
	@Value("${kafka.producer.buffer.memory}")
	private int producerBufferMemory;
	//开启权限认证
	@Value("${kafka.producer.auth.active}")
	private boolean producerAuthActive;
	//用户名
	@Value("${kafka.producer.auth.username}")
	private String producerAuthUsername;
	//密码
	@Value("${kafka.producer.auth.password}")
	private String producerAuthPassword;
	
	public Map<String, Object> producerConfigs() throws Exception {
		Map<String, Object> props = new HashMap<>();
		props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, producerServers);
		props.put(ProducerConfig.RETRIES_CONFIG, producerRetries);
		props.put(ProducerConfig.BATCH_SIZE_CONFIG, producerBatchSize);
		props.put(ProducerConfig.LINGER_MS_CONFIG, producerLinger);
		props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, producerBufferMemory);
		props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
		props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
		//权限认证
		if(producerAuthActive){
			props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
			props.put(SaslConfigs.SASL_MECHANISM, "PLAIN");
			props.put("sasl.jaas.config",
					"org.apache.kafka.common.security.plain.PlainLoginModule required username=\""+producerAuthUsername+"\" password=\""+producerAuthPassword+"\";");
		}
		return props;
	}
	
	public ProducerFactory<String, String> producerFactory() throws Exception {
		return new DefaultKafkaProducerFactory<>(producerConfigs());
	}
}

消费者配置类:KafkaConsumerConfig 

import java.util.HashMap;
import java.util.Map;

import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Conditional;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;



@Configuration
@EnableKafka
@PropertySource(value = "classpath:kafka.properties",ignoreResourceNotFound = true)
@Conditional(ScanConsumerCondition.class)// 判断是否开启配置
public class KafkaConsumerConfig {
	
    private static final Logger logger = Logger.getLogger(KafkaConsumerConfig.class);

	@Bean
	public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> kafkaListenerContainerFactory() throws Exception {
		logger.info("=============================[开启kafka消费者配置]=====================================");
		ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
		factory.setConsumerFactory(consumerFactory());
		factory.setConcurrency(consumerConcurrency);
		factory.getContainerProperties().setPollTimeout(1500);
		return factory;
	}
	//========================================================加载消费者配置文件=======================================================
	@Value("${kafka.consumer.servers}")
	private String consumerServers;
	@Value("${kafka.consumer.enable.auto.commit}")
	private boolean consumerEnableAutoCommit;
	@Value("${kafka.consumer.session.timeout}")
	private String consumerSessionTimeout;
	@Value("${kafka.consumer.auto.commit.interval}")
    private String consumerAutoCommitInterval;
	@Value("${kafka.consumer.group.id}")
	private String consumerGroupId;
	@Value("${kafka.consumer.auto.offset.reset}")
	private String consumerAutoOffsetReset;
	@Value("${kafka.consumer.concurrency}")
	private int consumerConcurrency;
	//开启权限认证
	@Value("${kafka.consumer.auth.active}")
	private boolean consumerAuthActive;
	//用户名
	@Value("${kafka.consumer.auth.username}")
	private String consumerAuthUsername;
	//密码
	@Value("${kafka.consumer.auth.password}")
	private String consumerAuthPassword;
	
	public ConsumerFactory<String, String> consumerFactory() throws Exception {
		return new DefaultKafkaConsumerFactory<>(consumerConfigs());
	}
	
	public Map<String, Object> consumerConfigs() throws Exception {
		Map<String, Object> props = new HashMap<>();
		props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, consumerServers);
		props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, consumerEnableAutoCommit);
		props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, consumerAutoCommitInterval);
		props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, consumerSessionTimeout);
		props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
		props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
		props.put(ConsumerConfig.GROUP_ID_CONFIG, consumerGroupId);
		props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, consumerAutoOffsetReset);
		//权限认证
		if(consumerAuthActive){	
			props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
			props.put(SaslConfigs.SASL_MECHANISM, "PLAIN");
			props.put("sasl.jaas.config",
					"org.apache.kafka.common.security.plain.PlainLoginModule required username=\""+consumerAuthUsername+"\" password=\""+consumerAuthPassword+"\";");
		}
		return props;
	}
}

kafka发送类:KafkaSend

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;

@Component
public class KafkaSend {
	
	@Autowired
    private KafkaTemplate<String, String> template;
	
	
	public void send(String topic,String parameter){
		this.template.send(topic, parameter);
	}
}

 

  • 1
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值