博文目录
pom.xml
<?xml version="1.0" encoding="UTF-8"?>
< project xmlns = " http://maven.apache.org/POM/4.0.0"
xmlns: xsi= " http://www.w3.org/2001/XMLSchema-instance"
xsi: schemaLocation= " http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" >
< modelVersion> 4.0.0</ modelVersion>
< groupId> com.mrathena.middle.ware</ groupId>
< artifactId> kafka.spring.boot</ artifactId>
< version> 1.0.0</ version>
< dependencyManagement>
< dependencies>
< dependency>
< groupId> org.springframework.boot</ groupId>
< artifactId> spring-boot-dependencies</ artifactId>
< version> 2.4.0</ version>
< type> pom</ type>
< scope> import</ scope>
</ dependency>
</ dependencies>
</ dependencyManagement>
< dependencies>
< dependency>
< groupId> org.projectlombok</ groupId>
< artifactId> lombok</ artifactId>
< version> 1.18.12</ version>
</ dependency>
< dependency>
< groupId> org.slf4j</ groupId>
< artifactId> slf4j-api</ artifactId>
< version> 1.7.30</ version>
</ dependency>
< dependency>
< groupId> ch.qos.logback</ groupId>
< artifactId> logback-classic</ artifactId>
< version> 1.2.3</ version>
</ dependency>
< dependency>
< groupId> org.springframework.boot</ groupId>
< artifactId> spring-boot-starter</ artifactId>
</ dependency>
< dependency>
< groupId> org.springframework.kafka</ groupId>
< artifactId> spring-kafka</ artifactId>
</ dependency>
< dependency>
< groupId> org.springframework.boot</ groupId>
< artifactId> spring-boot-starter-test</ artifactId>
< scope> test</ scope>
</ dependency>
< dependency>
< groupId> org.junit.platform</ groupId>
< artifactId> junit-platform-launcher</ artifactId>
< scope> test</ scope>
</ dependency>
</ dependencies>
< build>
< plugins>
< plugin>
< groupId> org.apache.maven.plugins</ groupId>
< artifactId> maven-compiler-plugin</ artifactId>
< version> 3.8.1</ version>
< configuration>
< source> 1.8</ source>
< target> 1.8</ target>
< encoding> UTF-8</ encoding>
</ configuration>
</ plugin>
</ plugins>
</ build>
</ project>
application.yml
spring:
application:
name: kafka.spring.boot
kafka: # 默认kafka配置
bootstrap-servers: 192.168.2.131:9092,192.168.2.132:9092,192.168.2.133:9092
producer:
acks: 1
retries: 3
buffer-memory: 33554432
batch-size: 16384
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
consumer:
group-id: kafka.spring.boot
enable-auto-commit: false
auto-offset-reset: earliest
max-poll-records: 100
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
listener:
concurrency: 3
# count: 一批poll的数据被消费后,被处理record数量大于等于ack-count指定的count时提交
# time: 一批poll的数据被消费之后,距离上次提交时间大于ack-time指定的time时提交
# count_time: count和time有一个条件满足时提交
# manual: 一批poll的数据被消费之后,手动调用Acknowledgment.acknowledge()后提交, ????
# manual_immediate: 手动调用Acknowledgment.acknowledge()后立即提交,一般使用这种, ????
# record: 每一条消息被消费后就提交
# batch: 一批poll的数据被消费后提交
# 以下是我的理解
# manual 和 batch: manual需要手动用Acknowledgment.acknowledge(), batch自动调用, 两者的本质是一样的, 都是把提交放到一个队列里, 等待执行
# manual 和 manual_immediate: manual_immediate不会把提交放到队列中, 而是直接
ack-mode: manual_immediate
server:
servlet:
context-path: /
port: 80
Application
package com. mrathena. kafka;
import org. springframework. boot. SpringApplication;
import org. springframework. boot. autoconfigure. SpringBootApplication;
import org. springframework. kafka. core. KafkaTemplate;
import org. springframework. scheduling. annotation. EnableScheduling;
import org. springframework. scheduling. annotation. Scheduled;
import javax. annotation. Resource;
@EnableScheduling
@SpringBootApplication
public class Application {
public static void main ( String[ ] args) {
SpringApplication. run ( Application. class ) ;
}
@Resource
private KafkaTemplate< String, String> kafkaTemplate;
private final static String TOPIC = "test-3-2-3" ;
@Scheduled ( cron = "0/5 * * * * ?" )
public void send ( ) {
kafkaTemplate. send ( TOPIC, 0 , "key" , "value" ) ;
kafkaTemplate. send ( TOPIC, 1 , "key" , "value" ) ;
}
}
KafkaConsumer
package com. mrathena. kafka;
import org. apache. kafka. clients. consumer. ConsumerRecord;
import org. springframework. kafka. annotation. KafkaListener;
import org. springframework. kafka. annotation. PartitionOffset;
import org. springframework. kafka. annotation. TopicPartition;
import org. springframework. kafka. support. Acknowledgment;
import org. springframework. stereotype. Component;
@Component
public class KafkaConsumer {
private final static String TOPIC = "test-3-2-3" ;
private final static String GROUP = "group" ;
@KafkaListener ( groupId = GROUP, topicPartitions = {
@TopicPartition ( topic = TOPIC, partitions = "0" ) ,
@TopicPartition ( topic = TOPIC, partitionOffsets = @PartitionOffset ( partition = "1" , initialOffset = "150" ) )
} , concurrency = "2" )
public void group ( ConsumerRecord< String, String> record, Acknowledgment ack) {
System. out. println ( "group-1: " + record) ;
ack. acknowledge ( ) ;
}
@KafkaListener ( topics = TOPIC, groupId = "group-2" )
public void group2 ( ConsumerRecord< String, String> record, Acknowledgment ack) {
System. out. println ( "group-2: " + record) ;
ack. acknowledge ( ) ;
}
}
整合多套kafka集群
KafkaConfig
package com. mrathena. web. configuration;
import org. apache. kafka. clients. consumer. ConsumerConfig;
import org. apache. kafka. clients. producer. ProducerConfig;
import org. apache. kafka. common. serialization. StringDeserializer;
import org. apache. kafka. common. serialization. StringSerializer;
import org. springframework. beans. factory. annotation. Value;
import org. springframework. context. annotation. Bean;
import org. springframework. context. annotation. Configuration;
import org. springframework. kafka. config. ConcurrentKafkaListenerContainerFactory;
import org. springframework. kafka. config. KafkaListenerContainerFactory;
import org. springframework. kafka. core. DefaultKafkaConsumerFactory;
import org. springframework. kafka. core. DefaultKafkaProducerFactory;
import org. springframework. kafka. core. KafkaTemplate;
import org. springframework. kafka. listener. ConcurrentMessageListenerContainer;
import org. springframework. kafka. support. ProducerListener;
import java. util. HashMap;
import java. util. Map;
@Configuration
public class KafkaConfig {
private static final String GROUP_ID = "spring.boot.starter" ;
@Value ( "${spring.kafka.bootstrap.servers.group}" )
private String groupKafkaBootstrapServers;
@Value ( "${spring.kafka.bootstrap.servers.tech}" )
private String techKafkaBootstrapServers;
@Bean ( name = "groupKafkaListenerContainerFactory" )
KafkaListenerContainerFactory< ConcurrentMessageListenerContainer< Integer, String> > groupKafkaListenerContainerFactory ( ) {
Map< String, Object> config = new HashMap < > ( 8 ) ;
config. put ( ConsumerConfig. BOOTSTRAP_SERVERS_CONFIG, groupKafkaBootstrapServers) ;
config. put ( ConsumerConfig. GROUP_ID_CONFIG, GROUP_ID) ;
config. put ( ConsumerConfig. ENABLE_AUTO_COMMIT_CONFIG, true ) ;
config. put ( ConsumerConfig. MAX_POLL_RECORDS_CONFIG, 100 ) ;
config. put ( ConsumerConfig. KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer. class ) ;
config. put ( ConsumerConfig. VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer. class ) ;
ConcurrentKafkaListenerContainerFactory< Integer, String> factory = new ConcurrentKafkaListenerContainerFactory < > ( ) ;
factory. setConsumerFactory ( new DefaultKafkaConsumerFactory < > ( config) ) ;
factory. setConcurrency ( 3 ) ;
factory. getContainerProperties ( ) . setPollTimeout ( 3000 ) ;
return factory;
}
@Bean ( name = "techKafkaListenerContainerFactory" )
KafkaListenerContainerFactory< ConcurrentMessageListenerContainer< Integer, String> > techKafkaListenerContainerFactory ( ) {
Map< String, Object> config = new HashMap < > ( 8 ) ;
config. put ( ConsumerConfig. GROUP_ID_CONFIG, GROUP_ID) ;
config. put ( ConsumerConfig. BOOTSTRAP_SERVERS_CONFIG, techKafkaBootstrapServers) ;
config. put ( ConsumerConfig. ENABLE_AUTO_COMMIT_CONFIG, true ) ;
config. put ( ConsumerConfig. MAX_POLL_RECORDS_CONFIG, 10 ) ;
config. put ( ConsumerConfig. KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer. class ) ;
config. put ( ConsumerConfig. VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer. class ) ;
ConcurrentKafkaListenerContainerFactory< Integer, String> factory = new ConcurrentKafkaListenerContainerFactory < > ( ) ;
factory. setConsumerFactory ( new DefaultKafkaConsumerFactory < > ( config) ) ;
factory. setConcurrency ( 3 ) ;
factory. getContainerProperties ( ) . setPollTimeout ( 3000 ) ;
return factory;
}
@Bean ( name = "groupKafkaTemplate" )
public KafkaTemplate< String, String> groupKafkaTemplate ( ProducerListener< String, String> kafkaProducerListener) {
Map< String, Object> config = new HashMap < > ( 8 ) ;
config. put ( ProducerConfig. BOOTSTRAP_SERVERS_CONFIG, groupKafkaBootstrapServers) ;
config. put ( ProducerConfig. ACKS_CONFIG, "1" ) ;
config. put ( ProducerConfig. RETRIES_CONFIG, 0 ) ;
config. put ( ProducerConfig. REQUEST_TIMEOUT_MS_CONFIG, 2000 ) ;
config. put ( ProducerConfig. MAX_BLOCK_MS_CONFIG, 2000 ) ;
config. put ( ProducerConfig. KEY_SERIALIZER_CLASS_CONFIG, StringSerializer. class ) ;
config. put ( ProducerConfig. VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer. class ) ;
KafkaTemplate< String, String> kafkaTemplate = new KafkaTemplate < > ( new DefaultKafkaProducerFactory < > ( config) , false ) ;
kafkaTemplate. setProducerListener ( kafkaProducerListener) ;
return kafkaTemplate;
}
@Bean ( name = "techKafkaTemplate" )
public KafkaTemplate< String, String> techKafkaTemplate ( ProducerListener< String, String> kafkaProducerListener) {
Map< String, Object> config = new HashMap < > ( 8 ) ;
config. put ( ProducerConfig. BOOTSTRAP_SERVERS_CONFIG, techKafkaBootstrapServers) ;
config. put ( ProducerConfig. ACKS_CONFIG, "1" ) ;
config. put ( ProducerConfig. RETRIES_CONFIG, 0 ) ;
config. put ( ProducerConfig. REQUEST_TIMEOUT_MS_CONFIG, 2000 ) ;
config. put ( ProducerConfig. MAX_BLOCK_MS_CONFIG, 2000 ) ;
config. put ( ProducerConfig. KEY_SERIALIZER_CLASS_CONFIG, StringSerializer. class ) ;
config. put ( ProducerConfig. VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer. class ) ;
KafkaTemplate< String, String> kafkaTemplate = new KafkaTemplate < > ( new DefaultKafkaProducerFactory < > ( config) , false ) ;
kafkaTemplate. setProducerListener ( kafkaProducerListener) ;
return kafkaTemplate;
}
}
KafkaConsumer
package com. mrathena. message. kafka. v3. marketing;
import lombok. extern. slf4j. Slf4j;
import org. springframework. stereotype. Component;
@Slf4j
@Component
public class KafkaConsumer {
@KafkaListener ( topics = "topic" , containerFactory = "groupKafkaListenerContainerFactory" )
public void onMessage ( ConsumerRecord< String, String> record) {
LogKit. setTraceNo ( IdKit. getUuid ( ) ) ;
log. info ( "KAFKA:CONSUMER:MESSAGE:{}" , record. value ( ) ) ;
}
@KafkaListener ( topics = "topic2" , containerFactory = "techKafkaListenerContainerFactorytechKafkaListenerContainerFactory" )
public void onMessage ( ConsumerRecord< String, String> record) {
LogKit. setTraceNo ( IdKit. getUuid ( ) ) ;
log. info ( "KAFKA:CONSUMER:MESSAGE:{}" , record. value ( ) ) ;
}
}