1、引入依赖
<!-- kafka依赖 begin -->
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-streams</artifactId>
</dependency>
2、配置yaml文件
spring:
application:
name: kafka-demo
# springboot整合kafka
kafka:
# ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG
# ProducerConfig.BOOTSTRAP_SERVERS_CONFIG
# 生产与消费者都使用同一个kafka broker
bootstrap-servers: 127.0.0.1:9092
# 生产者配置, 参考ProducerConfig类
producer:
retries: 10
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
acks: 1
batch-size: 16384
# bootstrap-servers: 生产与消费者可以独立配置
# 消费者配置 ConsumerConfig
consumer:
# 消费者重平衡时,offset提交策略
autoOffsetReset: earliest
# 消费者组名
group-id: group3
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
kafka:
hosts: 127.0.0.1:9092
group: ${spring.application.name}
3、消费者中创建kafkastream的配置类
@Configuration
@EnableKafkaStreams
@Data
@ConfigurationProperties(prefix = "kafka")
public class KafkaStreamsConfig {
private String hosts;
private String group;
@Bean(name = KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME)
public KafkaStreamsConfiguration kStreamsConfigs(){
Map<String, Object> props = new HashMap<>();
props.put(StreamsConfig.APPLICATION_ID_CONFIG,group);
props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG,hosts);
props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG,Serdes.String().getClass().getName());
props.put(StreamsConfig.RETRIES_CONFIG,10);
props.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG,5000);
props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, WallclockTimestampExtractor.class.getName());
return new KafkaStreamsConfiguration(props);
}
}
4、创建流处理监听对象
@Configuration
public class MyStreamHandler {
@Bean
public KStream<String,String> kStream(StreamsBuilder streamsBuilder){
KStream<String, String> stream = streamsBuilder.stream("topic-input");
stream.flatMapValues(new ValueMapper<String, Iterable<?>>() {
@Override
public Iterable<?> apply(String value) {
String[] s = value.split(" ");
return Arrays.asList(s);
}
})
.groupBy((key,value) -> {return value;})
.windowedBy(TimeWindows.of(Duration.ofSeconds(20)))
.count()
.toStream()
.map((key,value)->{
KeyValue<String, String> kv = new KeyValue<>(key.key().toString(), value.toString());
return kv;
})
// 指向输出端
.to("topic-out");
return stream;
}
}
5、消费者 用于接收流式处理之后的消息 并处理业务
@Component
public class ConsumerListener {
@KafkaListener(topics = "topic-out")
public void consumerMsg(ConsumerRecord<String, String> msgRecord){
String msg = msgRecord.value();
int p = msgRecord.partition();
long offset = msgRecord.offset();
System.out.println(String.format("消费者: 分区:%d, offset:%d, 消息内容:%s", p, offset,msg));
}
}
6、生产者发送消息(此步骤也可以放在第4步之前)
@Autowired
private KafkaTemplate kafkaTemplate;
@Test
public void testStream2(){
for (int i = 0; i < 6; i++) {
kafkaTemplate.send("topic-input","Hello kafka");
}
for (int i = 0; i < 3; i++) {
kafkaTemplate.send("topic-input","Hello word");
}
}