springboot项目集成kafka进行消息的订阅并发送
项目依赖
<!--kafka-->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-streams</artifactId>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.12</artifactId>
</dependency>
kafka发布-订阅模式:
kafka订阅者
接收消息并进行处理,其中,topic和brokerAddrs可以通过@Value从yml中读取注入,kafka接收消息后进项消息格式转换后可以进行其他操作,在testStream方法中可以对props进行其他配置
@Component
@Slf4j
public class TestKafkaStream implements CommandLineRunner {
private String topic = "mcloud-log";
private String brokerAddrs = "127.0.0.1:9092";
@Override
public void run(String... args) throws Exception {
Properties props = new Properties();
props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokerAddrs);
props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
testStream(props);
}
private void testStream(Properties props) {
props.put(StreamsConfig.APPLICATION_ID_CONFIG, UUID.randomUUID().toString());
props.put("deserializer.encoding","UTF-8");
props.put("auto.offset.reset","latest");
final StreamsBuilder builder = new StreamsBuilder();
builder.stream(topic)
.foreach((key,value)->{
try {
TestMessage message = JSONObject.parseObject((String)value,TestMessage.class);
System.out.println(message.getMessage());
}catch (Exception e){
System.out.println("发生错误");
}
});
final Topology topology = builder.build();
final KafkaStreams streams = new KafkaStreams(topology,props);
streams.start();
}
}
kafka发布者
首先需要对producer进行配置,brokerAddrs通过@Vaule注入,在yml文件中配置
@Configuration
public class KafkaProducerConfig {
@Value("${albert.kafka.risk.bootstrapServers}")
private List<String> brokerAddrs;
@Bean
public Producer<String,String> kafkaProducer(){
Properties props = new Properties();
props.put("bootstrap.servers",brokerAddrs);
props.put("deserializer.encoding","UTF-8");
props.put("auto.offset.reset","latest");
props.put("key.serializer","org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer","org.apache.kafka.common.serialization.StringSerializer");
return new KafkaProducer<String, String>(props);
}
}
@Service
public class KafkaProducerServiceImpl implements IKafkaProducerService {
@Autowired
private Producer producer;
@Value("${albert.topic.test}")
private String topic;
public void sendMessage() {
String str = "";
ProducerRecord<String, Object> record = new ProducerRecord<>(topic, "1", str);
producer.send(record);
}
}