springboot redis stream 消息队列

本文介绍了一种使用 Redis Stream 实现消息队列的方法,包括生产者和消费者的实现细节。从 Kafka 到 Redis 发布订阅模式再到 Redis Stream 的转变过程中,解决了组件部署不便及模式限制等问题。
摘要由CSDN通过智能技术生成

最开始使用kafka,由于kafka需要单独组件,部署不方便;
改为了redis的发布订阅,但是该模式限制过多;
最终改为redisstream的消息队列模式。

pom.xml

    <dependencies>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-data-redis-reactive</artifactId>
        </dependency>
        <dependency>
            <groupId>org.apache.commons</groupId>
            <artifactId>commons-pool2</artifactId>
        </dependency>
        <dependency>
            <groupId>org.apache.commons</groupId>
            <artifactId>commons-lang3</artifactId>
        </dependency>
    </dependencies>

application.yml

spring:
  redis:
    host: 127.0.0.1
    port: 6379
    database: 0
    password:

生产方

package com.guotie.dpc.data.parser.message;

import cn.hutool.core.util.StrUtil;
import com.guotie.dpc.kafka.constants.KafkaTopicConstant;
import com.guotie.dpc.kafka.message.ParserMessage;
import lombok.extern.slf4j.Slf4j;
import org.springframework.data.redis.connection.stream.ObjectRecord;
import org.springframework.data.redis.connection.stream.RecordId;
import org.springframework.data.redis.connection.stream.StreamRecords;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;

/**
 * @author liaozesong
 */
@Slf4j
@Service
public class SendMessage {
    @Resource
    private StringRedisTemplate stringRedisTemplate;

    public void send(ParserMessage message) {
        try {
            ObjectRecord<String, ParserMessage> record = StreamRecords.objectBacked(message).withStreamKey(KafkaTopicConstant.DPC_C2_DATA_PARSER_SUCCESS);
            RecordId recordId = stringRedisTemplate.opsForStream().add(record);
            log.info("消息发送成功{}", recordId);
        } catch (Exception e) {
            throw new RuntimeException(StrUtil.format("消息发送失败[{}]", message));
        }
    }
}

消费方

package com.guotie.dpc.data.play.mq;

import com.guotie.dpc.kafka.constants.KafkaTopicConstant;
import com.guotie.dpc.kafka.message.ParserMessage;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.springframework.data.redis.connection.stream.ObjectRecord;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.data.redis.stream.StreamListener;
import org.springframework.stereotype.Service;

/**
 * @author liaozesong
 */
@Slf4j
@Service
@RequiredArgsConstructor
public class OriginalConsumer implements StreamListener<String, ObjectRecord<String, ParserMessage>> {
    private final StringRedisTemplate stringRedisTemplate;
    
    @Override
    @SneakyThrows
    public void onMessage(ObjectRecord<String, ParserMessage> record) {
        ParserMessage message = record.getValue();
        try {
            log.info("1.监听到消息[{}]", message);
        } catch (Exception e) {
            log.warn("listen msg error:json:{}", message);
            log.warn(e.getMessage(), e);
        } finally {
            this.stringRedisTemplate.opsForStream().acknowledge(KafkaTopicConstant.DPC_C2_DATA_PARSER_SUCCESS, record);
        }
    }
}

消费方监听

package com.guotie.dpc.data.play.config;

import com.guotie.dpc.data.play.mq.OriginalConsumer;
import com.guotie.dpc.kafka.constants.KafkaTopicConstant;
import com.guotie.dpc.kafka.message.ParserMessage;
import io.lettuce.core.XGroupCreateArgs;
import io.lettuce.core.XReadArgs;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.dao.QueryTimeoutException;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.connection.lettuce.LettuceConnection;
import org.springframework.data.redis.connection.lettuce.LettuceConverters;
import org.springframework.data.redis.connection.stream.Consumer;
import org.springframework.data.redis.connection.stream.ObjectRecord;
import org.springframework.data.redis.connection.stream.ReadOffset;
import org.springframework.data.redis.connection.stream.StreamOffset;
import org.springframework.data.redis.stream.StreamMessageListenerContainer;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;

import javax.annotation.Resource;
import java.time.Duration;

/**
 * redis 消息队列
 *
 * @author liaozesong
 */
@Slf4j
@Configuration
public class RedisMqConfig {
    @Resource
    private ThreadPoolTaskExecutor threadPoolTaskExecutor;
    @Resource
    private OriginalConsumer originalConsumer;

    private static final String DEFAULT_TOPIC = KafkaTopicConstant.DPC_C2_DATA_PARSER_SUCCESS;
    private static final String DEFAULT_GROUP = DEFAULT_TOPIC;

    @Bean
    public StreamMessageListenerContainer<String, ObjectRecord<String, ParserMessage>> listener(RedisConnectionFactory connectionFactory) {
        //初始化topic
        try {
            LettuceConnection clusterConnection = (LettuceConnection) connectionFactory.getConnection();
            XReadArgs.StreamOffset<byte[]> streamOffset = XReadArgs.StreamOffset.from(LettuceConverters.toBytes(DEFAULT_GROUP), "0-0");
            clusterConnection.getNativeConnection().xgroupCreate(streamOffset, LettuceConverters.toBytes(DEFAULT_GROUP), XGroupCreateArgs.Builder.mkstream());
        } catch (Exception ex) {
            log.warn("Already Created {}", ex.getMessage());
        }


        StreamMessageListenerContainer.StreamMessageListenerContainerOptions<String, ObjectRecord<String, ParserMessage>> options = StreamMessageListenerContainer.StreamMessageListenerContainerOptions
                .builder()
                .pollTimeout(Duration.ZERO)
                .batchSize(1)
                .targetType(ParserMessage.class)
                .executor(threadPoolTaskExecutor)
                .build();

        StreamMessageListenerContainer<String, ObjectRecord<String, ParserMessage>> container = StreamMessageListenerContainer
                .create(connectionFactory, options);


        //指定消费者对象
        container.register(
                StreamMessageListenerContainer.StreamReadRequest.builder(StreamOffset.create(DEFAULT_TOPIC, ReadOffset.lastConsumed()))
                        .errorHandler((error) -> {
                            if (!(error instanceof QueryTimeoutException)) {
                                log.error(error.getMessage(), error);
                            }
                        })
                        .cancelOnError(e -> false)
                        .consumer(Consumer.from(DEFAULT_GROUP, DEFAULT_GROUP))
                        //关闭自动ack确认
                        .autoAcknowledge(false)
                        .build()
                , originalConsumer);
        container.start();
        return container;
    }
}
Spring Boot提供了对Redis Stream队列的支持。Redis Stream是一个高效的持久化消息队列,可以用于实现发布-订阅模式、任务队列等应用场景。 要在Spring Boot中使用Redis Stream队列,需要进行以下步骤: 1. 添加RedisSpring Data Redis的依赖:在`pom.xml`文件中添加以下依赖: ```xml <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-data-redis</artifactId> </dependency> ``` 2. 配置Redis连接信息:在`application.properties`或`application.yml`文件中配置Redis连接信息,包括主机、端口、密码等。 ```properties spring.redis.host=your_redis_host spring.redis.port=your_redis_port spring.redis.password=your_redis_password ``` 3. 创建Redis Stream队列:在Spring Boot中,可以使用`StreamOperations`接口来操作Redis Stream队列。可以通过自动注入`RedisTemplate`或`StringRedisTemplate`来获取`StreamOperations`对象。 ```java @Autowired private RedisTemplate<String, String> redisTemplate; ... StreamOperations<String, String, String> streamOperations = redisTemplate.opsForStream(); ``` 4. 发布消息到Redis Stream队列:使用`XADD`命令将消息发布到Redis Stream队列中。 ```java Map<String, String> message = new HashMap<>(); message.put("key1", "value1"); message.put("key2", "value2"); streamOperations.add("your_stream_key", message); ``` 5. 消费Redis Stream队列消息:使用`XREADGROUP`命令消费Redis Stream队列中的消息。 ```java Consumer<String, String> consumer = StreamOffset.create("your_stream_key", ReadOffset.lastConsumed()); while (true) { List<MapRecord<String, String, String>> records = streamOperations.read(consumer, StreamReadOptions.empty()); for (MapRecord<String, String, String> record : records) { // 处理消息 System.out.println(record.getValue()); } } ``` 以上是使用Spring Boot操作Redis Stream队列的基本步骤,你可以根据实际需求进行修改和扩展。希望对你有所帮助!
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值
>