文章目录
一、背景
很多小项目,有微服务间异步调用或者自己服务间异步调用的需求,这种情况下有时并不想再额外部署一个专业的消息队列。于是,基于redis实现一个简单的消息队列就变得必要。
在redis中,有一个stream的数据结构,可以利用它来实现消息队列。
1.1 stream介绍
stream是redis 5.0引入的一种数据结构。stream是一个日志数据结构模型,可以支持如消费组的消费策略
1.2 常用命令
1.2.1 新增对象
可以使用下面的命令,向该队列中增加一个对象:
XADD mystream * sensor-id 1234 temperature 19.8
"1676898195695-0"
XADD mystream * sensor-id 1236 temperature 21.8
"1676902589037-0"
返回的是新数据的Id,该Id是unix的事件戳
* 后面可以可以看作是json
1.2.2 查询
XRANGE mystream - +
XRANGE mystream 1676898195695 1676902589037
XREVRANGE mystream + - COUNT 1
这个函数表示查找IdMin~IdMax的所有数据,-表示最小Id,+表示最大Id
也可以指定查询多少个
1.2.3 创建消费组
redis的消息队列,借用kafka的设计,也有消费组的概念。
一个stream可以绑定多个消费组,不同消费组分别独立重复的消费stream中的数据。我们可以理解为相同的消息被不同的视角所解析,这个不同的视角就是消费组,可以对相同消息做不同的处理,达到解耦的目的。
XGROUP CREATE mystream mygroup $
XGROUP CREATE mystream mygroup1 $
ok
再添加一些数据
XADD mystream * name "zhifa" x 37.5 y 24.1
XADD mystream * name "xiaogong" x 106.3 y 44.2
XADD mystream * name "youla" x 45.2 y 88.5
1.2.4 消费
XREADGROUP GROUP mygroup Alice COUNT 1 STREAMS mystream >
1) 1) "mystream"
2) 1) 1) "1676941201844-0"
2) 1) "name"
2) "zhifa"
3) "x"
4) "37.5"
5) "y"
6) "24.1"
XREADGROUP GROUP mygroup Alice COUNT 1 STREAMS mystream >
1) 1) "mystream"
2) 1) 1) "1676941213223-0"
2) 1) "name"
2) "xiaogong"
3) "x"
4) "106.3"
5) "y"
6) "44.2"
XREADGROUP GROUP mygroup1 Alice COUNT 1 STREAMS mystream >
1) 1) "mystream"
2) 1) 1) "1676941201844-0"
2) 1) "name"
2) "zhifa"
3) "x"
4) "37.5"
5) "y"
6) "24.1"
XREADGROUP GROUP mygroup Alice COUNT 1 STREAMS mystream >
1) 1) "mystream"
2) 1) 1) "1676941218261-0"
2) 1) "name"
2) "youla"
3) "x"
4) "45.2"
5) "y"
6) "88.5"
从以上消费的实验中,我们印证了1.2.3的结论,下面,我们就可以开始编写SpringBoot下的redis-queue实现了。
二、redis-queue框架
2.1 代码整体结构
2.2 客户端Util-zfRedisMqUtil
package indi.zhifa.recipe.bailan.framework.queue.entity;
@EnumDesc(name = "值类型",desc = "值类型,整数、浮点、字符串、JSON")
@AllArgsConstructor
public enum EQueueValueType {
DEFAULT(0,"默认","默认是字符串"),
STRING(1,"字符串","普通字符串类型"),
JSON(2,"JSON","JSON类型"),
INT(3,"整型","整数类型"),
LONG(4,"长整型","64字节整型");
@EnumValue
@Getter
int code;
@Getter
String name;
@Getter
String desc;
}
package indi.zhifa.recipe.bailan.framework.queue.util;
@Component
public class ZfRedisMqUtil {
public static final String TYPE_KEY = "type";
public static final String MSG_ID_KEY = "id";
public static final String DATA_KEY = "data";
private final StreamOperations<String,String,Object> mStreamOperations;
public ZfRedisMqUtil(RedisConnectionFactory factory){
RedisTemplate<String, Object> redisTemplate = genTemplate(factory);
mStreamOperations = redisTemplate.opsForStream();
}
public RedisTemplate<String,Object> genTemplate(RedisConnectionFactory factory){
RedisTemplate<String, Object> redisTemplate = new RedisTemplate<String, Object>();
redisTemplate.setConnectionFactory(factory);
redisTemplate.setKeySerializer(RedisSerializer.string());
redisTemplate.setValueSerializer(RedisSerializer.string());
redisTemplate.setHashKeySerializer(RedisSerializer.string());
redisTemplate.setHashValueSerializer(RedisSerializer.byteArray());
redisTemplate.afterPropertiesSet();
return redisTemplate;
}
public void sendMessage(String pStream , Integer pMsgId, EQueueValueType pType, Object pData){
Map<String,Object> data = new HashMap<String,Object>();
data.put(MSG_ID_KEY,Convert.intToBytes(pMsgId));
data.put(TYPE_KEY,Convert.intToBytes(pType.getCode()));
FastJsonConfig fastJsonConfig = RedisConfig.getRedisFastJson();
byte[] byteData;
switch (pType){
case DEFAULT:
case STRING:
byteData = pData.toString().getBytes(StandardCharsets.UTF_8);
break;
case JSON:
byteData = JSON.toJSONBytes(pData,fastJsonConfig.getWriterFeatures());
break;
case INT:
byteData = Convert.intToBytes((int)pData);
break;
case LONG:
byteData = Convert.longToBytes((long)pData);
break;
default:
byteData = null;
break;
}
data.put(DATA_KEY,byteData);
mStreamOperations.add(pStream,data);
}
}
2.3 消息队列模板配置类
package indi.zhifa.recipe.bailan.framework.queue.config;
@RequiredArgsConstructor
@Configuration
@Slf4j
public class RedisQueueConfig {
private final RedisConnectionFactory mRedisConnectionFactory;
private final ThreadPoolTaskExecutor mThreadPoolTaskExecutor;
private final RedisQueueProperty redisQueueProperty;
private final RedisConsumerMap mRedisConsumerMap;
private final RedisTemplate<String,Object> mRedisTemplate;
StreamMessageListenerContainer<String, MapRecord<String,Object,byte[]>>[] streamMessageListenerContainers;
@Bean
public StreamMessageListenerContainer<String, MapRecord<String,Object,byte[]>>[] redisMesListeners() {
List<ConsumerConfig> consumerPairList = redisQueueProperty.getConsumerPairs();
if(CollectionUtils.isEmpty(consumerPairList)){
return null;
}
streamMessageListenerContainers = new StreamMessageListenerContainer[consumerPairList.size()];
StreamOperations<String,Object,byte[]> streamOperations = mRedisTemplate.opsForStream();
for(int i=0;i<consumerPairList.size();i++){
ConsumerConfig consumerConfig = consumerPairList.get(i);
RedisConsumer redisConsumer = mRedisConsumerMap.getConsumer(consumerConfig.getName());
boolean groupCreated = false;
if(mRedisTemplate.hasKey(consumerConfig.getStream())){
StreamInfo.XInfoGroups groups = streamOperations.groups(consumerConfig.getStream());
if(null != groups){
Iterator<StreamInfo.XInfoGroup> it = groups.iterator();
while (it.hasNext()){
StreamInfo.XInfoGroup group = it.next();
if(group.groupName().equals(consumerConfig.getGroup())){
groupCreated = true;
break;
}
}
}
}
if(!groupCreated){
streamOperations.createGroup(consumerConfig.getStream(),consumerConfig.getGroup());
}
StreamMessageListenerContainer<String, MapRecord<String,Object,byte[]>> container =
streamContainer(consumerConfig.getStream(), consumerConfig.getGroup(), consumerConfig.getName(),
consumerConfig.getBitchSize(),consumerConfig.getTimeOut(),
redisConsumer);
streamMessageListenerContainers[i] = container;
container.start();
}
return streamMessageListenerContainers;
}
private StreamMessageListenerContainer<String, MapRecord<String,Object,byte[]>> streamContainer(String pStream, String pGroup, String pConsumer,
int pBatchSize,long pTimeOut,
RedisConsumer pRedisConsumer){
StreamMessageListenerContainer.StreamMessageListenerContainerOptions<String, MapRecord<String,Object,byte[]>> options =
StreamMessageListenerContainer.StreamMessageListenerContainerOptions
.builder()
.pollTimeout(Duration.ofMillis(pTimeOut))
.batchSize(pBatchSize)
.executor(mThreadPoolTaskExecutor)
.keySerializer(RedisSerializer.string())
.hashKeySerializer(RedisSerializer.string())
.hashValueSerializer(RedisSerializer.byteArray())
.build();
StreamMessageListenerContainer<String, MapRecord<String,Object,byte[]>> container = StreamMessageListenerContainer
.create(mRedisConnectionFactory, options);
//指定消费最新的消息
StreamOffset<String> offset = StreamOffset.create(pStream, ReadOffset.lastConsumed());
//创建消费者
Consumer consumer = Consumer.from(pGroup, pConsumer);
StreamMessageListenerContainer.StreamReadRequest<String> streamReadRequest = StreamMessageListenerContainer.StreamReadRequest.builder(offset)
.errorHandler((error)->{
if(null != pRedisConsumer.getErrMsgHandler()){
pRedisConsumer.getErrMsgHandler().handleError(error);
}else{
log.error(error.getMessage());
}
})
.cancelOnError(e -> false)
.consumer(consumer)
//关闭自动ack确认
.autoAcknowledge(false)
.build();
//指定消费者对象
container.register(streamReadRequest, pRedisConsumer.getRedisMsgConsumerHandler());
return container;
}
}
2.4 消费端核心处理类
package indi.zhifa.recipe.bailan.framework.queue.handler;
@Slf4j
public abstract class BaseRedisMsgConsumerListener implements StreamListener<String, MapRecord<String,Object,byte[]>> {
public static final String TYPE_KEY = "type";
public static final String MSG_ID_KEY = "id";
public static final String DATA_KEY = "data";
protected final RedisTemplate<String,Object> mRedisTemplate;
protected final MsgHandlerMap mMsgHandlerMap;
protected final String mGroup;
protected BaseRedisMsgConsumerListener(
String pGroup,
RedisTemplate<String,Object> pRedisTemplate){
mMsgHandlerMap = initMsgHandlerMap();
mRedisTemplate = pRedisTemplate;
mGroup = pGroup;
}
protected abstract MsgHandlerMap initMsgHandlerMap();
@Override
public void onMessage(MapRecord<String,Object,byte[]> pMessage){
RecordId recordId = pMessage.getId();
String stream = pMessage.getStream();
Map<Object,byte[]> valueMap = pMessage.getValue();
Integer typeCode = getInteger(valueMap.get(TYPE_KEY));
Integer msgId = getInteger(valueMap.get(MSG_ID_KEY));
byte[] valueByte = valueMap.get(DATA_KEY);
if(typeCode > EQueueValueType.values().length || typeCode<0){
throw new ServiceException(TYPE_KEY+"字段缺失或不合法");
}
// 查找MsgId的配置
MsgHandlerInfo msgHandlerInfo = mMsgHandlerMap.getHandler(msgId);
if(null == msgHandlerInfo){
return;
}
IMessageHandler messageHandler = msgHandlerInfo.getMessageHandler();
EQueueValueType valueType = EQueueValueType.values()[typeCode];
try{
switch (valueType) {
case DEFAULT:
case STRING:
String strVal = getStr(valueByte);
messageHandler.handler(strVal);
break;
case JSON:
Class cls = msgHandlerInfo.getCls();
Object obj = null;
if(null != cls){
obj = getObject(valueByte,cls);
}else{
obj = getObject(valueByte,Object.class);
}
messageHandler.handler(obj);
break;
case INT:
Integer intVal = getInteger(valueByte);
messageHandler.handler(intVal);
break;
case LONG:
Long longVal = getLong(valueByte);
messageHandler.handler(longVal);
break;
}
mRedisTemplate.opsForStream().acknowledge(mGroup,pMessage);
}catch (Exception ex){
onError(recordId.getValue(),stream,msgId,valueByte,ex);
}
}
protected void onError(String pId, String pStream, Integer pMsgId, byte[] pValueByte, Exception ex){
log.error("stream:{}-id:{}-msgId:{} 解析发生错误,错误信息是{}",pId,pStream,pMsgId,ex.getMessage());
}
protected String getStr(byte[] pByte){
return new String(pByte);
}
protected Integer getInteger(byte[] pByte){
return Convert.bytesToInt(pByte);
}
protected Long getLong(byte[] pByte){
return Convert.bytesToLong(pByte);
}
protected <T> T getObject(byte[] pByte, Class<T> pCls){
T tObj = JSON.parseObject(pByte,pCls);
return tObj;
}
}
2.5 消费者相关的类
MsgHandlerMap
package indi.zhifa.recipe.bailan.framework.queue.bean;
public abstract class MsgHandlerMap {
protected Map<Integer, MsgHandlerInfo> msgHandlerMapData;
protected MsgHandlerMap(){
msgHandlerMapData = new HashMap<>();
init();
}
protected abstract void init();
public MsgHandlerInfo getHandler(Integer pMsgId){
return msgHandlerMapData.get(pMsgId);
}
}
RedisConsumerMap
package indi.zhifa.recipe.bailan.framework.queue.bean;
public abstract class RedisConsumerMap {
protected final Map<String, RedisConsumer> consumerMap;
protected RedisConsumerMap(){
consumerMap = new HashMap<>();
}
public RedisConsumer getConsumer(String pName){
return consumerMap.get(pName);
}
}
MsgHandlerInfo
package indi.zhifa.recipe.bailan.framework.queue.entity;
@Data
public class MsgHandlerInfo {
IMessageHandler messageHandler;
Class cls;
}
IMessageHandler
public interface IMessageHandler<T> {
void handler(T message);
}
IRedisMsgErrHandler
public interface IRedisMsgErrHandler {
void handleError(Throwable t);
}
RedisConsumer
@RequiredArgsConstructor
public class RedisConsumer {
private final BaseRedisMsgConsumerListener mRedisMsgConsumerService;
private final IRedisMsgErrHandler mRedisMsgErrService;
public IRedisMsgErrHandler getErrMsgHandler(){
return mRedisMsgErrService;
}
public BaseRedisMsgConsumerListener getRedisMsgConsumerHandler(){
return mRedisMsgConsumerService;
}
}
2.6 所需的配置类
package indi.zhifa.recipe.bailan.framework.queue.property;
@Data
public class ConsumerConfig {
String stream;
String group;
String name;
int bitchSize;
long timeOut;
}
package indi.zhifa.recipe.bailan.framework.queue.property;
@Configuration
@Data
@ConfigurationProperties(prefix = "queue")
public class RedisQueueProperty {
List<ConsumerConfig> consumerPairs;
}
2.7 pom引用
<dependencies>
<dependency>
<groupId>indi.zhifa.recipe</groupId>
<artifactId>framework-redis</artifactId>
</dependency>
<dependency>
<groupId>indi.zhifa.recipe</groupId>
<artifactId>framework-enums-client</artifactId>
</dependency>
<!-- ******************lombok****************************-->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
三、客户端使用
本期,我们就使用芝法酱躺平攻略(17)的例子来讲解。
3.1 MsgErrorHandler
package indi.zhifa.recipe.bailan5.coupon.handler;
@Component
@Slf4j
public class MsgErrorHandler implements IRedisMsgErrHandler {
@Override
public void handleError(Throwable t) {
log.error(t.getMessage());
}
}
3.2 CouponMsgHandler
package indi.zhifa.recipe.bailan5.coupon.handler;
@Slf4j
@RequiredArgsConstructor
@Component
public class CouponMsgHandler implements IMessageHandler<CouponStorageMsg> {
private final ICouponDbService mCouponDbService;
@Override
public void handler(CouponStorageMsg message) {
log.info("接到消息,"+ JSON.toJSONString(message));
CouponEntity couponEntity = new CouponEntity();
couponEntity.setId(message.getId());
couponEntity.setStorage(message.getStorage());
mCouponDbService.updateById(couponEntity);
}
}
3.3 CouponMsgListener
@Component
public class CouponMsgListener extends BaseRedisMsgConsumerListener {
private final CouponMsgHandler mCouponMsgHandler;
@Autowired
public CouponMsgListener( RedisTemplate<String, Object> pRedisTemplate,
CouponMsgHandler pCouponMsgHandler) {
super(AppConst.COUPON_CONSUME_GROUP, pRedisTemplate);
mCouponMsgHandler = pCouponMsgHandler;
}
@Override
protected MsgHandlerMap initMsgHandlerMap() {
MsgHandlerMap msgHandlerMap = new MsgHandlerMap() {
@Override
protected void init() {
MsgHandlerInfo couponHandler = new MsgHandlerInfo();
couponHandler.setCls(CouponMsgHandler.class);
couponHandler.setMessageHandler(mCouponMsgHandler);
this.msgHandlerMapData.put(AppConst.COUPON_USE,couponHandler);
}
};
return msgHandlerMap;
}
}
3.4 RedisConsumerMapImpl
@Component
public class RedisConsumerMapImpl extends RedisConsumerMap {
final CouponMsgListener mCouponMsgListener;
final MsgErrorHandler mMsgErrorService;
public RedisConsumerMapImpl(CouponMsgListener pCouponMsgListener,
MsgErrorHandler pMsgErrorService){
super();
mCouponMsgListener = pCouponMsgListener;
mMsgErrorService = pMsgErrorService;
consumerMap.put("defaultConsumer",new RedisConsumer(mCouponMsgListener,mMsgErrorService));
}
}
3.5 yml配置
queue:
consumer-pairs: #消费者配置
- stream: "COUPON" #流名称
group: "COUPON_STORAGE_CHANGE" #分组
name: "defaultConsumer" #消费者
bitch-size: 10 #每次取出数据的bitch-size
time-out: 0 #超时配置