一、SETNX命令实现分布式锁
实现分布式锁之前先看两个 Redis 命令:
-
SETNX
将
key
设置值为value
,如果key
不存在,这种情况下等同SET命令。 当key
存在时,什么也不做。SETNX
是”SETif Not eXists”的简写。返回值Integer reply, 特定值:
1
如果key被设置了0
如果key没有被设置
例子
redis> SETNX mykey "Hello"
(integer) 1
redis> SETNX mykey "World"
(integer) 0
redis> GET mykey
"Hello"
redis>
-
GETSET
自动将key对应到value并且返回原来key对应的value。如果key存在但是对应的value不是字符串,就返回错误。
设计模式
GETSET可以和INCR一起使用实现支持重置的计数功能。举个例子:每当有事件发生的时候,一段程序都会调用INCR给key mycounter加1,但是有时我们需要获取计数器的值,并且自动将其重置为0。这可以通过GETSET mycounter “0”来实现:
INCR mycounter GETSET mycounter "0" GET mycounter
返回值
bulk-string-reply: 返回之前的旧值,如果之前
Key
不存在将返回nil
。例子
redis> INCR mycounter (integer) 1 redis> GETSET mycounter "0" "1" redis> GET mycounter "0" redis>
这两个命令在 java 中对应为 setIfAbsent
和 getAndSet
分布式锁的实现:
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
@Component
@Slf4j
public class RedisLock {
@Autowired
StringRedisTemplate redisTemplate;
/**
* 加锁
* @param key
* @param value 当前时间 + 超时时间
* @return
*/
public boolean lock(String key, String value){
if (redisTemplate.opsForValue().setIfAbsent(key, value)){
return true;
}
//解决死锁,且当多个线程同时来时,只会让一个线程拿到锁
String currentValue = redisTemplate.opsForValue().get(key);
//如果过期
if (!StringUtils.isEmpty(currentValue) &&
Long.parseLong(currentValue) < System.currentTimeMillis()){
//获取上一个锁的时间
String oldValue = redisTemplate.opsForValue().getAndSet(key, value);
if (StringUtils.isEmpty(oldValue) && oldValue.equals(currentValue)){
return true;
}
}
return false;
}
/**
* 解锁
* @param key
* @param value
*/
public void unlock(String key, String value){
try {
String currentValue = redisTemplate.opsForValue().get(key);
redisTemplate.opsForValue().getOperations().delete(key);
}catch (Exception e){
log.error("【redis锁】解锁失败, {}", e);
}
}
}
使用:
@SpringBootTest
@RunWith(SpringRunner.class)
public class LockTest {
/**
* 模拟秒杀
*/
@Autowired
RedisLock redisLock;
//超时时间10s
private static final int TIMEOUT = 10 * 1000;
/**
* 演示时可以在该方法的1上打个断点,执行该方法,然后跑到2上,
* 这时再启动一个该方法的线程,第二次的线程会抛出异常,只有
* 当第一次的线程执行了3其余的线程才能拿到锁
*/
@Test
public void secKill(){
String productId="1";
long time = System.currentTimeMillis() + TIMEOUT;
//1.加锁
if (!redisLock.lock(productId, String.valueOf(time))){
throw new RuntimeException("人太多了,等会儿再试吧~");
}
//2.具体的秒杀逻辑
System.out.println("秒杀的业务逻辑");
//3.解锁
redisLock.unlock(productId, String.valueOf(time));
}
}
二、Redisson实现分布式锁+整合redis
项目结构
1、用到的依赖
<!-- 引入Redisson依赖 -->
<dependency>
<groupId>org.redisson</groupId>
<artifactId>redisson-spring-boot-starter</artifactId>
<version>3.12.5</version>
</dependency>
<!-- SpringBoot 拦截器 -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-aop</artifactId>
</dependency>
2、相关类与文件
(1)redisson-single.yml
singleServerConfig:
idleConnectionTimeout: 10000
connectTimeout: 10000
timeout: 3000
retryAttempts: 3
retryInterval: 1500
# 如果Redis服务端配置有密码需要替换password的值
password: null
subscriptionsPerConnection: 5
clientName: null
# 替换为自己真实Redis服务端连接
address: "redis://127.0.0.1:6379"
subscriptionConnectionMinimumIdleSize: 1
subscriptionConnectionPoolSize: 50
connectionMinimumIdleSize: 24
connectionPoolSize: 64
database: 0
dnsMonitoringInterval: 5000
threads: 16
nettyThreads: 32
codec: !<org.redisson.codec.FstCodec> {}
transportMode: "NIO"
(2)FastJson2JsonRedisSerializer
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.type.TypeFactory;
import org.springframework.data.redis.serializer.RedisSerializer;
import org.springframework.data.redis.serializer.SerializationException;
import com.alibaba.fastjson.parser.ParserConfig;
import org.springframework.util.Assert;
import java.nio.charset.Charset;
/**
* Redis使用FastJson序列化
*
* @author
*/
public class FastJson2JsonRedisSerializer<T> implements RedisSerializer<T>
{
@SuppressWarnings("unused")
private ObjectMapper objectMapper = new ObjectMapper();
public static final Charset DEFAULT_CHARSET = Charset.forName("UTF-8");
private Class<T> clazz;
static
{
ParserConfig.getGlobalInstance().setAutoTypeSupport(true);
}
public FastJson2JsonRedisSerializer(Class<T> clazz)
{
super();
this.clazz = clazz;
}
@Override
public byte[] serialize(T t) throws SerializationException
{
if (t == null)
{
return new byte[0];
}
return JSON.toJSONString(t, SerializerFeature.WriteClassName).getBytes(DEFAULT_CHARSET);
}
@Override
public T deserialize(byte[] bytes) throws SerializationException
{
if (bytes == null || bytes.length <= 0)
{
return null;
}
String str = new String(bytes, DEFAULT_CHARSET);
return JSON.parseObject(str, clazz);
}
public void setObjectMapper(ObjectMapper objectMapper)
{
Assert.notNull(objectMapper, "'objectMapper' must not be null");
this.objectMapper = objectMapper;
}
protected JavaType getJavaType(Class<?> clazz)
{
return TypeFactory.defaultInstance().constructType(clazz);
}
}
(3)RedisCache
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.BoundSetOperations;
import org.springframework.data.redis.core.HashOperations;
import org.springframework.data.redis.core.ListOperations;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.core.ValueOperations;
import org.springframework.stereotype.Component;
/**
* spring redis 工具类
*
* @author
**/
@Component
public class RedisCache {
@Autowired
public RedisTemplate redisTemplate;
/**
* 缓存基本的对象,Integer、String、实体类等
*
* @param key 缓存的键值
* @param value 缓存的值
* @return 缓存的对象
*/
public <T> ValueOperations<String, T> setCacheObject(String key, T value) {
ValueOperations<String, T> operation = redisTemplate.opsForValue();
operation.set(key, value);
return operation;
}
/**
* 缓存基本的对象,Integer、String、实体类等
*
* @param key 缓存的键值
* @param value 缓存的值
* @param timeout 时间
* @param timeUnit 时间颗粒度
* @return 缓存的对象
*/
public <T> ValueOperations<String, T> setCacheObject(String key, T value, Integer timeout, TimeUnit timeUnit) {
ValueOperations<String, T> operation = redisTemplate.opsForValue();
operation.set(key, value, timeout, timeUnit);
return operation;
}
/**
* 获得缓存的基本对象。
*
* @param key 缓存键值
* @return 缓存键值对应的数据
*/
public <T> T getCacheObject(String key) {
ValueOperations<String, T> operation = redisTemplate.opsForValue();
return operation.get(key);
}
/**
* 删除单个对象
*
* @param key
*/
public void deleteObject(String key) {
redisTemplate.delete(key);
}
/**
* 删除集合对象
*
* @param collection
*/
public void deleteObject(Collection collection) {
redisTemplate.delete(collection);
}
/**
* 删除集合对象
*
* @param keyLike
*/
public void deleteByLike(String keyLike) {
if(keyLike!=null){
redisTemplate.delete( keys(keyLike));
}
}
/**
* 缓存List数据
*
* @param key 缓存的键值
* @param dataList 待缓存的List数据
* @return 缓存的对象
*/
public <T> ListOperations<String, T> setCacheList(String key, List<T> dataList) {
ListOperations listOperation = redisTemplate.opsForList();
if (null != dataList) {
int size = dataList.size();
for (int i = 0; i < size; i++) {
listOperation.leftPush(key, dataList.get(i));
}
}
return listOperation;
}
/**
* 获得缓存的list对象
*
* @param key 缓存的键值
* @return 缓存键值对应的数据
*/
public <T> List<T> getCacheList(String key) {
List<T> dataList = new ArrayList<T>();
ListOperations<String, T> listOperation = redisTemplate.opsForList();
Long size = listOperation.size(key);
for (int i = 0; i < size; i++) {
dataList.add(listOperation.index(key, i));
}
return dataList;
}
/**
* 获得缓存的list对象
*
* @param collection 缓存的键值
* @return 缓存键值对应的数据
*/
public <T> List<T> mgetCacheList(Collection collection) {
List<T> dataList = new ArrayList<T>();
ValueOperations<String, T> operation = redisTemplate.opsForValue();
if (collection.size() <= 0) {
return dataList;
}
collection.forEach(item->{
dataList.add(operation.get(item));
});
return dataList;
}
/**
* 获得缓存的list对象
*
* @param keyLike 模糊路径查询 *
* @return 缓存键值对应的数据
*/
public <T> List<T> getCacheListByLike(String keyLike) {
List<T> dataList = new ArrayList<T>();
Collection<String> keys = keys(keyLike);
return mgetCacheList(keys);
}
/**
* 缓存Set
*
* @param key 缓存键值
* @param dataSet 缓存的数据
* @return 缓存数据的对象
*/
public <T> BoundSetOperations<String, T> setCacheSet(String key, Set<T> dataSet) {
BoundSetOperations<String, T> setOperation = redisTemplate.boundSetOps(key);
Iterator<T> it = dataSet.iterator();
while (it.hasNext()) {
setOperation.add(it.next());
}
return setOperation;
}
/**
* 获得缓存的set
*
* @param key
* @return
*/
public <T> Set<T> getCacheSet(String key) {
Set<T> dataSet = new HashSet<T>();
BoundSetOperations<String, T> operation = redisTemplate.boundSetOps(key);
dataSet = operation.members();
return dataSet;
}
/**
* 缓存Map
*
* @param key
* @param dataMap
* @return
*/
public <T> HashOperations<String, String, T> setCacheMap(String key, Map<String, T> dataMap) {
HashOperations hashOperations = redisTemplate.opsForHash();
if (null != dataMap) {
for (Map.Entry<String, T> entry : dataMap.entrySet()) {
hashOperations.put(key, entry.getKey(), entry.getValue());
}
}
return hashOperations;
}
/**
* 获得缓存的Map
*
* @param key
* @return
*/
public <T> Map<String, T> getCacheMap(String key) {
Map<String, T> map = redisTemplate.opsForHash().entries(key);
return map;
}
/**
* 获得缓存的基本对象列表
*
* @param pattern 字符串前缀
* @return 对象列表
*/
public Collection<String> keys(String pattern) {
return redisTemplate.keys(pattern);
}
}
(4)RedisConfig
import org.redisson.Redisson;
import org.redisson.api.RedissonClient;
import org.redisson.config.Config;
import org.springframework.cache.annotation.CachingConfigurerSupport;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.StringRedisSerializer;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
/**
* redis配置
*
* @author ruoyi
*/
@Configuration
@EnableCaching
public class RedisConfig extends CachingConfigurerSupport
{
@Bean
@SuppressWarnings(value = { "unchecked", "rawtypes" })
public RedisTemplate<Object, Object> redisTemplate(RedisConnectionFactory connectionFactory)
{
RedisTemplate<Object, Object> template = new RedisTemplate<>();
template.setConnectionFactory(connectionFactory);
FastJson2JsonRedisSerializer serializer = new FastJson2JsonRedisSerializer(Object.class);
ObjectMapper mapper = new ObjectMapper();
mapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY);
mapper.enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL);
serializer.setObjectMapper(mapper);
template.setValueSerializer(serializer);
// 使用StringRedisSerializer来序列化和反序列化redis的key值
template.setKeySerializer(new StringRedisSerializer());
template.afterPropertiesSet();
return template;
}
@Bean(destroyMethod="shutdown")
public RedissonClient redisson() throws IOException {
return Redisson.create(
Config.fromYAML(new ClassPathResource("redisson-single.yml").getInputStream()));
}
}
(5)TaskLock
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* @desc: 定时任务分布式锁的注解
* @author:
* @date: 2019-10-14 14:38
* @version: v1.0
*/
@Retention(RetentionPolicy.RUNTIME)
public @interface TaskLock {
String value();
}
(6)TaskLockAop
import lombok.extern.slf4j.Slf4j;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.reflect.MethodSignature;
import org.redisson.api.RLock;
import org.redisson.api.RedissonClient;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
/**
* 定时任务分布式锁
* @author lipo
* @version v1.0
* @date 2019-10-14 14:40
*/
@Component
@Aspect
@Slf4j
public class TaskLockAop {
@Autowired
private RedissonClient redissonClient;
@Around("execution(@TaskLock * * (..))")
public Object taskAround(ProceedingJoinPoint pjp) throws Throwable {
TaskLock taskAnnotation = ((MethodSignature)pjp.getSignature()).getMethod().getAnnotation(TaskLock.class);
String lockKey = taskAnnotation.value();
RLock coffeeLock = redissonClient.getLock(lockKey);
try {
// 尝试加锁,最多等待20秒,上锁以后5秒自动解锁
Future<Boolean> coffeeLockFuture = coffeeLock.tryLockAsync(20, 5, TimeUnit.SECONDS);
boolean result = coffeeLockFuture.get();
if (result) {
log.info("获得了锁");
}else{
throw new Exception("获取锁失败");
}
return pjp.proceed();
} finally {
log.info("释放了锁");
coffeeLock.unlock();
}
}
}
(7)RedisConstant
/**
* @author
* @version v1.0
* @date 2019-11-13 10:43
*/
public interface RedisConstant {
String TOPIC = "TOPIC";
String MQ_LIST = "MQ_LIST";
String MQ_ZSET = "MQ_ZSET";
String STATIS_UV = "STATIS_UV";
String CONSUME_REDIS_LIST_TASK = "CONSUME_REDIS_LIST_TASK";
String CONSUME_REDIS_ZSET_TASK = "CONSUME_REDIS_ZSET_TASK";
}
(8)RedisConsumeTask
import com.gyt.dlsb.common.base.RedisConstant;
import com.gyt.dlsb.common.core.aop.TaskLock;
import com.gyt.dlsb.service.impl.RedisService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
/**
* 定时任务
*/
@Component
public class RedisConsumeTask {
@Autowired
private RedisService redisService;
@TaskLock(RedisConstant.CONSUME_REDIS_LIST_TASK)
@Scheduled(cron = "0/5 * * * * *")
public void consumeMqList() {
System.out.println("开始");
redisService.consumeMqList();
System.out.println("结束");
}
}
大功告成,测试分布式锁是否成功时,可以将等待时间设置短点,自动解锁时间设置长,然后在两个端口数分别启动项目,在TaskLockAop类中打上断点,一个获取锁时,另一个再进来是获取不到锁的