基于redis的分布式锁的实现,说明一下,该实现的redis连接基于spring的template,连接不一样则需适当修改代码
编程不只关注于具体固定的代码,为了不误导读者,追本溯源,这里先贴上我实现分布式锁时所基于的几点考虑:
1.应设置锁超时,避免某条线程奔溃情况下永远无法释放锁。
2.在锁超时机制下,产生两大问题之一,C0超时,C1,C2先后检测到C0的超时,C1执行移除原锁获得新锁,C2又执行移除原锁获得新锁(此时C1,C2同时获得锁,并且C2移除了redis中C1的锁),应避免C1、C2同时获得锁,因此应该有检测是否真的获得锁的操作(移除原锁获取新锁时,应同时获得原锁,判断是否被其他线程先一步取得锁,如果被先一步取得则进入等待)
3.在锁超时机制下,产生两大问题之二,C0超时,C1获得锁,C0执行完删除了C1的锁,(错误解决方法,C0删除锁之前获取当前锁情况比对,C0比对当前锁是自己的执行释放,该解决方法可能会出现C0设置锁,C0锁超时,C0删除锁之前获取当前锁情况比对,C1删除了C0锁设置了新锁,C0比对当前锁是自己的执行释放锁,但此时释放的是C1的锁)
接下来直接上代码吧
关于template的配置,之前为了方便在redis中肉眼看键名,把key的序列化类给修改了
<bean id="serializer" class="org.springframework.data.redis.serializer.StringRedisSerializer"></bean>
<bean id="redisTemplate" class="org.springframework.data.redis.core.RedisTemplate">
<property name="connectionFactory" ref="connectionFactory" />
<property name="keySerializer" ref="serializer"></property>
<property name="hashKeySerializer" ref="serializer"></property>
</bean>
锁代码,由于项目基于spring,注入时本身属于单例,有需要单例的读者,把构造函数修改下即可
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.springframework.dao.DataAccessException;
import org.springframework.data.redis.connection.RedisConnection;
import org.springframework.data.redis.core.RedisCallback;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.RedisSerializer;
public class DistributedConcurrent {
RedisTemplate<String, Object> template2;
private RedisSerializer<String> keySerializer;
private RedisSerializer<Object> valueSerializer;
private long expire = 2000;
private String lockKey;
public DistributedConcurrent(RedisTemplate<String, Object> template2,String lockKey){
this.template2 = template2;
this.lockKey = lockKey;
keySerializer = (RedisSerializer<String>) template2.getKeySerializer();
valueSerializer = (RedisSerializer<Object>) template2.getValueSerializer();
}
// public DistributedConcurrent getInstance(RedisTemplate<String, Object> template2,String lockKey){
// if(concurrent != null){
// synchronized (installSync) {
// concurrent = new DistributedConcurrent(template2,lockKey);
// }
// }
// return concurrent;
// }
public DistributedLock lock(){
// long currentTimeMillis = System.currentTimeMillis();
DistributedLock lock = requireLock();
// System.out.println("lock require time-----"+(System.currentTimeMillis() - currentTimeMillis));
if(lock == null){
while(lock == null){
try {
Thread.sleep(100);
lock = requireLock();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
return lock;
}
private DistributedLock requireLock(){
byte[] locKeyBytes = keySerializer.serialize(lockKey);
long t1 = System.currentTimeMillis();
List<Object> pipeline = template2.executePipelined(new RedisCallback() {
@Override
public Object doInRedis(RedisConnection conn) throws DataAccessException {
conn.hGetAll(locKeyBytes);
conn.time();
return null;
}
});
Set<String> keysList = new HashSet<>();
List<byte[]> timeouts = new ArrayList<>();
long cur = (long) pipeline.get(1);
if(pipeline != null && pipeline.size() > 0){
Map<String, Long> map = (Map<String, Long>) pipeline.get(0);
Iterator<Entry<String, Long>> iterator = map.entrySet().iterator();
while(iterator.hasNext()){
Entry<String, Long> next = iterator.next();
keysList.add(next.getKey());
if(cur > next.getValue()){
timeouts.add(keySerializer.serialize(next.getKey()));
}
}
}
String uuid = UuidUtil.getUuid();
byte[] hashKeyBytes = keySerializer.serialize(uuid);
if(keysList.size() == timeouts.size()){//不存在锁
byte[][] bytesArr = new byte[timeouts.size()][];
timeouts.toArray(bytesArr);
long pass = System.currentTimeMillis() - t1;
List<Object> pipelined2 = template2.executePipelined(new RedisCallback() {
@Override
public Object doInRedis(RedisConnection conn) throws DataAccessException {
conn.hGetAll(locKeyBytes);
conn.hSet(locKeyBytes, hashKeyBytes, valueSerializer.serialize(cur+pass+expire));
if(bytesArr.length > 0){
conn.hDel(locKeyBytes, bytesArr);
}
return null;
}
});
// System.out.println(pass);
boolean locked = true;
Map<String,Long> map = (Map<String,Long>) pipelined2.get(0);
Iterator<Entry<String, Long>> itr = map.entrySet().iterator();
while(itr.hasNext()){//判断锁是否被抢先获取
Entry entry = itr.next();
if(!keysList.contains(entry.getKey())){
locked = false;
}
}
if(locked){//锁未被抢先获取
DistributedLock lock = new DistributedLock();
lock.lockKey = lockKey;
lock.lockHashKey = uuid;
return lock;
}else{//锁被抢先获取,移除该hashkey
template2.executePipelined(new RedisCallback() {
@Override
public Object doInRedis(RedisConnection conn) throws DataAccessException {
conn.hDel(locKeyBytes, hashKeyBytes);
return null;
}
});
}
}
return null;
}
public void releaseLock(DistributedLock lock){//释放锁
template2.executePipelined(new RedisCallback() {
@Override
public Object doInRedis(RedisConnection conn) throws DataAccessException {
conn.hDel(keySerializer.serialize(lock.lockKey), keySerializer.serialize(lock.lockHashKey));
return null;
}
});
}
public class DistributedLock{
public String lockKey;
public String lockHashKey;
}
public long getExpire() {
return expire;
}
public void setExpire(long expire) {
this.expire = expire;
}
}
import java.util.UUID;
public class UuidUtil {
private static final char[] _UU64 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_"
.toCharArray();
public static String getUuid(){
UUID uu = UUID.randomUUID();
long L = uu.getMostSignificantBits();
long R = uu.getLeastSignificantBits();
char[] cs = new char[22];
int hex;
// 从L64位取10次,每次取6位, 0-9位
long cur = L;
for (int i = 9, off = 4; i >= 0; --i) {
cur = cur >>> off;
hex = ((int) cur & 63);
cs[i] = _UU64[hex];
off = 6;
}
// 从R64位取10次,每次取6位, 11-20位
cur = R;
for (int i = 20, off = 2; i >= 11; --i) {
cur = cur >>> off;
hex = ((int) cur & 63);
cs[i] = _UU64[hex];
off = 6;
}
// 从L64位取最后的4位 + R64位头2位拼上, 10位
hex = (((int) L & 15) << 2) | (((int) cur >>> 6) & 3);
cs[10] = _UU64[hex];
hex = ((int) R & 3) << 4;
// 剩下的两位最后取, 21位
cs[21] = _UU64[hex];
// 返回字符串
return new String(cs);
}
}
————————-———2017-10-18—————————————
解决问题2, C1,C2同时获得锁的问题,可以使用redis的watch与multi事务操作,这样子实现分布式锁的代码就简单多了