package com.wlzx;
import java.io.Closeable;
import java.io.IOException;
import java.lang.reflect.Field;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import redis.clients.jedis.BinaryJedisCluster;
import redis.clients.jedis.Client;
import redis.clients.jedis.HostAndPort;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisCluster;
import redis.clients.jedis.JedisClusterConnectionHandler;
import redis.clients.jedis.JedisClusterInfoCache;
import redis.clients.jedis.JedisPool;
import redis.clients.jedis.JedisPoolConfig;
import redis.clients.jedis.JedisSlotBasedConnectionHandler;
import redis.clients.jedis.PipelineBase;
import redis.clients.jedis.exceptions.JedisMovedDataException;
import redis.clients.jedis.exceptions.JedisRedirectionException;
import redis.clients.util.JedisClusterCRC16;
import redis.clients.util.SafeEncoder;
public class RedisClusterUtil extends PipelineBase implements Closeable {
private JedisSlotBasedConnectionHandler connectionHandler;
private JedisClusterInfoCache clusterInfoCache;
private Queue<Client> clients = new LinkedList();
private Map<JedisPool, Jedis> jedisMap = new HashMap<>();
private boolean hasDataInBuf = false;
private void initJedisCluster(JedisCluster jedisCluster) {
try {
Field conn = BinaryJedisCluster.class.getDeclaredField("connectionHandler");
conn.setAccessible(true);
connectionHandler = (JedisSlotBasedConnectionHandler) conn.get(jedisCluster);
Field clusterInfo = JedisClusterConnectionHandler.class.getDeclaredField("cache");
clusterInfo.setAccessible(true);
clusterInfoCache = (JedisClusterInfoCache) clusterInfo.get(connectionHandler);
} catch (IllegalArgumentException | IllegalAccessException | NoSuchFieldException | SecurityException e) {
e.printStackTrace();
}
}
public static RedisClusterUtil pipelined(JedisCluster jedisCluster) {
RedisClusterUtil rcu = new RedisClusterUtil();
rcu.initJedisCluster(jedisCluster);
return rcu;
}
public RedisClusterUtil() {
}
private void refreshCluster() {
connectionHandler.renewSlotCache();
}
private void flushCachedData(Jedis jedis) {
try {
jedis.getClient().getAll();
} catch (RuntimeException ex) {
}
}
public void sync() {
try {
for (Client client : clients) {
generateResponse(client.getOne()).get();
}
} catch (JedisRedirectionException jre) {
if (jre instanceof JedisMovedDataException) {
refreshCluster();
}
throw jre;
} finally {
for (Jedis jedis : jedisMap.values()) {
flushCachedData(jedis);
}
hasDataInBuf = false;
try {
close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
@Override
public void close() throws IOException {
clean();
clients.clear();
for (Jedis jedis : jedisMap.values()) {
if (hasDataInBuf) {
flushCachedData(jedis);
}
jedis.close();
}
jedisMap.clear();
hasDataInBuf = false;
}
@Override
protected Client getClient(String key) {
byte[] bKey = SafeEncoder.encode(key);
return getClient(bKey);
}
@Override
protected Client getClient(byte[] key) {
Jedis jedis = getJedis(JedisClusterCRC16.getSlot(key));
Client client = jedis.getClient();
clients.add(client);
return client;
}
private Jedis getJedis(int slot) {
JedisPool pool = clusterInfoCache.getSlotPool(slot);
Jedis jedis = jedisMap.get(pool);
if (null == jedis) {
jedis = pool.getResource();
jedisMap.put(pool, jedis);
}
hasDataInBuf = true;
return jedis;
}
// 集群所有节点
public static Set<HostAndPort> getRedisClusterNodes() {
Set<HostAndPort> jedisClusterNodes = new HashSet<>();
// 集群节点自己写啦~
return jedisClusterNodes;
}
public static void main(String[] args) throws Exception {
Scanner scanner = new Scanner(System.in);
int i = scanner.nextInt();
long start = System.currentTimeMillis();
JedisCluster cluster = new JedisCluster(getRedisClusterNodes(), 5000, 3000, 3, "1qazXSW@", new JedisPoolConfig());
RedisClusterUtil rc = RedisClusterUtil.pipelined(cluster);
rc.initJedisCluster(cluster);
rc.refreshCluster();
rc.close();
Path path = Paths.get("data-1000.txt");
List<String> list = Files.readAllLines(path, Charset.forName("utf-8"));
int count = 1;
try {
for (String str : list) {
rc.setex("chenne_" + count, 300, str);
if (count % i == 0) {
rc.sync();
}
count++;
}
rc.sync();
} finally {
rc.close();
}
long end = System.currentTimeMillis();
System.out.println(end - start);
}
}
目前测试结果是读取1000w行数据然后按照上面写入只需要30s
当然,这也跟集群服务器的性能有关。