TODO:不同的业务环境下,需要不同的配置。我的生产,适合几乎不变的大key,即便是变了也不影响。
结论:
refreshAfterWrite
:可以不重写reload。如果缓存过期,则一个线程去获取数据,其他线程返回老数据。refreshAfterWrite + CacheLoader.asyncReloading
:第一次加载数据是同步的,以后所有加载数据都是异步的。如果当前数据过期则异步线程去获取新的数据,所有线程都先返回。expireAfterWrite
:如果缓存过期,则阻塞所有线程,一个线程去获取数据。expireAfterAccess
:一段时间内没有访问则过期,如果有多个线程同时查询,则阻塞所有线程,一个线程去获取数据。
可以尝试使用
Map<String, Supplier<String>> cacheLoadFunctionMap
来封装一个工具类,
生产代码
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import org.springframework.stereotype.Service;
import java.util.concurrent.TimeUnit;
@Component
@Slf4j
public class AsyncCacheExample {
private static final Executor DEFAULT_REOLAD_EXECUTOR =
new ThreadPoolExecutor(1, // 没必要多个
4,
60,
TimeUnit.MILLISECONDS,
new LinkedBlockingDeque<>(10), // 队列长度+非核心线程数>key的数量
new ThreadFactoryBuilder().setNameFormat("default-reload-cache-task-pool-%d").build());
private final LoadingCache<String, String> cache;
public AsyncCacheExample() {
this.cache = CacheBuilder.newBuilder()
.maximumSize(10) // 数量不超过key的数量
.refreshAfterWrite(10, TimeUnit.MINUTES)
.recordStats()// 记录缓存统计信息
.concurrencyLevel(4)// 并发级别默认是4,控制并发写的数量。(没感觉,似乎没啥用,影响并发写线程数量的是CacheLoader.asyncReloading,这个concurrencyLevel似乎是控制的槽数量?)
.build(CacheLoader.asyncReloading(new CacheLoader<String, String>() {
@Override
public String load(String key) {
try {
return loadData(key);
}catch (Exception e){
log.error(e.getMessage());
// 异常处理
}
}
}, DEFAULT_REOLAD_EXECUTOR));
}
private static String loadData(String key) {
// 模拟从数据库加载数据
return "json";
}
public String get(String key){
try {
return cache.get(key);
} catch (ExecutionException e) {
throw new RuntimeException(e);
}
}
}
生产代码2
public class LocalCacheUtils{
private static final Executor DEFAULT_REOLAD_EXECUTOR =
new ThreadPoolExecutor(1,
1,
60,
TimeUnit.MILLISECONDS,
new LinkedBlockingDeque<>(1000),
new ThreadFactoryBuilder().setNameFormat("default-reload-cache-task-pool-%d").build());
public static <K, V> LoadingCache<K, V> buildAsyncReloadingCache(long duration, TimeUnit unit, Function<K, V> loader) {
return CacheBuilder.newBuilder()
.maximumSize(50)
.refreshAfterWrite(duration, unit)
.build(CacheLoader.asyncReloading(
new CacheLoader<K, V>() {
@Override
public V load(K key) {
return loader.apply(key);
}
}, DEFAULT_REOLAD_EXECUTOR));
}
}
// 使用:
private final LoadingCache<String, Map<String, List<String>> xxxyyyListMap = LocalCacheUtils.buildAsyncReloadingCache(5,TimeUnit.MINUTES,k->getData());
private Map<String, List<String> getData(){
return ...
}
测试expireAfterWrite:
package com.tangxz.cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
public class CacheMultiThreadTest {
public static void main(String[] args) {
AtomicInteger integer = new AtomicInteger(0);
LoadingCache<String, String> cache = CacheBuilder.newBuilder()
.expireAfterWrite(1, TimeUnit.SECONDS)
.build(new CacheLoader<String, String>() {
@Override
public String load(String key) throws Exception {
// Simulating loading data from database
System.out.println("Loading data for key: " + key);
return "Data for key: " + key + ":" + integer.getAndIncrement();
}
});
// Create a thread pool with 5 threads
ExecutorService executorService = Executors.newFixedThreadPool(2);
// Submit tasks to the thread pool
for (int i = 0; i < 10; i++) {
final int taskId = i;
executorService.submit(() -> {
try {
String data = cache.get("Key");
System.out.println("Task " + taskId + " - Data: " + data);
Thread.sleep(600);
} catch (ExecutionException | InterruptedException e) {
e.printStackTrace();
}
});
}
// Shutdown the executor service
executorService.shutdown();
}
}
测试refreshAfterWrite:
package com.tangxz.cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
public class CacheMultiThreadTest {
public static void main(String[] args) {
AtomicInteger integer = new AtomicInteger(0);
LoadingCache<String, String> cache = CacheBuilder.newBuilder()
.refreshAfterWrite(1, TimeUnit.SECONDS)
.build(new CacheLoader<String, String>() {
@Override
public String load(String key) throws Exception {
// Simulating loading data from database
System.out.println("Loading data for key: " + key);
return "Data for key: " + key + ":" + integer.getAndIncrement();
}
});
// Create a thread pool with 5 threads
ExecutorService executorService = Executors.newFixedThreadPool(2);
// Submit tasks to the thread pool
for (int i = 0; i < 10; i++) {
final int taskId = i;
executorService.submit(() -> {
try {
String data = cache.get("Key");
System.out.println("Task " + taskId + " - Data: " + data);
Thread.sleep(600);
} catch (ExecutionException | InterruptedException e) {
e.printStackTrace();
}
});
}
// Shutdown the executor service
executorService.shutdown();
}
}