背景:数据导入基于线程池需要获取返回值记录导入记录,采用easyExcel读取excel导入数据库。
callable任务实现:
返回执行结果,这个类不能用spring注入
package org.xxx.interfaces.infra.utils;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.util.Assert;
import org.xxx.interfaces.api.ImportJsonController.ImportHistoryJsonDto;
import org.xxx.interfaces.api.ImportJsonController.ImportHistoryJsonService;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Callable;
@Slf4j
public class MyCallable implements Callable {
private ImportHistoryJsonService importHistoryJsonService;
private ImportHistoryJsonDto importJsonDto;
private Map<String,String> map;
//构造器注入需要的对象
public MyCallable(ImportHistoryJsonDto importJsonDto,Map<String,String> map,ImportHistoryJsonService importHistoryJsonService) {
this.importJsonDto = importJsonDto;
this.map = map;
this.importHistoryJsonService = importHistoryJsonService;
}
@Override
public Map<String,Object> call() throws Exception {
long l = System.currentTimeMillis();
String interfaceFlag = map.get("interfaceFlag");
Assert.isTrue(StringUtils.isNoneBlank(interfaceFlag),"接口标识不能为空");
String s ;
//多个接口根据传入的参数来区分走哪个逻辑
if(interfaceFlag.equals("poRestData")){
s = importHistoryJsonService.poRestData(importJsonDto.getRequestData());
}else{
s = importHistoryJsonService.rcvTrxHeaderService(importJsonDto.getRequestData());
}
long useTime = (System.currentTimeMillis()-l);
//记录程序耗时
importJsonDto.setUseTime(useTime);
HashMap<String, Object> hashMap = new HashMap<>();
hashMap.put("importJsonDto",importJsonDto);
hashMap.put("result",s);
//返回的结果记录起来,跟踪导入记录
return hashMap;
}
}
easyExcel+线程池实现调用:
每次调用创建一个新的
package org.xxx.interfaces.api.ImportJsonController;
import com.alibaba.excel.EasyExcel;
import com.alibaba.excel.context.AnalysisContext;
import com.alibaba.excel.read.listener.ReadListener;
import com.alibaba.excel.util.ListUtils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.Assert;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import org.xxx.interfaces.infra.mapper.EmployeeMapper;
import org.xxx.interfaces.infra.utils.MyCallable;
import org.xxx.interfaces.infra.utils.Task;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.*;
/**
* 期初历史数据导入,期初用一次,后面不再启用,所以代码冗余在一起
*/
@RestController("importHistoryJsonController.v1")
@Slf4j
public class ImportHistoryJsonController {
//临时借用一下
@Autowired
private EmployeeMapper EmployeeMapper;
@Autowired
private ProfileClient profileClient;
@Autowired
private ImportHistoryJsonService importHistoryJsonService;
/**
* 文件上传
* <p>1. 创建excel对应的实体对象 参照{@link }
* <p>2. 由于默认一行行的读取excel,所以需要创建excel一行一行的回调监听器,参照{@link }
* <p>3. 直接读即可
*/
@PostMapping("/updateJson/poHeaderNewServiceUpload")
public String poHeaderNewServiceUpload(MultipartFile file,@RequestParam Map<String,String> map) throws IOException {
/**
* 单次缓存的数据量,控制线程执行量
* 比如配置10,每次读取excel10行数据,缓存到list中,多线程跑10条任务,不是让线程自己控制,不想走丢失策略
*/
int BATCH_COUNT = Integer.parseInt(profileClient.getProfileValueByOptions("IMPORT_JSON_BATCH_COUNT"));
/**
* 等待超时间。单位秒
*/
int TIME_OUT = Integer.parseInt(profileClient.getProfileValueByOptions("IMPORT_JSON_TIME_OUT"));
ThreadPoolExecutor myThreadPool = new ThreadPoolExecutor(
Runtime.getRuntime().availableProcessors()-1,
Runtime.getRuntime().availableProcessors()*2,
TIME_OUT*2,
TimeUnit.SECONDS,
new ArrayBlockingQueue<>(1024)) ;
CompletionService<String> cs = new ExecutorCompletionService<>(myThreadPool) ;
log.info("---xxx---线程池信息---"+myThreadPool);
// 匿名内部类 不用额外写一个DemoDataListener
// 这里 需要指定读用哪个class去读,然后读取第一个sheet 文件流会自动关闭
EasyExcel.read(file.getInputStream(), ImportHistoryJsonDto.class, new ReadListener<ImportHistoryJsonDto>() {
/**
*临时存储
*/
private List<ImportHistoryJsonDto> cachedDataList = ListUtils.newArrayListWithExpectedSize(BATCH_COUNT);
/**
*临时存储future
*/
private List<Future> cachedFutureList = ListUtils.newArrayListWithExpectedSize(BATCH_COUNT);
@Override
public void invoke(ImportHistoryJsonDto data, AnalysisContext context) {
Integer row = context.readRowHolder().getRowIndex();
data.setFileName(file.getOriginalFilename());
data.setRow(row);
Assert.isTrue(data!=null,"导入excel行数据为null,请检查excel文件,行号="+row);
Assert.isTrue(data.toString().length()>10,"导入excel行数据长度小于10,请检查excel文件,行号="+row);
cachedDataList.add(data);
if (cachedDataList.size() >= BATCH_COUNT) {
saveData();
// 存储完成清理 list
cachedDataList = ListUtils.newArrayListWithExpectedSize(BATCH_COUNT);
cachedFutureList = ListUtils.newArrayListWithExpectedSize(BATCH_COUNT);
}
}
//最后一次保存走这里
@Override
public void doAfterAllAnalysed(AnalysisContext context) {
log.info("---xxx---最后一次保存");
saveData();
}
/**
* 加上存储数据库
*/
private void saveData() {
/**
* 循环缓存的list数据,让线程去执行
*/
for (int i = 0; i < cachedDataList.size(); i++) {
log.info("---xxx--"+(i+1)+"---开始准备提交给线程处理");
MyCallable myCallable = new MyCallable(cachedDataList.get(i),map,importHistoryJsonService);
log.info("---xxx---callable信息---"+myCallable.hashCode());
//阻塞获取异步任务结果
//Future future = myThreadPool.submit(myCallable);
Future future = cs.submit(myCallable);
log.info("---xxx---100---cs信息---"+cs);
cachedFutureList.add(future);
log.info("---xxx---future信息---"+future);
log.info("---xxx--"+(i+1)+"---结束提交给线程处理");
}
/**
* 循环异步拿到线程执行的结果,处理异常保存记录
*/
for (int i = 0; i < cachedFutureList.size(); i++) {
Map<String, Object> map = new HashMap<>();
ImportHistoryJsonDto importJsonDto ;
try {
log.info("---xxx--"+(i+1)+"---开始获取返回结果");
//异步拿返回值
Object object = cs.take().get(TIME_OUT, TimeUnit.SECONDS);
map = JSONObject.parseObject(JSONObject.toJSONString(object), Map.class);
//同步拿返回值
// map = (Map<String,Object>)cachedFutureList.get(i).get(TIME_OUT, TimeUnit.SECONDS);
log.info("---xxx--"+(i+1)+"---结束获取返回结果");
//处理返回会结果
String s = map.get("result").toString();
importJsonDto = JSON.parseObject(JSON.toJSONString(map.get("importJsonDto")), ImportHistoryJsonDto.class);
if(s.contains("ERROR") || s.contains("FAILED") || s.contains("PART")){
importJsonDto.setErrMsg(s);
importJsonDto.setImportState("error");
}
} catch (Exception e) {
e.printStackTrace();
importJsonDto = JSON.parseObject(JSON.toJSONString(map.get("importJsonDto")), ImportHistoryJsonDto.class);
if(Objects.isNull(e)){
importJsonDto.setErrMsg("json导入,空指针异常!");
}else {
importJsonDto.setErrMsg(e.toString());
}
}
employeeMapper.insertImportHistoryJsonLog(importJsonDto);
}
}
}).sheet().doRead();
//jdk的线程池需要手动关闭
myThreadPool.shutdown();
log.info("---xxx---json数据导入---主线程运行结束");
return "success";
}
@PostMapping("/updateJson/poHeaderNewServiceUpload1")
public String poHeaderNewServiceUpload1() throws Exception {
ThreadPoolExecutor executor = new ThreadPoolExecutor(3, 3, 60, TimeUnit.SECONDS, new ArrayBlockingQueue<>(10)) ;
Future<String> future1 = executor.submit(new Task(3)) ;
Future<String> future2 = executor.submit(new Task(2)) ;
Future<String> future3 = executor.submit(new Task(1)) ;
String result1 = future1.get() ;
String result2 = future2.get() ;
String result3 = future3.get() ;
System.out.println("result1:" + result1 + "\t" + "result2:" + result2 + "\t" + "result3:" + result3) ;
executor.shutdown();
return "success";
}
@PostMapping("/updateJson/poHeaderNewServiceUpload2")
public String poHeaderNewServiceUpload2() throws Exception {
ThreadPoolExecutor pool = new ThreadPoolExecutor(3, 3, 60, TimeUnit.SECONDS, new ArrayBlockingQueue<>(10)) ;
CompletionService<String> cs = new ExecutorCompletionService<>(pool) ;
cs.submit(new Task(3, "name" + 3)) ;
cs.submit(new Task(1, "name" + 1)) ;
cs.submit(new Task(2, "name" + 2)) ;
for (int i = 0; i < 3; i++) {
String s = cs.take().get();
System.out.println(cs.take().get()) ;
}
pool.shutdown();
return "success";
}
}