package com.XX.XX.util.easyexcel;
import com.alibaba.excel.EasyExcel;
import com.alibaba.excel.ExcelWriter;
import com.alibaba.excel.write.metadata.WriteSheet;
import com.alibaba.excel.write.style.column.LongestMatchColumnWidthStyleStrategy;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.lang.reflect.ParameterizedType;
import java.util.List;
import java.util.Objects;
import java.util.function.Consumer;
/**
* AbstractExcelImportExecutor
*
* @author wangchen
* @date 2022/8/4 19:53
*/
@Slf4j
public abstract class AbstractExcelImportExecutor<T extends BaseImportModel> {
public String handle(MultipartFile file) {
Class<T> clazz = (Class<T>) ((ParameterizedType) getClass().getGenericSuperclass()).getActualTypeArguments()[0];
ExcelImportContext context = new ExcelImportContext();
context.setFileName(Objects.requireNonNull(file.getOriginalFilename()).substring(0, file.getOriginalFilename().lastIndexOf(".")));
UploadDataListener<T> listener = new UploadDataListener<>(this, context);
try {
// 读取Excel数据
EasyExcel.read(file.getInputStream(), clazz, listener).sheet().doRead();
} catch (IOException e) {
log.error("", e);
}
return context.getExportFileName();
}
/**
* 数据处理
*
* @param dataList 解析的数据
* @param context 上下文
*/
protected abstract void execute(List<T> dataList, ExcelImportContext context);
/**
* 错误文件提示下载
*
* @param dataList 错误数据
* @param context 上下文
* @param isCache 是否缓存错误文件多次下载
* @author hao.chen
* @time 2023/2/10 13:55
*/
protected void exceptionHandle(List<T> dataList, ExcelImportContext context, boolean isCache) {
if (CollectionUtils.isEmpty(dataList)) {
return;
}
Class<T> clazz = (Class<T>) ((ParameterizedType) getClass().getGenericSuperclass()).getActualTypeArguments()[0];
String fileName = context.getFileName();
Consumer<ExcelWriter> consumer = excelWriter -> {
WriteSheet writeSheet = EasyExcel.writerSheet()
.head(clazz)
.registerWriteHandler(new LongestMatchColumnWidthStyleStrategy())
.build();
excelWriter.write(dataList, writeSheet);
};
if (isCache) {
context.setExportFileName(ExcelExportUtils.excelExportTemporary(fileName, consumer));
} else {
ExcelExportUtils.excelExport(fileName, consumer);
}
}
}
package com.XX.XX.util.easyexcel;
import com.alibaba.excel.annotation.ExcelProperty;
import lombok.Data;
/**
* BaseImportModel
*
* @author wangchen
* @date 2022/8/4 19:55
*/
@Data
public class BaseImportModel {
/** 异常消息 */
@ExcelProperty(value = "errorMsg")
private String errorMessage;
}
package com.XX.XX.util.easyexcel;
import lombok.Data;
/**
* ExcelImportContext
*
* @author wangchen
* @date 2022/10/19 18:24
*/
@Data
public class ExcelImportContext {
private String fileName;
private boolean isComplete;
private String exportFileName;
}
package com.XX.XX.util.easyexcel;
import com.alibaba.excel.context.AnalysisContext;
import com.alibaba.excel.event.AnalysisEventListener;
import com.alibaba.fastjson.JSON;
import lombok.extern.slf4j.Slf4j;
import org.springframework.scheduling.annotation.Async;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* UploadDataListener
*
* @author wangchen
* @date 2022/8/4 19:59
*/
@Slf4j
public class UploadDataListener<T extends BaseImportModel> extends AnalysisEventListener<T> {
/** 每1000条数据保存,实际使用中可以3000条,然后清理list ,方便内存回收 */
private static final int BATCH_COUNT = 1000;
/** 缓存的数据 */
private final List<T> cachedDataList = new ArrayList<>(BATCH_COUNT);
private final AbstractExcelImportExecutor<T> abstractExcelImportExecutor;
private final ExcelImportContext context;
public UploadDataListener(AbstractExcelImportExecutor<T> abstractExcelImportExecutor, ExcelImportContext context) {
this.abstractExcelImportExecutor = abstractExcelImportExecutor;
this.context = context;
}
/**
* 读取数据
*
* @param data 传入数据
* @param context 读取数据
*/
@Override
public void invoke(T data, AnalysisContext context) {
log.info("解析到一条数据:{}", JSON.toJSONString(data));
cachedDataList.add(data);
// 达到BATCH_COUNT了,需要去存储一次数据库,防止数据几万条数据在内存,容易OOM
if (cachedDataList.size() >= BATCH_COUNT) {
saveData(false);
// 存储完成清理 list
cachedDataList.clear();
}
}
/**
* sheet读取完会执行
*/
@Override
public void doAfterAllAnalysed(AnalysisContext context) {
// 这里也要保存数据,确保最后遗留的数据也存储到数据库
saveData(true);
log.info("所有数据解析完成!");
}
@Override
public void invokeHeadMap(Map<Integer, String> headMap, AnalysisContext context) {
log.info("表头数据 excelHead= {}", headMap);
}
/**
* 存储数据 注意对先后顺序要求的导入
*/
@Async
public void saveData(boolean isComplete) {
log.info("{}条数据,开始处理!", cachedDataList.size());
context.setComplete(isComplete);
abstractExcelImportExecutor.execute(cachedDataList, context);
log.info("处理完成!");
}
}