官方的导入有单个sheet导入,建一个监听器,多个sheet就要建多个监听器,简单倒是简单,就是需要建多个。
多个sheet,使用一个监听器实现导入并保存数据库
service
@Slf4j
@Service
public class ExcelFileSystemServiceImpl implements ExcelFileSystemService {
private static final int NUM_LISTENERS = 8;
@SneakyThrows
@Override
public void uploadFile(MultipartFile file){
try (ExcelReader excelReader = EasyExcelFactory.read(file.getInputStream()).build()) {
excelReader.read(listenersAndReadSheets());
}
}
public List<ReadSheet> listenersAndReadSheets() {
List<ReadSheet> readSheets = new ArrayList<>(NUM_LISTENERS);
for (int i = 0; i < NUM_LISTENERS; i++) {
try {
NoModelDataListener< ?, ? > listener = new NoModelDataListener<>(DaoClassEnum.getDaoClass().get(i));
ReadSheet readSheet = createReadSheet(i, listener, Class.forName(EntityClassEnum.getEntityClass().get(i)));
readSheets.add(readSheet);
} catch (Exception e) {
// 处理可能的异常,例如日志记录或错误回报
log.error("Error initializing listener/readSheet for " + EntityClassEnum.getEntityClass().get(i));
e.printStackTrace();
}
}
return readSheets;
}
private ReadSheet createReadSheet(int sheetIndex, NoModelDataListener<?,?> listener, Class<?> entityClass) {
return EasyExcelFactory.readSheet(sheetIndex)
.headRowNumber(2)
.head(entityClass)
.registerReadListener(listener)
.build();
}
}
NoModelDataListener
批量写数据库可以自己实现
@Slf4j
public class NoModelDataListener<X extends BaseAreaEntity,Y extends BatchBaseMapper<X>> extends AnalysisEventListener<X> {
Class<Y> yClass;
@SuppressWarnings("unchecked")
public NoModelDataListener(Class<?> yClass) {
this.yClass = (Class<Y>) yClass;
}
private static final int BATCH_COUNT = 10000;
private List<X> cachedDataList = ListUtils.newArrayListWithExpectedSize(BATCH_COUNT);
@Override
public void invoke(X data, AnalysisContext context) {
cachedDataList.add(data);
if (cachedDataList.size() >= BATCH_COUNT) {
saveData();
cachedDataList = ListUtils.newArrayListWithExpectedSize(BATCH_COUNT);
}
}
@Override
public void doAfterAllAnalysed(AnalysisContext context) {
saveData();
log.info("所有数据解析完成!");
}
/**
* 加上存储数据库
*/
private void saveData() {
log.info("{}条数据,开始存储数据库!", cachedDataList.size());
Y y = SpringContentUtils.getBean(yClass);
y.insertBatch(cachedDataList,BATCH_COUNT / 100);
log.info("存储数据库成功!");
}
}