前端上传 MultipartFile文件,后端读取文件数据转为对象集合手写sql批量插入数据
话不多说,上代码
导入pom
<dependency>
<groupId>com.univocity</groupId>
<artifactId>univocity-parsers</artifactId>
<version>2.9.1</version>
</dependency>
Controller层
@Log(title = "导入CSV数据", businessType = BusinessType.OTHER)
@ApiOperation(value = "导入CSV数据")
@PostMapping("/importCSV")
public R<Integer> importCsv(@RequestParam("file") MultipartFile file, @Param("tableName") String tableName, @Param("tableType") Integer tableType){
return R.ok(dataCommonOperationService.importCSVData(file, tableName, tableType));
}
ServiceImpl实现类层
/**
* 导入CSV文件数据
* @param file 表文件
* @param tableName 表名
* @param tableType 表类型
* @return 结果
*/
@Override
public int importCSVData(MultipartFile file, String tableName, Integer tableType){
int resultNum = 0;
// 根据tableType类型判断是哪个表 然后再进行读取csv的对象数据
switch (tableType){
case 1:
List<ImportCsvDataOneDto> listOne = CSVUtils.read(file, ImportCsvDataOneDto.class);
listOne.forEach(System.out::println);
listOne.forEach( one -> one.setAuditStatus(DataAuditStatusEnum.AUDIT_ING.getCode() ));
// 读取后根据表名插入表数据
resultNum = dataCommonOperationMapper.batchDataList(tableName, listOne);
if(resultNum <= 0){
throw new ServiceException(BaseErrorCodeEnum.CODE_ERROR_DATA_IMPORT_ERROR.getMessage(), BaseErrorCodeEnum.CODE_ERROR_DATA_IMPORT_ERROR.getCode());
}
break;
case 2:
List<ImportCsvDataTwoDto> listTwo = CSVUtils.read(file, ImportCsvDataTwoDto.class);
listTwo.forEach( two -> two.setAuditStatus(DataAuditStatusEnum.AUDIT_ING.getCode() ));
resultNum = dataCommonOperationNlgMapper.batchNlgDataList(tableName, listTwo);
if(resultNum <= 0){
throw new ServiceException(BaseErrorCodeEnum.CODE_ERROR_DATA_IMPORT_ERROR.getMessage(), BaseErrorCodeEnum.CODE_ERROR_DATA_IMPORT_ERROR.getCode());
}
break;
}
return resultNum;
}
ps:这里如果上传数据量较多的话会产生问题
可以分批处理数据,再存入数据库表内,如下
/**
* 导入CSV文件数据
* @param file 表文件
* @param tableName 表名
* @param tableType 表类型
* @return 结果
*/
@Override
@Transactional
public int importCSVData(MultipartFile file, String tableName, Integer tableType) throws IOException {
String charset = CSVUtils.getCsvCharset(file.getInputStream());
int resultNum = 0;
// 根据tableType类型判断是哪个表 然后再进行读取csv的对象数据
switch (tableType){
case 1:
List<ImportCsvDataOneDto> listOne = CSVUtils.read(file, ImportCsvDataOneDto.class, charset);
if(listOne.isEmpty()){
throw new ServiceException(BaseErrorCodeEnum.CODE_ERROR_DATA_IMPORT_ZERO_ERROR.getMessage(), BaseErrorCodeEnum.CODE_ERROR_DATA_IMPORT_ZERO_ERROR.getCode());
}
listOne.forEach( one -> one.setAuditStatus(DataAuditStatusEnum.AUDIT_ING.getCode() ));
// 读取后根据表名插入表数据
resultNum = insert(tableName, listOne);
if(resultNum <= 0){
throw new ServiceException(BaseErrorCodeEnum.CODE_ERROR_DATA_IMPORT_ERROR.getMessage(), BaseErrorCodeEnum.CODE_ERROR_DATA_IMPORT_ERROR.getCode());
}
break;
case 2:
List<ImportCsvDataTwoDto> listTwo = CSVUtils.read(file, ImportCsvDataTwoDto.class, charset);
if(listTwo.isEmpty()){
throw new ServiceException(BaseErrorCodeEnum.CODE_ERROR_DATA_IMPORT_ZERO_ERROR.getMessage(), BaseErrorCodeEnum.CODE_ERROR_DATA_IMPORT_ZERO_ERROR.getCode());
}
listTwo.forEach( two -> two.setAuditStatus(DataAuditStatusEnum.AUDIT_ING.getCode() ));
resultNum = insertNlg(tableName, listTwo);
if(resultNum <= 0){
throw new ServiceException(BaseErrorCodeEnum.CODE_ERROR_DATA_IMPORT_ERROR.getMessage(), BaseErrorCodeEnum.CODE_ERROR_DATA_IMPORT_ERROR.getCode());
}
break;
}
return resultNum;
}
@Transactional
public int insert(String tableName, List<ImportCsvDataOneDto> list) {
// 每次插入的数量
int batchSize = 2000;
// 计算需要分多少批插入数据库
int batch = list.size() / batchSize;
// 计算最后一批的大小
int lastSize = list.size() % batchSize;
// 将筛选出的结果分批次添加到表中
for (int i = batchSize; i <= batch * batchSize; i = i + batchSize) {
// 截取本次要添加的数据
List<ImportCsvDataOneDto> insertList = list.subList(i - batchSize, i);
// 添加本批次数据到数据库中
dataCommonOperationMapper.batchDataList(tableName, insertList);
}
// 最后一批元素的大小是否为0
if (lastSize != 0) {
// 如果元素有剩余则将所有元素作为一个子列表一次性插入
List<ImportCsvDataOneDto> lastList = list.subList(batchSize * batch, list.size());
// 添加集合到数据库中
dataCommonOperationMapper.batchDataList(tableName, lastList);
}
return list.size();
}
CSVUtils工具类:(获取对应的编码格式,进行处理,因为csv文件的编码格式不固定,采用了动态获取的办法)
package com.geb.common.utils;
import com.univocity.parsers.common.processor.BeanListProcessor;
import com.univocity.parsers.csv.CsvFormat;
import com.univocity.parsers.csv.CsvParser;
import com.univocity.parsers.csv.CsvParserSettings;
import lombok.extern.slf4j.Slf4j;
import org.mozilla.universalchardet.UniversalDetector;
import org.springframework.stereotype.Component;
import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.Collections;
import java.util.List;
/**
* @author: Jxc
* @create: 2023-12-27
* @description: CSV工具类
*/
@Component
@Slf4j
public class CSVUtils {
/**
* 读取crv文件并转换成List
*
* @param clazz bean对象
* @param file 待读取文件
* @return crv对象list
*/
public static <T> List<T> read(MultipartFile file, Class<T> clazz, String charset) {
List<T> result = Collections.emptyList();
try {
BeanListProcessor<T> rowProcessor = new BeanListProcessor<>(clazz);
InputStream in = file.getInputStream();
// InputStreamReader reader = new InputStreamReader(in, "GBK");
InputStreamReader reader = new InputStreamReader(in, charset);
CsvParserSettings settings = new CsvParserSettings();
settings.getFormat().setLineSeparator("\n");
settings.setProcessor(rowProcessor);
settings.setFormat(new CsvFormat());
CsvParser parser = new CsvParser(settings);
parser.parse(reader);
//逐行读取
result = rowProcessor.getBeans();
} catch (Exception e) {
log.error("Import csv file failed. message: ", e);
}
return result;
}
/**
* 获取CSV文件编码格式
* @param inputStream
* @return
* @throws IOException
*/
public static String getCsvCharset(InputStream inputStream) throws IOException {
byte[] buf = new byte[4096];
UniversalDetector detector = new UniversalDetector(null);
int nread;
while ((nread = inputStream.read(buf)) > 0 && !detector.isDone()) {
detector.handleData(buf, 0, nread);
}
detector.dataEnd();
String encoding = detector.getDetectedCharset();
detector.reset();
return encoding;
}
}
Mapper层:
int batchDataList(@Param("tableName") String tableName, @Param("oneList") List<ImportCsvDataOneDto> dataOneList);
Mapper.xml层:
<!-- 批量插入数据 -->
<insert id="batchDataList">
insert into ${tableName}(space, device, scene, position, query, reply, audit_status, create_time)
values
<foreach item="item" index="index" collection="oneList" separator=",">
(#{item.space}, #{item.device}, #{item.scene}, #{item.position}, #{item.query},
#{item.model_return}, #{item.auditStatus}, #{item.create_date})
</foreach>
</insert>
以上就是全部的csv数据读取导入至数据库,如有补充请留言~