1.第一次失败:数据库查询超时设置了超时时间timeout=30
<select id="selectOrdId" resultType="com.huifu.louis.monitor.application.dto.PostransDetailDto" timeout="30">
SELECT
id,ord_id as ordId
FROM
postrans_ord_log
WHERE
update_time between date_format(#{startDateStr,jdbcType=VARCHAR},'%Y-%m-%d %H:%i:%s')
and date_format(#{endDateStr,jdbcType=VARCHAR},'%Y-%m-%d %H:%i:%s')
order by update_time asc limit #{size}, #{limit}
</select>
2.第二次失败:数据库查询超时,原因是根据create_time查根据id排序导致全表扫描索引失效,更改查询语句where create_time between 'x' and 'x' order by create_time asc
3.第三次失败:java.lang.OutOfMemoryError: Java heap space
在JVM中如果98%的时间是用于GC且可用的 Heap size 不足2%的时候将抛出此异常信息。
JVM堆的设置是指java程序运行过程中JVM可以调配使用的内存空间的设置.JVM在启动的时候会自动设置Heap size的值,其初始空间(即-Xms)是物理内存的1/64,最大空间(-Xmx)是物理内存的1/4。可以利用JVM提供的-Xmn -Xms -Xmx等选项可进行设置。
在测试环境增加170万数据,jvm参数增加到3g,执行两个小时左右还是内存溢出。
考虑
1.线程数太多频繁切换上下文导致频繁的GC
2.存在内存泄露
优化:
1.由原来的40个线程减少到8个线程
2.不要频繁的创建对象
PostransOrdLogExample postransOrdLogExample = new PostransOrdLogExample();
PosOrdLogExtendExample posOrdLogExtendExample = new PosOrdLogExtendExample();
for (String ordId : ordIds) {
List<String> details = new ArrayList<>();
postransOrdLogExample.clear();
// postransOrdLogExample对象就创建一次,通过clear来设置查询条件
postransOrdLogExample.createCriteria().andOrdIdEqualTo(ordId);
List<PostransOrdLog> postransOrdLogs = postransOrdLogMapper.selectByExample(postransOrdLogExample);
if (CollectionUtils.isEmpty(postransOrdLogs)) {
break;
}
3.由原来的将所有数据写入内存流改为每500条数据写入文件,清空内存数据,所有数据写入内存完成,上传jfile文件,删除本地文件。
4.cat.enable.sql=false,查询sql太多了,关掉cat监控
修改之后功能代码:
public void process(String msg) throws IOException {
// 删除过期文件
delExpireFile();
Date starDate = DateUtil.getStartOfDay(DateUtils.addDays(new Date(), -1));
Date endDate = DateUtil.getStartOfDay(new Date());
SimpleDateFormat sf = new SimpleDateFormat(Pattern);
String startDateStr = sf.format(starDate);
String endDateStr = sf.format(endDate);
List<String> ordIdList = new ArrayList<>();
// 根据创建时间分页查ordId
Integer limit = PyxisConfig.getInstance().getIntProperty("ord.detail.limit", 100000);
Integer size = 0;
while (true) {
List<PostransDetailDto> postransDetailDtos = postransOrdLogMapper.selectOrdId(startDateStr, endDateStr, size, limit);
if (CollectionUtils.isEmpty(postransDetailDtos)) {
break;
}
ordIdList.addAll(postransDetailDtos.stream().
map(postransDetailDto -> postransDetailDto.getOrdId()).
collect(Collectors.toList()));
size = ordIdList.size();
}
log.info("{}日订单数量:{}", startDateStr, ordIdList.size());
if (CollectionUtils.isEmpty(ordIdList)) {
return;
}
int count = ordIdList.size();
Integer threshold = PyxisConfig.getInstance().getIntProperty("multiThread.thred", 1000);
FileUtils.deleteQuietly(new File(CSV_NAME));
CSVFormat format = CSVFormat.DEFAULT;
try (OutputStreamWriter out = new OutputStreamWriter(new FileOutputStream(CSV_NAME));
CSVPrinter csvFilePrinter = new CSVPrinter(out,
format)) {
out.write(new String(new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF}));
// 保存csv数据
saveCsvHeader(csvFilePrinter);
if (count > threshold) {
// 超过1000条,用任务线程进行并发处理。
// 每个线程处理量
Integer threadNum = PyxisConfig.getInstance().getIntProperty("thread.num", 5);
CountDownLatch countDown = new CountDownLatch(threadNum);
int countPerThread = count / threadNum;
int startIndex = 0;
int endIndex = 0;
for (int i = 0; i < threadNum; i++) {
startIndex = endIndex;
if (i == threadNum - 1) {
endIndex = count;
} else {
endIndex = startIndex + countPerThread;
}
List<String> subOrdId = ordIdList.subList(startIndex, endIndex);
String traceNo = LogUtil.getTraceNo();
final CSVPrinter csvFilePrinter1 = csvFilePrinter;
executor.execute(new Runnable() {
List<String> subOrdIds = subOrdId;
String traceno = traceNo;
@Override
public void run() {
try {
LogUtil.setTraceNo(traceno);
log.info(String.format("写入csv订单的数据量:%s", subOrdIds.size()));
writeOrd(subOrdIds, csvFilePrinter1);
} catch (Exception e) {
log.error("写入csv订单出现异常", e);
} finally {
countDown.countDown();
}
}
});
}
try {
countDown.await();
} catch (Exception e) {
log.error("写入csv订单出现异常", e);
}
} else {
log.info("写入csv订单:{}", count);
try {
writeOrd(ordIdList, csvFilePrinter);
} catch (Exception e) {
log.error("写入csv订单出现异常", e);
}
}
} catch (Exception e) {
log.error("失败", e);
}
// 上传jfile文件并发送
pushJfile(startDateStr);
}
private void writeOrd(List<String> ordIds, CSVPrinter csvFilePrinter) {
try {
int j = 0;
List<List<String>> postransOrdDetails = new ArrayList<>();
PostransOrdLogExample postransOrdLogExample = new PostransOrdLogExample();
PosOrdLogExtendExample posOrdLogExtendExample = new PosOrdLogExtendExample();
for (String ordId : ordIds) {
List<String> details = new ArrayList<>();
postransOrdLogExample.clear();
postransOrdLogExample.createCriteria().andOrdIdEqualTo(ordId);
List<PostransOrdLog> postransOrdLogs = postransOrdLogMapper.selectByExample(postransOrdLogExample);
if (CollectionUtils.isEmpty(postransOrdLogs)) {
break;
}
PostransOrdLog postransOrdLog = postransOrdLogs.get(0);
details.add(postransOrdLog.getProductId());
details.add(postransOrdLog.getOemId());
details.add(postransOrdLog.getOrdId());
details.add(postransOrdLog.getTransDate());
details.add(postransOrdLog.getTransCode());
details.add(postransOrdLog.getTransType());
details.add(postransOrdLog.getCloudPayFlag());
details.add(postransOrdLog.getDoubleExempt());
details.add(postransOrdLog.getTransStat());
details.add(postransOrdLog.getAcctStat());
details.add(postransOrdLog.getTransAmt().toString());
details.add(postransOrdLog.getFeeAmt().toPlainString());
details.add(postransOrdLog.getAgentId());
details.add(postransOrdLog.getBagentId());
posOrdLogExtendExample.clear();
posOrdLogExtendExample.createCriteria().andOrdIdEqualTo(postransOrdLog.getOrdId());
List<PosOrdLogExtend> posOrdLogExtends = posOrdLogExtendMapper.selectByExample(posOrdLogExtendExample);
if (CollectionUtils.isNotEmpty(posOrdLogExtends)) {
PosOrdLogExtend posOrdLogExtend = posOrdLogExtends.get(0);
try {
JSONObject attachInfo = null;
if (StringUtils.isNotBlank(posOrdLogExtend.getAttachInfo())) {
attachInfo = JSON.parseObject(posOrdLogExtend.getAttachInfo());
details.add(attachInfo.getString("tpBagtId"));
} else {
details.add("");
}
} catch (Exception e) {
log.error("订单{}attchInfo解析失败", posOrdLogExtend.getOrdId(), e);
details.add("");
}
}
details.add(postransOrdLog.getMerId());
details.add(postransOrdLog.getPttFlag());
details.add(postransOrdLog.getIsSim());
details.add(postransOrdLog.getIsFirst());
details.add(postransOrdLog.getTsCashStat());
details.add(postransOrdLog.getOrgOrdId());
details.add(postransOrdLog.getCashAmtFee().toPlainString());
details.add(postransOrdLog.getDevId());
details.add(postransOrdLog.getIsActive());
postransOrdDetails.add(details);
j++;
if (j % 500 == 0 || ordIds.size() == j) {
//输出到CSV文件,并清空LIST
synchronized (this) {
for (List<String> detail : postransOrdDetails) {
csvFilePrinter.printRecord(detail);
}
csvFilePrinter.flush();
postransOrdDetails.clear();
}
}
}
} catch (IOException e) {
log.error("保存csv内容失败", e);
}
}
/**
* 保存csv头部信息
*/
private void saveCsvHeader(CSVPrinter printer) {
List<String> csvHeaders = Arrays.asList(Constant.PRODUCT, Constant.OEM, Constant.ORD, Constant.TRANS_DATE,
Constant.TRANS_CODE, Constant.TRANS_TYPE, Constant.CLOUD_PAY_FLAG, Constant.DOUBLE_EXEMPT,
Constant.TRANS_STAT, Constant.ACCT_STAT, Constant.TRANS_AMT, Constant.FEE_AMT,
Constant.AGENT_ID, Constant.BAGENT_ID, Constant.TOP_BAGENT_ID, Constant.MER,
Constant.PTT_FLAG, Constant.IS_SIM, Constant.IS_FIRST, Constant.TS_CASH_STAT,
Constant.ORG_ORD, Constant.CASH_FEE_AMT, Constant.DEV, Constant.IS_ACTIVE);
try {
printer.printRecord(csvHeaders);
printer.flush();
} catch (IOException e) {
log.error("生成csv文件头部失败", e);
}
}
private void pushJfile(String startDateStr) {
String jfileId = "0";
String transDate = null;
try {
if (StringUtils.isNotBlank(startDateStr)) {
transDate = startDateStr.substring(0, 8);
}
String fileName = Constant.ORD_DETAIL + transDate + Constant.FILE_SUFFIX;
log.info("上送jfile开始,文件名:{}", fileName);
jfileId = JFileUtil.uploadFile(FileUtils.readFileToByteArray(new File(CSV_NAME)), fileName);
log.info("交易日期{}的订单摘要上送完成,返回值:{}", transDate, jfileId);
} catch (Exception e) {
log.error("上传jfile文件失败", e);
}
String pushJfileChannel = PyxisConfig.getInstance().getProperty("pushJfile", "C_LOUIS_MONITOR_PUSH_JFILE");
PushJFileBO pushJFileBO = new PushJFileBO();
pushJFileBO.setJfileId(jfileId);
pushJFileBO.setTransDate(transDate);
pushJFileBO.setJfileAppToken(PyxisConfig.getInstance().getProperty("jfile.appToken"));
pushJFileBO.setJfileAppKey(PyxisConfig.getInstance().getProperty("jfile.appKey"));
pushJFileBO.setJfileEnv(PyxisConfig.getInstance().getProperty("jfile.env"));
pushJFileBO.setJfileServerUrl(PyxisConfig.getInstance().getProperty("jfile.serverUrl"));
log.info("发送jfile信息:{}", JSON.toJSONString(pushJFileBO));
sender.sendMessage(pushJFileBO, pushJfileChannel);
JfileInfo jfileInfo = new JfileInfo();
jfileInfo.setJfileId(jfileId);
jfileInfo.setTransDate(transDate);
jfileInfoMapper.insertSelective(jfileInfo);
FileUtils.deleteQuietly(new File(CSV_NAME));
}
public void delExpireFile() {
Date date = com.huifu.module.common.time.DateUtils.addDays(new Date(), -30);
SimpleDateFormat sf = new SimpleDateFormat(YYYY_MM_DD_PATTERN);
String transDate = sf.format(date);
List<JfileInfo> jfileInfoList = jfileInfoMapper.selectByTransDate(transDate);
if(CollectionUtils.isEmpty(jfileInfoList)) {
log.info("jfile文件信息为空,不处理");
return;
}
String appKey = PyxisConfig.getInstance().getProperty("jfile.appKey");
String appToken = PyxisConfig.getInstance().getProperty("jfile.appToken");
String serverUrl = PyxisConfig.getInstance().getProperty("jfile.serverUrl");
String env = PyxisConfig.getInstance().getProperty("jfile.env");
JFileConfig jFileConfig = new JFileConfig(appKey, appToken, serverUrl, env);
JfileClient jfileClient = new JfileClient(jFileConfig.getAppKey(), jFileConfig.getAppToken(), jFileConfig.getServerUrl(), jFileConfig.getEnv());
for(JfileInfo jfileInfo:jfileInfoList) {
JfileResult jfileResult = jfileClient.delete(jfileInfo.getJfileId());
if(!Constant.JFILE_RESP_CODE.equals(jfileResult.getCode())) {
log.error("删除jfileId{}文件失败", jfileInfo.getJfileId());
continue;
}
JfileInfoExample delJfileInfoExample = new JfileInfoExample();
delJfileInfoExample.createCriteria().andJfileIdEqualTo(jfileInfo.getJfileId());
jfileInfoMapper.deleteByExample(delJfileInfoExample);
}
}
错误版本-将数据写入内存流版本:
public void process(String msg) {
Date starDate = DateUtil.getStartOfDay(DateUtils.addDays(new Date(), -1));
Date endDate = DateUtil.getStartOfDay(new Date());
SimpleDateFormat sf = new SimpleDateFormat(Pattern);
String startDateStr = sf.format(starDate);
String endDateStr = sf.format(endDate);
List<String> ordIdList = new ArrayList<>();
// 根据创建时间分页查ordId
Integer limit = PyxisConfig.getInstance().getIntProperty("ord.detail.limit", 100000);
Integer size = 0;
while (true) {
List<PostransDetailDto> postransDetailDtos = postransOrdLogMapper.selectOrdId(startDateStr, endDateStr, size, limit);
if (CollectionUtils.isEmpty(postransDetailDtos)) {
break;
}
ordIdList.addAll(postransDetailDtos.stream().
map(postransDetailDto -> postransDetailDto.getOrdId()).
collect(Collectors.toList()));
size = ordIdList.size();
}
log.info("{}日订单数量:{}", startDateStr, ordIdList.size());
if (CollectionUtils.isEmpty(ordIdList)) {
return;
}
int count = ordIdList.size();
Integer threshold = PyxisConfig.getInstance().getIntProperty("multiThread.thred", 1000);
ByteArrayOutputStream csvOut = null;
// 保存csv数据
csvOut = saveCsvHeader(csvOut);
if (count > threshold) {
// 超过1000条,用任务线程进行并发处理。
// 每个线程处理量
Integer threadNum = PyxisConfig.getInstance().getIntProperty("thread.num", 5);
CountDownLatch countDown = new CountDownLatch(threadNum);
int countPerThread = count / threadNum;
int startIndex = 0;
int endIndex = 0;
for (int i = 0; i < threadNum; i++) {
startIndex = endIndex;
if (i == threadNum - 1) {
endIndex = count;
} else {
endIndex = startIndex + countPerThread;
}
List<String> subOrdId = ordIdList.subList(startIndex, endIndex);
String traceNo = LogUtil.getTraceNo();
ByteArrayOutputStream finalCsvOut = csvOut;
executor.execute(new Runnable() {
List<String> subOrdIds = subOrdId;
String traceno = traceNo;
@Override
public void run() {
try {
LogUtil.setTraceNo(traceno);
log.info(String.format("写入csv订单的数据量:%s", subOrdIds.size()));
writeOrd(subOrdIds, finalCsvOut);
} catch (Exception e) {
log.error("写入csv订单出现异常", e);
} finally {
countDown.countDown();
}
}
});
}
try {
countDown.await();
} catch (Exception e) {
log.error("写入csv订单出现异常", e);
}
} else {
log.info("写入csv订单:{}", count);
try {
writeOrd(ordIdList, csvOut);
} catch (Exception e) {
log.error("写入csv订单出现异常", e);
}
}
// 上传jfile文件并发送
pushJfile(startDateStr, csvOut);
}
private void writeOrd(List<String> ordIds, ByteArrayOutputStream csvOut) {
CSVPrinter csvFilePrinter = null;
try {
List<List<String>> postransOrdDetails = new ArrayList<>();
for (String ordId : ordIds) {
List<String> details = new ArrayList<>();
PostransOrdLogExample postransOrdLogExample = new PostransOrdLogExample();
postransOrdLogExample.createCriteria().andOrdIdEqualTo(ordId);
List<PostransOrdLog> postransOrdLogs = postransOrdLogMapper.selectByExample(postransOrdLogExample);
if (CollectionUtils.isEmpty(postransOrdLogs)) {
break;
}
PostransOrdLog postransOrdLog = postransOrdLogs.get(0);
details.add(postransOrdLog.getProductId());
details.add(postransOrdLog.getOemId());
details.add(postransOrdLog.getOrdId());
details.add(postransOrdLog.getTransDate());
details.add(postransOrdLog.getTransCode());
details.add(postransOrdLog.getTransType());
details.add(postransOrdLog.getCloudPayFlag());
details.add(postransOrdLog.getDoubleExempt());
details.add(postransOrdLog.getTransStat());
details.add(postransOrdLog.getAcctStat());
details.add(postransOrdLog.getTransAmt().toString());
details.add(postransOrdLog.getFeeAmt().toPlainString());
details.add(postransOrdLog.getAgentId());
details.add(postransOrdLog.getBagentId());
PosOrdLogExtendExample posOrdLogExtendExample = new PosOrdLogExtendExample();
posOrdLogExtendExample.createCriteria().andOrdIdEqualTo(postransOrdLog.getOrdId());
List<PosOrdLogExtend> posOrdLogExtends = posOrdLogExtendMapper.selectByExample(posOrdLogExtendExample);
if (CollectionUtils.isNotEmpty(posOrdLogExtends)) {
PosOrdLogExtend posOrdLogExtend = posOrdLogExtends.get(0);
try {
JSONObject attachInfo = null;
if (StringUtils.isNotBlank(posOrdLogExtend.getAttachInfo())) {
attachInfo = JSON.parseObject(posOrdLogExtend.getAttachInfo());
details.add(attachInfo.getString("tpBagtId"));
} else {
details.add("");
}
} catch (Exception e) {
log.error("订单{}attchInfo解析失败", posOrdLogExtend.getOrdId(), e);
details.add("");
}
}
details.add(postransOrdLog.getMerId());
details.add(postransOrdLog.getPttFlag());
details.add(postransOrdLog.getIsSim());
details.add(postransOrdLog.getIsFirst());
details.add(postransOrdLog.getTsCashStat());
details.add(postransOrdLog.getOrgOrdId());
details.add(postransOrdLog.getCashAmtFee().toPlainString());
details.add(postransOrdLog.getDevId());
details.add(postransOrdLog.getIsActive());
postransOrdDetails.add(details);
}
CSVFormat formator = CSVFormat.DEFAULT;
ByteArrayOutputStream bout = new ByteArrayOutputStream();
OutputStreamWriter out = new OutputStreamWriter(bout);
csvFilePrinter = new CSVPrinter(out,
formator);
for (List<String> details : postransOrdDetails) {
csvFilePrinter.printRecord(details);
}
csvFilePrinter.flush();
synchronized (this) {
csvOut.write(bout.toByteArray());
bout.close();
}
} catch (IOException e) {
log.error("保存csv内容失败", e);
} finally {
try {
if (csvFilePrinter != null) {
csvFilePrinter.close();
}
} catch (Exception ex) {
log.error("流关闭失败", ex);
}
}
}