springbatch 批量读写demo

这两天做到一个需求,需要一次性将一张数据表里的所有资源数据中的位置信息录入到redis中,表中的数据有几百万起步,采用的是java定时任务,循环每次查出一万条数据存入redis,测试时发现插入效率非常慢,经过排查发现是通过mybatis查询数据的耗时超过了总耗时的99%,我这台机器上是每查询一万条数据返回耗时87s左右。
解决方法:
在mapper.xml中的中加入fetchSize参数,设置的大一些,如下:
在这里插入图片描述
pageSize 每次分页大小配置

/*
 * Copyright yangjunxiong 2021 Wicrenet, Inc. All rights reserved.
 */
package com.aegonthtf.fate.service.batch;

import com.aegonthtf.fate.constant.CommonConstant;
import com.aegonthtf.fate.constant.EventTypeEnum;
import com.aegonthtf.fate.entity.esales.AthActExport;
import com.aegonthtf.fate.entity.user.FateSalesAct;
import com.aegonthtf.fate.service.batch.listener.StartJobExecutionListener;
import com.aegonthtf.fate.service.batch.processor.AthActExportItemProcessor;
import com.google.common.collect.ImmutableMap;
import org.joda.time.LocalDateTime;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.item.database.*;
import org.springframework.batch.item.database.builder.JdbcPagingItemReaderBuilder;
import org.springframework.batch.item.database.support.MySqlPagingQueryProvider;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import javax.sql.DataSource;
import java.util.HashMap;
import java.util.Map;

@Configuration
public class AthActExportBatchJob {
    //创建 创建job的对象
    @Autowired
    private       JobBuilderFactory         jobBuilderFactory;
    //创建 创建step的对象
    @Autowired
    private       StepBuilderFactory        stepBuilderFactory;
    @Autowired
    private       StartJobExecutionListener startJobExecutionListener;
    @Autowired
    @Qualifier("ThirdDataSource")
    private       DataSource                dataSource;
    private final EventTypeEnum             eventTypeEnum = EventTypeEnum.E1;//ATH_ACT_EXPORT表里有E1,E2,E3,E7
    @Autowired
    private       AthActExportItemProcessor athActExportItemProcessor;

    //启动器
    public Job itemReaderFromDBJob(LocalDateTime staratDateTime) {
        String date = staratDateTime.toString(CommonConstant.yyyyMMddHHmmss);
        return jobBuilderFactory.get(eventTypeEnum.getiTableName() + ":Job" + ":" + date)
                .start(stepBuilderFactory.get(eventTypeEnum.getiTableName() + ":Step" + date)
                        .<AthActExport, FateSalesAct>chunk(CommonConstant.BEHAVIOR_BATCH_SIZE)
                        .reader(itemReaderFromDB(CommonConstant.NULL))
                        .processor(athActExportItemProcessor)
                        .writer(writer())
                        .build()
                )
                .listener(startJobExecutionListener)
                .build();
    }

    @Bean
    @StepScope
    public JdbcPagingItemReader<AthActExport> itemReaderFromDB(@Value("#{jobParameters[map]}") String map) {
        return new JdbcPagingItemReaderBuilder<AthActExport>()
                .dataSource(dataSource)
                .name(eventTypeEnum.getiTableName())
                .fetchSize(CommonConstant.FETCH_SIZE)
                .parameterValues(ImmutableMap.of("delFlag", CommonConstant._1))
                .pageSize(CommonConstant.BEHAVIOR_BATCH_SIZE)
                .rowMapper((resultSet, i) -> {
                            AthActExport entity = new AthActExport();
                            entity.setId(resultSet.getInt(CommonConstant._1));            //ID
                            entity.setInfoKind(resultSet.getString(CommonConstant._2));   //INFO_KIND
                            entity.setEventType(resultSet.getString(CommonConstant._3));  //EVENT_TYPE
                            entity.setTransId(resultSet.getString(CommonConstant._4));    //TRANS_ID
                            entity.setTransDate(resultSet.getString(CommonConstant._5));  //TRANS_DATE   (YYYYMMDD)
                            entity.setTransTime(resultSet.getDate(CommonConstant._6));    //TRANS_TIME    (datetime)
                            entity.setAgentCode(resultSet.getString(CommonConstant._7));  //AGENT_CODE
                            entity.setAgentName(resultSet.getString(CommonConstant._8));  //AGENT_NAME
                            entity.setCustOpenid(resultSet.getString(CommonConstant._9)); //CUST_OPENID
                            entity.setOpenTime(resultSet.getDate(CommonConstant._10));    //OPEN_TIME     (datetime)
                            entity.setSubTime(resultSet.getDate(CommonConstant._11));     //SUB_TIME      (datetime)
                            entity.setAthId(resultSet.getString(CommonConstant._12));     //ATH_ID
                            entity.setCustName(resultSet.getString(CommonConstant._13));  //CUST_NAME
                            entity.setCustPhone(resultSet.getString(CommonConstant._14)); //CUST_PHONE
                            entity.setCustGender(resultSet.getString(CommonConstant._15));//CUST_GENDER
                            entity.setInfoTitle(resultSet.getString(CommonConstant._16)); //INFO_TITLE
                            entity.setInfoSource(resultSet.getString(CommonConstant._17));//INFO_SOURCE
                            entity.setCreatetime(resultSet.getDate(CommonConstant._18));  //CREATETIME
                            entity.setCreater(resultSet.getString(CommonConstant._19));   //CREATER
                            entity.setUpdatetime(resultSet.getDate(CommonConstant._20));  //UPDATETIME
                            entity.setDelFlag(resultSet.getString(CommonConstant._21));   //DEL_FLAG
                            return entity;
                        }
                )
                .queryProvider(articleProvider())
                .build();
    }

    private PagingQueryProvider articleProvider() {
        Map<String, Order> sortKeys = new HashMap<>(CommonConstant._1);
        sortKeys.put("ID", Order.ASCENDING);
        MySqlPagingQueryProvider provider = new MySqlPagingQueryProvider();
        provider.setSelectClause("ID, INFO_KIND, EVENT_TYPE, TRANS_ID, TRANS_DATE, TRANS_TIME, AGENT_CODE, AGENT_NAME, CUST_OPENID, OPEN_TIME, SUB_TIME, ATH_ID, CUST_NAME, CUST_PHONE, CUST_GENDER, INFO_TITLE, INFO_SOURCE, CREATETIME, CREATER, UPDATETIME, DEL_FLAG");
        provider.setFromClause("FROM " + eventTypeEnum.getiTableName());//ATH_ACT_EXPORT表里有E1,E2,E3,E7
//        provider.setWhereClause("event_occurred_time >= :startTime AND event_occurred_time < :stopTime");
        provider.setWhereClause("DEL_FLAG = :delFlag");
        provider.setSortKeys(sortKeys);
        return provider;
    }

    @Bean
    public JdbcBatchItemWriter<FateSalesAct> writer() {
        JdbcBatchItemWriter<FateSalesAct> writer = new JdbcBatchItemWriter<>();
        writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<>());
        writer.setSql("INSERT INTO " + eventTypeEnum.getoTableName() + " (ID, INFO_KIND, INFO_SOURCE, INFO_TITLE, EVENT_TYPE, TRANS_ID, TRANS_DATE, TRANS_TIME, AGENT_CODE, AGENT_NAME, AGENT_OPENID, CUST_OPENID, OPEN_DATE, OPEN_TIME, SUB_DATE, SUB_TIME, ATH_ID, SIS_ID, CUST_ID, CUST_NAME, CUST_PHONE, CUST_GENDER, CUST_AGE, CUST_OPR, CUST_EMAIL, RISK_NAME, FATE_APE, PAY_TIME, FATE_FYP, CREATEDATE, CREATETIME, CREATER, UPDATATIME, DEL_FLAG, POLICY_NO) VALUES (" +
                ":id          ,\n" +
                ":infoKind    ,\n" +
                ":infoSource  ,\n" +
                ":infoTitle   ,\n" +
                ":eventType   ,\n" +
                ":transId     ,\n" +
                ":transDate   ,\n" +
                ":transTime   ,\n" +
                ":agentCode   ,\n" +
                ":agentName   ,\n" +
                ":agentOpenid ,\n" +
                ":custOpenid  ,\n" +
                ":openDate    ,\n" +
                ":openTime    ,\n" +
                ":subDate     ,\n" +
                ":subTime     ,\n" +
                ":athId       ,\n" +
                ":sisId       ,\n" +
                ":custId      ,\n" +
                ":custName    ,\n" +
                ":custPhone   ,\n" +
                ":custGender  ,\n" +
                ":custAge     ,\n" +
                ":custOpr     ,\n" +
                ":custEmail   ,\n" +
                ":riskName    ,\n" +
                ":fateApe     ,\n" +
                ":payTime     ,\n" +
                ":fateFyp     ,\n" +
                ":createdate  ,\n" +
                ":createtime  ,\n" +
                ":creater     ,\n" +
                ":updatatime,\n" +
                ":delFlag,\n" +
                ":policyNo\n)");
        writer.setDataSource(dataSource);
        return writer;
    }
}
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 2
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值