源代码已经上传到码云上,下载地址https://gitee.com/heaven_light/batch-bank.git,可以下载看看。
1.需要指定版本的maven依赖如下
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<java.version>1.7</java.version>
<mybatis.starter.version>1.3.1</mybatis.starter.version>
<mapper.starter.version>1.1.3</mapper.starter.version>
<pagehelper.starter.version>1.1.1</pagehelper.starter.version>
<druid.version>1.0.27</druid.version>
</properties>
还要又spring-boot-starter-batch,mysql-connector-java和spring-boot-starter-web依赖。
2.最重要的application.properties配置文件内容如下
spring.application.name=batch-bank
spring.datasource.type=com.alibaba.druid.pool.DruidDataSource
spring.datasource.url=jdbc:mysql://127.0.0.1:3306/test?useUnicode=true&characterEncoding=utf-8&autoReconnect=true&failOverReadOnly=false
spring.datasource.username=root
spring.datasource.password=123456
spring.datasource.driverClassName=com.mysql.jdbc.Driver
spring.datasource.initialSize=5
spring.datasource.minIdle=5
spring.datasource.maxActive=20
spring.datasource.maxWait=60000
spring.datasource.timeBetweenEvictionRunsMillis=60000
spring.datasource.minEvictableIdleTimeMillis=300000
spring.datasource.validationQuery=SELECT 1 FROM DUAL
spring.datasource.testWhileIdle=true
spring.datasource.testOnBorrow=false
spring.datasource.testOnReturn=false
spring.datasource.poolPreparedStatements=true
spring.datasource.maxPoolPreparedStatementPerConnectionSize=20
spring.datasource.filters=stat,wall,log4j
spring.datasource.connectionProperties=druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000
mybatis.type-aliases-package=com.net.lnk.batch.bank.model
mapper.mappers=tk.mybatis.mapper.common.Mapper
mapper.not-empty=false
mapper.identity=MYSQL
pagehelper.helperDialect=mysql
pagehelper.reasonable=true
pagehelper.supportMethodsArguments=true
pagehelper.params=count=countSql
3.这里列出启动类BatchBankApplication中有关Spring Batch的配置内容
package com.net.lnk.batch.bank;
import javax.sql.DataSource;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.core.launch.support.SimpleJobLauncher;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.batch.support.DatabaseType;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.core.io.ClassPathResource;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.util.StringUtils;
import com.net.lnk.batch.bank.batch.BankcardImportJobListener;
import com.net.lnk.batch.bank.batch.BankcardItemProcessor;
import com.net.lnk.batch.bank.batch.BankcardItemWriter;
import com.net.lnk.batch.bank.dto.BankcardDTO;
import com.net.lnk.batch.bank.model.BankcardModel;
@SpringBootApplication
@MapperScan({ "com.net.lnk.batch.bank.dao" })
@EnableBatchProcessing
public class BatchBankApplication {
public static void main(String[] args) {
SpringApplication.run(BatchBankApplication.class, args);
}
public JobRepository jobRepository(DataSource dataSource, PlatformTransactionManager transactionManager)
throws Exception {
JobRepositoryFactoryBean jobRepositoryFactoryBean = new JobRepositoryFactoryBean();
jobRepositoryFactoryBean.setDataSource(dataSource);
jobRepositoryFactoryBean.setTransactionManager(transactionManager);
jobRepositoryFactoryBean.setDatabaseType(DatabaseType.MYSQL.name());
return jobRepositoryFactoryBean.getObject();
}
public SimpleJobLauncher jobLauncher(DataSource dataSource, PlatformTransactionManager transactionManager)
throws Exception {
SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
jobLauncher.setJobRepository(jobRepository(dataSource, transactionManager));
return jobLauncher;
}
@Bean
public BankcardImportJobListener bankcardImportJobListener() {
return new BankcardImportJobListener();
}
@Bean
public Job importJob(JobBuilderFactory jobs, Step step) {
return jobs.get("importBankcardJob").incrementer(new RunIdIncrementer()).flow(step) // 为Job指定Step
.end().listener(bankcardImportJobListener()) // 绑定监听器
.build();
}
@Bean
@StepScope
public FlatFileItemReader<BankcardDTO> bankcardItemReader(
@Value("#{jobParameters['input.file.name']}") String filePath) {
// 使用FlatFileItemReader读取文件
FlatFileItemReader<BankcardDTO> reader = new FlatFileItemReader<BankcardDTO>();
if (StringUtils.isEmpty(filePath)) {
reader.setResource(new ClassPathResource("bankcard.csv"));
} else {
reader.setResource(new ClassPathResource(filePath));
}
DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer();
tokenizer.setNames(new String[] { "bankName", "cardName", "cardNoLength", "cardNoSample", "binLength", "bin",
"cardType" });
BeanWrapperFieldSetMapper<BankcardDTO> fieldSetMapper = new BeanWrapperFieldSetMapper<BankcardDTO>();
fieldSetMapper.setTargetType(BankcardDTO.class);
DefaultLineMapper<BankcardDTO> lineMapper = new DefaultLineMapper<BankcardDTO>();
lineMapper.setLineTokenizer(tokenizer);
lineMapper.setFieldSetMapper(fieldSetMapper);
reader.setLineMapper(lineMapper);
return reader;
}
@Bean
public ItemProcessor<BankcardDTO, BankcardModel> bankcardItemProcessor() {
BankcardItemProcessor processor = new BankcardItemProcessor();
return processor;
}
@Bean
public ItemWriter<BankcardModel> bankcardItemWriter() {
return new BankcardItemWriter();
}
@Bean
public Step bankcardImportStep(StepBuilderFactory stepBuilderFactory, ItemReader<BankcardDTO> reader,
ItemProcessor<BankcardDTO, BankcardModel> processor, ItemWriter<BankcardModel> writer) {
return stepBuilderFactory.get("bankcardImportStep").<BankcardDTO, BankcardModel>chunk(200) // 批处理每次提交200条数据
.reader(reader) // 给step绑定reader
.processor(processor) // 给step绑定processor
.writer(writer) // 给step绑定writer
.build();
}
}
4.工程所需的所有文件都在resources目录下能找到
启动工程,输入url http://localhost:8080/import/bankcard?fileName=bankcard1 可以手动触发导入job执行;输入url http://localhost:8080/bankcard/621205 能查询单条导入的记录;输入url http://localhost:8080/bankcard/list?bankCode=01020000 能批量查询导入的记录;输入 url http://localhost:8080/bankcard/pageable?bankCode=01020000&pageNum=2&pageSize=10 可以分页查询导入的记录。
Spring Batch 默认会在数据库中建立以BATCH_开头的表,用来记录导入job执行情况,如果一个job执行中出现异常,会在表中有记录体现,下次再执行的时候,会从上次出现异常的地方继续执行,不会出现重复导入已经导入过的数据的情况;但是当一个job成功执行来,再次执行的时候会出现重复导入的情况,对此,使用定时任务控制批量导入配合出错时候的手动处理会比较好,这将是下一个学习的内容。