spring batch入门笔记

spring batch入门笔记(1)

springboot 整合springbatch
准备:sprigboot基础架构项目
添加springbatch相关依赖

<!--  spring batch -->
<dependency>
    <groupId>org.springframework.boot</groupId>
    <artifactId>spring-boot-starter-batch</artifactId>
</dependency>
<!-- hibernate validator校验器 -->
<dependency>
    <groupId>org.hibernate</groupId>
    <artifactId>hibernate-validator</artifactId>
    <version>6.0.7.Final</version>
</dependency>
<!-- mysql connector-->
<dependency>
    <groupId>mysql</groupId>
    <artifactId>mysql-connector-java</artifactId>
    <version>5.1.35</version>
</dependency>
<!-- alibaba dataSource -->
<dependency>
    <groupId>com.alibaba</groupId>
    <artifactId>druid</artifactId>
    <version>1.1.12</version>
</dependency>
<dependency>
    <groupId>org.springframework.boot</groupId>
    <artifactId>spring-boot-starter-test</artifactId>
</dependency>

propertise配置

#数据库连接
spring.datasource.url=jdbc:p6spy:mysql://localhost:3306/pan?useSSL=false&serverTimezone=UTC
spring.datasource.username=root
spring.datasource.password=root
#config druid
#初始化连接数
spring.datasource.initialSize=1
#最小连接数
spring.datasource.minIdle=5
#最大连接数
spring.datasource.maxActive=20
#客户获取的等待时间
spring.datasource.maxWait=60000
spring.datasource.timeBetweenEvictionRunMillis=60000
spring.datasource.minEvictableIdelTimeMillis=300000
spring.datasource.validationQuery=SELECT 1 FROM DUAL

# batch
# 默认自动执行定义的Job(true),改为false,需要jobLaucher.run执行
batch.job.enabled:false
# spring batch在数据库里面创建默认的数据表,如果不是always则会提示相关表不存在
batch.job.initialize-schema:always
# 设置batch表的前缀
#batch.job.table-prefix: csv-batch

注册DBConfig配置类

/**
 * @author 
 * @dete 
 * @description 自定义DataSource
 *
 */
@SuppressWarnings("all")
@Configuration
public class DruidDBConfig {

    private Logger logger = LoggerFactory.getLogger(DruidDBConfig.class);

    @Value("${spring.datasource.url}")
    private String dbUrl;

    @Value("${spring.datasource.username}")
    private String username;

    @Value("${spring.datasource.password}")
    private String password;

    @Value("${spring.datasource.driver-class-name}")
    private String driverClassName;

   /* @Value("${spring.datasource.initialSize}")
    private int initialSize;

    @Value("${spring.datasource.minIdle}")
    private int minIdle;

    @Value("${spring.datasource.maxActive}")
    private int maxActive;

    @Value("${spring.datasource.maxWait}")
    private int maxWait;

    @Value("${spring.datasource.timeBetweenEvictionRunsMillis}")
    private int timeBetweenEvictionRunsMillis;

    @Value("${spring.datasource.minEvictableIdleTimeMillis}")
    private int minEvictableIdleTimeMillis;

    @Value("${spring.datasource.validationQuery}")
    private String validationQuery;

    @Value("${spring.datasource.testWhileIdle}")
    private boolean testWhileIdle;

    @Value("${spring.datasource.testOnBorrow}")
    private boolean testOnBorrow;

    @Value("${spring.datasource.testOnReturn}")
    private boolean testOnReturn;

    @Value("${spring.datasource.poolPreparedStatements}")
    private boolean poolPreparedStatements;

    @Value("${spring.datasource.maxPoolPreparedStatementPerConnectionSize}")
    private int maxPoolPreparedStatementPerConnectionSize;

    @Value("${spring.datasource.filters}")
    private String filters;

    @Value("{spring.datasource.connectionProperties}")
    private String connectionProperties;*/

    @Bean
    @Primary  // 被注入的优先级最高
    public DruidDataSource dataSource() {
        DruidDataSource dataSource = new DruidDataSource();
        logger.info("-------->dataSource[url="+dbUrl+" ,username="+username+"]");
        dataSource.setUrl(dbUrl);
        dataSource.setUsername(username);
        dataSource.setPassword(password);
        dataSource.setDriverClassName(driverClassName);

        /*  //configuration
        datasource.setInitialSize(initialSize);
        datasource.setMinIdle(minIdle);
        datasource.setMaxActive(maxActive);
        datasource.setMaxWait(maxWait);
        datasource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
        datasource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
        datasource.setValidationQuery(validationQuery);
        datasource.setTestWhileIdle(testWhileIdle);
        datasource.setTestOnBorrow(testOnBorrow);
        datasource.setTestOnReturn(testOnReturn);
        datasource.setPoolPreparedStatements(poolPreparedStatements);
        datasource.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize);
        try {
            datasource.setFilters(filters);
        } catch (SQLException e) {
            logger.error("druid configuration initialization filter", e);
        }
        datasource.setConnectionProperties(connectionProperties);*/

        return  dataSource;
    }

    @Bean
    public ServletRegistrationBean druidServletRegistrationBean() {
        ServletRegistrationBean servletRegistrationBean = new ServletRegistrationBean();
        servletRegistrationBean.setServlet(new StatViewServlet());
        servletRegistrationBean.addUrlMappings("/druid/*");
        return servletRegistrationBean;
    }

    /**
     * 注册DruidFilter拦截
     *
     * @return
     */
    @Bean
    public FilterRegistrationBean duridFilterRegistrationBean() {
        FilterRegistrationBean filterRegistrationBean = new FilterRegistrationBean();
        filterRegistrationBean.setFilter(new WebStatFilter());
        Map<String, String> initParams = new HashMap<String, String>();
        //设置忽略请求
        initParams.put("exclusions", "*.js,*.gif,*.jpg,*.bmp,*.png,*.css,*.ico,/druid/*");
        filterRegistrationBean.setInitParameters(initParams);
        filterRegistrationBean.addUrlPatterns("/*");
        return filterRegistrationBean;
    }
}

编写batch配置类


/**
*
* @author 
* @date 
* @description spring batch cvs文件批处理配置需要注入Spring Batch以下组成部分
* spring batch组成:
* 1)JobRepository 注册job的容器
* 2)JonLauncher 用来启动job的接口
* 3)Job 实际执行的任务,包含一个或多个Step
* 4)Step Step步骤包括ItemReader、ItemProcessor和ItemWrite
* 5)ItemReader 读取数据的接口
* 6)ItemProcessor 处理数据的接口
* 7)ItemWrite 输出数据的接口
*
*
*/
@Configuration
@EnableBatchProcessing // 开启批处理的支持
@Import(DruidDBConfig.class) // 注入datasource
public class CsvBatchConfig {
   private Logger logger = LoggerFactory.getLogger(CsvBatchConfig.class);


   /**
    * ItemReader定义:读取文件数据+entirty映射
    * @return
    */
   @Bean
   public ItemReader<Person> reader(){
       // 使用FlatFileItemReader去读cvs文件,一行即一条数据
       FlatFileItemReader<Person> reader = new FlatFileItemReader<>();
       // 设置文件处在路径
       reader.setResource(new PathResource("src/main/resources/cvs/person.csv"));
       // entity与csv数据做映射
       reader.setLineMapper(new DefaultLineMapper<Person>() {
           {
               setLineTokenizer(new DelimitedLineTokenizer() {
                   {
                       setNames(new String[]{"id", "name", "age", "gender"});
                   }
               });
               setFieldSetMapper(new BeanWrapperFieldSetMapper<Person>() {
                   {
                       setTargetType(Person.class);
                   }
               });
           }
       });
       return reader;
   }


   /**
    * 注册ItemProcessor: 处理数据+校验数据
    * @return
    */
   @Bean
   public ItemProcessor<Person, Person> processor(){
       CvsItemProcessor cvsItemProcessor = new CvsItemProcessor();
       // 设置校验器
       cvsItemProcessor.setValidator(csvBeanValidator());
       return cvsItemProcessor;
   }

   /**
    * 注册校验器
    * @return
    */
   @Bean
   public CsvBeanValidator csvBeanValidator(){
       return new CsvBeanValidator<Person>();
   }

   /**
    * ItemWriter定义:指定datasource,设置批量插入sql语句,写入数据库
    * @param dataSource
    * @return
    */
   @Bean
   public ItemWriter<Person> writer(DruidDataSource dataSource){
       // 使用jdbcBcatchItemWrite写数据到数据库中
       JdbcBatchItemWriter<Person> writer = new JdbcBatchItemWriter<>();
       // 设置有参数的sql语句
       writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<Person>());
       String sql = "insert into person values(:id,:name,:age,:gender)";
       writer.setSql(sql);
       writer.setDataSource(dataSource);
       return writer;
   }

   /**
    * JobRepository定义:设置数据库,注册Job容器
    * @param dataSource
    * @param transactionManager
    * @return
    * @throws Exception
    */
   @Bean
   public JobRepository cvsJobRepository(DataSource dataSource, PlatformTransactionManager transactionManager) throws Exception{
       JobRepositoryFactoryBean jobRepositoryFactoryBean = new JobRepositoryFactoryBean();
       jobRepositoryFactoryBean.setDatabaseType("mysql");
       jobRepositoryFactoryBean.setTransactionManager(transactionManager);
       jobRepositoryFactoryBean.setDataSource(dataSource);
       return jobRepositoryFactoryBean.getObject();
   }

   /**
    * jobLauncher定义:
    * @param dataSource
    * @param transactionManager
    * @return
    * @throws Exception
    */
   @Bean
   public SimpleJobLauncher csvJobLauncher(DataSource dataSource, PlatformTransactionManager transactionManager) throws Exception{
       SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
       // 设置jobRepository
       jobLauncher.setJobRepository(cvsJobRepository(dataSource, transactionManager));
       return jobLauncher;
   }

   /**
    * 定义job
    * @param jobs
    * @param step
    * @return
    */
   @Bean
   public Job importJob(JobBuilderFactory jobs, Step step){
       return jobs.get("importCsvJob")
               .incrementer(new RunIdIncrementer())
               .flow(step)
               .end()
               .listener(csvJobListener())
               .build();
   }

   /**
    * 注册job监听器
    * @return
    */
   @Bean
   public CsvJobListener csvJobListener(){
       return new CsvJobListener();
   }


   /**
    * step定义:步骤包括ItemReader->ItemProcessor->ItemWriter 即读取数据->处理校验数据->写入数据
    * @param stepBuilderFactory
    * @param reader
    * @param writer
    * @param processor
    * @return
    */
   @Bean
   public Step step(StepBuilderFactory stepBuilderFactory, ItemReader<Person> reader,
                    ItemWriter<Person> writer, ItemProcessor<Person, Person> processor){
       return stepBuilderFactory
               .get("step")
               .<Person, Person>chunk(65000) // Chunk的机制(即每次读取一条数据,再处理一条数据,累积到一定数量后再一次性交给writer进行写入操作)
               .reader(reader)
               .processor(processor)
               .writer(writer)
               .build();

   }
}

定义处理器
实现ItemProcessor接口,重写process方法,输入的参数是从ItemReader读取到的数据,返回的数据给ItemWriter

/**
 * @author 
 * @date 
 * @description
 * CSV文件数据处理及校验
 * 只需要实现ItemProcessor接口,重写process方法,输入的参数是从ItemReader读取到的数据,返回的数据给ItemWriter
 */
public class CvsItemProcessor extends ValidatingItemProcessor<Person> {
    private Logger logger = LoggerFactory.getLogger(CvsItemProcessor.class);

    @Override
    public Person process(Person item) throws ValidationException {
        // 执行super.process()才能调用自定义的校验器
        logger.info("processor start validating...");
        super.process(item);

        // 数据处理,比如将中文性别设置为M/F
        if ("男".equals(item.getGender())) {
            item.setGender("M");
        } else {
            item.setGender("F");
        }
        logger.info("processor end validating...");
        return item;
    }
}

定义校验器:使用JSR-303(hibernate-validator)注解,来校验ItemReader读取到的数据是否满足要求。如不满足则不会进行接下来的批处理任务。这个主要是用来校验参数

/**
*
* @author jian
* @date 2019/4/28
* @param <T>
* @description 定义校验器:使用JSR-303(hibernate-validator)注解,来校验ItemReader读取到的数据是否满足要求。
*/

public class CsvBeanValidator<T> implements Validator<T>, InitializingBean {

   private javax.validation.Validator validator;


   /**
    * 进行JSR-303的Validator的初始化
    * @throws Exception
    */
   @Override
   public void afterPropertiesSet() throws Exception {
       ValidatorFactory validatorFactory = Validation.buildDefaultValidatorFactory();
       validator = validatorFactory.usingContext().getValidator();
   }

   /**
    * 使用validator方法检验数据
    * @param value
    * @throws ValidationException
    */
   @Override
   public void validate(T value) throws ValidationException {
       Set<ConstraintViolation<T>> constraintViolations = validator.validate(value);
       if (constraintViolations.size() > 0) {
           StringBuilder message = new StringBuilder();
           for (ConstraintViolation<T> constraintViolation: constraintViolations) {
               message.append(constraintViolation.getMessage() + "\n");
           }
           throw new ValidationException(message.toString());
       }
   }
}

监听Job执行情况,则定义一个类实现JobExecutorListener,并定义Job的Bean上绑定该监听器


/**
 * @author 
 * @date 监听器
 * @description
 * 监听Job执行情况,则定义一个类实现JobExecutorListener,并定义Job的Bean上绑定该监听器
 */
public class CsvJobListener implements JobExecutionListener {

    private Logger logger = LoggerFactory.getLogger(CsvJobListener.class);
    private long startTime;
    private long endTime;

    @Override
    public void beforeJob(JobExecution jobExecution) {
        startTime = System.currentTimeMillis();
        logger.info("job process start...");
    }

    @Override
    public void afterJob(JobExecution jobExecution) {
        endTime = System.currentTimeMillis();
        logger.info("job process end...");
        logger.info("elapsed time: " + (endTime - startTime) + "ms");
    }
}

测试
丢一个.csv文件到rsouce下
csv文件时以逗号为分隔的数据表示字段,回车表示一行(条)数据记录

1,Zhangsan,21,2,Lisi,22,3,Wangwu,23,4,Zhaoliu,24,5,Zhouqi,25,

实体类
person.csv中的字段与之对应,并在该实体中可以添加校验注解,如@Size表示该字段的长度范围,如果超过规定。则会被校验检测到,批处理将不会进行!

public class Person implements Serializable {
    private final long serialVersionUID = 1L;

    private String id;
    @Size(min = 2, max = 8)
    private String name;
    private int age;
    private String gender;

    public String getId() {
        return id;
    }

    public void setId(String id) {
        this.id = id;
    }

    public String getName() {
        return name;
    }

    public void setName(String name) {
        this.name = name;
    }

    public int getAge() {
        return age;
    }

    public void setAge(int age) {
        this.age = age;
    }

    public String getGender() {
        return gender;
    }

    public void setGender(String gender) {
        this.gender = gender;
    }

    @Override
    public String toString() {
        return "Person{" +
                "id='" + id + '\'' +
                ", name='" + name + '\'' +
                ", age=" + age +
                ", gender='" + gender + '\'' +
                '}';
    }
}

数据表

CREATE TABLE `person` (
  `id` int(11) NOT NULL,
  `name` varchar(10) DEFAULT NULL,
  `age` int(11) DEFAULT NULL,
  `gender` varchar(2) NOT NULL,
  PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1

测试类
需要注入发布器,与job任务。同时可以使用后置参数灵活处理,最后调用JobLauncher.run方法执行批处理任务

@RunWith(SpringRunner.class)
@SpringBootTest
@Slf4j
@SuppressWarnings("all")
public class BatchTest {

    @Autowired
    SimpleJobLauncher jobLauncher;

    @Autowired
    Job importJob;

    @Test
    public void test() throws Exception{
        // 后置参数:使用JobParameters中绑定参数
        JobParameters jobParameters = new JobParametersBuilder().addLong("time", System.currentTimeMillis())
                .toJobParameters();
        jobLauncher.run(importJob, jobParameters);
    }
}

在这里插入图片描述

结束
参考原作:https://www.cnblogs.com/jian0110/p/10838744.html
略作修改

路漫漫其修远兮,吾将上下而求索
在这里插入图片描述

  • 3
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值