ES-JOB——分布式定时任务高级使用——控制台修改任务

参考分布式定时任务基础使用:ES-JOB——分布式定时任务基础使用_择业的博客-CSDN博客

再上面链接代码后进行代码编译

分布式定时任务:按照分片执行,同一个jar在不同服务器上发布,同一定时任务,按照定时任务的分片执行。

高级用法思想:

一个job用后台创建job方式,后台创建设置管理定时任务(可以修改,不能新增)

用@job自定义注解方式,后台设置创建管理定时任务

1.将数据源与Job关联起来JobEventConfig


package com.bfxy.esjob.config;

import javax.sql.DataSource;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import com.dangdang.ddframe.job.event.JobEventConfiguration;
import com.dangdang.ddframe.job.event.rdb.JobEventRdbConfiguration;

@Configuration
public class JobEventConfig {

    @Autowired
    private DataSource dataSource;

    @Bean
    public JobEventConfiguration jobEventConfiguration() {
        return new JobEventRdbConfiguration(dataSource);
    }
}

2.将JobEventConfiguration注入到MySimpleJobConfig

package com.bfxy.esjob.config;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import com.bfxy.esjob.listener.SimpleJobListener;
import com.bfxy.esjob.task.MySimpleJob;
import com.dangdang.ddframe.job.api.simple.SimpleJob;
import com.dangdang.ddframe.job.config.JobCoreConfiguration;
import com.dangdang.ddframe.job.config.simple.SimpleJobConfiguration;
import com.dangdang.ddframe.job.event.JobEventConfiguration;
import com.dangdang.ddframe.job.lite.api.JobScheduler;
import com.dangdang.ddframe.job.lite.config.LiteJobConfiguration;
import com.dangdang.ddframe.job.lite.spring.api.SpringJobScheduler;
import com.dangdang.ddframe.job.reg.zookeeper.ZookeeperRegistryCenter;

@Configuration
public class MySimpleJobConfig {

	@Autowired
	private ZookeeperRegistryCenter registryCenter;
	
	@Autowired
	private JobEventConfiguration jobEventConfiguration;
	
	@Bean
	public SimpleJob simpleJob() {
		return new MySimpleJob();
	}
	/**
	 * @param simpleJob
	 * @return
	 */
	@Bean(initMethod = "init")
	public JobScheduler simpleJobScheduler(final SimpleJob simpleJob,
			@Value("${simpleJob.cron}") final String cron,
			@Value("${simpleJob.shardingTotalCount}") final int shardingTotalCount,
			@Value("${simpleJob.shardingItemParameters}") final String shardingItemParameters,
			@Value("${simpleJob.jobParameter}") final String jobParameter,
			@Value("${simpleJob.failover}") final boolean failover,
			@Value("${simpleJob.monitorExecution}") final boolean monitorExecution,
			@Value("${simpleJob.monitorPort}") final int monitorPort,
			@Value("${simpleJob.maxTimeDiffSeconds}") final int maxTimeDiffSeconds,
			@Value("${simpleJob.jobShardingStrategyClass}") final String jobShardingStrategyClass) {
		
		return new SpringJobScheduler(simpleJob,
				registryCenter,
				getLiteJobConfiguration(simpleJob.getClass(),
						cron,
						shardingTotalCount,
						shardingItemParameters,
						jobParameter,
						failover,
						monitorExecution,
						monitorPort,
						maxTimeDiffSeconds,
						jobShardingStrategyClass),
				jobEventConfiguration,
				new SimpleJobListener());
		
	}
	
	
	private LiteJobConfiguration getLiteJobConfiguration(Class<? extends SimpleJob> jobClass, String cron,
			int shardingTotalCount, String shardingItemParameters, String jobParameter, boolean failover,
			boolean monitorExecution, int monitorPort, int maxTimeDiffSeconds, String jobShardingStrategyClass) {

		JobCoreConfiguration jobCoreConfiguration = JobCoreConfiguration
				.newBuilder(jobClass.getName(), cron, shardingTotalCount)
				.misfire(true)
				.failover(failover)
				.jobParameter(jobParameter)
				.shardingItemParameters(shardingItemParameters)
				.build();
		
		SimpleJobConfiguration simpleJobConfiguration = new SimpleJobConfiguration(jobCoreConfiguration, jobClass.getCanonicalName());
		
		LiteJobConfiguration liteJobConfiguration = LiteJobConfiguration.newBuilder(simpleJobConfiguration)
				.jobShardingStrategyClass(jobShardingStrategyClass)
				.monitorExecution(monitorExecution)
				.monitorPort(monitorPort)
				.maxTimeDiffSeconds(maxTimeDiffSeconds)
				.overwrite(false)
				.build();
		
		return liteJobConfiguration;
	}
	
	
	
	
	
	
}

3.流式Job,DataflowJobConfig/循环执行定时任务,每十秒钟执行一次

/*
 * Copyright 1999-2015 dangdang.com.
 * <p>
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 * </p>
 */

package com.bfxy.esjob.config;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import com.bfxy.esjob.task.SpringDataflowJob;
import com.dangdang.ddframe.job.api.dataflow.DataflowJob;
import com.dangdang.ddframe.job.config.JobCoreConfiguration;
import com.dangdang.ddframe.job.config.dataflow.DataflowJobConfiguration;
import com.dangdang.ddframe.job.event.JobEventConfiguration;
import com.dangdang.ddframe.job.lite.api.JobScheduler;
import com.dangdang.ddframe.job.lite.config.LiteJobConfiguration;
import com.dangdang.ddframe.job.lite.spring.api.SpringJobScheduler;
import com.dangdang.ddframe.job.reg.zookeeper.ZookeeperRegistryCenter;

@Configuration
public class DataflowJobConfig {
    
	@Autowired
    private ZookeeperRegistryCenter regCenter;
    
    @Autowired
    private JobEventConfiguration jobEventConfiguration;
    
    @Bean
    public DataflowJob dataflowJob() {
        return new SpringDataflowJob();
    }
    
    @Bean(initMethod = "init")
    public JobScheduler dataflowJobScheduler(final DataflowJob dataflowJob, @Value("${dataflowJob.cron}") final String cron,
                                             @Value("${dataflowJob.shardingTotalCount}") final int shardingTotalCount,
                                             @Value("${dataflowJob.shardingItemParameters}") final String shardingItemParameters) {
       
    	SpringJobScheduler springJobScheduler = new SpringJobScheduler(dataflowJob, regCenter, getLiteJobConfiguration(dataflowJob.getClass(), cron,
                shardingTotalCount, shardingItemParameters), jobEventConfiguration);
//    	springJobScheduler.init();
    	return springJobScheduler;
    }
    
    private LiteJobConfiguration getLiteJobConfiguration(final Class<? extends DataflowJob> jobClass, final String cron, final int shardingTotalCount, final String shardingItemParameters) {
        return LiteJobConfiguration.newBuilder(
        		new DataflowJobConfiguration(JobCoreConfiguration.newBuilder(jobClass.getName(), cron, shardingTotalCount)
        		.shardingItemParameters(shardingItemParameters).build(), 
        		jobClass.getCanonicalName(),
        		true))	//streamingProcess
        		.overwrite(true)
        		.build();
    }
}

相关实体类

package com.bfxy.esjob.entity;

public class Foo {

	private String id;
	private String name;
	
	public Foo() {
	}
	public Foo(String id, String name) {
		super();
		this.id = id;
		this.name = name;
	}
	public String getId() {
		return id;
	}
	public void setId(String id) {
		this.id = id;
	}
	public String getName() {
		return name;
	}
	public void setName(String name) {
		this.name = name;
	}
	
	
}

具体job任务SpringDataflowJob

/*
 * Copyright 1999-2015 dangdang.com.
 * <p>
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 * </p>
 */

package com.bfxy.esjob.task;

import java.util.List;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.bfxy.esjob.entity.Foo;
import com.dangdang.ddframe.job.api.ShardingContext;
import com.dangdang.ddframe.job.api.dataflow.DataflowJob;


public class SpringDataflowJob implements DataflowJob<Foo> {
	
    private static final Logger LOGGER = LoggerFactory.getLogger(SpringDataflowJob.class);
    
    @Override
    public List<Foo> fetchData(final ShardingContext shardingContext) {
    	System.err.println("--------------@@@@@@@@@@ 抓取数据集合...--------------");
        return null;
    }
    
    @Override
    public void processData(final ShardingContext shardingContext, final List<Foo> data) {
    	System.err.println("--------------@@@@@@@@@ 处理数据集合...--------------");
    }
}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论
使用 Elasticsearch SQL 插件,您需要先确保您已经安装了 Elasticsearch 7.1.1 版本。然后,按照以下步骤进行操作: 1. 安装 Elasticsearch SQL 插件: - 打开终端或命令提示符窗口。 - 运行以下命令安装 Elasticsearch SQL 插件: ``` ./bin/elasticsearch-plugin install https://github.com/NLPchina/elasticsearch-sql/releases/download/7.1.1.0/elasticsearch-sql-7.1.1.0.zip ``` - 安装完成后,重启 Elasticsearch 以使插件生效。 2. 使用 Elasticsearch SQL: - 打开终端或命令提示符窗口。 - 进入 Elasticsearch 的安装目录。 - 运行以下命令以启动 Elasticsearch SQL 查询: ``` ./bin/elasticsearch-sql-cli ``` - 您将进入 Elasticsearch SQL 的命令行界面。 - 现在,您可以开始使用 SQL 语法来查询 Elasticsearch 数据。 例如,您可以运行以下命令来执行一个简单的 SELECT 查询: ``` SELECT * FROM your_index_name WHERE your_field_name = 'your_value' ``` 注意:在上述查询中,`your_index_name` 是您要查询的索引名称,`your_field_name` 是您要查询的字段名称,`your_value` 是您要匹配的字段值。 这样,您就可以使用 Elasticsearch SQL 插件来执行 SQL 查询并检索 Elasticsearch 中的数据了。请记住,Elasticsearch SQL 插件提供了一种方便的方式来使用 SQL 语法进行查询,但它可能不适用于所有类型的查询,特别是复杂的查询。因此,在使用插件时,请确保您了解其使用限制和适用场景。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

择业

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值