xxl-job 定时任务(多线程执行)获取任务执行进度
具体思路
- 前提:需要将任务按照一定条件进行分组,并处理成列表的形式(任务与任务之间不存在交叉关系,保持列表中每一项的唯一性)
- 在使用xxl-job进行任务调度多线程执行时,获取到任务具体执行进度;首先将需要进行多线程处理的数据,取出分组条件信息组成一个list对象,然后根据分组条件进行分页,进而划分为多批次(多页),从而实现任务的批次处理。
完整工具代码
import java.util.Map;
import java.util.concurrent.Future;
@FunctionalInterface
public interface JobExecutor<T> {
public Future<T> execute(int curr, int limit, Job args) throws Exception;
}
- JobExecutorEnd任务执行完成后,自定义的回调方法,定义任务结束后,要进行的下一步操作、打印日志之类的
@FunctionalInterface
public interface JobExecutorEnd<T> {
public void executorEnd();
}
- 用于传递执行任务过程中需要常用的参数,可以自定义添加参数,需要在JobHelp类的startJob、startMapJob 方法中进行自定义参数的处理逻辑,也可以在具体的任务处理中进行修改
@Setter
@Getter
public class Job {
private String mapKey;
private String time;
}
@Setter
@Getter
@Slf4j
public class JobHelp<T> {
private int limit = 500;
private long total;
private long pages;
private double pro;
private int progress = 0;
private Integer step = 5;
private List<String> mapIndex;
private JobExecutorEnd<T> jobExecutorEnd;
private String progressRedisKey;
private static RedisRepository redisRepository;
public void setRedisRepository(RedisRepository redisRepository) {
if (JobHelp.redisRepository == null){
JobHelp.redisRepository = redisRepository;
}
}
public JobHelp(long total) {
this.total = total;
this.pages = total % limit == 0 ? total / limit : (total / limit) + 1;
this.pro = 1.00 / pages;
}
public JobHelp(int limit, int total) {
this.limit = limit;
this.total = total;
this.pages = total % limit == 0 ? total / limit : (total / limit) + 1;
this.pro = 1.00 / pages;
}
public JobHelp(Set<String> set){
this.mapIndex = new ArrayList<String>(set);
this.limit = 1;
this.total = set.size();
this.pages = total % limit == 0 ? total / limit : (total / limit) + 1;
this.pro = 1.00 / pages;
}
public void startJob(Job args, JobExecutor<T> jobExecutor) throws Exception{
for (int i = 0; i < pages; i += step) {
List<Future<T>> taskList = new ArrayList<>();
long last = Math.min(i + step, pages);
for (int j = i; j < last; j++) {
int curr = j + 1;
Future<T> future = jobExecutor.execute(curr, limit,args);
taskList.add(future);
}
for (Future item : taskList) {
log.info((String) item.get());
progress++;
redisRepository.set("system:job:"+ progressRedisKey + ":progress" , String.format("%.6f", progress * pro));
}
}
jobEnd();
}
public void startJob(JobExecutor<T> jobExecutor) throws Exception{
for (int i = 0; i < pages; i += step) {
List<Future<T>> taskList = new ArrayList<>();
long last = Math.min(i + step, pages);
for (int j = i; j < last; j++) {
Job args = new Job();
int curr = j + 1;
Future<T> future = jobExecutor.execute(curr, limit,args);
taskList.add(future);
}
for (Future item : taskList) {
log.info((String) item.get());
progress++;
redisRepository.set("system:job:"+ progressRedisKey + ":progress" , String.format("%.6f", progress * pro));
}
}
jobEnd();
}
public void startMapJob(Job args, JobExecutor<T> jobExecutor) throws Exception{
for (int i = 0; i < pages; i += step) {
List<Future<T>> taskList = new ArrayList<>();
long last = Math.min(i + step, pages);
for (int j = i; j < last; j++) {
int curr = j + 1;
args.setMapKey(mapIndex.get(j));
Future<T> future = jobExecutor.execute(curr, limit,args);
taskList.add(future);
}
for (Future item : taskList) {
log.info((String) item.get());
progress++;
redisRepository.set("system:job:"+ progressRedisKey + ":progress" , String.format("%.6f", progress * pro));
}
}
jobEnd();
}
public void startMapJob(JobExecutor<T> jobExecutor) throws Exception{
for (int i = 0; i < pages; i += step) {
List<Future<T>> taskList = new ArrayList<>();
long last = Math.min(i + step, pages);
for (int j = i; j < last; j++) {
int curr = j + 1;
Job args = new Job();
args.setMapKey(mapIndex.get(j));
Future<T> future = jobExecutor.execute(curr, limit,args);
taskList.add(future);
}
for (Future item : taskList) {
log.info((String) item.get());
progress++;
redisRepository.set("system:job:"+ progressRedisKey + ":progress" , String.format("%.6f", progress * pro));
}
}
jobEnd();
}
public void jobEnd() {
if (jobExecutorEnd !=null){
jobExecutorEnd.executorEnd();
}
}
}
使用例子
@Autowired
private UserMapper userMapper;
@Autowired
private UserBlackMapper userBlackMapper;
@Autowired
private RedisRepository redisRepository;
LambdaQueryWrapper<User> queryWrapper = new LambdaQueryWrapper<>();
LocalDate now = LocalDate.now();
String time = now.format(DateTimeFormatter.ofPattern("yyyyMMdd"));
queryWrapper.ge(User::getCjsj, now);
Page<User> resPage = userMapper.selectPage(new Page<>(1, 500), queryWrapper);
Job args = new Job();
args.setTime(time);
JobHelp<String> jobHelp = new JobHelp<>(resPage.getTotal());
jobHelp.setRedisRepository(redisRepository);
jobHelp.setProgressRedisKey("user:" + time);
try {
jobHelp.startJob(args, (curr, limit, map) -> {
long execStartTime = System.currentTimeMillis();
Page<User> page = userMapper.selectPage(new Page<>(curr, limit), queryWrapper);
List<User> insertList = page.getRecords();
userBlackMapper.saveBatch(insertList);
long execEndTime = System.currentTimeMillis();
return new AsyncResult<String>("每日新建用户备份" + curr + "页结束,总耗时" + (execEndTime - execStartTime) / 1000 + "s");
});
} catch (Exception e) {
e.printStackTrace();
throw e;
}