自己写一个任务队列
今天遇到一个BUG,说是有一个提交的操作,响应速度慢,这个操作包含了一个上传图片的功能。
之后进行断点追踪,发现了一个耗时操作,就是为已上传的图片添加一个水印。用的是jdk自带的BufferedImage进行绘制操作。遇到这个问题,我的第一反映不是想如何优化IO,而是想到了两种方法,一种是换成非阻塞IO流,另外一种是使用队列进行异步处理。
说干就干,上百度上面查找是否有rabbitmq消息队列的关键代码,或者rabbitmq生产和处理消息的机制,发现这方面的资料太少,后面参考了一位开发人员写的代码,进行了改造,成为了现在这个版本。
UML类图:
JobDetail.java
package com.esinhai.epm.job.abstractEntiry;
import lombok.Getter;
@Getter
public class JobDetail {
private Object param1;
public JobDetail(Object param1){
this.param1 = param1;
}
}
JobInterface.java
package com.esinhai.epm.job.abstractEntiry;
public interface JobInterface {
void execute(JobDetail jobDetail);
}
JobWrapper.java
package com.esinhai.epm.job.abstractEntiry;
public class JobWrapper implements Runnable{
private JobInterface jobInterface;
private JobDetail jobDetail;
public JobWrapper(JobInterface jobInterface, JobDetail jobDetail) {
this.jobInterface = jobInterface;
this.jobDetail = jobDetail;
}
@Override
public void run() {
if(jobInterface != null){
jobInterface.execute(jobDetail);
}
}
}
JobCustomer.java
package com.esinhai.epm.job.customer;
import com.esinhai.common.utils.WriteLogUtil;
import com.esinhai.epm.job.abstractEntiry.JobWrapper;
import com.esinhai.epm.job.factory.QueueFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
public class JobCustomer extends Thread {
private static final int maxPool = 3;
private static final int maxQueueNum = 3;
private volatile boolean isPause = false;
private static ReentrantLock lock = new ReentrantLock();
private static Condition condition = lock.newCondition();
private final ExecutorService service;
public JobCustomer(){
service = Executors.newFixedThreadPool(maxPool);
}
@Override
public void run(){
while (true){
doWait();
try {
Thread.sleep(100); //让线程有时间排队
doTask();
} catch (InterruptedException e) {
WriteLogUtil.write("job-1-wait-error",e.getMessage());
}
}
}
private void doTask() throws InterruptedException {
BlockingQueue<JobWrapper> blockingDeque = QueueFactory.getSingleBlockQueue();
List<Runnable> list = new ArrayList<>();
for(int i = 0;i < maxQueueNum; i++){
if(!blockingDeque.isEmpty()){
list.add(blockingDeque.take());
}
}
if(list.size() > 0){
for(Runnable runnable : list){
service.submit(runnable);
}
}
}
private void doWait() {
if(isPause) {
lock.lock();
try {
while (isPause) {
condition.await();
}
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
lock.unlock();
}
}
}
public void shutdown(){
service.shutdown();
}
public void pauseJob(){
lock.lock();
isPause = true;
lock.unlock();
}
public void resumeJob(){
lock.lock();
isPause = false;
condition.signal();
lock.unlock();
}
}
JobFactory.java
package com.esinhai.epm.job.factory;
import com.esinhai.epm.job.abstractEntiry.JobInterface;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Component;
@Component
public class JobFactory implements ApplicationContextAware {
private ApplicationContext applicationContext;
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
private JobInterface createJobInterface(Class<? extends JobInterface> c) throws Exception {
if(c != null) return c.newInstance();
else throw new Exception("未找到任务类");
}
public JobInterface createJob(Class<? extends JobInterface> c) throws Exception {
JobInterface jobInterface = this.createJobInterface(c);
this.applicationContext.getAutowireCapableBeanFactory().autowireBean(jobInterface);
return jobInterface;
}
}
QueueFactory.java
package com.esinhai.epm.job.factory;
import com.esinhai.epm.job.abstractEntiry.JobWrapper;
import java.util.concurrent.BlockingDeque;
import java.util.concurrent.LinkedBlockingDeque;
public class QueueFactory {
private static BlockingDeque<JobWrapper> blockingDeque = new LinkedBlockingDeque<>();
public static BlockingDeque getSingleBlockQueue(){
return blockingDeque;
}
}
JobProducer.java
package com.esinhai.epm.job.producer;
import com.esinhai.epm.job.abstractEntiry.JobDetail;
import com.esinhai.epm.job.abstractEntiry.JobInterface;
import com.esinhai.epm.job.abstractEntiry.JobWrapper;
import com.esinhai.epm.job.factory.JobFactory;
import com.esinhai.epm.job.factory.QueueFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
public class JobProducer {
@Autowired
private JobFactory jobFactory;
public void createJob(Class<? extends JobInterface> c, JobDetail jobDetail) throws Exception {
JobInterface jobInterface = jobFactory.createJob(c);
JobWrapper jobWrapper = new JobWrapper(jobInterface,jobDetail);
QueueFactory.getSingleBlockQueue().put(jobWrapper);
}
}
DownLoadJob.java
package com.esinhai.epm.job.task;
import com.esinhai.common.utils.CompressUtil;
import com.esinhai.common.utils.FTPUtil;
import com.esinhai.common.utils.ResourcePathUtil;
import com.esinhai.common.utils.WriteLogUtil;
import com.esinhai.epm.api.resp.FtpFileParams;
import com.esinhai.epm.job.abstractEntiry.JobDetail;
import com.esinhai.epm.job.abstractEntiry.JobInterface;
import java.util.List;
public class DownLoadJob implements JobInterface {
@Override
public void execute(JobDetail jobDetail) {
try{
//TODO 执行异步逻辑
}catch (Exception e){
}
}
}
ServletListener.java
package com.esinhai.common.config;
import com.esinhai.epm.job.customer.JobCustomer;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import javax.servlet.annotation.WebListener;
@WebListener
public class ServletListener implements ServletContextListener {
private JobCustomer jobCustomer;
@Override
public void contextDestroyed(ServletContextEvent arg0) {
if(jobCustomer != null){
jobCustomer.shutdown();
}
}
@Override
public void contextInitialized(ServletContextEvent arg0) {
if(jobCustomer == null){
jobCustomer = new JobCustomer();
jobCustomer.start();
}
}
}
主要看一下JobCustomer这个类,这个类首先是继承Thread,说明此类是一个任务类,所以需要伴随着系统启动,后续新增的任务是通过此类进行调用。
private static final int maxPool = 3; //执行线程的最大数量
private static final int maxQueueNum = 3; //队列最大数量,要大于等于并发的线程数
private volatile boolean isPause = false; //是否暂停,加volatile是为了保证此基本类型的原子操作的可见性,
//即赋值happen-before于取值,将cpu缓存刷到内存缓存中,这样线程获取时间片时能使用最新的数据
下面看锁的定义
private static ReentrantLock lock = new ReentrantLock();
private static Condition condition = lock.newCondition();
看下面代码可知ReentrantLock是一个非公平锁
public ReentrantLock() {
sync = new NonfairSync();
}
并且是用AQS框架来定义锁的获取和释放
abstract static class Sync extends AbstractQueuedSynchronizer {
//......
}
看这段代码,看出ReentrantLock是公平锁,并且是互斥锁,也是可重入锁
final boolean nonfairTryAcquire(int acquires) {
final Thread current = Thread.currentThread();
int c = getState();
if (c == 0) {
if (compareAndSetState(0, acquires)) {
setExclusiveOwnerThread(current);
return true;
}
}
else if (current == getExclusiveOwnerThread()) {
int nextc = c + acquires;
if (nextc < 0) // overflow
throw new Error("Maximum lock count exceeded");
setState(nextc);
return true;
}
return false;
}
Condition维护了一个等待队列,执行await方法时,创建一个新节点,存入自己的等待队列中,并且释放AQS中的相同线程的锁,然后阻塞当前线程
public final void await() throws InterruptedException {
if (Thread.interrupted())
throw new InterruptedException();
Node node = addConditionWaiter();
int savedState = fullyRelease(node);
int interruptMode = 0;
while (!isOnSyncQueue(node)) {
LockSupport.park(this);
if ((interruptMode = checkInterruptWhileWaiting(node)) != 0)
break;
}
if (acquireQueued(node, savedState) && interruptMode != THROW_IE)
interruptMode = REINTERRUPT;
if (node.nextWaiter != null) // clean up if cancelled
unlinkCancelledWaiters();
if (interruptMode != 0)
reportInterruptAfterWait(interruptMode);
}
Condition恢复执行await方法而阻塞的线程signalAll方法
public final void signalAll() {
if (!isHeldExclusively())
throw new IllegalMonitorStateException();
Node first = firstWaiter;
if (first != null)
doSignalAll(first);
}
为什么判断isPause需要用到while而不是用if这点我不是很明白,如果是考虑并发的情况,那么volatile已经解决了
lock.lock();
try{
while(isPause){
condition.await();
}
}catch (Exception e){
e.printStackTrace();
}finally{
lock.unlock();
}
不断的从BlockingQueue中获取数据并执行,使用maxPool数量的线程同步执行。JobWrapper对象继承了Runnable接口,并且包含了JobInterface(封装执行实体)和JobDetail(封装执行参数)。
private void doTask() throws InterruptedException {
BlockingQueue<JobWrapper> blockingDeque = QueueFactory.getSingleBlockQueue();
List<Runnable> list = new ArrayList<>();
for(int i = 0;i < maxQueueNum; i++){
if(!blockingDeque.isEmpty()){
list.add(blockingDeque.take());
}
}
if(list.size() > 0){
for(Runnable runnable : list){
service.submit(runnable);
}
}
}
需要注意的一点是JobFactory类实现了ApplicationContextAware,这样可以将JobInterface进行装配,使之兼容spring。
public JobInterface createJob(Class<? extends JobInterface> c) throws Exception {
JobInterface jobInterface = this.createJobInterface(c);
this.applicationContext.getAutowireCapableBeanFactory().autowireBean(jobInterface);
return jobInterface;
}
这样一个任务队列基本符合我的要求了,但是还有很多需要完善的地方,比如项目运行过程中断电了,或者由于其他原因终止掉了,下次项目启动后还能继续执行未完成的任务。
最后感谢大家的阅读,希望有大神能够指出这篇文章的不足之处,以便我能够改进,不懂之处,也欢迎在评论区留言。