有开发经验的朋友应该都听过数据库连接池的概念,本节将简单的描述“池资源”。首先简单描述一下连接池中池化数据库连接资源的重要性:
- 频繁的创建和断开与数据库之间的连接是相当消耗性能的,增加了程序的内耗;
- 连接资源未释放或释放不合理导致数据库连接资源被长时间挂起进而影响数据库性能;
- 如果能在程序级别能控制访问数据库的各种策略是在好不过了,当然数据库本身也是允许的,对于多个应用程序而言如能够独立的维护自己的数据库资源是一种不错的选择。
- 其它,不可否认数据库连接池能给我们带来极大的方便,当然我们永远都记得任何东西自然有它不完美的存在。
笔者层有一个幼稚的想法,假如我有若干资源(也许是数据库连接,JavaBean,自行车),我该如何使用上述“池化思想”来改善我对现有资源的不合理使用,第一想法当然是借鉴一下连接池的思想遇到如下问题:
- 池化的资源该如何保存 Array ?,Map?,List?
- 池化的资源什么该如何监管,谁拿走了?还剩余多少?谁该还回来了?
- 最重要的如何标记一个资源被借走了?又如何标记一个资源被还回来?
此前没有此类的开发经验,理所当然的我认为自己需要重头开始写一个能满足上述需要的程序出来(为什么是程序不是框架,就因为水平太低,脑子里根本没有架构的概念),到此为止上述的问题都要一一解决,当然问题肯定层出不穷。我自然会放弃!开发者永远不要禁锢自己,退一步讲,有更适合现在这种情况的选择,你只需要多问,果不然我打听到了。现在目标很清楚有一个开源项目正好解决我的问题:commons-pool ,不得不说,小伙子!不懂就要问!肯定有人会!不会了再说!
简单说一下,commons-pool 是一个开源项目,其目的就是为解决资源池化而生,对我而言完成任务是首要的,这也是还能写这篇文章的原因,学而不思则罔,今天简单回顾一下commons-pool (http://commons.apache.org/proper/commons-pool/)的用法:
- 顶级接口PooledObjectFactory,用来创建池中的对象
public interface PooledObjectFactory<T> { PooledObject<T> makeObject();//创建对象 void activateObject(PooledObject<T> obj);//激活对象 void passivateObject(PooledObject<T> obj);//钝化对象 boolean validateObject(PooledObject<T> obj);//检查有效性 void destroyObject(PooledObject<T> obj);//销毁对象 }
- 顶级接口ObjectPool,代表一个池对象
readerPool = new GenericObjectPool<MyReader>(new ReaderObjectFactory(tool), config);
下将贴出一段开发中的案例供参考:
- PooledObjectFactory实现
public class ReaderObjectFactory implements PoolableObjectFactory<MyReader> { private ArchiveTool tool; public ReaderObjectFactory(ArchiveTool tool) { this.setTool(tool); } public MyReader makeObject() throws Exception { MyReader reader = new MyReader(); try { ArchiveReader ar = tool.createArchiveReader(); reader.setIsValid(true); reader.setReader(ar); } catch (Exception e) { e.printStackTrace(); } return reader; } public void destroyObject(MyReader obj) throws Exception { try { obj.getReader().release(); } catch (Exception e) { e.printStackTrace(); } } public boolean validateObject(MyReader obj) { try { return obj.getIsValid(); } catch (Exception e) { e.printStackTrace(); } return false; } public void activateObject(MyReader obj) throws Exception { } public void passivateObject(MyReader obj) throws Exception { } public ArchiveTool getTool() { return tool; } public void setTool(ArchiveTool tool) { this.tool = tool; } }
-
资源使用案例
-
public class DHDFSFileManager { private static Logger Log = Logger.getLogger(DHDFSFileManager.class); private static EFSConfig cfg; private static ArchiveTool tool; //对象池 private static ObjectPool<MyReader> readerPool; //线程绑定 private static ThreadLocal<LRUAri<ArchiveWriter>> thread = new ThreadLocal<LRUAri<ArchiveWriter>>(); public static void main(String[] args) throws IOException { if (DHDFSFileManager.init("172.16.248.160", 38100, "root", "123456")) { InputStream in = DHDFSFileManager.class.getResourceAsStream("car.jpg"); String bucket = genBucket(new Date()); FileOutputStream o = new FileOutputStream("/test/T1/load.jpg"); byte[] buf = IOUtils.toByteArray(in); o.write(buf); o.close(); String fileName = DHDFSFileManager.uploadFile(buf, bucket); System.out.println("fileName:" + fileName); byte[] context = DHDFSFileManager .downloadFile("2016-11-25/archivefile-2016-11-25-131859-00C948B800000001:64/448092.jpg"); FileOutputStream out = new FileOutputStream(new File("./load.jpg")); out.write(context); out.close(); System.out.println(context.length); } } /** * <h1>初始化ArchiveTool,在使用之前须先调用初始化方法 * * @param address * DFS host name or IP address * @param port * host port * @param userName * user name * @param password * user pass * * @return success ?true : false */ public static boolean init(String address, int port, String userName, String password) { if (tool == null) { synchronized (DHDFSFileManager.class) { if (tool == null) { cfg = new EFSConfig.ByReference(); cfg.address = address; cfg.port = port; cfg.userName = userName; cfg.password = password; tool = new ArchiveTool(); if (!tool.init(cfg)) { Log.error(cfg.address + "_" + cfg.port + "_" + cfg.userName + "_" + cfg.password); Log.error("init ArchiveTool fail!!"); System.out.println("init ArchiveTool fail!!"); return false; } Log.info("init ArchiveTool success"); if (!tool.efsSetLog("./logs/", EFSLogLevel.INFOF)) { Log.error("init SystemLog fail!!"); System.out.println("init SystemLog fail!!"); return false; } Log.info("init SystemLog success"); // 初始化Bucket String bucket = genBucket(new Date()); if (!tool.isBucketValid(bucket)) { if (!tool.createBucket(bucket)) { System.out.println("create bucket:" + bucket + " fail!!"); return false; } } Log.info("init Bucket success"); // 初始化对象池 Config config = new Config(); config.maxActive = 1; config.minIdle = 1; readerPool = new GenericObjectPool<MyReader>(new ReaderObjectFactory(tool), config); //初始化绑定到线程的变量 thread.set(new LRUAri<ArchiveWriter>(5)); Log.info("init Writer Pool success"); System.out.println("init succeess and END"); return true; } } } else { System.out.println("arready init!!"); return false; } return false; } /** * <h1>上传文件 * * @param img * image byte array * @param bucket * bucket name * @return filename */ public static String uploadFile(byte[] img, String bucket) { long start = System.currentTimeMillis(); System.out.println("sdtart upload,time:" + start); ArchiveWriter aw = null; String archname = null; try { checkOrCreateBucket(bucket); // aw.setBucket(bucket); aw = getWriter(bucket); aw.open("jpg"); int len = img.length; System.out.println("length:" + len); int writeData = 0; int offset = 0; while (writeData < len) { int ret = aw.write(img, len - offset); if (ret < 0) { System.out.println("write faile"); throw new Exception("write faile"); } System.out.println(ret); writeData += ret; offset += ret; } System.out.println("writeSize:" + writeData); archname = aw.close(); Log.debug("upload{time:" + new Date().toString() + ",fileName:" + archname + "}"); System.out.println("end upload,time:" + System.currentTimeMillis()); System.out.println("interval time:" + (System.currentTimeMillis() - start)); Log.info("文件上传成功,FileName:" + archname); // writer.setIsValid(false); } catch (Exception e) { e.printStackTrace(); Log.error("文件上传失败!!"); return archname; } finally { aw = null; } return archname; } /** * <h1>下载文件 * * @param archname * fileName * @return byte array image content */ public static byte[] downloadFile(String archname) { long start = System.currentTimeMillis(); System.out.println("start download,time:" + start); MyReader reader = null; ArchiveReader ar = null; ByteArrayOutputStream out = null; try { reader = readerPool.borrowObject(); ar = reader.getReader(); ArchiveInfo ai = tool.getArchiveInfos(archname); String fileName = ai.getFilename(); int len = (int) ar.open(fileName); System.out.println("fileLen:" + len); out = new ByteArrayOutputStream(); byte[] buf = new byte[len]; int getData = 0; while (getData < len) { int ret = ar.read(buf, len); if (ret > 0) { getData += ret; out.write(buf, 0, ret); } if (ret == -1) { System.out.println("read faile"); break; } } Log.debug("upload{time:" + new Date().toString() + ",fileName:" + archname + "}"); ar.close(); Log.info("文件下载成功,FileName:" + archname); System.out.println("end download,time:" + System.currentTimeMillis()); System.out.println("interval time :" + (System.currentTimeMillis()-start)); Log.debug("interval time :" + (start - System.currentTimeMillis())); return out.toByteArray(); } catch (Exception e) { // 失败 reader.setIsValid(false); e.printStackTrace(); Log.debug("downloadfail{time:" + new Date().toString() + ",interval time :" + (start - System.currentTimeMillis()) + ",fileName:" + archname + "}"); System.out.println("文件下载失败!!"); return null; } finally { try { if (out != null) { out.close(); } } catch (Exception e) { e.printStackTrace(); } try { //释放资源 readerPool.returnObject(reader); } catch (Exception e) { e.printStackTrace(); } } } /** * <h1>get a bucket * * @param date * time * @return bucket name format by "yyyy-MM-dd" */ public static String genBucket(Date date) { String pattren = "yyyy-MM-dd"; return new SimpleDateFormat(pattren).format(date); } /** * <h1>get a bucket * * @param date * time * @return bucket name format by "yyyy-MM-dd" */ public static String genBucket(String date) { String pattren = "yyyy-MM-dd"; return date.substring(0, pattren.length()); } /** * <h1>get a bucket * * @param bucket * bucket name * @return delete success ?true:false */ public static boolean deleteBucket(String bucket) { try { if (!tool.isBucketValid(bucket)) { Log.error("bucket:" + bucket + " not exist"); return true; } else { if (tool.removeBucket(bucket)) { Log.info("delete bucket:" + bucket); return true; } else { Log.error("delete bucket :" + bucket + " error!!"); return false; } } } catch (Exception e) { e.printStackTrace(); } return false; } public static Object obj = ""; public static void checkOrCreateBucket(String bucket) { try { if (!tool.isBucketValid(bucket)) { synchronized (obj) { if (!tool.isBucketValid(bucket)) { tool.createBucket(bucket); } } } } catch (Exception e) { e.printStackTrace(); } } public static Object obj1 = ""; /** * <h1>get ArchiveWriter from Thread Local * * @param bucket * bucket name * @return ArchiveWriter write file */ public static ArchiveWriter getWriter(String bucket) { LRUAri<ArchiveWriter> writers = thread.get(); Log.debug(writers); if (writers == null) { writers = new LRUAri<ArchiveWriter>(5); thread.set(writers); } ArchiveWriter writer = writers.get(bucket); if (writer == null) { synchronized (obj1) { if (writers.get(bucket) == null) { writer = tool.createArchiveWriter(); writer.init((byte) 3, (byte) 1, bucket); writers.put(bucket, writer); thread.set(writers); } } } return writer; } }