DBUtils是Python的一个用于实现数据库连接池的模块,并允许对非线程安全的数据库接口进行线程安全包装。
连接的两种模式:
- 为每个线程创建一个连接,线程即使调用了close方法,也不会关闭,只是把连接重新放到连接池,供自己线程再次使用。当线程终止时,连接自动关闭。
POOL = PersistentDB(
creator=pymysql, # 使用链接数据库的模块
maxusage=None, # 一个链接最多被重复使用的次数,None表示无限制
setsession=[], # 开始会话前执行的命令列表。如:["set datestyle to ...", "set time zone ..."]
ping=0,
# ping MySQL服务端,检查是否服务可用。# 如:0 = None = never, 1 = default = whenever it is requested, 2 = when a cursor is created, 4 = when a query is executed, 7 = always
closeable=False,
# 如果为False时, conn.close() 实际上被忽略,供下次使用,再线程关闭时,才会自动关闭链接。如果为True时, conn.close()则关闭链接,那么再次调用pool.connection时就会报错,因为已经真的关闭了连接(pool.steady_connection()可以获取一个新的链接)
threadlocal=None, # 本线程独享值得对象,用于保存链接对象,如果链接对象被重置
host='127.0.0.1',
port=3306,
user='root',
password='123',
database='pooldb',
charset='utf8'
)
def func():
conn = POOL.connection(shareable=False)
cursor = conn.cursor()
cursor.execute('select * from tb1')
result = cursor.fetchall()
cursor.close()
conn.close()
func()
为什么会每个连接都会被一个线程使用呢?
def connection(self, shareable=False):
"""Get a steady, persistent DB-API 2 connection.
The shareable parameter exists only for compatibility with the
PooledDB connection method. In reality, persistent connections
are of course never shared with other threads.
"""
try:
#有连接直接返回
con = self.thread.connection
except AttributeError:
#没有连接就创建一个然后返回!
con = self.steady_connection()
if not con.threadsafety():
raise NotSupportedError("Database module is not thread-safe.")
#创建完毕赋值给self.thread.connection
self.thread.connection = con
con._ping_check()
return con
- 连接池中建好连接,所有的线程共享连接(由于pymysql、MySQLdb等threadsafety值为1,所以该模式连接池中的线程会被所有线程共享)。
import time
import pymysql
import threading
from DBUtils.PooledDB import PooledDB, SharedDBConnection
POOL = PooledDB(
creator=pymysql, # 使用链接数据库的模块
maxconnections=6, # 连接池允许的最大连接数,0和None表示不限制连接数
mincached=2, # 初始化时,链接池中至少创建的空闲的链接,0表示不创建
maxcached=5, # 链接池中最多闲置的链接,0和None不限制
maxshared=3, # 链接池中最多共享的链接数量,0和None表示全部共享。PS: 无用,因为pymysql和MySQLdb等模块的 threadsafety都为1,所有值无论设置为多少,_maxcached永远为0,所以永远是所有链接都共享。
blocking=True, # 连接池中如果没有可用连接后,是否阻塞等待。True,等待;False,不等待然后报错
maxusage=None, # 一个链接最多被重复使用的次数,None表示无限制
setsession=[], # 开始会话前执行的命令列表。如:["set datestyle to ...", "set time zone ..."]
ping=0,
# ping MySQL服务端,检查是否服务可用。# 如:0 = None = never, 1 = default = whenever it is requested, 2 = when a cursor is created, 4 = when a query is executed, 7 = always
host='127.0.0.1',
port=3306,
user='root',
password='123',
database='pooldb',
charset='utf8'
)
def func():
# 检测当前正在运行连接数的是否小于最大链接数,如果不小于则:等待或报raise TooManyConnections异常
# 否则
# 则优先去初始化时创建的链接中获取链接 SteadyDBConnection。
# 然后将SteadyDBConnection对象封装到PooledDedicatedDBConnection中并返回。
# 如果最开始创建的链接没有链接,则去创建一个SteadyDBConnection对象,再封装到PooledDedicatedDBConnection中并返回。
# 一旦关闭链接后,连接就返回到连接池让后续线程继续使用。
conn = POOL.connection()
# print(th, '链接被拿走了', conn1._con)
# print(th, '池子里目前有', pool._idle_cache, '\r\n')
cursor = conn.cursor()
cursor.execute('select * from tb1')
result = cursor.fetchall()
conn.close()
func()
看下这个连接是怎么做的?
def connection(self, shareable=True):
if shareable and self._maxshared:
#如果connection共享:
self._lock.acquire()
try:
# 如果所有共用的连接数小于最大连接数:
while (not self._shared_cache and self._maxconnections
and self._connections >= self._maxconnections):
# 等待直到被唤起或者有异常抛出~
self._wait_lock()
if len(self._shared_cache) < self._maxshared:
# shared cache is not full, get a dedicated connection
try: # first try to get it from the idle cache
#首先从理想缓存中获取连接
con = self._idle_cache.pop(0)
except IndexError: # else get a fresh connection
con = self.steady_connection()
else:
#检查连接是否正常
con._ping_check() # check this connection
con = SharedDBConnection(con)
#获取到可用连接计数+1
self._connections += 1
else: # shared cache full or no more connections allowed
# 共享的缓存满了或者池子中的连接已达到最大数
self._shared_cache.sort() # least shared connection first
# 从_shared_cache中拿出第一个连接
con = self._shared_cache.pop(0) # get it
while con.con._transaction:
# do not share connections which are in a transaction
self._shared_cache.insert(0, con)
self._wait_lock()
self._shared_cache.sort()
con = self._shared_cache.pop(0)
con.con._ping_check() # check the underlying connection
con.share() # increase share of this connection
# 将connection 又重新放回_shared_cache中,是追加!
# 看下这个:self._shared_cache = []
self._shared_cache.append(con)
self._lock.notify()
finally:
self._lock.release()
con = PooledSharedDBConnection(self, con)
else: # try to get a dedicated connection
#不共享connection,得到一个dedicated connection
self._lock.acquire()
try:
while (self._maxconnections
and self._connections >= self._maxconnections):
self._wait_lock()
# connection limit not reached, get a dedicated connection
try: # first try to get it from the idle cache
con = self._idle_cache.pop(0)
except IndexError: # else get a fresh connection
con = self.steady_connection()
else:
con._ping_check() # check connection
con = PooledDedicatedDBConnection(self, con)
self._connections += 1
finally:
self._lock.release()
return con
def dedicated_connection(self):
#这个是connection不被共享的的别名,参数为shareable=False
"""Alias for connection(shareable=False)."""
return self.connection(False)