多进程_同步进程(消息队列)
一、队列
>>> import queue >>> q=queue.Queue() >>> q.put(1) >>> q.get() 1 >>> q.put(2) >>> q.put(3) >>> q.put(4) >>> q.qsize()#之前已经取出了一个1 3 >>> q.full()#没有限定长度个数,就是没有满 False >>> q.maxsize()#没有限定长度个数,就是没有最大个数 Traceback (most recent call last): File "<stdin>", line 1, in <module> TypeError: 'int' object is not callable
>>> q=queue.Queue(3)#限定数量是3 >>> q.put(4) >>> q.put(5) >>> q.put(6) >>> q.full()#此时队列已存满 True >>> q.put(7,timeout=2)#不加超时,会一直死等 Traceback (most recent call last): File "<stdin>", line 1, in <module> File "C:\Python36\lib\queue.py", line 141, in put raise Full queue.Full >>> q.qsize()#此时队列的数量是3 3 >>> q.maxsize#此时队列数量上限是3 3
二、多进程入队&出队的创建
#encoding=utf-8 from multiprocessing import Process, Queue def offer(queue): # 入队列 if queue.empty(): queue.put("Hello World") else: print(queue.get()) if __name__ == '__main__': # 创建一个队列实例 q = Queue() p = Process(target = offer, args = (q,)) p.start() print(q.get()) # 出队列 q.put("huanghuang") m = Process(target = offer, args = (q,)) m.start() p.join() m.join()
运行结果
三、进程同步(使用queue)
#encoding=utf-8 from multiprocessing import Process, Queue import os, time, random # 写数据进程执行的代码: def write(q): for value in ['A', 'B', 'C']: print('Put %s to queue...' % value) q.put(value) time.sleep(random.random()) # 读数据进程执行的代码 def read(q): time.sleep(1) while not q.empty(): # if not q.empty(): print('Get %s from queue.' % q.get(True)) time.sleep(1) # 目的是等待写队列完成 if __name__=='__main__': # 父进程创建Queue,并传给各个子进程 q = Queue() pw = Process(target = write, args = (q,)) pr = Process(target = read, args = (q,)) # 启动子进程pw,写入: pw.start() # 启动子进程pr,读取: pr.start() # 等待pw结束: pw.join() pr.join() print("Done!")
运行结果
四、进程同步(使用Queue&JoinableQueue)
#encoding=utf-8 import multiprocessing import time class Consumer(multiprocessing.Process): # 派生进程 def __init__(self, task_queue, result_queue): multiprocessing.Process.__init__(self) self.task_queue = task_queue self.result_queue = result_queue # 重写原进程的run方法 def run(self):#run方法运行完毕,进程运行就结束了 proc_name = self.name while True: next_task = self.task_queue.get() if next_task is None:#为None时,进程退出
# Poison pill means shutdown print(('%s: Exiting' % proc_name)) self.task_queue.task_done() break print(('%s: %s' % (proc_name, next_task))) answer = next_task() # __call__() self.task_queue.task_done() self.result_queue.put(answer) return class Task(object): def __init__(self, a, b): self.a = a self.b = b def __call__(self): time.sleep(0.1) # pretend to take some time to do the work return '%s * %s = %s' % (self.a, self.b, self.a * self.b) def __str__(self): return '%s * %s' % (self.a, self.b) if __name__ == '__main__': # Establish communication queues tasks = multiprocessing.JoinableQueue()#存任务的队列,生成joinable任务队列,调用之后必须要有done
results = multiprocessing.Queue()#存结果的队列 # Start consumers num_consumers = multiprocessing.cpu_count() print(('Creating %d consumers' % num_consumers)) # 创建cup核数量数量个的子进程 consumers = [ Consumer(tasks, results) for i in range(num_consumers) ] # 依次启动子进程 for w in consumers: w.start() # Enqueue jobs num_jobs = 10 for i in range(num_jobs): tasks.put(Task(i, i)) # Add a poison pill for each consumer for i in range(num_consumers): tasks.put(None)#有几个进程加几个None,以此结束死循环 # Wait for all of the tasks to finish tasks.join() # Start printing results while num_jobs: result = results.get() print ('Result: %s' %result) num_jobs -= 1