python多进程共享变量
以下代码创建10个进程,并发访问一个存储了变量的字典。
#!/usr/bin/python
#-*- coding: UTF-8 -*-
#
# @file: test.py
#
# 测试
#
# @author: master@pepstack
#
# @create: $create$
#
# @update:
#
########################################################################
import os, sys, signal, shutil, inspect, commands
import importlib, hashlib, yaml
import time, datetime
import optparse, ConfigParser
import multiprocessing
from multiprocessing import Process, Queue, Manager
from Queue import Empty, Full
########################################################################
# application specific
APPFILE = os.path.realpath(sys.argv[0])
APPHOME = os.path.dirname(APPFILE)
APPNAME,_ = os.path.splitext(os.path.basename(APPFILE))
APPVER = "1.0.0"
APPHELP = "Test Python Scripts."
#######################################################################
def process_worker(dictValues, lock):
print multiprocessing.current_process().name
with lock:
dictValues['shanghai'] += 1
dictValues['beijing'] += 1
pass
########################################################################
# main function
#
def main():
# 全局共享字典, 保存共享变量
dictValues = Manager().dict()
lock = Manager().Lock()
dictValues['shanghai'] = 100000000;
dictValues['beijing'] = 200000000;
print "[1] shanghai=", dictValues['shanghai']
print "[1] beijing=", dictValues['beijing']
# 创建多个进程
p_workers = []
for i in xrange(10):
p = Process(target = process_worker, args = (dictValues, lock, ))
p.daemon = True
p.start()
p_workers.append(p)
pass
# block wait child processes exit
for p in p_workers:
p.join()
print "[2] shanghai=", dictValues['shanghai']
print "[2] beijing=", dictValues['beijing']
pass
########################################################################
#
if __name__ == "__main__":
main()
sys.exit(0)