大家好,我用python写了个多进程程序,由于听说multiprocessing里面自带的Queue会有丢失数据的情况,所以我使用了python里面的的队列模块即直接import Queue,但是发现此在多个子进程中无法共享数据,请帮我看看,谢谢,另外这两种队列应用场景是什么那,请多指教,谢谢!
import time
#from multiprocessing import Queue
import Queue
import orderopts
import epay_config
import public
from multiprocessing import Process,Pool
def put_orders(logfile,q):
'''
完成取工单放入到未处理工单队列中去
'''
while True:
orderDatas = orderopts.GetOrders(fileLog)
update_lists = []
for oneorder in orderDatas:
q.put(oneorder)
logfile.info('put order_id:%s in queue'%oneorder[0])
update_one = (epay_config.states['process'][0],epay_config.states['process'][1],oneorder[0])
update_lists.append(update_one)
orderopts.UpdateOrders(fileLog,update_lists)
time.sleep(epay_config.get_time)
logfile.info('-----New Get orders----')
def deal_orders(logfile,q,outq):
'''
处理工单,从队列中选择工单,然后发送HTTP请求,将处理结果放入到完工队列中去
'''
while True:
if not q.empty():
logfile.info('-----start deal order----')
oneorder = q.get()
logfile.info('deal order_id:%s '%oneorder[0])
#以后访问网络代码在此处添加
#回单
state = epay_config.states['sucess']
remark3 = u'sucess'
result_order = (state,remark3,oneorder[0])
outq.put(oneorder)
q.task_done()
time.sleep(epay_config.send_msg_time)
logfile.info('-----Deal new order----')
def finish_orders(logfile,q):
'''
完工工单
'''
while True:
if not q.empty():
logfile.info('-----start finish order----')
one_finish_order = q.get()
logfile.info('finish order_id:%s state:%s'%(one_finish_order[0],one_finish_order[1]))
finish_result = orderopts.UpdateOrders(logfile,one_finish_order)
logfile.info('finish result:%s '%finish_result)
q.task_done()
logfile.info('-----finish new order----')
if __name__ == "__main__":
# 保存未处理工单队列
get_queue= Queue.Queue()
#完工工单队列
finish_queue= Queue.Queue()
#定义日志文件
getLog = public.Init_log(epay_config.get_order_log)
delLog = public.Init_log(epay_config.deal_order_log)
finishLog = public.Init_log(epay_config.finish_order_log)
#定义子进程
get_process = Process(target=put_orders,args=(getLog,get_queue))
deal_process = Process(target=deal_orders,args=(delLog,get_queue,finish_queue))
finish_process = Process(target=finish_orders,args=(getLog,finish_queue))
#启动进程
deal_process.start()
finish_process.start()
get_process.start()