import httplib2
from multiprocessing import Lock, Process, Queue, current_process
def worker(work_queue, done_queue):
try:
for url in iter(work_queue.get, 'STOP'):
status_code = print_site_status(url)
done_queue.put("%s - %s got %s." % (current_process().name, url, status_code))
except Exception, e:
done_queue.put("%s failed on %s with: %s" % (current_process().name, url, e))
return True
def print_site_status(url):
http = httplib2.Http(timeout=10)
headers, content = http.request(url)
return headers.get('status', 'no response')
def main():
sites = (
'http://penny-arcade.com/',
'http://reallifecomics.com/',
'http://sinfest.net/',
'http://userfriendly.org/',
'http://savagechickens.com/',
'http://xkcd.com/',
'http://duelinganalogs.com/',
'http://cad-comic.com/',
'http://samandfuzzy.com/',
)
workers = 4
work_queue = Queue()
done_queue = Queue()
processes = []
for url in sites:
work_queue.put(url)
for w in xrange(workers):
p = Process(target=worker, args=(work_queue, done_queue))
p.start()
processes.append(p)
work_queue.put('STOP')
for p in processes:
p.join()
done_queue.put('STOP')
for status in iter(done_queue.get, 'STOP'):
print status
if __name__ == '__main__':
main()python 多进程实例 进程间的通信
最新推荐文章于 2025-10-13 22:15:17 发布
本文展示了一个使用Python多进程技术来并发检查多个网站状态的例子。通过定义工作队列和完成队列,该程序能够有效地分配任务给不同的进程,并收集每个网站的状态码。此方法提高了网站检查效率。
195

被折叠的 条评论
为什么被折叠?



