多进程系列:进程中包含子进程
如果需要在多进程中创建子进程,可以使用multiprocessing.Process
类来创建子进程,而不是使用multiprocessing.Pool
。
import multiprocessing
import time
from pprint import pprint
# 假设以下是五个分类模型函数
def classify_model_1(image_path, queue):
# 模拟分类操作
time.sleep(1)
print("classify_model_1")
# return f"模型1分类结果: {image_path}"
queue.put(f"模型1分类结果: {image_path}")
def classify_model_2(image_path, queue):
# 模拟分类操作
time.sleep(1)
print("classify_model_2")
# return f"模型2分类结果: {image_path}"
queue.put(f"模型2分类结果: {image_path}")
def classify_model_3(image_path, queue):
# 模拟分类操作
time.sleep(1)
print("classify_model_3")
# return f"模型3分类结果: {image_path}"
queue.put(f"模型3分类结果: {image_path}")
def classify_model_4(image_path, queue):
# 模拟分类操作
time.sleep(1)
print("classify_model_4")
# return f"模型4分类结果: {image_path}"
queue.put(f"模型4分类结果: {image_path}")
def classify_model_5(image_path, queue):
# 模拟分类操作
time.sleep(1)
print("classify_model_5")
# return f"模型5分类结果: {image_path}"
queue.put(f"模型5分类结果: {image_path}")
# 假设以下是五个分割模型函数
def segment_model_1(image_path, queue):
# 模拟分割操作
time.sleep(1)
print("segment_model_1")
# return f"模型1分割结果: {image_path}"
queue.put(f"模型1分割结果: {image_path}")
def segment_model_2(image_path, queue):
# 模拟分割操作
time.sleep(1)
print("segment_model_2")
# return f"模型2分割结果: {image_path}"
queue.put(f"模型2分割结果: {image_path}")
def segment_model_3(image_path, queue):
# 模拟分割操作
time.sleep(1)
print("segment_model_3")
# return f"模型3分割结果: {image_path}"
queue.put(f"模型3分割结果: {image_path}")
def segment_model_4(image_path, queue):
# 模拟分割操作
time.sleep(1)
print("segment_model_4")
# return f"模型4分割结果: {image_path}"
queue.put(f"模型4分割结果: {image_path}")
def segment_model_5(image_path, queue):
# 模拟分割操作
time.sleep(1)
print("segment_model_5")
# return f"模型5分割结果: {image_path}"
queue.put(f"模型5分割结果: {image_path}")
# 进程执行函数
def process_image(image_path, result_dict, result_key):
# 创建用于进程间通信的队列
classify_queue = multiprocessing.Queue()
segment_queue = multiprocessing.Queue()
# 创建分类模型和分割模型的进程
classify_processes = [
multiprocessing.Process(
target=classify_model_1, args=(image_path, classify_queue)
),
multiprocessing.Process(
target=classify_model_2, args=(image_path, classify_queue)
),
multiprocessing.Process(
target=classify_model_3, args=(image_path, classify_queue)
),
multiprocessing.Process(
target=classify_model_4, args=(image_path, classify_queue)
),
multiprocessing.Process(
target=classify_model_5, args=(image_path, classify_queue)
),
]
segment_processes = [
multiprocessing.Process(
target=segment_model_1, args=(image_path, segment_queue)
),
multiprocessing.Process(
target=segment_model_2, args=(image_path, segment_queue)
),
multiprocessing.Process(
target=segment_model_3, args=(image_path, segment_queue)
),
multiprocessing.Process(
target=segment_model_4, args=(image_path, segment_queue)
),
multiprocessing.Process(
target=segment_model_5, args=(image_path, segment_queue)
),
]
# 启动所有进程
for p in classify_processes + segment_processes:
p.start()
# 等待所有分类模型进程完成
for p in classify_processes:
p.join()
# 等待所有分割模型进程完成
for p in segment_processes:
p.join()
# 收集所有分类和分割模型的结果
classify_results = []
while not classify_queue.empty():
classify_results.append(classify_queue.get())
segment_results = []
while not segment_queue.empty():
segment_results.append(segment_queue.get())
# return classify_results, segment_results
# 将结果存储在共享字典中
result_dict[result_key] = (classify_results, segment_results)
# 主函数
if __name__ == "__main__":
tic = time.time()
# 假设我们有一个图像路径列表
image_paths = [
"image1.jpg",
"image2.jpg",
"image3.jpg",
"image4.jpg",
"image5.jpg",
"image6.jpg",
"image7.jpg",
"image8.jpg",
]
# 为每个图像创建一个进程
# 创建一个Manager来共享结果字典
manager = multiprocessing.Manager()
result_dict = manager.dict()
processes = []
for i, image_path in enumerate(image_paths):
p = multiprocessing.Process(target=process_image, args=(image_path, result_dict, i))
processes.append(p)
p.start()
# 等待所有图像处理进程完成
for p in processes:
p.join()
# 输出结果
for key, value in result_dict.items():
pprint(value)
toc = time.time()
print("程序运行时间:", toc - tic)
# 程序运行时间: 31.997563123703003