python多进程

import json
from functools import reduce


import numpy as np
import time
from multiprocessing import Pool, cpu_count

import psutil
from billiard.queues import Queue
from billiard import get_context
from concurrent.futures import ProcessPoolExecutor
from loky import get_reusable_executor
from billiard.process import Process


def fill_json_with_sin_values_serial(data_array):
    start_time = time.time()
    result_dict = {
        "sin_values": []
    }
    for value in data_array:
        result_dict["sin_values"].append(np.sin(value))
    # json_result = json.dumps(result_dict, indent=4)
    print(f"normal execution time: {time.time() - start_time:.2f} seconds")
    # return json_result


# 定义一个函数来计算一个子集的正弦值
def compute_sin_chunk(chunk):
    return np.sin(chunk).tolist()

def fill_json_with_sin_values_multiprocessing(array_size, num_processes=None):
    start_time = time.time()
    data_array = np.linspace(0, 2 * np.pi, array_size)

    # 如果没有指定进程数,则使用计算机上的CPU核心数
    if num_processes is None:
        num_processes = cpu_count()
    # 将数组分割成多个子集
    chunk_size = array_size // num_processes
    chunks = [data_array[i * chunk_size:(i + 1) * chunk_size] for i in range(num_processes)]

    # 使用进程池来并行处理这些子集
    with Pool(num_processes) as pool:
        sin_chunks = pool.map(compute_sin_chunk, chunks)

    # 合并所有子集的结果
    sin_values = [value for chunk in sin_chunks for value in chunk]

    # 创建一个字典来存储结果
    result_dict = {
        "sin_values": sin_values
    }
    json_result = json.dumps(result_dict, indent=4)
    print(f"Multiprocessing execution time: {time.time() - start_time:.2f} seconds")
    return json_result

from joblib import Parallel, delayed
# from joblib.cpu_count import cpu_count
def fill_json_with_sin_values_joblib(array_size, num_processes=None):
    start_time = time.time()
    data_array = np.linspace(0, 2 * np.pi, array_size)

    # 如果没有指定进程数,则使用计算机上的CPU核心数
    if num_processes is None:
        num_processes = cpu_count()

    # 计算每个进程的块大小,注意处理余数
    chunk_size = array_size // num_processes
    chunks = [data_array[i * chunk_size:(i + 1) * chunk_size] for i in range(num_processes)]
    # 如果 array_size 不是 num_processes 的整数倍,添加剩余的元素到最后一个块
    if array_size % num_processes != 0:
        chunks.append(data_array[chunk_size * num_processes:])

    # 使用 joblib 并行处理这些子集
    with Parallel(n_jobs=num_processes) as parallel:
        sin_chunks = parallel(delayed(compute_sin_chunk)(chunk) for chunk in chunks)

    # 合并所有子集的结果
    sin_values = [value for chunk in sin_chunks for value in chunk]

    # 创建一个字典来存储结果
    result_dict = {
        "sin_values": sin_values
    }
    json_result = json.dumps(result_dict, indent=4)
    print(f"Joblib execution time: {time.time() - start_time:.2f} seconds")
    return json_result

# 处理子数组
def process_chunk(chunk):
    # 对子数组中的每个元素计算正弦值
    return np.array([np.sin(value) for value in chunk])
# 定义一个函数来计算正弦值并填充JSON数据(并行版)
def fill_json_with_sin_values_loky(data_array, array_size, max_workers=None):
    start_time = time.time()
    result_dict = {
        "sin_values": []
    }
    chunk_size = array_size // max_workers
    chunks = [data_array[i * chunk_size:(i + 1) * chunk_size] for i in range(max_workers)]
    with get_reusable_executor(max_workers=max_workers) as executor:
        future_to_result = [executor.submit(process_chunk, chunk) for chunk in chunks]
        sin_values = []
        for future in future_to_result:
            sin_values.extend(future.result())
    result_dict["sin_values"] = sin_values
    # json_result = json.dumps(result_dict, indent=4)
    print("loky execution completed: {}".format(time.time()-start_time))
    # return json_result


def worker_task(data_chunk, result_queue, start_index):
    """计算数据块的正弦值,并将结果发送到结果队列中。"""
    sin_values = np.sin(data_chunk)
    result_queue.put((start_index, sin_values))


def fill_json_with_sin_values_billiard(array_size):
    start_time = time.time()

    # 生成一个包含array_size个元素的数组,元素值在0到2π之间
    data_array = np.linspace(0, 2 * np.pi, array_size)

    # 确定要使用的进程数(例如,使用所有可用的CPU核心)
    num_processes = cpu_count()
    chunk_size = array_size // num_processes

    # 创建一个队列来存储来自每个进程的结果
    ctx = get_context()
    result_queue = ctx.Queue()

    # 创建并启动进程
    processes = []
    for i in range(num_processes):
        start_index = i * chunk_size
        if i == num_processes - 1:
            # 处理最后一个可能不完整的块
            data_chunk = data_array[start_index:]
        else:
            data_chunk = data_array[start_index:start_index + chunk_size]

        p = Process(target=worker_task, args=(data_chunk, result_queue, start_index))
        processes.append(p)
        p.start()

    # 收集所有进程的结果
    sin_values_list = []
    for _ in range(num_processes):
        start_index, sin_chunk = result_queue.get()
        sin_values_list.extend(sin_chunk)

    # 由于我们按块顺序发送了数据,所以结果列表已经是有序的
    # 如果需要,可以在这里对sin_values_list进行任何额外的处理

    # 创建结果字典并序列化为JSON
    result_dict = {
        "sin_values": sin_values_list
    }
    json_result = json.dumps(result_dict, indent=4)

    # 等待所有进程完成(这实际上在收集结果时已经隐含地完成了)
    for p in processes:
        p.join()

    print(f"parallel execution time: {time.time() - start_time:.2f} seconds")
    return json_result


# 使用示例
if __name__ == "__main__":
    array_size = 10000000  # 当数据量更大时,joblib会报错 5千万是稳定的
    data_array = np.linspace(0, 2 * np.pi, array_size)

    # json_result = fill_json_with_sin_values_multiprocessing(array_size, num_processes=8)
    # with open("multiprocess.json", "w") as f:
    #     f.write(json_result)

    fill_json_with_sin_values_serial(data_array)
    # with open("normal.json", "w") as f:
    #     f.write(json2)

    import os
    # max_workers = os.cpu_count()  # 使用所有可用的CPU核心
    fill_json_with_sin_values_loky(data_array, array_size, max_workers=8)
    # with open("loky.json", "w") as f:
    #     f.write(json3)

    # json4 = fill_json_with_sin_values_joblib(array_size, num_processes=8)

    # josn5 = fill_json_with_sin_values_billiard(array_size)

评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

unbekannten

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值