廖雪峰Python学习笔记day9

学习笔记day8

# python study day9

# 进程和线程。实现多任务的方式
# 1、多进程模式
# 2、多线程模式
# 3、多进程+多线程模式

# multiprocessing 跨平台多进程模块。Unix/Linux/Mac环境下可以使用fork创建子进程
# from multiprocessing import Process
# import os
# def run_proc(name): # 子程序要执行的代码
#     print('Parent process %s (%s)...' % (name, os.getpid))
# if __name__ == '__main__':
#     print('Parent process %s.' % os.getpid)
#     p = Process(target=run_proc, args=('test',)) # 通过执行函数和参数创建子进程
#     print('Child process will start.')
#     p.start() # 开始启动
#     p.join() # 等待子进程执行结束
#     print('Child process end.') #>>>
# # Parent process 928.
# # Child process will start.
# # Run child process test (929)...
# # Process end.
# from multiprocessing import Pool
# import os, time, random
# def long_time_task(name):
#     print('Run task %s (%s)...' % (name, os.getpid()))
#     start = time.time()
#     time.sleep(random.random() * 3)
#     end = time.time()
#     print('Task %s runs %0.2f seconds.' % (name, (end- start)))
# if __name__ == '__main__':
#     print('Parent process %s.' % os.getpid())
#     p = Pool(4) # 使用进程池批量创建子进程
#     for i in range(5):
#         p.apply_async(long_time_task, args=(i,))
#     print('Waiting for all subprocesses done...')
#     p.close() # 需要在join之前,调用后不能再添加process
#     p.join() # 同步,等待所有子进程结束
#     print('All subprocesses done.') #>>>
#     Parent process 22900.
# # Waiting for all subprocesses done...
# # Run task 0 (19656)...
# # Run task 1 (10880)...
# # Run task 2 (11372)...
# # Run task 3 (19780)...
# # Task 2 runs 0.35 seconds.
# # Run task 4 (11372)...
# # Task 0 runs 0.83 seconds.
# # Task 3 runs 1.03 seconds.
# # Task 4 runs 1.10 seconds.
# # Task 1 runs 2.25 seconds.
# # All subprocesses done.
# 进程间的通信通过Queue、Pipes实现
# from multiprocessing import Process, Queue
# import os, time, random
# def write(q): # 写数据进程执行代码
#     print('Process to write: %s' % os.getpid())
#     for value in [1, 2, 3]:
#         print('put %s to queue...' % value)
#         q.put(value)
#         time.sleep(random.random())
# def read(q):
#     print('process to read: %s' % os.getpid())
#     while True:
#         value = q.get(True)
#         print('get %s from queue.' % value)
# if __name__ == '__main__':
#     q = Queue() # 父进程创建Queue, 并传给各个子进程
#     pw = Process(target=write, args=(q,))
#     pr = Process(target=read, args=(q,))
#     pw.start() # 启动子进程pw, 写入
#     pr.start() # 启动pr, 读取
#     pw.join()
#     pr.terminate() # pr进程死循环,强行终止 >>>
# # Process to write: 9468
# # put 1 to queue...
# # process to read: 18696
# # get 1 from queue.
# # put 2 to queue...
# # get 2 from queue.
# # put 3 to queue...
# # get 3 from queue.

# 多线程,python提供低级的_thread和高级的threading模块,常用threading
# import time,threading
# def son_thread_task():
#     for i in range(5):
#         print(threading.current_thread().name, i)
#         time.sleep(1)
# print(threading.current_thread().name, 'running')
# son_thread = threading.Thread(target=son_thread_task, name='sonthread')
# son_thread.start()
# son_thread.join()
# print(threading.current_thread().name, 'end') #>>>
# # MainThread running
# # sonthread 0
# # sonthread 1
# # sonthread 2
# # sonthread 3
# # sonthread 4
# # MainThread end
# Lock 线程安全
# import time, threading
# balance = 0 # 全局银行存款变量
# lock = threading.Lock()
# def balance_saveAndGet(money):
#     global balance
#     balance = balance + money
#     balance = balance - money
# def run_thread(money):
#     for i in range(2000000):
#         lock.acquire() # 获取锁后再修改
#         try:
#             balance_saveAndGet(money)
#         finally:
#             lock.release() # 释放锁。with threading.Lock() as lock:……
# def main():
#     t1 = threading.Thread(target=run_thread,args=(5,))
#     t2 = threading.Thread(target=run_thread,args=(8,))
#     t1.start()
#     t2.start()
#     t1.join()
#     t2.join()
#     print('balance now is:', balance)
# if __name__ == '__main__':
#     main()'
# python 因为解析器设计时有GIL全局锁导致多线程无法利用多核,不过可以考虑多进程代替

# ThreadLocal 全局线程变量,每个线程只能读写自己线程,互不干扰。
# 解决了线程内多函数之间相互传递参数的问题。
# import threading
# local_school = threading.local() # 创建全局ThreadLocal对象, 可以看作是dict
# def process_student():
#     std = local_school.student # 获取当前线程的student
#     print(std, threading.current_thread().name)
# def process_thread(name):
#     local_school.student = name # 使用ThreadLocal绑定线程的student
#     process_student()
# t1 = threading.Thread(target=process_thread, args=('Alice',),name='threada')
# t2 = threading.Thread(target=process_thread, args=('Bob',),name='threadb')
# t1.start()
# t2.start()
# t1.join()
# t2.join() #>>>
# # Alice threada
# # Bob threadb

# 单线程的异步编程模型称为协程
# 在Thread和Process中当应优选Process
# Process可以分布到多台机械上,Thread最大只能分布到多个CPU上
# managers模块通过网络暴露Queue可以实现分布式进程
# task_worker.py
# import time, sys, queue
# from multiprocessing.managers import BaseManager
# # 创建类似的QueueManager:
# class QueueManager(BaseManager):
#     pass
# # 由于这个QueueManager只从网络上获取Queue,所以注册时只提供名字:
# QueueManager.register('get_task_queue')
# QueueManager.register('get_result_queue')
# # 连接到服务器,也就是运行task_master.py的机器:
# server_addr = '127.0.0.1'
# print('Connect to server %s...' % server_addr)
# # 端口和验证码注意保持与task_master.py设置的完全一致:
# m = QueueManager(address=(server_addr, 5000), authkey=b'abc') #authkey连接标识
# # 从网络连接:
# m.connect()
# # 获取Queue的对象:
# task = m.get_task_queue()
# result = m.get_result_queue()
# # 从task队列取任务,并把结果写入result队列:
# for i in range(10):
#     try:
#         n = task.get(timeout=1)
#         print('run task %d * %d...' % (n, n))
#         r = '%d * %d = %d' % (n, n, n*n)
#         time.sleep(1)
#         result.put(r)
#     except Queue.Empty:
#         print('task queue is empty.')
# # 处理结束:
# print('worker exit.')

在这里插入图片描述
学习笔记day10

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值