import threading # 多线程和多进程不同之处在于多线程本身就是可以和父进程共享内存的, # 这也是为什么其中一个线程挂掉以后,为什么其他线程也会死掉的原因 def worke(l): l.append("hello") l.append("world") l.append("python") if __name__ == "__main__": l = list() l += range(1,10) print(l) t = threading.Thread(target=worke,args=(l,)) t.start() print(l) # 通过传入一个参数组来实现多线程,并且它的多线程是有序的,顺序与参数组中的参数顺序保持一致 # 安装包: # pip install threadpool # # 调用格式: # from threadpool import * # pool = ThreadPool(poolsize) # requests = makeRequests(some_callable, list_of_args, callback) # [pool.putRequest(req) for req in requests] # pool.wait() import threadpool def hello(m, n, o): """""" print "m = %s, n = %s, o = %s" % (m, n, o) if __name__ == '__main__': # 方法1 lst_vars_1 = ['1', '2', '3'] lst_vars_2 = ['4', '5', '6'] func_var = [(lst_vars_1, None), (lst_vars_2, None)] # 方法2 dict_vars_1 = {'m': '1', 'n': '2', 'o': '3'} dict_vars_2 = {'m': '4', 'n': '5', 'o': '6'} func_var = [(None, dict_vars_1), (None, dict_vars_2)] pool = threadpool.ThreadPool(2) requests = threadpool.makeRequests(hello, func_var) [pool.putRequest(req) for req in requests] pool.wait()
多线程二
最新推荐文章于 2018-01-08 21:49:13 发布