import urllib2
from multiprocessing.dummy import Pool as ThreadPool
urls =['http://www.python.org','http://www.python.org/about/',# etc.. ]# Make the Pool of workers
pool = ThreadPool(4)# Open the urls in their own threads# and return the results
results = pool.map(urllib2.urlopen, urls)#close the pool and wait for the work to finish
pool.close()
pool.join()
multiprocessing
import os
import PIL
from multiprocessing import Pool
from PIL import Image
SIZE =(75,75)
SAVE_DIRECTORY ='thumbs'defget_image_paths(folder):return(os.path.join(folder, f)for f in os.listdir(folder)if'jpeg'in f)defcreate_thumbnail(filename):
im = Image.open(filename)
im.thumbnail(SIZE, Image.ANTIALIAS)
base, fname = os.path.split(filename)
save_path = os.path.join(base, SAVE_DIRECTORY, fname)
im.save(save_path)if __name__ =='__main__':
folder = os.path.abspath('dir/prefix')
os.mkdir(os.path.join(folder, SAVE_DIRECTORY))
images = get_image_paths(folder)
pool = Pool()
pool.map(creat_thumbnail, images)
pool.close()
pool.join()