#encoding=utf-8
import urllib
import threading
import multiprocessing
import threadpool
import time
from multiprocessing.dummy import Pool as ThreadPool
# import sys
# reload(sys)
# sys.setdefaultencoding("utf-8")
def get_content(url):
try:
page=urllib.urlopen(url).getcode()
print url,page
# op_httpcode_txt.write('%s%s '%(url, page))
except Exception, exception:
print exception
# op_httpcode_txt.write('exception')
# threading.Lock().release
# if __name__=="__main__":
# start=time.time()
# result=[]
# p=multiprocessing.Pool(processes=4)
# with open('url.txt','r') as f:
# for i in f.readlines():
# i=i.strip()
# result.append(p.apply_async(get_content,('%s'%i,)))
# p.close()
# for res in result:
# res.get(timeout=5)
# end=time.time()
# print end-start #5.72699999809 7.3s
# c=raw_input('anything:')
if __name__ == '__main__':
start=time.time()
with open('url.txt','r') as f:
for i in f.readlines():
i=i.strip()
# threading.Lock()
p=threading.Thread(target=get_content,args=(i,))
p.start()
end=time.time()
print end-start # 5.1s
print 'current has %d threads' %(threading.activeCount())
# c=raw_input('anything:')
# if __name__ == '__main__':
# start=time.time()
# pool=ThreadPool(4)
# page=[x.strip() for x in open('url.txt').readlines()]
# results=pool.map(get_content,page)
# pool.close()
# pool.join()
# end=time.time()
# print end-start #5.82299995422s 6.2s
# if __name__ == '__main__':
# start=time.time()
# pool = threadpool.ThreadPool(4)
# i=[i.strip() for i in open('url.txt').readlines()]
# reqs = threadpool.makeRequests(get_content,i)
# [pool.putRequest(req) for req in reqs]
# pool.wait()
# end=time.time()
# print end-start #6.45700001717s 6.7s