```python
import requests,time
start=time.time()
url_list = ['https://www.baidu.com/',
'https://www.sina.com.cn/',
'http://www.sohu.com/',
'https://www.qq.com/',
'https://www.163.com/',
'http://www.iqiyi.com/',
'https://www.tmall.com/',
'http://www.ifeng.com/']
for url in url_list:
r=requests.get(url)
if r.status_code!=200:
continue
print(url,r.status_code)
end =time.time()
print(end-start)
from gevent import monkey
monkey.patch_all()
import gevent,time,requests
start = time.time()
url_list = ['https://www.baidu.com/',
'https://www.sina.com.cn/',
'http://www.sohu.com/',
'https://www.qq.com/',
'https://www.163.com/',
'http://www.iqiyi.com/',
'https://www.tmall.com/',
'http://www.ifeng.com/']
def crawler(url):
r = requests.get(url)
print(url,time.time()-start,r.status_code)
tasks_list = []
for url in url_list:
task = gevent.spawn(crawler,url)
tasks_list.append(task)
gevent.joinall(tasks_list)
end = time.time()
print(end-start)
# #总结:用genvent实现多协程爬虫要点:
# 1.定义爬虫函数
# 2.用gevent.spawn()创建任务
# 3.用gevent.joinall()执行任务