Python爬取,快速抓取(消息队列+多进程池+协程)
思路
1、利用多进程实现多核CPU同时工作
2、利用协程解决请求IO阻塞
3、将成功请求返回值加入消息队列,请求失败的任务重新加入任务队列
4、再次执行任务,直到所有任务完成
3、利用消息队列实现结果输出
代码
import asyncio
import aiohttp
import time
now = lambda: time.time()
tim1 = now()
async def craw(i, resultq, taskq, client):
async with client.get("https://chengdu.anjuke.com/sale/tainfuxinqu/p" + str(i) + "/#filtersort") as resp:
print("开始爬取成都二手房第%s页信息....." % (str(i)))
if resp.status == 200:
resultq.put({i: '完成'})
else:
taskq.put(i)
async def main1(lis, resultq, taskq):
async with aiohttp.ClientSession() as client:
tasks = []
for i in lis:
tasks.append(asyncio.ensure_future(craw(i, resultq, taskq, client)))
await asyncio.wait(tasks)
# 构造协程事件循环
def proce(lis, resultq, taskq):
loop = asyncio.get_event_loop()
loop.run_until_complete(main1(lis, resultq, taskq))
# 获取任务列表
def getTaskQ(taskq):
i = 0
lis = []
while 1:
if taskq.empty():
return lis
if i == 600:
return lis
lis.append(taskq.get())
i += 1
if __name__ == '__main__':
from multiprocessing import Manager, Pool
tim = now()
# 假如一次取2000个任务,这里我规定每600个任务启动一个进程
import queue
taskq = Manager().Queue()
pool = Pool(processes=3)
resultq = Manager().Queue() # 用于进程之间交互数据
# 模拟2000个任务
for i in range(2000):
taskq.put(i)
while 1:
lis = getTaskQ(taskq)
if resultq.qsize() == 2000:
break
if lis == []:
continue
pool.apply_async(proce,args=(lis, resultq, taskq))
if len(pool._cache)==3:
while 1:
if len(pool._cache)==0:
break
print(f'花费时间{now() - tim}')
print(resultq.qsize())
dic = {}
while not resultq.empty():
dic.update(resultq.get())
print(dic)
# print('----------正常')
# time2=now()
# for i in range(200):
# url = "https://chengdu.anjuke.com/sale/tainfuxinqu/p" + str(i) + "/#filtersort"
# headers = {
# "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.87 Safari/537.36"
# }
# print("开始爬取成都二手房第%s页信息....." % (str(i)))
# response = requests.get(url=url, headers=headers)
# print(f'花费时间{now()-time2}')
如有任何问题,欢迎在下方留言,谢谢!