通过这个方法可以每次请求更换不同的user-agent,防止网站根据user-agent屏蔽scrapy的蜘蛛
首先将下面的代码添加到settings.py文件,替换默认的user-agent处理模块
DOWNLOADER_MIDDLEWARES = { |
'scraper.random_user_agent.RandomUserAgentMiddleware' : 400 , |
'scrapy.contrib.downloadermiddleware.useragent.UserAgentMiddleware' : None , |
自定义useragent处理模块
from scraper.settings import USER_AGENT_LIST |
class RandomUserAgentMiddleware( object ): |
def process_request( self , request, spider): |
ua = random.choice(USER_AGENT_LIST) |
request.headers.setdefault( 'User-Agent' , ua) |