scrapy配置User-Agent和ip代理
无论是配置User-Agent还是ip,均仅需修改setting. py 和 middlewares. py
ip代理:
PS: 用代理ip去访问网站一般都比较慢
settings. py
PROXY_LIST = [
#科学上网
#{'ip_port': '127.0.0.1:1080'} # 必须和ssr等代理一起使用,否则国内网站会拒绝你的访问请求
{'ip_port': '117.66.143.76:28140'} #请更换ip
]
DOWNLOADER_MIDDLEWARES = {
'scrapy.downloadermiddleware.useragent.UserAgentMiddleware': None,#禁用内置的中间件,启用自定义
#注册ip代理
'project_name.middlewares.RandomProxy': 544 #请更换项目名
}
middlewares. py
from scrapy import signals
import random
import base64
from project_name.settings import PROXY_LIST #请更换项目名
class RandomProxy(object):
def process_request(self, request, spider):
# 随机取出一个代理ip
proxy = random.choice(PROXY_LIST)
# 判断是否为人民币玩家
if 'user_passwd' in proxy:
#把账号密码转换为b64编码格式(字符串先变成bytes类型)必须字符串转为bytes
b64_data = base64.b64encode(proxy['user_passwd'].encode())
# 设置账号密码认证 认证方式 编码之后的账号密码
request.headers['Proxy-Authorization'] = 'Basic ' + b64_data.decode()
# 设置代理
else:
# 免费代理不用认证
request.meta['proxy'] = proxy['ip_port']
User-Agent
注意: 部分Agent的版本可能过低
settings. py
MY_USER_AGENT = [
"Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.93 Safari/537.36"
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36 QIHU 360EE",
"Mozilla/5.0 (Windows NT 10.0; WOW64; rv:45.66.18) Gecko/20177177 Firefox/45.66.18",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1200.0 Iron/21.0.1200.0 Safari/537.1",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1200.0 Iron/21.0.1200.0 Safari/537.1",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36"
]
DOWNLOADER_MIDDLEWARES = {
'scrapy.downloadermiddleware.useragent.UserAgentMiddleware': None,#禁用内置的中间件,启用自定义
'project_name.middlewares.EbaySpiderMiddleware': 543 #请更改项目名
}
middlewares. py
from scrapy import signals
import random
from scrapy.downloadermiddlewares.useragent import UserAgentMiddleware
class EbaySpiderMiddleware(UserAgentMiddleware):
def __init__(self, user_agent):
self.user_agent = user_agent
# 从setting.py中引入设置文件
@classmethod
def from_crawler(cls, crawler):
return cls(user_agent=crawler.settings.get('MY_USER_AGENT'))
# 设置User-Agent
def process_request(self, request, spider):
agent = random.choice(self.user_agent)
request.headers['User-Agent'] = agent
print(u'当前User-Agent:',request.headers['User-Agent'])
其他防反爬
settings. py
DOWNLOAD_DELAY=1 #设置下载延迟
COOKIES_ENABLED=False #禁用cokkies