kongfuzi.py
利用更换代理ip,延迟提交数据,设置请求头破解网站的反爬虫机制
import requests
import random
import time
class DownLoad():
def __init__(self):
self.ip_list = ['191.33.179.242:8080', '122.72.108.53:80', '93.190.142.214:80', '189.8.88.125:65301',
'36.66.55.181:8080', '170.84.102.5:8080', '177.200.72.214:20183', '115.229.115.190:9000']
self.user_agent_list = [
'User-Agent:Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50',
'User-Agent:Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50',
'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11'
]
def get(self, url, proxy=None, timeout=20, num=5):
print("正在请求%s" % url)
UA = random.choice(self.user_agent_list)
headers = {'User-Agent': UA}
if proxy == None:
try:
return requests.get(url, headers=headers, timeout=timeout)
except:
if num > 0:
time.sleep(10)
return self.get(url, num=num - 1)
else:
time.sleep(10)
IP = ''.join(random.choice(self.ip_list).strip())
proxy = {'http': IP}
return self.get(url, proxy=proxy, timeout=timeout)
else:
try:
IP = ''.join(random.choice(self.ip_list).s