def get_html(url,params):
try:
uapools=[
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:30.0) Gecko/20100101 Firefox/30.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/537.75.14'
]
ippools=[
'124.113.217.185:33755'
'113.120.63.10:40564',
'117.57.90.116:31028',
'116.239.107.198:40681',
'123.162.201.255:45684',
'115.211.231.157:48786',
'125.123.127.97:38592',
'124.113.192.128:47494',
'171.211.80.94:45346',
'193.112.111.90:51974'
]
thisua=random.choice(uapools)
headers={"User-Agent":thisua}
thisip=random.choice(ippools)
proxy_ip='http://'+thisip;
proxy_ips='https://'+thisip;
proxy={'http':proxy_ip ,'https':proxy_ips}
r=requests.get(url,headers=headers,params=params,proxies=proxy)
r.raise_for_status()
r.encoding=r.apparent_encoding
return r.text
except Exception as e:
print('Exception: ',e)
def main():
try:
url='https://www.baidu.com/s?'
params={'wd':"love"}
data=get_html(url,params=params)
print(data[100:120])
except Exception as e:
print('Exception: ',e)
main()
requests库用户代理和ip代理使用
最新推荐文章于 2024-01-25 20:27:51 发布