import random
import urllib.request
proxies_pool = [
{'http':'121.230.211.142:32561111'},
{'http':'121.230.211.142:32562222'}
]
proxies = random.choice(proxies_pool)
url = 'http://www.baidu.com/s?wd=ip'
headers = {
'User-Agent':' Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36 SLBrowser/9.0.3.1311 SLBChan/103'
}
request = urllib.request.Request(url=url,headers=headers)
#handler build_opener open
#(1)获取handler对象
handler = urllib.request.ProxyHandler(proxies=proxies)
#(2)获取opener对象
opener = urllib.request.build_opener(handler)
#(3)调用open方法
response = opener.open(request)
content = response.read().decode('utf-8')
with open('daili.html','w',encoding='utf-8')as fp:
fp.write(content)
爬虫----066urllib_handler处理器的基本使用
最新推荐文章于 2024-06-14 10:31:03 发布