python 爬虫之 urllib
import urllib.request
#获取一个get请求
response = urllib.request.urlopen('http://www.baidu.com')
print(response.read().decode('utf-8')) #对获取的网页源码进行utf-8解码
#获取一个post请求
import urllib.parse #用于解析
data = bytes(urllib.parse.urlencode({"hello":"world"}),encoding="utf-8") #将数据转为二进制
response = urllib.request.urlopen("http://httpbin.org/post",data=data)
print(response.read().decode("utf-8"))
1.data 参数:
如果要添加该参数,需使用bytes()方法转化,请求方式也变为 POST;
该方法的一个参数需要时str类型,需使用urllib.parse库中的urlencode()方法将参数字典转化成字符串
2.headers参数:
可以通过修改User-Agent来伪装浏览器;
也可以用add_headers()方法来添加
url = "https://httpbin.org/post"
headers = {
"User-Agent":" Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Mobile Safari/537.36 Edg/89.0.774.68"
} #模拟浏览器(可增加更多键值对)
data = bytes(urllib.parse.urlencode({"name":"zyx"}),encoding="utf-8")
req = urllib.request.Request(url=url,data=data,headers=headers,method='POST')
response = urllib.request.urlopen(req)
print(response.read().decode("utf-8"))
url = "https://www.douban.com"
headers = {
"User-Agent":" Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Mobile Safari/537.36 Edg/89.0.774.68"
}
req = urllib.request.Request(url=url,headers=headers,method='POST')
response = urllib.request.urlopen(req)
print(response.read().decode("utf-8"))
3.timeout参数用于设置超时时长
#超时处理
try:
response = urllib.request.urlopen("http://httpbin.org/get",timeout=0.01)
print(response.read().decode("utf-8"))
except urllib.error.URLError as e:
print("time out!")
response = urllib.request.urlopen("http://baidu.com/get")
print(response.getheaders())
4.验证:
首先实例化HTTPBasicAuthHandler对象,
其参数是HTTPPasswordMgrWithDefaultRealm对象
利用p.add_password()添加进去用户名和密码,这样就建立了一个处理验证的Handler;
接下来,使用build_opener() 方法建立一个opener,并利用opener的open()方法打开链接
# 有些网站在打开时就会弹出提示框,要求你输入用户名和密码
from urllib.request import HTTPPasswordMgrWithDefaultRealm,HTTPBasicAuthHandler,build_opener
from urllib.error import URLError
username = 'username'
password = 'password'
url = 'http.....'
p = HTTPPasswordMgrWithDefaultRealm()
p.add_password(None,url,username,password)
auth_handler = HTTPBasicAuthHandler(p)
opener = build_opener(auth_handler)
try:
result = opener.open(url)
html = result.read().decode('utf-8')
print(html)
except URLError as e:
print(e.reason)
5.代理