初级打开
使用urlopen打开,返回的是二进制需要使用decode解码,status返回的是状态码200,302都是成功的。
f
rom urllib import request
response = request.urlopen('https://www.baidu.com/')
print(response.read().decode('utf-8'))
print(response.status)
添加头信息
- 第一种
headers里面加的是请求头,例如User-Agent,Cookies,Referer
method加的是请求的方法如get,post
timeout超时时间
data向服务器提交数据,post请求使用
from urllib import request
req = request.Request(headers='', url='', data='', method='',
origin_req_host='',timeout=)
request.urlopen(req).read()
- 第二种
from urllib.request import ProxyHandler, build_opener
proxy_handler = ProxyHandler({
# ip代理
})
headers = ''
req = build_opener(proxy_handler)
req.addheaders = [headers]
request = request.Request(url='', headers='')
response = req.open(request).read()
cookies处理
from urllib import request
from urllib.request import build_opener
import http.cookiejar
cjar = http.cookiejar.CookieJar()
hander = request.HTTPCookieProcessor(cjar)
opener = build_opener(hander)
opener.open(url)
# 得到cookie
for item in cjar:
print(item.name + '=' + item.value)
# 添加cookie
opener.addheaders.append(('Cookie', 'key' + 'value'))
url拼接
from urllib.parse import unquote # 对文字进行解码
from urllib.parse import quote # 对文字进行编码
quote('http://www.sina.com.cn')
from urllib.parse import urljoin # url拼接
from urllib.parse import urlencode # url编码
from urllib.parse import urlsplit