爬虫网络请求模块

1. urllib.request模块
python2 :urllib2、urllib
python3 :把urllib和urllib2合并,urllib.request
1.2 常用方法
。urllib.request.urlopen(“网址”) 作用:向网站发起一个请求并获取响应
。字节流 = response.read()
。字符串 = response.read().decode(‘utf-8’)
。urllib.request.Request("网址“,headers=“字典”) urlopen()不支持重构
。User-Agent
为了更好的用户体验 爬虫的一种反爬机制
记录了用户的浏览器 操作系统等

# @Time : 2020/10/14 19:41
# @Author : PanHui
# @File : urllib的使用.py
import urllib.request
# # headers = {
   
# #
# # }
# # 获取响应对象urlopen() 不支持重构user-agent
# response = urllib.request.urlopen('https://www.baidu.com/')
# # read()把对象中内容读取出来
# # decode() bytes数据类型 ---> str数据类型
# # encode() str数据类型 ---> bytes数据类型
# html = response.read().decode('utf-8')
# print(type(html),html)

url = 'https://www.baidu.com/'
headers = {
   'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.97 Safari/537.36'}
# 1.创建请求对象Request()
req = urllib.request.Request(url,headers=headers)
# 2.获取响应对象 urlopen()
res = urllib.request.urlopen(req)
# 3.读取响应的内容 read()
html = res.read().decode('utf-8')
# print(html)
# print(res.getcode()) # 返回状态码
print(res.geturl()) # 返回我们实际请求的url

1.3 响应对象
。read()读取响应内容
。getcode() 返回HTTP的响应码
。getur() 返回实际数据的URL(防止重定向问题)

2. urllib.parse模块
2.1 常用方法
。urlencode(字典)
。quote(字符串)

# @File : urllib.parse的使用.py
# url = 'https://www.baidu.com/s?wd=%E6%B5%B7%E8%B4%BC%E7%8E%8B'
# # 'https://www.baidu.com/s?wd=火影忍者
# import urllib.parse
# te = {'wd':'海贼王'}
# result = urllib.parse.urlencode(te)
# print(result) # wd=%E6%B5%B7%E8%B4%BC%E7%8E%8B

# 搜索一个内容 把这个内容保存到本地 html

#
# baseurl = 'https://www.baidu.com/s?'
# key = input('请输入你要搜索的内容:')
# # 进行urlencode()进行编码
# w = {'wd':key}
# k = urllib.parse.urlencode(w)
# # 拼接url
# url = baseurl + k
# # print(url)
# headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.97 Safari/537.36','Cookie':'BIDUPSID=23F0C104655E78ACD11DB1E20FA56630; PSTM=1592045183; BD_UPN=12314753; sug=0; sugstore=0; ORIGIN=0; bdime=0; BAIDUID=23F0C104655E78AC9F0FB18960BCA3D3:SL=0:NR=10:FG=1; BDUSS=ldxR1FyQ2FEaVZ5UWFjTDlRbThVZHJUQTY1S09PSU81SXlHaUpubVpEY0FMakZmRVFBQUFBJCQAAAAAAAAAAAEAAADzvSajSjdnaGgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAChCV8AoQlfb; BDUSS_BFESS=ldxR1FyQ2FEaVZ5UWFjTDlRbThVZHJUQTY1S09PSU81SXlHaUpubVpEY0FMakZmRVFBQUFBJCQAAAAAAAAAAAEAAADzvSajSjdnaGgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAChCV8AoQlfb; MCITY=-158%3A; BDORZ=B490B5EBF6F3CD402E515D22BCDA1598; BD_HOME=1; delPer=0; BD_CK_SAM=1; PSINO=6; BDRCVFR[feWj1Vr5u3D]=I67x6TjHwwYf0; BDRCVFR[tox4WRQ4-Km]=mk3SLVN4HKm; BDRCVFR[-pGxjrCMryR]=mk3SLVN4HKm; BDRCVFR[CLK3Lyfkr9D]=mk3SLVN4HKm; COOKIE_SESSION=204_0_5_9_4_6_0_0_5_4_0_0_533_0_0_0_1602246393_0_1602250500%7C9%2369429_193_1601361993%7C9; H_PS_PSSID=32757_32617_1428_7566_7544_31660_32723_32230_7517_32116_32718; H_PS_645EC=ab4cD3QpA7yZJBKDrrzZqesHzhDrwV%2BYww0WVHtmGJ3Adcj0qvjZIVV%2F9q4'}
# # 创建请求对象
# req = urllib.request.Request(url,headers=headers)
# # 获取响应对象
# res = urllib.request.urlopen(req)
# # 读取响应对象
# html = res.read().decode('utf-8')
# # 写入文件
# with open('搜索1.html','w',encoding='utf-8') as f:
#     f.write(html)

baseurl = 'https://www.baidu.com/s?wd='
key = input('请输入你要搜索的内容:')
k = urllib.parse.quote(key)
url = baseurl + k
headers = {
   
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.97 Safari/537.36','Cookie':'BIDUPSID=23F0C104655E78ACD11DB1E20FA56630; PSTM=1592045183; BD_UPN=12314753; sug=0; sugstore=0; ORIGIN=0; bdime=0; BAIDUID=23F0C104655E78AC9F0FB18960BCA3D3:SL=0:NR=10:FG=1; BDUSS=ldxR1FyQ2FEaVZ5UWFjTDlRbThVZHJUQTY1S09PSU81SXlHaUpubVpEY0FMakZmRVFBQUFBJCQAAAAAAAAAAAEAAADzvSajSjdnaGgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAChCV8AoQlfb; BDUSS_BFESS=ldxR1FyQ2FEaVZ5UWFjTDlRbThVZHJUQTY1S09PSU81SXlHaUpubVpEY0FMakZmRVFBQUFBJCQAAAAAAAAAAAEAAADzvSajSjdnaGgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAChCV8AoQlfb; MCITY=-158%3A; BDORZ=B490B5EBF6F3CD402E515D22BCDA1598; BD_HOME=1; delPer=0; BD_CK_SAM=1; PSINO=6; BDRCVFR[feWj1Vr5u3D]=I67x6TjHwwYf0; BDRCVFR[tox4WRQ4-Km]=mk3SLVN4HKm; BDRCVFR[-pGxjrCMryR]=mk3SLVN4HKm; BDRCVFR[CLK3Lyfkr9D]=mk3SLVN4HKm; COOKIE_SESSION=204_0_5_9_4_6_0_0_5_4_0_0_533_0_0_0_1602246393_0_1602250500%7C9%2369429_193_1601361993%7C9; H_PS_PSSID=32757_32617_1428_7566_7544_31660_32723_32230_7517_32116_32718; H_PS_645EC=ab4cD3QpA7yZJBKDrrzZqesHzhDrwV%2BYww0WVHtmGJ3Adcj0qvjZIVV%2F9q4'
}
# 创建请求对象
req = urllib.request.Request(url,headers=headers)
# 获取响应对象
res = urllib.request.urlopen(req)
# 读取响应对象
html = res.read().decode('utf-8')
# 写入文件
with open('搜索2.html','w'
  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值