#UA伪装:让爬虫对应的请求载体身份标识伪装成某一款游览器
import requests
#UA伪装:将对应的User-Agent封装到一个字典中
headers = {
'User-Agent':"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36"
}
url = "https://www.sogou.com/web"
#处理url携带的参数:封装到字典中
kw = input("enter a word:")
param = {
"query":kw
}
#对指定的url 发起的请求对应的url是携带参数的,并且请求过程中处理了参数
response = requests.get(url=url,params=param,headers=headers)
page_text = response.text
fileName = kw + '.html'
with open(fileName,'w',encoding='utf-8') as fp:
fp.write(page_text)
print("保存成功!")
核心代码:
#指定url有参数的处理---字典封装动态的参数
kw = input("enter a word:")
param = {
"query" : kw
}
response = request.get(url=url,params=param,headers=headers).text
#持久化存储的改进:根据输入的动态字符进行命名
fieName = kw + ".html"
with open(fileName,"w",enconding="utf-8") as fp:
fp.write(response)
ps:有参数的网址