import requests
from bs4 import BeautifulSoup
url='www.google.com'
print('url=',url)
try:
response = requests.get(url,timeout=(3,7)) #返回Response对象
#print(response.status_code)这句话似乎只能显示正常状态码200,异常状态码都不能显示,待查
response.raise_for_status() #若状态码不是200,抛出HTTPError异常
response.encoding = response.apparent_encoding #保证页面编码正确
except requests.exceptions.ConnectTimeout:
print( '超时!')
except requests.exceptions.ConnectionError:
print('无效地址!')
html=response.text
html = BeautifulSoup(html, 'lxml')
with open("1.html", 'w', encoding="utf-8") as f:#存到html文件
f.write(html.prettify())
详细在工程里操作,先不适用try,直接显示异常,
查看到异常,再在except后面输入。
如果不要只要判断正确或异常(不考虑异常类型),将try部分改为如下:
try:
response = requests.get(url,timeout=(3,7)) #返回Response对象
#print(response.status_code)这句话似乎只能显示正常状态码200,异常状态码都不能显示,待查
response.raise_for_status() #若状态码不是200,抛出HTTPError异常
response.encoding = response.apparent_encoding #保证页面编码正确
except:
print('异常!')