增加异常捕获,更容易现问题的解决方向
import ssl
import urllib.request
from bs4 import BeautifulSoup
from urllib.error import HTTPError, URLError
def get_data(url):
headers = {"user-agent":
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.93 Safari/537.36"
}
ssl._create_default_https_context = ssl._create_unverified_context
"""
urlopen处增加两个异常捕获:
1、如果页面出现错误或者服务器不存在时,会抛HTTP错误代码
2、如果url写错了或者是链接打不开时,会抛URLError错误
"""
try:
url_obj = urllib.request.Request(url, headers=headers)
response = urllib.request.urlopen(url_obj)
html = response.read().decode('utf8')
except (HTTPError, URLError)as e:
raise e
"""
BeautifulSoup处增加异常捕获是因为BeautifulSoup对象中有时候标签实际不存在时,会返回None值;
因为不知道ÿ