import requests from bs4 import BeautifulSoup import csv url = 'file:///C:/Users/SWJD/Downloads/aqi_1.html' res = requests.get(url) soup = BeautifulSoup(res.text, 'html.parser') table = soup.find_all('table', {'class': 'table'})[0] data_list = [] for tr in table.tbody.find_all('tr'): td_list = tr.find_all('td') if len(td_list) > 0: location = td_list[1].text.strip() aqi = td_list[2].text.strip() air_quality = td_list[3].span['class'][0].split('_')[1].upper() data_list.append([location, aqi, air_quality]) with open('bj20200721.csv', 'w', newline='', encoding='utf-8-sig') as f: writer = csv.writer(f) writer.writerow(['监测点', 'AQI', '空气质量指数类别']) writer.writerows(data_list)