select()
ahref = soup.select('td a')
for i in ahref:
print(i['href'])
案例演示
- 需求:爬取全国所有城市的温度(最低气温) 并保存到csv文件当中 [{‘city’:‘北京’,‘temp’:‘5C’},{xxx},{xxx}]
技术:requests csv bs4
http://www.weather.com.cn/textFC/hb.shtml
import csv
import requests
from bs4 import BeautifulSoup
def get_html(url):
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.77 Safari/537.36'
}
req = requests.request("get", url, headers=headers)
h = req.content.decode("utf-8")
return h
def weather_data(html):
lst = []
soup = BeautifulSoup(html, features="html5lib")
conMidtab = soup.select(".conMidtab")[0]
tbodys = conMidtab.find_all("tbody")
for tbody in tbodys:
trs = tbody.find_all("tr")[2:]
for index, tr in enumerate(trs):
tds = tr.find_all("td")
city = list(tds[0].stripped_strings)[0]
if index == 0:
city = list(tds[1].stripped_strings)[0]
tem = list(tds[-2].stripped_strings)[0]
wea = list(tds[-4].stripped_strings)[0]
data = {"城市": city, "天气": wea, "最低温度": tem}
lst.append(data)
return lst
def save_data(lst):
header = {"城市", "天气", "最低温度"}
with open("全国天气预报.csv", 'w', encoding="utf-8", newline='') as f:
writer = csv.DictWriter(f, header)
writer.writeheader()
writer.writerows(lst)
if __name__ == '__main__':
urls = [
'http://www.weather.com.cn/textFC/hb.shtml',
'http://www.weather.com.cn/textFC/db.shtml',
'http://www.weather.com.cn/textFC/hd.shtml',
'http://www.weather.com.cn/textFC/hz.shtml',
'http://www.weather.com.cn/textFC/hn.shtml',
'http://www.weather.com.cn/textFC/xb.shtml',
'http://www.weather.com.cn/textFC/xn.shtml',
'http://www.weather.com.cn/textFC/gat.shtml'
]
lsts = []
for url in urls:
lsts += weather_data(get_html(url))
save_data(lsts)