效果如下
代码如下
import requests
from bs4 import BeautifulSoup
import csv
# 发送HTTP请求,获取知乎热搜榜页面内容
url = 'https://www.zhihu.com/billboard'
response = requests.get(url)
html_content = response.text
# 解析HTML内容
soup = BeautifulSoup(html_content, 'html.parser')
# 使用CSS选择器定位知乎热搜榜的元素
hot_topics = soup.select('.HotList-item')
# 提取前十条热搜数据
top_ten_data = []
for index, topic in enumerate(hot_topics[:10], start=1):
topic_text = topic.select_one('.HotList-itemTitle').text.strip()
top_ten_data.append([index, topic_text])
# 将数据写入CSV文件
csv_filename = 'zhihu_hot_topics.csv'
with open(csv_filename, 'w', newline='', encoding='utf-8') as csv_file:
csv_writer = csv.writer(csv_file)
csv_writer.writerow(['Rank', 'Topic'])
csv_writer.writerows(top_ten_data)
print(f"Data has been written to {csv_filename}")