"""
https://www.qiushibaike.com/8hr/page/1/
https://www.qiushibaike.com/8hr/page/2/
https://www.qiushibaike.com/8hr/page/3/
"""
import os
from lxml import etree
import requests
def qiushi(page_start,page_end):
#其实的url
base="https://www.qiushibaike.com/8hr/page/1/"
path='./static/templates'
if not os.path.exists(path):
os.makedirs(path)
for page in range(int(page_start),int(page_end)+1):
base_url="https://www.qiushibaike.com/8hr/page/{}/".format(page)
#获取内容
headers={
"User-Agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36"
}
response=requests.get(base_url,headers=headers)
html=response.text
# print(html)
html_xml = etree.HTML(html)#将字符串转换成xpath使用的格式
# print(html_xml)
# print(type(html_xml))#<class 'lxml.etree._Element'>
#使用xpath爬取想要的内容
#使用缩小范围的方法获取响应的内容
#先获取每一个li标签:
li_list=html_xml.xpath('//div[@class="recommend-article"]//li')
# print(li_list)
#循环遍历每一个li标签
print("========================第{}页开始下载".format(page),'=============================')
big_dic={
'第{}页'.format(page):{}
}
for index,li in enumerate(li_list):
#1 获取糗事名称
name=li.xpath('.//a[@class="recmd-content"]/text()')[0]
# print(name)
#2 获取:用户昵称:
nickname=li.xpath('.//span[@class="recmd-name"]/text()')[0]
# print(nickname)
#3 点赞次数
like1=li.xpath('.//div[@class="recmd-num"]//span[1]/text()')[0]
# print(like1)
#4 评论次数
comment=li.xpath('.//div[@class="recmd-num"]//span[last()-1]/text()')[0]
# print(comment)
#获取图片:
picture1=li.xpath('./a/img/@src')[0]
#拼接图片
pic='https:'+picture1
# print(pic)
#创建一个字典:
qiushi_dict={
'糗事名称':name,
'用户昵称':nickname,
'点赞次数':like1,
'评论次数':comment,
'图片':pic,
}
big_dic.get('第{}页'.format(page))[index+1]=qiushi_dict
import json#导入
big_dc=json.dumps(big_dic)
# print(qiushi_dict)
path1=path+str(page)
with open(path1+'.txt','w',encoding='utf-8') as f:
f.write(str(big_dc))
print(big_dc)
if __name__ == '__main__':
page_start = input('请输入要爬的起始页:')
page_end = input('输入要爬取的终止页')
qiushi(page_start,page_end)
"""
遇到的问题:获取不到内容
加请求头
"""
python爬虫--爬取-糗事百科
最新推荐文章于 2020-10-21 14:10:52 发布