爬虫最终练习(初版)

# coding=utf-8
import requests
import re
from lxml import etree
def main():
    url='https://www.bilibili.com/video/BV1CL411F7r6?spm_id_from=333.934.0.0'
    headers={
              'user-agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.99 Safari/537.36'
           }
    res=(requests.get(url,headers=headers)).text
    # print(cid)
    cid=get_cid(res)
    url2 = 'https://comment.bilibili.com/' + cid + '.xml'
    content1=get_content(url2)
    comments=get_target(content1)
    _print(comments)


    #print(get_cid(res))
#print(res.text)
def get_cid(res):
    obj1 = re.compile(r'"cid=(?P<cid1>.*?)&aid')
    cid = obj1.findall(res)
    cid = list(cid)[0]
    return cid
def get_content(url2):
    headers = {
        'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.99 Safari/537.36'
    }
    res2=requests.get(url2,headers=headers)
    res2.encoding ='Utf-8'
    #print(res2.text)
    return res2.text
def get_target(content1):
    obj2 = re.compile('<d p=".*?">(.*?)</d>')
    comments_list = re.findall(obj2, content1)
    # print(comments_list) #在控制台打印所匹配的内容
    #print('成功获取弹幕信息')
    #print(comments_list)
    return comments_list
def _print(comments):
    for i in comments:
        print(i)
if __name__ =="__main__":
    main()
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值