python网络爬虫 抓取金融分析师名单

51 篇文章 4 订阅

分析师名单网站:https://www.xiadun.net/rank/authorlist.php

代码

import requests
import random
from lxml.html import etree


def get_analyst():
    url = "https://www.xiadun.net/rank/authorlist.php"
    timeout = 60
    user_agent = {
        'User-Agent': random.choice(
            [
                "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50",
                "Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50",
                "Mozilla/5.0 (Windows NT 10.0; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0",
                "Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; .NET4.0C; .NET4.0E; .NET CLR 2.0.50727; .NET CLR 3.0.30729; .NET CLR 3.5.30729; InfoPath.3; rv:11.0) like Gecko",
            ]
        ),
    }
    response = requests.get(url=url, headers=user_agent, timeout=timeout)
    return response


if __name__ == '__main__':
    write_loc = "../data/analyst-1.txt"

    response: requests.Response = get_analyst()
    html = etree.HTML(response.text)
    for user in html.cssselect(".contentMiddle a"):
        name = user.get("title")
        with open(write_loc, 'a') as file:
            file.write(name)
            file.write("\n")
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

呆萌的代Ma

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值