【数据大屏】Python爬取全国及北京疫情数据、实现可视化大屏


import requests
import json
import pymysql
def Down_data_1():
    url = 'https://view.inews.qq.com/g2/getOnsInfo?name=disease_h5'
    headers = {
        'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Mobile Safari/537.36'
    }
    r = requests.get(url, headers)
    res = json.loads(r.text)
    data_res = json.loads(res['data'])
    return data_res


def Down_data_2():
    url = 'https://api.inews.qq.com/newsqa/v1/query/inner/publish/modules/list?modules=statisGradeCityDetail'
    headers = {
        'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Mobile Safari/537.36'
    }
    r = requests.get(url, headers)
    res = json.loads(r.text)
    return res['data']

def Down_data_3():
    url = 'https://api.inews.qq.com/newsqa/v1/query/pubished/daily/list?province=%E5%8C%97%E4%BA%AC&'
    headers = {
        'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Mobile Safari/537.36'
    }
    r = requests.get(url, headers)
    res = json.loads(r.text)
    return res['data']

def Parse_data1():
    data = Down_data_1()
    list = ['截至时间:' + str(data['lastUpdateTime']) + '\n'
                            '全国确诊人数:' + str(data['chinaTotal']['confirm']) + '\n'
                            '今日新增确诊:' + str(data['chinaAdd']['confirm']) + '\n'
                            '全国疑似:' + str(data['chinaTotal']['suspect']) + '\n'
                            '今日新增疑似:' + str(data['chinaAdd']['suspect']) + '\n'
                            '全国治愈:' + str(data['chinaTotal']['heal']) + '\n'
                            '今日新增治愈:' + str(data['chinaAdd']['heal']) + '\n'
                            '全国死亡:' + str(data['chinaTotal']['dead']) + '\n'
                            '今日新增死亡:' + str(data['chinaAdd']['dead']) + '\n']
    result = ''.join(list)
    sql = "truncate table txy.t_chinatotal;"
    Run_sql(sql)
    sql1 = "insert into txy.t_chinatotal(confirm,heal,dead,nowConfirm,suspect,nowSevere,importedCase,noInfect)" \
           " values ("+str(data['chinaTotal']['confirm'])+","+str(data['chinaTotal']['heal'])+","+str(data['chinaTotal']['dead'])+\
           ","+str(data['chinaTotal']['nowConfirm'])+","+str(data['chinaTotal']['suspect'])+","+str(data['chinaTotal']['nowSevere'])+\
           ","+str(data['chinaTotal']['importedCase'])+","+str(data['chinaTotal']['importedCase'])+")"
    Run_sql(sql1)
    sql2 = "truncate table txy.t_chinaadd;"
    Run_sql(sql2)
    sql3 = "insert into txy.t_chinaadd(confirm,heal,dead,nowConfirm,suspect,nowSevere,importedCase,noInfect)" \
           " values (" + str(data['chinaAdd']['confirm']) + "," + str(data['chinaAdd']['heal']) + "," + str(
        data['chinaAdd']['dead']) + \
           "," + str(data['chinaAdd']['nowConfirm']) + "," + str(data['chinaAdd']['suspect']) + "," + str(
        data['chinaAdd']['nowSevere']) + \
           "," + str(data['chinaAdd']['importedCase']) + "," + str(data['chinaAdd']['importedCase']) + ")"
    Run_sql(sql3)
    sql4 = "truncate table txy.t_upd_time;"
    Run_sql(sql4)
    sql5 = "insert into t_upd_time(update_time) select concat('统计日期:',now())"
    Run_sql(sql5)
def Parse_data2():
    data = Down_data_1()['areaTree'][0]['children']
    sql1 = "truncate table txy.t_area;"
    Run_sql(sql1)
    for item in data:
        list_city = [
                '地区: ' + str(item['name']) + '\n'
                ' 确诊人数:' + str(item['total']['confirm']),
                ' 新增确诊:' + str(item['today']['confirm']),
                ' 治愈:' + str(item['total']['heal']),
                #' 新增治愈:' + str(item['today']['heal']),
                ' 死亡:' + str(item['total']['dead']),
                #' 新增死亡:' + str(item['today']['dead']) + '\n'
        ]
        sql = "insert into t_area(name,total_confirm,today_confirm,total_heal,total_dead) values (" \
              "'%s',%s,%s,%s,%s)"%(str(item['name']),str(item['total']['confirm']),str(item['today']['confirm']),
                                str(item['total']['heal']),str(item['total']['dead']))
        Run_sql(sql)

def Parse_data3():
    data = Down_data_2()['statisGradeCityDetail']
    sql1 = "truncate table txy.t_graph;"
    Run_sql(sql1)
    for item in data:
        list_graph = [
                '地区: ' + str(item['city']) + '\n'
                '等级:' + str(item['grade']),
                '省:' + str(item['province']),
                '治愈:' + str(item['heal']),
                '新增:' + str(item['nowConfirm']) + '\n'
        ]
        print(list_graph)
        sql = "insert into t_graph(city,grade,province,heal,nowConfirm) values (" \
              "'%s','%s','%s',%s,%s)"%(str(item['city']),str(item['grade']),str(item['province']),str(item['heal']),str(item['nowConfirm']) )

        Run_sql(sql)



def Parse_data4():
    data = Down_data_3()
    sql1 = "truncate table txy.t_beijing;"
    Run_sql(sql1)
    for item in data:
        list_graph = [
                '年: ' + str(item['year']) + '\n'
                '日期:' + str(item['date']),
                '确诊人数:' + str(item['confirm']),
                 '新增人数:' + str(item['confirm_add']),
                 '现有人数:' + str(item['newConfirm']),
                '治愈:' + str(item['heal']),
                '死亡:' + str(item['dead']) + '\n'
        ]
        sql  = "insert into t_beijing (year,day,confirm,confirm_add,newConfirm,heal,dead)" \
               " values ('%s','%s','%s','%s','%s','%s','%s')"%(str(item['year']),str(item['date']),
                                                               str(item['confirm']),str(item['confirm_add']),
                                                               str(item['newConfirm']),str(item['heal']),
                                                               str(item['dead']))
        Run_sql(sql)

def Run_sql(sql):
    conn = pymysql.connect(host="localhost", user="bfd001", password="Liyong123!@#", database="txy", charset="utf8")
    cursor = conn.cursor()
    cursor.execute(sql)
    conn.commit()
    cursor.close()
    conn.close()

print("begin")
Parse_data1()
Parse_data2()
Parse_data3()
Parse_data4()
print("end")

仅是爬去数据代码,将数据存入mysql之后,注册腾讯云图,连接mysql后布局可视化界面

图片图片
在这里插入图片描述

Python爬虫是一种通过编写程序来获取互联网上的数据的技术。对于爬取招聘网站数据,可以使用Python中的一些第三方库如Scrapy或BeautifulSoup来实现。 首先,我们需要分析招聘网站的HTML结构,找到我们需要爬取数据所在的元素位置。然后,编写Python代码,使用相应的库来提取所需的数据。这些数据可以包括职位名称、公司名称、薪资水平等。 接下来,我们可以使用Tableau来进行数据可视化和交互。Tableau是一款功能强大的商业智能工具,可以帮助我们将数据变成易于理解和分析的可视化图表。可以通过将爬取到的数据导入Tableau,然后使用Tableau的图表、地图、仪表盘等功能来展示数据。 通过Tableau的交互大屏功能,我们可以实现数据的实时展示和交互。例如,我们可以设置数据刷新时间,使得大屏能够显示最新的招聘信息。我们还可以添加筛选器和参数控制,使用户能够自由地根据需求进行数据过滤和分析。 最后,将Python爬取数据和Tableau可视化交互大屏的源码整合起来,就可以实现将招聘网站数据爬取并用Tableau进行可视化展示的功能。这个源码可以分为两部分,一部分是爬虫代码,负责数据爬取和处理;另一部分是Tableau代码,负责将数据导入Tableau并进行可视化展示。 综上所述,通过Python爬虫获取招聘网站数据,并使用Tableau进行可视化交互大屏展示,是一种非常有效的数据分析方法。这样可以将庞大的数据转化为直观、易懂的图表,方便用户进行数据的理解和决策。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

haikuotiankongdong

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值