爬取国内疫情数据并可视化

warning: 这篇文章距离上次修改已过990天,其中的内容可能已经有所变动。
import requests
import json
from lxml import etree
import pandas as pd
import time
from pyecharts import options as opts
from pyecharts.charts import Bar

def sleeptime(hour, min, sec):   #时间转换
    return hour * 3600 + min * 60 + sec

def Get_HTML(url):
    headers={'User-Agent':'Mozilla/5.0'}
    try:
        r=requests.get(url,headers=headers)
        r.raise_for_status()
        r.encoding=r.apparent_encoding
        return r.text
    except:
        print("error!")
def Parse_page():
    C_name,Ljqz,Cured,Died,Xcqz,Xzqz=[],[],[],[],[],[]
    url='https://m.look.360.cn/events/feiyan?sv=&version=&market=&device=2&net=4&stype=&scene=&sub_scene=&refer_scene=&refer_subscene=&f=jsonp&location=true&sort=2&_=1649252949072&callback=jsonp2'
    re=Get_HTML(url)
    re=re[7:-2]
    response=json.loads(re)
    for i in range(34):
        c_name=response['data'][i]['data']['provinceName']  #城市名称
        ljqz=response['data'][i]['diagnosed']               #累计确诊
        cured=response['data'][i]['cured']           #治愈
        died=response['data'][i]['died']             #死亡
        xcqz=response['data'][i]['currentConfirmed']   #现存确诊
        xzqz=response['data'][i]['diffDiagnosed']      #新增确诊
        C_name.append(c_name)
        Ljqz.append(ljqz)
        Cured.append(cured)
        Died.append(died)
        Xcqz.append(xcqz)
        Xzqz.append(xzqz)
#     columns=['新增确诊','现存确诊','累计确诊','治愈人数','死亡人数']
#     index=C_name
    data={
        "地区":C_name,
        "新增确诊":Xzqz,
        "现存确诊":Xcqz,
        "累计确诊":Ljqz,
        "治愈人数":Cured,
        "死亡人数":Died
    }
#     print(data)
    return data
def View():
    data=Parse_page()
    c = (
        Bar()
        .add_xaxis(data['地区'])
        .add_yaxis("新增确诊",data['新增确诊'], stack="stack1")
        .add_yaxis("现存确诊",data['现存确诊'] , stack="stack1")
        .add_yaxis("累计确诊",data['累计确诊'] , stack="stack1")
        .add_yaxis("治愈人数",data['治愈人数'] , stack="stack1")
        .add_yaxis("死亡人数",data['死亡人数'] , stack="stack1")
        .set_global_opts(
            title_opts=opts.TitleOpts(title="国内疫情数据数据(全部)"),
            datazoom_opts=opts.DataZoomOpts(type_="inside"),
            )
        .render("国内疫情数据数据.html")
    )
def main():
    data=Parse_page()
    data=pd.DataFrame(data)
    second = sleeptime(24,0,0)
    while True:
#         print(data)
        data.to_csv('./国内疫情数据.csv')
        View()
        time.sleep(second)
# main()

data=Parse_page()
data=pd.DataFrame(data)
data

添加新评论