File:Suomen aktiivisia koronatapauksia arvio 1.svg

Original file(SVG file, nominally 1,128 × 518 pixels, file size: 51 KB)

Captions

Captions

Add a one-line explanation of what this file represents

Summary

edit
Description
Suomi: Arvioitu koronaa sillä hetkellä sairastavien luku . Tämä on aktiiviset tapaukset.
Date
Source Own work
Author Merikanto

Python code to produce image (estimated active covid-19 cases in finland)


    1. COVID-19 statistics from aggregated data from net site
    2. with Python
    3. active case plot
    4. Input from internet site: cases, recovered, deaths.
    5. Calculates active cases.
    6. version 0003.0004
    7. 24.7.2022


paiva1="2021-09-01" paiva2="2022-07-07" ymax1=1200 ymax2=25


import math as math import numpy as np import pandas as pd import matplotlib.pyplot as plt import matplotlib.ticker as ticker import locale from datetime import datetime, timedelta import matplotlib.dates as mdates from scipy import interpolate import scipy.signal from matplotlib.ticker import (MultipleLocator, FormatStrFormatter,

                              AutoMinorLocator, MaxNLocator)

from scipy.signal import savgol_filter

from mizani.breaks import date_breaks, minor_breaks from mizani.formatters import date_format

from bs4 import BeautifulSoup import requests

  1. import locale

from datetime import datetime, timedelta

import matplotlib.ticker as ticker import matplotlib.dates as mdates from matplotlib.ticker import (MultipleLocator, FormatStrFormatter,

                              AutoMinorLocator)

from matplotlib.ticker import MaxNLocator from matplotlib.ticker import ScalarFormatter


def format_func(value, tick_number):

   N = int(np.round(value/10))
   if N == 0:
       return "0"
   else:
       return r"${0}\pv$".format(N)
       
  1. locale.setlocale(locale.LC_ALL, 'fi_FI')


    1. very basic exponential r0 calculation

def calculate_r0(time1, time2, val1, val2): k=0 td=time2-time1 ## #optim #td=1 gr0=math.log(val2/val1) gr=gr0/td if(gr!=0): td= math.log(2.0)/gr else: return(1)

tau=5.0

k=math.log(2.0)/td


r0=math.exp(k*tau) if(r0==32): r0=1

if(r0>32): r0=4

return(r0)


def cut_by_dates(dfx, start_date, end_date): mask = (dfx['Date'] >= start_date) & (dfx['Date'] <= end_date) dfx2 = dfx.loc[mask] #print(dfx2) return(dfx2)


def load_country_cases(maa): dfin = pd.read_csv('https://datahub.io/core/covid-19/r/countries-aggregated.csv', parse_dates=['Date']) countries = [maa] dfin = dfin[dfin['Country'].isin(countries)] #print (head(dfin)) #quit(-1) selected_columns = dfin"Date", "Confirmed", "Recovered", "Deaths" df2 = selected_columns.copy()

df=df2 len1=len(df["Date"]) aktiv2= [None] * len1 for n in range(0,len1-1): aktiv2[n]=0

dates=df['Date'] rekov1=df['Recovered'] konf1=df['Confirmed'] death1=df['Deaths'] #print(dates) spanni=6

#print(rekov1)

#quit(-1)

rulla = rekov1.rolling(window=spanni).mean() rulla2 = rulla.rolling(window=spanni).mean()


tulosrulla=rulla2 tulosrulla= tulosrulla.replace(np.nan, 0) tulosrulla=np.array(tulosrulla).astype(int) rulla2=tulosrulla

x=np.linspace(0,len1,len1); #print("kupla") #print(tulosrulla)

#print(konf1) #print(death1) #print(aktiv2) konf1=np.array(konf1).astype(int) death1=np.array(death1).astype(int) #print(konf1) #quit(-1)

for n in range(0,(len1-1)): #print("luzmu") rulla2[n]=tulosrulla[n] #print ("luzmu2") #aktiv2[n]=konf1[n]-death1[n]-rulla2[n] aktiv2[n]=konf1[n] #print(rulla2[n])

#quit(-1)

#aktiv3=np.array(aktiv2).astype(int)

dailycases1= [0] * len1 dailydeaths1= [0] * len1

for n in range(1,(len1-1)): dailycases1[n]=konf1[n]-konf1[n-1] if (dailycases1[n]<0): dailycases1[n]=0

for n in range(1,(len1-1)): dailydeaths1[n]=death1[n]-death1[n-1] if (dailydeaths1[n]<0): dailydeaths1[n]=0

#quit(-1) df.insert (2, "Daily_Cases", dailycases1) df.insert (3, "Daily_Deaths", dailydeaths1) df['ActiveEst']=aktiv2 #print (df) dfout = df'Date', 'Confirmed','Deaths','Recovered', 'ActiveEst','Daily_Cases','Daily_Deaths' #print(df) #print(dfout) print(".") return(dfout)


def load_fin_wiki_data(): url="https://fi.wikipedia.org/wiki/Suomen_koronaviruspandemian_aikajana" response = requests.get(url) soup = BeautifulSoup(response.text, 'lxml') table = soup.find_all('table')[0] # Grab the first table df = pd.read_html(str(table))[0] #print(df) #Päivä Tapauksia Uusia tapauksia Sairaalassa Teholla Kuolleita Uusia kuolleita Toipuneita

df2 = df'Tapauksia','Uusia tapauksia','Sairaalassa','Teholla','Kuolleita','Uusia kuolleita','Toipuneita'

kaikkiatapauksia=df['Tapauksia'] toipuneita=df['Toipuneita'] uusiatapauksia=df['Uusia tapauksia'] sairaalassa=df['Sairaalassa'] teholla=df['Teholla'] kuolleita=df['Kuolleita'] uusiakuolleita=df['Uusia kuolleita']

len1=len(kaikkiatapauksia)

kaikkiatapauksia2=[] toipuneita2=[] uusiatapauksia2=[] sairaalassa2=[] teholla2=[] kuolleita2=[] uusiakuolleita2=[]

for n in range(0,len1): elem0=kaikkiatapauksia[n] elem1 = .join(c for c in elem0 if c.isdigit()) elem2=int(elem1) kaikkiatapauksia2.append(elem2) elem0=toipuneita[n] elem1 = .join(c for c in elem0 if c.isdigit()) if (elem1!=): toipuneita2.append(int(elem1)) else: toipuneita2.append(0) elem0=uusiatapauksia[n] elem1 = .join(c for c in elem0 if c.isdigit()) uusiatapauksia2.append(int(elem1)) elem0=sairaalassa[n] #elem1 = .join(c for c in elem0 if c.isdigit()) sairaalassa2.append(int(elem0)) elem0=teholla[n] #elem1 = .join(c for c in elem0 if c.isdigit()) teholla2.append(int(elem0)) elem0=kuolleita[n] #elem1 = .join(c for c in elem0 if c.isdigit()) kuolleita2.append(int(elem0)) elem0=uusiakuolleita[n] #elem1 = .join(c for c in elem0 if c.isdigit()) uusiakuolleita2.append(int(elem0))

#kaikkiatapauksia3=np.array(kaikkiatapauksia2).astype(int) #print("---") #print(kaikkiatapauksia2) #print(toipuneita2)


kaikkiatapauksia3=np.array(kaikkiatapauksia2).astype(int) toipuneita3=np.array(toipuneita2).astype(int) uusiatapauksia3=np.array(uusiatapauksia2).astype(int) sairaalassa3=np.array(sairaalassa2).astype(int) teholla3=np.array(teholla2).astype(int) kuolleita3=np.array(kuolleita2) .astype(int) uusiakuolleita3=np.array(uusiakuolleita2).astype(int)

for n in range(1,len1): toipu0=toipuneita3[n] if (toipu0==0): paikka=n toipui=toipu1 break toipu1=toipu0

#print (toipui) #print (paikka)


## oletus: toipuu suureksi osaksi akuutista vaiheesta 3 viikossa

for n in range(paikka,len1): toipui=toipui+uusiatapauksia3[n-21]-uusiakuolleita3[n] toipuneita3[n]=toipui print(toipuneita3[n])

#quit(-1)

napapaiva1 = np.datetime64("2020-04-01") timedelta1= np.timedelta64(len(kaikkiatapauksia3),'D') napapaiva2 = napapaiva1+timedelta1

#dada1 = np.linspace(napapaiva1.astype('f8'), napapaiva2.astype('f8'), dtype='<M8[D]') dada1 = pd.date_range(napapaiva1, napapaiva2, periods=len(kaikkiatapauksia3)).to_pydatetime()

#print(dada1)

data = {'Date':dada1, 'Kaikkia tapauksia':kaikkiatapauksia3, "Uusia tapauksia":uusiatapauksia3, "Sairaalassa":sairaalassa3, "Teholla":teholla3, "Kuolleita":kuolleita3, "Uusiakuolleita":uusiakuolleita3, "Toipuneita":toipuneita3 }

df2 = pd.DataFrame(data)

#print(kaikkiatapauksia3) #print ("Fin wiki data.") return(df2)


def get_solanpaa_fi_data(): url="https://covid19.solanpaa.fi/data/fin_cases.json" response = requests.get(url,allow_redirects=True) open('solanpaa_fi.json', 'w').write(response.text) with open('solanpaa_fi.json') as f: sola1=pd.read_json(f)


sola1_top = sola1.head() #print (sola1_top) #Rt […] #Rt_lower […] #Rt_upper […] #Rt_lower50 […] #Rt_upper50 […] #Rt_lower90 […] #Rt_upper90 […] #new_cases_uks […] #new_cases_uks_lower50 […] #new_cases_uks_upper50 […] #new_cases_uks_lower90 […] #new_cases_uks_upper90 […] #new_cases_uks_lower […] #new_cases_uks_upper […]

dada1=sola1["date"] casa1=sola1["cases"] death1=sola1["deaths"] newcasa1=sola1["new_cases"] newdeath1=sola1["new_deaths"] hosp1=sola1["hospitalized"] icu1=sola1["in_icu"] rt=sola1["Rt"] newcasauks=sola1["new_cases_uks"]


print(dada1) len1=len(dada1) toipuneita=np.zeros(len1) toipuneita=toipuneita*0 aktiivisia=toipuneita


toipui=0 for n in range(0,len1):

if(n>21): toipui=toipui+newcasa1[n-21]-newdeath1[n]

toipuneita[n]=toipui

aktiivisia=casa1-toipuneita-death1 #print(toipuneita)

#print("KKKK") #print (dada1[0+21])

data = {'Date':dada1, 'Tapauksia':casa1, 'Kuolemia':death1, 'Sairaalassa':hosp1, 'Teholla':icu1, 'Uusia_tapauksia':newcasa1, 'Uusia_kuolemia':newdeath1, 'R':rt, 'Uusia_tapauksia_ennuste':newcasauks, 'Toipuneita':toipuneita, 'Aktiivisia':aktiivisia, }

df = pd.DataFrame(data)

return(df)


def get_ecdc_fi_hospital_data(): url="https://opendata.ecdc.europa.eu/covid19/hospitalicuadmissionrates/json/" response = requests.get(url,allow_redirects=True) open('ecdc_hoic.json', 'w').write(response.text) with open('ecdc_hoic.json') as f: sola1=pd.read_json(f)

#print(sola1.head())

sola2=sola1.loc[sola1["country"]=='Finland']

#sola2.to_csv (r'ecdc_hospital_finland_origo.csv', index = True, header=True, sep=';')


#print(sola2.head())

dada0=sola2["date"] hosp0=sola2["value"] country0=sola2["country"]

len1=len(dada0) len2=int(len1/2) #print (len2)

dada1=dada0[1:len2-1] hosp1=np.array(hosp0[1:len2-1]) icu1=np.array(hosp0[len2:len1])

#print(dada1) print (icu1) quit(-1)


data = {'Date':dada1, 'Sairaalassa':hosp1, 'Teholla':icu1 }

df = pd.DataFrame(data)

df.to_csv (r'ecdc_hospital_finland.csv', index = True, header=True, sep=';')

return df

def get_thl_fi_open_data(): ## thl open data, 1.2.2021

url1="https://sampo.thl.fi/pivot/prod/fi/epirapo/covid19case/fact_epirapo_covid19case.json?row=measure-444833&column=dateweek20200101-508804L"

url2="https://sampo.thl.fi/pivot/prod/fi/epirapo/covid19case/fact_epirapo_covid19case.json?row=measure-492118&column=dateweek20200101-508804L"

headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'} response1 = requests.get(url1,headers=headers,allow_redirects=True) open('thl_cases1.json', 'w').write(response1.text) with open('thl_cases1.json') as json_file1: data1 = json.load(json_file1) #print(data1)

headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'} response2 = requests.get(url2,headers=headers,allow_redirects=True) open('thl_deaths1.json', 'w').write(response2.text) with open('thl_deaths1.json') as json_file2: data2 = json.load(json_file2) #print(data1)


k2=data1['dataset'] k3=k2['dimension'] k4=k3['dateweek20200101'] k5=k4['category'] k6=k5['label'] k8a=k6.keys() k8b=k6.values() d1=k2['value']

m2=data2['dataset'] m3=m2['dimension'] m4=m3['dateweek20200101'] m5=m4['category'] m6=m5['label'] m8a=m6.keys() m8b=m6.values() d2=m2['value']

#print (d1)

d1a=d1.keys() d1b=d1.values() d2a=d2.keys() d2b=d2.values()

#print (k8b) #print (d1a) #print (d1b) #print (d2a) #print (d2b)

len1=len(k8b)

#print(len1)

#dates0=np.datetime64(np.array(list(k8b))) dates0=list(k8b)

casekeys=np.array(list(d1a)).astype(int) cases0=np.array(list(d1b)).astype(int) deathkeys=np.array(list(d2a)).astype(int) deaths0=np.array(list(d2b)).astype(int)

#print(dates0) #print(casekeys) #print(cases0)


kasetab1=np.empty(len1).astype(int) kasetab1[casekeys]=cases0 deathtab1=np.empty(len1).astype(int) deathtab1[deathkeys]=deaths0

#print (len(dates0)) #print (len(kasetab1))

datax = {'Date':dates0, 'Uusia_tapauksia':kasetab1, 'Uusia_kuolemia':deathtab1 }

df = pd.DataFrame(datax)


return(df)


                          1. main proge

df=get_solanpaa_fi_data()


  1. quit(-1)
  1. df=load_country_cases("Finland")
  1. df=load_fin_wiki_data()
  2. print(df)
  3. quit(-1)

df2=cut_by_dates(df, paiva1,paiva2)

  1. print(df2)
  2. quit(-1)

df.to_csv (r'kovadata1.csv', index = True, header=True, sep=';')


dates0=df2['Date']

  1. cases0=df2['Daily_Cases']

dailycases1=df2['Uusia_tapauksia'] dailydeaths1=df2['Uusia_kuolemia']

konf1=np.array(df2['Tapauksia']) rekov1=np.array(df2['Toipuneita']) death1=np.array(df2['Kuolemia']) hospital1=np.array(df2['Sairaalassa']) icu1=np.array(df2['Teholla'])


  1. rekr1 = rekov1.rolling(window=spanni).mean()
  2. rekr2 = rekr1.rolling(window=spanni).mean()

rekr2 = scipy.signal.savgol_filter(rekov1,7, 1)

aktiv1=konf1-rekr2-death1


dailycases_savgol_1 = scipy.signal.savgol_filter(dailycases1,7, 1)

pos1=len(dailycases_savgol_1)-2

time2=pos1-0 time1=pos1-21

val1=dailycases_savgol_1[time1] val2=dailycases_savgol_1[time2]

ro00=calculate_r0(time1, time2, val1, val2)

ro=round(ro00,2)

print("R0") print (ro)


fig, ax = plt.subplots(constrained_layout=True)

ax.legend(fontsize=14)

ax2=ax.twinx()

  1. secax = ax.secondary_xaxis('top')
  2. secax.set_xlabel('paivia')

plt.xticks(fontsize=18)

plt.yticks(fontsize=18, rotation=0)

ax2.set_xlabel('Pvm 2020-2021', color='g',size=18) ax2.set_ylabel('Akuutteja tapauksia (akuutein vaihe)', color='#800000',size=18) ax2.set_title('Akuutteja koronavirustapauksia ja sairaalassa', color='b',size=22) ax.set_ylabel('Sairaalassa', color='#3f7f00', size=18)

ax.tick_params(axis='both', which='major', labelsize=18)

ax2.plot(dates0, aktiv1,'#800000',linewidth=4.0, label='Akuuteimpia tapauksia') ax.plot(dates0, hospital1,'#3f7f00',linestyle="--",linewidth=4.0, label='Sairaalassa')


locator1 = mdates.MonthLocator() # every month

  1. Specify the format - %b gives us Jan, Feb...
  2. dateformat1 = mdates.DateFormatter('%b')

dateformat1 = mdates.DateFormatter('%d.%m')


ax.xaxis.set_major_formatter(dateformat1) ax.xaxis.set_major_locator(locator1)


ax2.yaxis.set_major_locator(MaxNLocator(integer=True))

ax.legend(loc = (.05,.90), fontsize=14,frameon = False) ax2.legend( loc = (.05, .82),fontsize=14, frameon = False)


plt.show()

  1. fig.savefig('suomen_aktiivisia_koronatapauksia_arvio_2.svg')
  2. df_aktive.to_csv (r'active1.csv', index = True, header=True, sep=';')


  1. print(dfout)



Licensing

edit
I, the copyright holder of this work, hereby publish it under the following license:
w:en:Creative Commons
attribution share alike
This file is licensed under the Creative Commons Attribution-Share Alike 4.0 International license.
You are free:
  • to share – to copy, distribute and transmit the work
  • to remix – to adapt the work
Under the following conditions:
  • attribution – You must give appropriate credit, provide a link to the license, and indicate if changes were made. You may do so in any reasonable manner, but not in any way that suggests the licensor endorses you or your use.
  • share alike – If you remix, transform, or build upon the material, you must distribute your contributions under the same or compatible license as the original.

File history

Click on a date/time to view the file as it appeared at that time.

(newest | oldest) View (newer 10 | ) (10 | 20 | 50 | 100 | 250 | 500)
Date/TimeThumbnailDimensionsUserComment
current11:40, 24 July 2022Thumbnail for version as of 11:40, 24 July 20221,128 × 518 (51 KB)Merikanto (talk | contribs)Update
06:02, 14 April 2022Thumbnail for version as of 06:02, 14 April 2022992 × 491 (46 KB)Merikanto (talk | contribs)update
08:30, 8 December 2021Thumbnail for version as of 08:30, 8 December 20211,339 × 584 (62 KB)Merikanto (talk | contribs)update
07:11, 20 September 2021Thumbnail for version as of 07:11, 20 September 20211,246 × 459 (63 KB)Merikanto (talk | contribs)Update
12:57, 5 August 2021Thumbnail for version as of 12:57, 5 August 20211,145 × 389 (60 KB)Merikanto (talk | contribs)Update
08:26, 15 July 2021Thumbnail for version as of 08:26, 15 July 2021991 × 406 (58 KB)Merikanto (talk | contribs)Update
11:54, 19 June 2021Thumbnail for version as of 11:54, 19 June 2021944 × 447 (57 KB)Merikanto (talk | contribs)update
12:34, 21 May 2021Thumbnail for version as of 12:34, 21 May 20211,041 × 442 (56 KB)Merikanto (talk | contribs)Upload
13:33, 13 May 2021Thumbnail for version as of 13:33, 13 May 2021893 × 433 (57 KB)Merikanto (talk | contribs)Update
08:38, 25 April 2021Thumbnail for version as of 08:38, 25 April 2021946 × 446 (49 KB)Merikanto (talk | contribs)Update
(newest | oldest) View (newer 10 | ) (10 | 20 | 50 | 100 | 250 | 500)

There are no pages that use this file.

Metadata