Skip to content

Commit ab79ab1

Browse files
committed
Updated run.py
1 parent c53b886 commit ab79ab1

File tree

1 file changed

+20
-33
lines changed
  • combo_cases_and_deaths/delphi_combo_cases_and_deaths

1 file changed

+20
-33
lines changed

combo_cases_and_deaths/delphi_combo_cases_and_deaths/run.py

Lines changed: 20 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,10 @@
11
# -*- coding: utf-8 -*-
22
"""Functions to call when running the function.
3-
43
This module should contain a function called `run_module`, that is executed when
54
the module is run with `python -m delphi_combo_cases_and_deaths`.
6-
75
This module produces a combined signal for jhu-csse and usa-facts. This signal
86
is only used for visualization. It sources Puerto Rico from jhu-csse and
97
everything else from usa-facts.
10-
118
"""
129
from datetime import date, timedelta, datetime
1310
from itertools import product
@@ -18,29 +15,10 @@
1815
import pandas as pd
1916

2017
from delphi_utils import read_params, create_export_csv
18+
from .constants import *
19+
from .handle_wip_signal import *
2120

2221

23-
METRICS = [
24-
"confirmed",
25-
"deaths",
26-
]
27-
SMOOTH_TYPES = [
28-
"",
29-
"7dav",
30-
]
31-
SENSORS = [
32-
"incidence_num",
33-
"cumulative_num",
34-
"incidence_prop",
35-
"cumulative_prop",
36-
]
37-
GEO_RESOLUTIONS = [
38-
"county",
39-
"state",
40-
"msa",
41-
"hrr",
42-
]
43-
4422
def check_not_none(data_frame, label, date_range):
4523
"""Exit gracefully if a data frame we attempted to retrieve is empty"""
4624
if data_frame is None:
@@ -78,7 +56,6 @@ def extend_raw_date_range(params, sensor_name):
7856
"""A complete issue includes smoothed signals as well as all raw data
7957
that contributed to the smoothed values, so that it's possible to use
8058
the raw values in the API to reconstruct the smoothed signal at will.
81-
8259
The smoother we're currently using incorporates the previous 7
8360
days of data, so we must extend the date range of the raw data
8461
backwards by 7 days.
@@ -155,11 +132,21 @@ def run_module():
155132
params['date_range'] = [date1, date2]
156133

157134
for metric, geo_res, sensor_name, signal in variants:
158-
create_export_csv(
159-
combine_usafacts_and_jhu(signal, geo_res, extend_raw_date_range(params, sensor_name)),
160-
export_dir=params['export_dir'],
161-
start_date=pd.to_datetime(params['export_start_date']),
162-
metric=metric,
163-
geo_res=geo_res,
164-
sensor=sensor_name,
165-
)
135+
136+
df = combine_usafacts_and_jhu(signal, geo_res, extend_raw_date_range(params, sensor_name))
137+
138+
df = df.copy()
139+
df["timestamp"] = pd.to_datetime(df["timestamp"])
140+
start_date = pd.to_datetime(params['export_start_date'])
141+
export_dir = params["export_dir"]
142+
dates = pd.Series(
143+
df[df["timestamp"] >= start_date]["timestamp"].unique()
144+
).sort_values()
145+
146+
signal_name = add_prefix([signal], wip_signal=params["wip_signal"], prefix="wip_")
147+
for date_ in dates:
148+
export_fn = f'{date_.strftime("%Y%m%d")}_{geo_res}_' f"{signal_name[0]}.csv"
149+
df[df["timestamp"] == date_][["geo_id", "val", "se", "sample_size", ]].to_csv(
150+
f"{export_dir}/{export_fn}", index=False, na_rep="NA"
151+
)
152+

0 commit comments

Comments
 (0)