Skip to content

Commit 3b5a791

Browse files
authored
Merge pull request #1136 from cmu-delphi/remove-7dav-cumulative
Remove 7dav from cumulative signals
2 parents e9b90cd + 488a9d1 commit 3b5a791

File tree

8 files changed

+59
-5
lines changed

8 files changed

+59
-5
lines changed

combo_cases_and_deaths/delphi_combo_cases_and_deaths/run.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -322,6 +322,7 @@ def run_module(params):
322322
variants = [tuple((metric, geo_res)+sensor_signal(metric, sensor, smoother))
323323
for (metric, geo_res, sensor, smoother) in
324324
product(METRICS, GEO_RESOLUTIONS, SENSORS, SMOOTH_TYPES)]
325+
variants = [i for i in variants if not ("7dav" in i[2] and "cumulative" in i[2])]
325326
params = configure(variants, params)
326327
logger = get_structured_logger(
327328
__name__, filename=params["common"].get("log_filename"),

combo_cases_and_deaths/tests/receiving/.gitkeep

Whitespace-only changes.

combo_cases_and_deaths/tests/test_run.py

Lines changed: 48 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,16 @@
11
"""Tests for running combo cases and deaths indicator."""
22
from datetime import date
33
from itertools import product
4+
import os
45
import unittest
56
from unittest.mock import patch, call
67
import pandas as pd
78
import numpy as np
89

910
from delphi_combo_cases_and_deaths.run import (
10-
extend_raw_date_range, get_updated_dates,
11+
run_module,
12+
extend_raw_date_range,
13+
get_updated_dates,
1114
sensor_signal,
1215
combine_usafacts_and_jhu,
1316
compute_special_geo_dfs,
@@ -244,6 +247,50 @@ def test_no_nation_jhu(mock_covidcast_signal):
244247
"sample_size": [None]},)
245248
)
246249

250+
@patch("delphi_combo_cases_and_deaths.run.combine_usafacts_and_jhu")
251+
def test_output_files(mock_combine):
252+
params = {
253+
"common": {
254+
"export_dir": "./receiving"
255+
},
256+
"indicator": {
257+
"export_start_date": [2020, 4, 1],
258+
"source":"indicator-combination",
259+
"wip_signal": ""
260+
}
261+
}
262+
mock_combine.return_value = pd.DataFrame(
263+
{
264+
"geo_id": ["01000"],
265+
"val": [10],
266+
"timestamp": [pd.to_datetime("2021-01-04")],
267+
"issue": [pd.to_datetime("2021-01-04")],
268+
"se": 0,
269+
"sample_size": 0
270+
},
271+
index=[1]
272+
)
273+
run_module(params)
274+
csv_files = [f for f in os.listdir("receiving") if f.endswith(".csv")]
275+
dates = ["20210104"]
276+
geos = ["county", "hrr", "msa", "state", "hhs", "nation"]
277+
278+
# enumerate metric names.
279+
metrics = []
280+
for event, span, stat in product(["deaths", "confirmed"],
281+
["cumulative", "incidence"],
282+
["num", "prop"]):
283+
metrics.append("_".join([event, span, stat]))
284+
metrics.append("_".join([event, "7dav", span, stat]))
285+
286+
expected_files = []
287+
for date in dates:
288+
for geo in geos:
289+
for metric in metrics:
290+
if "7dav" in metric and "cumulative" in metric:
291+
continue
292+
expected_files += [date + "_" + geo + "_" + metric + ".csv"]
293+
assert set(csv_files) == set(expected_files)
247294

248295
if __name__ == '__main__':
249296
unittest.main()

jhu/delphi_jhu/run.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,8 @@ def run_module(params: Dict[str, Any]):
106106
for metric, geo_res, sensor, smoother in product(
107107
METRICS, GEO_RESOLUTIONS, SENSORS, SMOOTHERS
108108
):
109-
print(metric, geo_res, sensor, smoother)
109+
if "cumulative" in sensor and "seven_day_average" in smoother:
110+
continue
110111
logger.info(
111112
event="generating signal and exporting to CSV",
112113
metric=metric,

jhu/tests/test_run.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,8 @@ def test_output_files_exist(self, run_as_module):
3131
for date in dates:
3232
for geo in geos:
3333
for metric in metrics:
34+
if "7dav" in metric and "cumulative" in metric:
35+
continue
3436
# Can't compute 7dav for first few days of data because of NAs
3537
if date > "20200305" or "7dav" not in metric:
3638
expected_files += [date + "_" + geo + "_" + metric + ".csv"]

jhu/tests/test_smooth.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,14 +9,14 @@ def test_output_files_smoothed(self, run_as_module):
99
dates = [str(x) for x in range(20200303, 20200310)]
1010

1111
smoothed = pd.read_csv(
12-
join("./receiving", f"{dates[-1]}_state_confirmed_7dav_cumulative_num.csv")
12+
join("./receiving", f"{dates[-1]}_state_confirmed_7dav_incidence_num.csv")
1313
)
1414

1515
# Build a dataframe out of the individual day files
1616
raw = pd.concat(
1717
[
1818
pd.read_csv(
19-
join("./receiving", f"{date}_state_confirmed_cumulative_num.csv")
19+
join("./receiving", f"{date}_state_confirmed_incidence_num.csv")
2020
)
2121
for date in dates
2222
]

usafacts/delphi_usafacts/run.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -103,6 +103,8 @@ def run_module(params: Dict[str, Dict[str, Any]]):
103103
dfs = {metric: pull_usafacts_data(base_url, metric, logger) for metric in METRICS}
104104
for metric, geo_res, sensor, smoother in product(
105105
METRICS, GEO_RESOLUTIONS, SENSORS, SMOOTHERS):
106+
if "cumulative" in sensor and "seven_day_average" in smoother:
107+
continue
106108
logger.info("generating signal and exporting to CSV",
107109
geo_res = geo_res,
108110
metric = metric,

usafacts/tests/test_run.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,8 +54,9 @@ def test_output_files_exist(self):
5454
for metric in metrics:
5555
if "7dav" in metric and date in dates[:6]:
5656
continue # there are no 7dav signals for first 6 days
57+
if "7dav" in metric and "cumulative" in metric:
58+
continue
5759
expected_files += [date + "_" + geo + "_" + metric + ".csv"]
58-
5960
assert set(csv_files) == set(expected_files)
6061

6162
def test_output_file_format(self):

0 commit comments

Comments
 (0)