diff --git a/jhu/delphi_jhu/run.py b/jhu/delphi_jhu/run.py index 9c9533896..9c4c1c64e 100644 --- a/jhu/delphi_jhu/run.py +++ b/jhu/delphi_jhu/run.py @@ -44,12 +44,12 @@ "cumulative_prop": ("cumulative_prop", False), } # Temporarily added for wip_ signals -WIP_SENSOR_NAME_MAP = { - "new_counts": ("incid_num", False), - "cumulative_counts": ("cumul_num", False), - "incidence": ("incid_prop", False), - "cumulative_prop": ("cumul_prop", False), -} +# WIP_SENSOR_NAME_MAP = { +# "new_counts": ("incid_num", False), +# "cumulative_counts": ("cumul_num", False), +# "incidence": ("incid_prop", False), +# "cumulative_prop": ("cumul_prop", False), +# } SMOOTHERS_MAP = { "unsmoothed": (identity, '', False), "seven_day_average": (seven_day_moving_average, '7dav_', True), @@ -91,9 +91,9 @@ def run_module(): # Drop early entries where data insufficient for smoothing df = df.loc[~df["val"].isnull(), :] sensor_name = SENSOR_NAME_MAP[sensor][0] - if (SENSOR_NAME_MAP[sensor][1] or SMOOTHERS_MAP[smoother][2]): - metric = f"wip_{metric}" - sensor_name = WIP_SENSOR_NAME_MAP[sensor][0] + # if (SENSOR_NAME_MAP[sensor][1] or SMOOTHERS_MAP[smoother][2]): + # metric = f"wip_{metric}" + # sensor_name = WIP_SENSOR_NAME_MAP[sensor][0] sensor_name = SMOOTHERS_MAP[smoother][1] + sensor_name create_export_csv( df, diff --git a/jhu/tests/test_run.py b/jhu/tests/test_run.py index 246cf7e66..60d3e13b1 100644 --- a/jhu/tests/test_run.py +++ b/jhu/tests/test_run.py @@ -30,8 +30,8 @@ def test_output_files_exist(self, run_as_module): "confirmed_cumulative_num", "confirmed_incidence_num", "confirmed_incidence_prop", - "wip_deaths_cumulative_prop", - "wip_confirmed_cumulative_prop", + "deaths_7dav_cumulative_prop", + "confirmed_7dav_cumulative_prop", ] expected_files = [] diff --git a/jhu/tests/test_smooth.py b/jhu/tests/test_smooth.py index 43097e9ed..1f77fc76a 100644 --- a/jhu/tests/test_smooth.py +++ b/jhu/tests/test_smooth.py @@ -14,7 +14,7 @@ def test_output_files_smoothed(self, run_as_module): smoothed = pd.read_csv( join("receiving", - f"{dates[-1]}_state_wip_confirmed_7dav_cumul_num.csv") + f"{dates[-1]}_state_confirmed_7dav_cumulative_num.csv") ) raw = pd.concat([