Skip to content

Commit 1215753

Browse files
committed
lint and cleanup
1 parent 8049cb0 commit 1215753

File tree

3 files changed

+17
-9
lines changed

3 files changed

+17
-9
lines changed

google_symptoms/delphi_google_symptoms/date_utils.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from datetime import date, datetime, timedelta
44
from itertools import product
5-
from typing import Dict, List, Tuple
5+
from typing import Dict, List, Union
66

77
import covidcast
88
from delphi_utils.validator.utils import lag_converter
@@ -11,7 +11,7 @@
1111
from .constants import COMBINED_METRIC, FULL_BKFILL_START_DATE, PAD_DAYS, SMOOTHERS
1212

1313

14-
def generate_patch_dates(params: Dict) -> Dict[date, Tuple[date]]:
14+
def generate_patch_dates(params: Dict) -> Dict[date, Dict[str,Union[date, int]]]:
1515
"""
1616
Generate date range for chunking backfilled dates.
1717
@@ -43,11 +43,13 @@ def generate_patch_dates(params: Dict) -> Dict[date, Tuple[date]]:
4343

4444
return patch_dates
4545

46+
4647
def get_max_lag(params: Dict) -> int:
47-
"""Determine reporting lag for data source"""
48+
"""Determine reporting lag for data source."""
4849
max_expected_lag = lag_converter(params["validation"]["common"].get("max_expected_lag", {"all": 4}))
4950
return max(list(max_expected_lag.values()))
5051

52+
5153
def generate_num_export_days(params: Dict, logger) -> [int]:
5254
"""
5355
Generate dates for exporting based on current available data.
@@ -128,4 +130,3 @@ def generate_query_dates(
128130
retrieve_dates = [start_date - timedelta(days=PAD_DAYS - 1), export_end_date]
129131

130132
return retrieve_dates
131-

google_symptoms/delphi_google_symptoms/run.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,13 @@ def run_module(params, logger=None):
6060
)
6161

6262
# Pull GS data
63-
dfs = pull_gs_data(params["indicator"]["bigquery_credentials"], export_start_date, export_end_date, num_export_days, custom_run_flag)
63+
dfs = pull_gs_data(
64+
params["indicator"]["bigquery_credentials"],
65+
export_start_date,
66+
export_end_date,
67+
num_export_days,
68+
custom_run_flag,
69+
)
6470
for geo_res in GEO_RESOLUTIONS:
6571
if geo_res == "state":
6672
df_pull = dfs["state"]

google_symptoms/tests/test_pull.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,15 +8,16 @@
88
from delphi_google_symptoms.pull import (
99
pull_gs_data, preprocess, format_dates_for_query, pull_gs_data_one_geolevel)
1010
from delphi_google_symptoms.constants import METRICS, COMBINED_METRIC
11+
from conftest import TEST_DIR
1112

1213
good_input = {
13-
"state": "test_data/small_states_daily.csv",
14-
"county": "test_data/small_counties_daily.csv"
14+
"state": f"{TEST_DIR}/test_data/small_states_daily.csv",
15+
"county": f"{TEST_DIR}/test_data/small_counties_daily.csv"
1516
}
1617

1718
bad_input = {
18-
"missing_cols": "test_data/bad_state_missing_cols.csv",
19-
"invalid_fips": "test_data/bad_county_invalid_fips.csv"
19+
"missing_cols": f"{TEST_DIR}/test_data/bad_state_missing_cols.csv",
20+
"invalid_fips": f"{TEST_DIR}/test_data/bad_county_invalid_fips.csv"
2021
}
2122

2223
symptom_names = ["symptom_" +

0 commit comments

Comments
 (0)