Skip to content

Commit 28bc63e

Browse files
committed
handle lag, issues, as_of
1 parent f713e39 commit 28bc63e

File tree

1 file changed

+20
-20
lines changed

1 file changed

+20
-20
lines changed

src/server/endpoints/covidcast.py

+20-20
Original file line numberDiff line numberDiff line change
@@ -91,15 +91,20 @@ def parse_time_pairs() -> TimePair:
9191
return parse_time_arg()
9292

9393

94-
def _handle_lag_issues_as_of(q: QueryBuilder, issues: Optional[TimeValues] = None, lag: Optional[int] = None, as_of: Optional[int] = None):
95-
if issues:
96-
q.retable(history_table)
97-
q.where_integers("issue", issues)
98-
elif lag is not None:
94+
def _filter_by_lag(q: QueryBuilder, lag: Optional[int]):
95+
if lag is not None:
9996
q.retable(history_table)
10097
# history_table has full spectrum of lag values to search from whereas the latest_table does not
10198
q.where(lag=lag)
102-
elif as_of is not None:
99+
100+
101+
def _filter_by_issues(q: QueryBuilder, issues: Optional[TimeValues]):
102+
if issues is not None:
103+
q.retable(history_table)
104+
q.where_integers("issue", issues)
105+
106+
def _filter_by_as_of(q: QueryBuilder, as_of: Optional[int]):
107+
if as_of is not None:
103108
# fetch the most recent issue as of a certain date (not to be confused w/ plain-old "most recent issue"
104109
q.retable(history_table)
105110
sub_condition_asof = "(issue <= :as_of)"
@@ -108,9 +113,6 @@ def _handle_lag_issues_as_of(q: QueryBuilder, issues: Optional[TimeValues] = Non
108113
sub_group = "time_type, time_value, `source`, `signal`, geo_type, geo_value"
109114
sub_condition = f"x.max_issue = {q.alias}.issue AND x.time_type = {q.alias}.time_type AND x.time_value = {q.alias}.time_value AND x.source = {q.alias}.source AND x.signal = {q.alias}.signal AND x.geo_type = {q.alias}.geo_type AND x.geo_value = {q.alias}.geo_value"
110115
q.subquery = f"JOIN (SELECT {sub_fields} FROM {q.table} WHERE {q.conditions_clause} AND {sub_condition_asof} GROUP BY {sub_group}) x ON {sub_condition}"
111-
else:
112-
# else we are using the (standard/default) `latest_table`, to fetch the most recent issue quickly
113-
pass
114116

115117

116118
@bp.route("/", methods=("GET", "POST"))
@@ -147,7 +149,14 @@ def handle():
147149
q.where_geo_pairs("geo_type", "geo_value", geo_pairs)
148150
q.where_time_pair("time_type", "time_value", time_pair)
149151

150-
_handle_lag_issues_as_of(q, issues, lag, as_of)
152+
# Handle history table in this priority order: issues > lag > as_of
153+
# if all are None, we are using the (standard/default) `latest_table`, to fetch the most recent issue quickly
154+
if issues:
155+
_filter_by_issues(q, issues)
156+
elif lag is not None:
157+
_filter_by_lag(q, lag)
158+
elif as_of is not None:
159+
_filter_by_as_of(q, as_of)
151160

152161
def transform_row(row, proxy):
153162
if is_compatibility or not alias_mapper or "source" not in row:
@@ -201,8 +210,6 @@ def handle_trend():
201210
q.where_geo_pairs("geo_type", "geo_value", geo_pairs)
202211
q.where_time_pair("time_type", "time_value", time_window)
203212

204-
# fetch most recent issue fast
205-
_handle_lag_issues_as_of(q, None, None, None)
206213

207214
p = create_printer()
208215

@@ -252,8 +259,6 @@ def handle_trendseries():
252259
q.where_geo_pairs("geo_type", "geo_value", geo_pairs)
253260
q.where_time_pair("time_type", "time_value", time_window)
254261

255-
# fetch most recent issue fast
256-
_handle_lag_issues_as_of(q, None, None, None)
257262

258263
p = create_printer()
259264

@@ -386,7 +391,7 @@ def handle_export():
386391
q.where_time_pair("time_type", "time_value", TimePair("day" if is_day else "week", [(start_day, end_day)]))
387392
q.where_geo_pairs("geo_type", "geo_value", [GeoPair(geo_type, True if geo_values == "*" else geo_values)])
388393

389-
_handle_lag_issues_as_of(q, None, None, as_of)
394+
_filter_by_as_of(as_of)
390395

391396
format_date = time_value_to_iso if is_day else lambda x: time_value_to_week(x).cdcformat()
392397
# tag as_of in filename, if it was specified
@@ -466,9 +471,6 @@ def handle_backfill():
466471
q.where_geo_pairs("geo_type", "geo_value", [geo_pair])
467472
q.where_time_pair("time_type", "time_value", time_pair)
468473

469-
# no restriction of issues or dates since we want all issues
470-
# _handle_lag_issues_as_of(q, issues, lag, as_of)
471-
472474
p = create_printer()
473475

474476
def find_anchor_row(rows: List[Dict[str, Any]], issue: int) -> Optional[Dict[str, Any]]:
@@ -644,8 +646,6 @@ def handle_coverage():
644646
q.group_by = "c.source, c.signal, c.time_value"
645647
q.set_order("source", "signal", "time_value")
646648

647-
_handle_lag_issues_as_of(q, None, None, None)
648-
649649
def transform_row(row, proxy):
650650
if not alias_mapper or "source" not in row:
651651
return row

0 commit comments

Comments
 (0)