@@ -91,15 +91,20 @@ def parse_time_pairs() -> TimePair:
91
91
return parse_time_arg ()
92
92
93
93
94
- def _handle_lag_issues_as_of (q : QueryBuilder , issues : Optional [TimeValues ] = None , lag : Optional [int ] = None , as_of : Optional [int ] = None ):
95
- if issues :
96
- q .retable (history_table )
97
- q .where_integers ("issue" , issues )
98
- elif lag is not None :
94
+ def _filter_by_lag (q : QueryBuilder , lag : Optional [int ]):
95
+ if lag is not None :
99
96
q .retable (history_table )
100
97
# history_table has full spectrum of lag values to search from whereas the latest_table does not
101
98
q .where (lag = lag )
102
- elif as_of is not None :
99
+
100
+
101
+ def _filter_by_issues (q : QueryBuilder , issues : Optional [TimeValues ]):
102
+ if issues is not None :
103
+ q .retable (history_table )
104
+ q .where_integers ("issue" , issues )
105
+
106
+ def _filter_by_as_of (q : QueryBuilder , as_of : Optional [int ]):
107
+ if as_of is not None :
103
108
# fetch the most recent issue as of a certain date (not to be confused w/ plain-old "most recent issue"
104
109
q .retable (history_table )
105
110
sub_condition_asof = "(issue <= :as_of)"
@@ -108,9 +113,6 @@ def _handle_lag_issues_as_of(q: QueryBuilder, issues: Optional[TimeValues] = Non
108
113
sub_group = "time_type, time_value, `source`, `signal`, geo_type, geo_value"
109
114
sub_condition = f"x.max_issue = { q .alias } .issue AND x.time_type = { q .alias } .time_type AND x.time_value = { q .alias } .time_value AND x.source = { q .alias } .source AND x.signal = { q .alias } .signal AND x.geo_type = { q .alias } .geo_type AND x.geo_value = { q .alias } .geo_value"
110
115
q .subquery = f"JOIN (SELECT { sub_fields } FROM { q .table } WHERE { q .conditions_clause } AND { sub_condition_asof } GROUP BY { sub_group } ) x ON { sub_condition } "
111
- else :
112
- # else we are using the (standard/default) `latest_table`, to fetch the most recent issue quickly
113
- pass
114
116
115
117
116
118
@bp .route ("/" , methods = ("GET" , "POST" ))
@@ -147,7 +149,14 @@ def handle():
147
149
q .where_geo_pairs ("geo_type" , "geo_value" , geo_pairs )
148
150
q .where_time_pair ("time_type" , "time_value" , time_pair )
149
151
150
- _handle_lag_issues_as_of (q , issues , lag , as_of )
152
+ # Handle history table in this priority order: issues > lag > as_of
153
+ # if all are None, we are using the (standard/default) `latest_table`, to fetch the most recent issue quickly
154
+ if issues :
155
+ _filter_by_issues (q , issues )
156
+ elif lag is not None :
157
+ _filter_by_lag (q , lag )
158
+ elif as_of is not None :
159
+ _filter_by_as_of (q , as_of )
151
160
152
161
def transform_row (row , proxy ):
153
162
if is_compatibility or not alias_mapper or "source" not in row :
@@ -201,8 +210,6 @@ def handle_trend():
201
210
q .where_geo_pairs ("geo_type" , "geo_value" , geo_pairs )
202
211
q .where_time_pair ("time_type" , "time_value" , time_window )
203
212
204
- # fetch most recent issue fast
205
- _handle_lag_issues_as_of (q , None , None , None )
206
213
207
214
p = create_printer ()
208
215
@@ -252,8 +259,6 @@ def handle_trendseries():
252
259
q .where_geo_pairs ("geo_type" , "geo_value" , geo_pairs )
253
260
q .where_time_pair ("time_type" , "time_value" , time_window )
254
261
255
- # fetch most recent issue fast
256
- _handle_lag_issues_as_of (q , None , None , None )
257
262
258
263
p = create_printer ()
259
264
@@ -386,7 +391,7 @@ def handle_export():
386
391
q .where_time_pair ("time_type" , "time_value" , TimePair ("day" if is_day else "week" , [(start_day , end_day )]))
387
392
q .where_geo_pairs ("geo_type" , "geo_value" , [GeoPair (geo_type , True if geo_values == "*" else geo_values )])
388
393
389
- _handle_lag_issues_as_of ( q , None , None , as_of )
394
+ _filter_by_as_of ( as_of )
390
395
391
396
format_date = time_value_to_iso if is_day else lambda x : time_value_to_week (x ).cdcformat ()
392
397
# tag as_of in filename, if it was specified
@@ -466,9 +471,6 @@ def handle_backfill():
466
471
q .where_geo_pairs ("geo_type" , "geo_value" , [geo_pair ])
467
472
q .where_time_pair ("time_type" , "time_value" , time_pair )
468
473
469
- # no restriction of issues or dates since we want all issues
470
- # _handle_lag_issues_as_of(q, issues, lag, as_of)
471
-
472
474
p = create_printer ()
473
475
474
476
def find_anchor_row (rows : List [Dict [str , Any ]], issue : int ) -> Optional [Dict [str , Any ]]:
@@ -644,8 +646,6 @@ def handle_coverage():
644
646
q .group_by = "c.source, c.signal, c.time_value"
645
647
q .set_order ("source" , "signal" , "time_value" )
646
648
647
- _handle_lag_issues_as_of (q , None , None , None )
648
-
649
649
def transform_row (row , proxy ):
650
650
if not alias_mapper or "source" not in row :
651
651
return row
0 commit comments