diff --git a/src/server/_common.py b/src/server/_common.py index 45813e451..d8e2bc068 100644 --- a/src/server/_common.py +++ b/src/server/_common.py @@ -2,15 +2,16 @@ import time from flask import Flask, g, request -from sqlalchemy import event -from sqlalchemy.engine import Connection +from sqlalchemy import create_engine, event +from sqlalchemy.engine import Connection, Engine from werkzeug.local import LocalProxy from .utils.logger import get_structured_logger -from ._config import SECRET -from ._db import engine +from ._config import SECRET, SQLALCHEMY_DATABASE_URI, SQLALCHEMY_ENGINE_OPTIONS from ._exceptions import DatabaseErrorException, EpiDataException +engine: Engine = create_engine(SQLALCHEMY_DATABASE_URI, **SQLALCHEMY_ENGINE_OPTIONS) + app = Flask("EpiData", static_url_path="") app.config["SECRET"] = SECRET diff --git a/src/server/_db.py b/src/server/_db.py deleted file mode 100644 index 9d15ef5b4..000000000 --- a/src/server/_db.py +++ /dev/null @@ -1,26 +0,0 @@ -from typing import Dict, List -from sqlalchemy import MetaData, create_engine, inspect -from sqlalchemy.engine import Engine -from sqlalchemy.engine.reflection import Inspector - -from ._config import SQLALCHEMY_DATABASE_URI, SQLALCHEMY_ENGINE_OPTIONS - -engine: Engine = create_engine(SQLALCHEMY_DATABASE_URI, **SQLALCHEMY_ENGINE_OPTIONS) -metadata = MetaData(bind=engine) - -TABLE_OPTIONS = dict( - mysql_engine="InnoDB", - # mariadb_engine="InnoDB", - mysql_charset="utf8mb4", - # mariadb_charset="utf8", -) - - -def sql_table_has_columns(table: str, columns: List[str]) -> bool: - """ - checks whether the given table has all the given columns defined - """ - inspector: Inspector = inspect(engine) - table_columns: List[Dict] = inspector.get_columns(table) - table_column_names = set(str(d.get("name", "")).lower() for d in table_columns) - return all(c.lower() in table_column_names for c in columns) diff --git a/src/server/_params.py b/src/server/_params.py index 2cef9725b..0879601e6 100644 --- a/src/server/_params.py +++ b/src/server/_params.py @@ -7,7 +7,7 @@ from ._exceptions import ValidationFailedException -from .utils import days_in_range, weeks_in_range, guess_time_value_is_day, guess_time_value_is_week, TimeValues, days_to_ranges, weeks_to_ranges +from .utils import days_in_range, weeks_in_range, guess_time_value_is_day, guess_time_value_is_week, IntRange, TimeValues, days_to_ranges, weeks_to_ranges def _parse_common_multi_arg(key: str) -> List[Tuple[str, Union[bool, Sequence[str]]]]: @@ -140,7 +140,7 @@ def to_ranges(self): return TimePair(self.time_type, days_to_ranges(self.time_values)) -def _verify_range(start: int, end: int) -> Union[int, Tuple[int, int]]: +def _verify_range(start: int, end: int) -> IntRange: if start == end: # the first and last numbers are the same, just treat it as a singe value return start @@ -151,7 +151,7 @@ def _verify_range(start: int, end: int) -> Union[int, Tuple[int, int]]: raise ValidationFailedException(f"the given range {start}-{end} is inverted") -def parse_week_value(time_value: str) -> Union[int, Tuple[int, int]]: +def parse_week_value(time_value: str) -> IntRange: count_dashes = time_value.count("-") msg = f"{time_value} does not match a known format YYYYWW or YYYYWW-YYYYWW" @@ -171,7 +171,7 @@ def parse_week_value(time_value: str) -> Union[int, Tuple[int, int]]: raise ValidationFailedException(msg) -def parse_day_value(time_value: str) -> Union[int, Tuple[int, int]]: +def parse_day_value(time_value: str) -> IntRange: count_dashes = time_value.count("-") msg = f"{time_value} does not match a known format YYYYMMDD, YYYY-MM-DD, YYYYMMDD-YYYYMMDD, or YYYY-MM-DD--YYYY-MM-DD" diff --git a/src/server/_query.py b/src/server/_query.py index 69607255f..81f765a95 100644 --- a/src/server/_query.py +++ b/src/server/_query.py @@ -20,7 +20,7 @@ from ._exceptions import DatabaseErrorException from ._validate import extract_strings from ._params import GeoPair, SourceSignalPair, TimePair -from .utils import time_values_to_ranges, TimeValues +from .utils import time_values_to_ranges, IntRange, TimeValues def date_string(value: int) -> str: @@ -34,7 +34,7 @@ def date_string(value: int) -> str: def to_condition( field: str, - value: Union[str, Tuple[int, int], int], + value: Union[str, IntRange], param_key: str, params: Dict[str, Any], formatter=lambda x: x, @@ -50,7 +50,7 @@ def to_condition( def filter_values( field: str, - values: Optional[Sequence[Union[str, Tuple[int, int], int]]], + values: Optional[Sequence[Union[str, IntRange]]], param_key: str, params: Dict[str, Any], formatter=lambda x: x, @@ -75,7 +75,7 @@ def filter_strings( def filter_integers( field: str, - values: Optional[Sequence[Union[Tuple[int, int], int]]], + values: Optional[Sequence[IntRange]], param_key: str, params: Dict[str, Any], ): @@ -399,7 +399,7 @@ def _fq_field(self, field: str) -> str: def where_integers( self, field: str, - values: Optional[Sequence[Union[Tuple[int, int], int]]], + values: Optional[Sequence[IntRange]], param_key: Optional[str] = None, ) -> "QueryBuilder": fq_field = self._fq_field(field) @@ -466,25 +466,41 @@ def where_time_pair( ) return self + def apply_lag_filter(self, history_table: str, lag: Optional[int]): + if lag is not None: + self.retable(history_table) + # history_table has full spectrum of lag values to search from whereas the latest_table does not + self.where(lag=lag) + return self + + def apply_issues_filter(self, history_table: str, issues: Optional[TimeValues]): + if issues: + self.retable(history_table) + self.where_integers("issue", issues) + return self + + def apply_as_of_filter(self, history_table: str, as_of: Optional[int]): + if as_of is not None: + self.retable(history_table) + sub_condition_asof = "(issue <= :as_of)" + self.params["as_of"] = as_of + sub_fields = "max(issue) max_issue, time_type, time_value, `source`, `signal`, geo_type, geo_value" + sub_group = "time_type, time_value, `source`, `signal`, geo_type, geo_value" + alias = self.alias + sub_condition = f"x.max_issue = {alias}.issue AND x.time_type = {alias}.time_type AND x.time_value = {alias}.time_value AND x.source = {alias}.source AND x.signal = {alias}.signal AND x.geo_type = {alias}.geo_type AND x.geo_value = {alias}.geo_value" + self.subquery = f"JOIN (SELECT {sub_fields} FROM {self.table} WHERE {self.conditions_clause} AND {sub_condition_asof} GROUP BY {sub_group}) x ON {sub_condition}" + return self + def set_fields(self, *fields: Iterable[str]) -> "QueryBuilder": self.fields = [f"{self.alias}.{field}" for field_list in fields for field in field_list] return self - def set_order(self, *args: str, **kwargs: Union[str, bool]) -> "QueryBuilder": + def set_sort_order(self, *args: str): """ sets the order for the given fields (as key word arguments), True = ASC, False = DESC """ - def to_asc(v: Union[str, bool]) -> str: - if v is True: - return "ASC" - elif v is False: - return "DESC" - return cast(str, v) - - args_order = [f"{self.alias}.{k} ASC" for k in args] - kw_order = [f"{self.alias}.{k} {to_asc(v)}" for k, v in kwargs.items()] - self.order = args_order + kw_order + self.order = [f"{self.alias}.{k} ASC" for k in args] return self def with_max_issue(self, *args: str) -> "QueryBuilder": diff --git a/src/server/_validate.py b/src/server/_validate.py index 59e5aa7d0..ad82b8425 100644 --- a/src/server/_validate.py +++ b/src/server/_validate.py @@ -3,7 +3,7 @@ from flask import request from ._exceptions import UnAuthenticatedException, ValidationFailedException -from .utils import TimeValues +from .utils import IntRange, TimeValues def resolve_auth_token() -> Optional[str]: @@ -84,9 +84,6 @@ def extract_strings(key: Union[str, Sequence[str]]) -> Optional[List[str]]: return [v for vs in s for v in vs.split(",")] -IntRange = Union[Tuple[int, int], int] - - def extract_integer(key: Union[str, Sequence[str]]) -> Optional[int]: s = _extract_value(key) if not s: diff --git a/src/server/endpoints/covid_hosp_facility.py b/src/server/endpoints/covid_hosp_facility.py index 8d5cb0b77..4bbe863c2 100644 --- a/src/server/endpoints/covid_hosp_facility.py +++ b/src/server/endpoints/covid_hosp_facility.py @@ -139,7 +139,7 @@ def handle(): q.set_fields(fields_string, fields_int, fields_float) # basic query info - q.set_order("collection_week", "hospital_pk", "publication_date") + q.set_sort_order("collection_week", "hospital_pk", "publication_date") # build the filter q.where_integers("collection_week", collection_weeks) diff --git a/src/server/endpoints/covid_hosp_facility_lookup.py b/src/server/endpoints/covid_hosp_facility_lookup.py index 880767135..0fa94e99e 100644 --- a/src/server/endpoints/covid_hosp_facility_lookup.py +++ b/src/server/endpoints/covid_hosp_facility_lookup.py @@ -33,7 +33,7 @@ def handle(): ] ) # basic query info - q.set_order("hospital_pk") + q.set_sort_order("hospital_pk") # build the filter # these are all fast because the table has indexes on each of these fields if state: diff --git a/src/server/endpoints/covid_hosp_state_timeseries.py b/src/server/endpoints/covid_hosp_state_timeseries.py index 5da4d4e16..63540e5c8 100644 --- a/src/server/endpoints/covid_hosp_state_timeseries.py +++ b/src/server/endpoints/covid_hosp_state_timeseries.py @@ -145,7 +145,7 @@ def handle(): ] q.set_fields(fields_string, fields_int, fields_float) - q.set_order("date", "state", "issue") + q.set_sort_order("date", "state", "issue") # build the filter q.where_integers("date", dates) diff --git a/src/server/endpoints/covidcast.py b/src/server/endpoints/covidcast.py index 0c22e4573..f7e4564a2 100644 --- a/src/server/endpoints/covidcast.py +++ b/src/server/endpoints/covidcast.py @@ -91,28 +91,6 @@ def parse_time_pairs() -> TimePair: return parse_time_arg() -def _handle_lag_issues_as_of(q: QueryBuilder, issues: Optional[TimeValues] = None, lag: Optional[int] = None, as_of: Optional[int] = None): - if issues: - q.retable(history_table) - q.where_integers("issue", issues) - elif lag is not None: - q.retable(history_table) - # history_table has full spectrum of lag values to search from whereas the latest_table does not - q.where(lag=lag) - elif as_of is not None: - # fetch the most recent issue as of a certain date (not to be confused w/ plain-old "most recent issue" - q.retable(history_table) - sub_condition_asof = "(issue <= :as_of)" - q.params["as_of"] = as_of - sub_fields = "max(issue) max_issue, time_type, time_value, `source`, `signal`, geo_type, geo_value" - sub_group = "time_type, time_value, `source`, `signal`, geo_type, geo_value" - sub_condition = f"x.max_issue = {q.alias}.issue AND x.time_type = {q.alias}.time_type AND x.time_value = {q.alias}.time_value AND x.source = {q.alias}.source AND x.signal = {q.alias}.signal AND x.geo_type = {q.alias}.geo_type AND x.geo_value = {q.alias}.geo_value" - q.subquery = f"JOIN (SELECT {sub_fields} FROM {q.table} WHERE {q.conditions_clause} AND {sub_condition_asof} GROUP BY {sub_group}) x ON {sub_condition}" - else: - # else we are using the (standard/default) `latest_table`, to fetch the most recent issue quickly - pass - - @bp.route("/", methods=("GET", "POST")) def handle(): source_signal_pairs = parse_source_signal_pairs() @@ -132,11 +110,11 @@ def handle(): fields_float = ["value", "stderr", "sample_size"] is_compatibility = is_compatibility_mode() if is_compatibility: - q.set_order("signal", "time_value", "geo_value", "issue") + q.set_sort_order("signal", "time_value", "geo_value", "issue") else: # transfer also the new detail columns fields_string.extend(["source", "geo_type", "time_type"]) - q.set_order("source", "signal", "time_type", "time_value", "geo_type", "geo_value", "issue") + q.set_sort_order("source", "signal", "time_type", "time_value", "geo_type", "geo_value", "issue") q.set_fields(fields_string, fields_int, fields_float) # basic query info @@ -147,7 +125,9 @@ def handle(): q.where_geo_pairs("geo_type", "geo_value", geo_pairs) q.where_time_pair("time_type", "time_value", time_pair) - _handle_lag_issues_as_of(q, issues, lag, as_of) + q.apply_issues_filter(history_table, issues) + q.apply_lag_filter(history_table, lag) + q.apply_as_of_filter(history_table, as_of) def transform_row(row, proxy): if is_compatibility or not alias_mapper or "source" not in row: @@ -195,15 +175,12 @@ def handle_trend(): fields_int = ["time_value"] fields_float = ["value"] q.set_fields(fields_string, fields_int, fields_float) - q.set_order("geo_type", "geo_value", "source", "signal", "time_value") + q.set_sort_order("geo_type", "geo_value", "source", "signal", "time_value") q.where_source_signal_pairs("source", "signal", source_signal_pairs) q.where_geo_pairs("geo_type", "geo_value", geo_pairs) q.where_time_pair("time_type", "time_value", time_window) - # fetch most recent issue fast - _handle_lag_issues_as_of(q, None, None, None) - p = create_printer() def gen(rows): @@ -246,15 +223,12 @@ def handle_trendseries(): fields_int = ["time_value"] fields_float = ["value"] q.set_fields(fields_string, fields_int, fields_float) - q.set_order("geo_type", "geo_value", "source", "signal", "time_value") + q.set_sort_order("geo_type", "geo_value", "source", "signal", "time_value") q.where_source_signal_pairs("source", "signal", source_signal_pairs) q.where_geo_pairs("geo_type", "geo_value", geo_pairs) q.where_time_pair("time_type", "time_value", time_window) - # fetch most recent issue fast - _handle_lag_issues_as_of(q, None, None, None) - p = create_printer() shifter = lambda x: shift_day_value(x, -basis_shift) @@ -303,7 +277,7 @@ def handle_correlation(): fields_int = ["time_value"] fields_float = ["value"] q.set_fields(fields_string, fields_int, fields_float) - q.set_order("geo_type", "geo_value", "source", "signal", "time_value") + q.set_sort_order("geo_type", "geo_value", "source", "signal", "time_value") q.where_source_signal_pairs( "source", @@ -381,12 +355,12 @@ def handle_export(): q = QueryBuilder(latest_table, "t") q.set_fields(["geo_value", "signal", "time_value", "issue", "lag", "value", "stderr", "sample_size", "geo_type", "source"], [], []) - q.set_order("time_value", "geo_value") + q.set_sort_order("time_value", "geo_value") q.where_source_signal_pairs("source", "signal", source_signal_pairs) q.where_time_pair("time_type", "time_value", TimePair("day" if is_day else "week", [(start_day, end_day)])) q.where_geo_pairs("geo_type", "geo_value", [GeoPair(geo_type, True if geo_values == "*" else geo_values)]) - _handle_lag_issues_as_of(q, None, None, as_of) + q.apply_as_of_filter(history_table, as_of) format_date = time_value_to_iso if is_day else lambda x: time_value_to_week(x).cdcformat() # tag as_of in filename, if it was specified @@ -459,16 +433,13 @@ def handle_backfill(): fields_int = ["time_value", "issue"] fields_float = ["value", "sample_size"] # sort by time value and issue asc - q.set_order(time_value=True, issue=True) + q.set_sort_order("time_value", "issue") q.set_fields(fields_string, fields_int, fields_float, ["is_latest_issue"]) q.where_source_signal_pairs("source", "signal", source_signal_pairs) q.where_geo_pairs("geo_type", "geo_value", [geo_pair]) q.where_time_pair("time_type", "time_value", time_pair) - # no restriction of issues or dates since we want all issues - # _handle_lag_issues_as_of(q, issues, lag, as_of) - p = create_printer() def find_anchor_row(rows: List[Dict[str, Any]], issue: int) -> Optional[Dict[str, Any]]: @@ -642,9 +613,7 @@ def handle_coverage(): q.where_source_signal_pairs("source", "signal", source_signal_pairs) q.where_time_pair("time_type", "time_value", time_window) q.group_by = "c.source, c.signal, c.time_value" - q.set_order("source", "signal", "time_value") - - _handle_lag_issues_as_of(q, None, None, None) + q.set_sort_order("source", "signal", "time_value") def transform_row(row, proxy): if not alias_mapper or "source" not in row: diff --git a/src/server/endpoints/covidcast_utils/model.py b/src/server/endpoints/covidcast_utils/model.py index 154bb3668..bbefe3cd4 100644 --- a/src/server/endpoints/covidcast_utils/model.py +++ b/src/server/endpoints/covidcast_utils/model.py @@ -236,11 +236,6 @@ def _load_data_signals(sources: List[DataSource]): data_signals_by_key[(source.db_source, d.signal)] = d - -def get_related_signals(signal: DataSignal) -> List[DataSignal]: - return [s for s in data_signals if s != signal and s.signal_basename == signal.signal_basename] - - def count_signal_time_types(source_signals: List[SourceSignalPair]) -> Tuple[int, int]: """ count the number of signals in this query for each time type diff --git a/src/server/endpoints/dengue_nowcast.py b/src/server/endpoints/dengue_nowcast.py index 206d4dff0..cb5747a4d 100644 --- a/src/server/endpoints/dengue_nowcast.py +++ b/src/server/endpoints/dengue_nowcast.py @@ -22,7 +22,7 @@ def handle(): fields_float = ["value", "std"] q.set_fields(fields_string, fields_int, fields_float) - q.set_order("epiweek", "location") + q.set_sort_order("epiweek", "location") # build the filter q.where_strings("location", locations) diff --git a/src/server/endpoints/dengue_sensors.py b/src/server/endpoints/dengue_sensors.py index df3672209..e1a8fbcf9 100644 --- a/src/server/endpoints/dengue_sensors.py +++ b/src/server/endpoints/dengue_sensors.py @@ -26,7 +26,7 @@ def handle(): fields_float = ["value"] q.set_fields(fields_string, fields_int, fields_float) - q.set_order('epiweek', 'name', 'location') + q.set_sort_order('epiweek', 'name', 'location') q.where_strings('name', names) q.where_strings('location', locations) diff --git a/src/server/endpoints/ecdc_ili.py b/src/server/endpoints/ecdc_ili.py index 75b253b1e..af932d296 100644 --- a/src/server/endpoints/ecdc_ili.py +++ b/src/server/endpoints/ecdc_ili.py @@ -24,7 +24,7 @@ def handle(): fields_float = ["incidence_rate"] q.set_fields(fields_string, fields_int, fields_float) - q.set_order("epiweek", "region", "issue") + q.set_sort_order("epiweek", "region", "issue") q.where_integers("epiweek", epiweeks) q.where_strings("region", regions) diff --git a/src/server/endpoints/flusurv.py b/src/server/endpoints/flusurv.py index 5205056db..a80159d09 100644 --- a/src/server/endpoints/flusurv.py +++ b/src/server/endpoints/flusurv.py @@ -29,7 +29,7 @@ def handle(): "rate_overall", ] q.set_fields(fields_string, fields_int, fields_float) - q.set_order("epiweek", "location", "issue") + q.set_sort_order("epiweek", "location", "issue") q.where_integers("epiweek", epiweeks) q.where_strings("location", locations) diff --git a/src/server/endpoints/fluview_clinicial.py b/src/server/endpoints/fluview_clinicial.py index 650ec9add..dd095a7d8 100644 --- a/src/server/endpoints/fluview_clinicial.py +++ b/src/server/endpoints/fluview_clinicial.py @@ -22,7 +22,7 @@ def handle(): fields_int = ["issue", "epiweek", "lag", "total_specimens", "total_a", "total_b"] fields_float = ["percent_positive", "percent_a", "percent_b"] q.set_fields(fields_string, fields_int, fields_float) - q.set_order("epiweek", "region", "issue") + q.set_sort_order("epiweek", "region", "issue") q.where_integers("epiweek", epiweeks) q.where_strings("region", regions) diff --git a/src/server/endpoints/gft.py b/src/server/endpoints/gft.py index 8179b3522..ab776c6e5 100644 --- a/src/server/endpoints/gft.py +++ b/src/server/endpoints/gft.py @@ -22,7 +22,7 @@ def handle(): fields_int = ["epiweek", "num"] fields_float = [] q.set_fields(fields_string, fields_int, fields_float) - q.set_order("epiweek", "location") + q.set_sort_order("epiweek", "location") # build the filter q.where_integers("epiweek", epiweeks) diff --git a/src/server/endpoints/ght.py b/src/server/endpoints/ght.py index 3d5c0dec1..b30a42abc 100644 --- a/src/server/endpoints/ght.py +++ b/src/server/endpoints/ght.py @@ -26,7 +26,7 @@ def handle(): fields_float = ["value"] q.set_fields(fields_string, fields_int, fields_float) - q.set_order("epiweek", "location") + q.set_sort_order("epiweek", "location") # build the filter q.where_strings("location", locations) diff --git a/src/server/endpoints/kcdc_ili.py b/src/server/endpoints/kcdc_ili.py index 08158cdaf..32933eb3e 100644 --- a/src/server/endpoints/kcdc_ili.py +++ b/src/server/endpoints/kcdc_ili.py @@ -24,7 +24,7 @@ def handle(): fields_float = ["ili"] q.set_fields(fields_string, fields_int, fields_float) - q.set_order("epiweek", "region", "issue") + q.set_sort_order("epiweek", "region", "issue") # build the filter q.where_integers("epiweek", epiweeks) q.where_strings("region", regions) diff --git a/src/server/endpoints/nidss_flu.py b/src/server/endpoints/nidss_flu.py index 3caf099dc..989a41a3d 100644 --- a/src/server/endpoints/nidss_flu.py +++ b/src/server/endpoints/nidss_flu.py @@ -23,7 +23,7 @@ def handle(): fields_int = ["issue", "epiweek", "lag", "visits"] fields_float = ["ili"] q.set_fields(fields_string, fields_int, fields_float) - q.set_order("epiweek", "region", "issue") + q.set_sort_order("epiweek", "region", "issue") # build the filter q.where_integers("epiweek", epiweeks) diff --git a/src/server/endpoints/nowcast.py b/src/server/endpoints/nowcast.py index 88ee83400..77ed84401 100644 --- a/src/server/endpoints/nowcast.py +++ b/src/server/endpoints/nowcast.py @@ -22,7 +22,7 @@ def handle(): fields_float = ["value", "std"] q.set_fields(fields_string, fields_int, fields_float) - q.set_order(epiweek=True, location=True) + q.set_sort_order("epiweek", "location") # build the filter q.where_strings("location", locations) diff --git a/src/server/endpoints/paho_dengue.py b/src/server/endpoints/paho_dengue.py index 3afd11a6f..0a50885c2 100644 --- a/src/server/endpoints/paho_dengue.py +++ b/src/server/endpoints/paho_dengue.py @@ -32,7 +32,7 @@ def handle(): fields_float = ["incidence_rate"] q.set_fields(fields_string, fields_int, fields_float) - q.set_order(epiweek=True, region=True, issue=True) + q.set_sort_order("epiweek", "region", "issue") # build the filter q.where_integers("epiweek", epiweeks) diff --git a/src/server/endpoints/quidel.py b/src/server/endpoints/quidel.py index c32a8a040..f96b4dd20 100644 --- a/src/server/endpoints/quidel.py +++ b/src/server/endpoints/quidel.py @@ -25,7 +25,7 @@ def handle(): fields_float = ["value"] q.set_fields(fields_string, fields_int, fields_float) - q.set_order(epiweek=True, location=True) + q.set_sort_order("epiweek", "location") # build the filter q.where_strings("location", locations) diff --git a/src/server/utils/__init__.py b/src/server/utils/__init__.py index efab6c030..2e99dfeba 100644 --- a/src/server/utils/__init__.py +++ b/src/server/utils/__init__.py @@ -1 +1 @@ -from .dates import shift_day_value, day_to_time_value, time_value_to_iso, time_value_to_day, days_in_range, weeks_in_range, shift_week_value, week_to_time_value, time_value_to_week, guess_time_value_is_day, guess_time_value_is_week, time_values_to_ranges, days_to_ranges, weeks_to_ranges, TimeValues +from .dates import shift_day_value, day_to_time_value, time_value_to_iso, time_value_to_day, days_in_range, weeks_in_range, shift_week_value, week_to_time_value, time_value_to_week, guess_time_value_is_day, guess_time_value_is_week, time_values_to_ranges, days_to_ranges, weeks_to_ranges, IntRange, TimeValues diff --git a/src/server/utils/dates.py b/src/server/utils/dates.py index b85465bb8..126f79383 100644 --- a/src/server/utils/dates.py +++ b/src/server/utils/dates.py @@ -13,7 +13,8 @@ from .logger import get_structured_logger # Alias for a sequence of date ranges (int, int) or date integers -TimeValues: TypeAlias = Sequence[Union[Tuple[int, int], int]] +IntRange: TypeAlias = Union[Tuple[int, int], int] +TimeValues: TypeAlias = Sequence[IntRange] def time_value_to_day(value: int) -> date: year, month, day = value // 10000, (value % 10000) // 100, value % 100