Skip to content

Commit a1efe48

Browse files
committed
JIT: major feature commit
* add smooth_diff * add model updates * add /trend endpoint * add /trendseries endpoint * add /csv endpoint * params with utility functions * update date utility functions
1 parent 0fa53d7 commit a1efe48

File tree

20 files changed

+1585
-117
lines changed

20 files changed

+1585
-117
lines changed

deploy.json

+7-1
Original file line numberDiff line numberDiff line change
@@ -32,14 +32,20 @@
3232
"match": "^.*\\.(py)$",
3333
"add-header-comment": true
3434
},
35+
{
36+
"type": "move",
37+
"src": "src/server/utils",
38+
"dst": "[[package]]/server/utils/",
39+
"match": "^.*\\.(py)$",
40+
"add-header-comment": true
41+
},
3542
{
3643
"type": "move",
3744
"src": "src/server/endpoints/covidcast_utils",
3845
"dst": "[[package]]/server/endpoints/covidcast_utils/",
3946
"match": "^.*\\.(py)$",
4047
"add-header-comment": true
4148
},
42-
4349
"// acquisition - fluview",
4450
{
4551
"type": "move",

integrations/server/test_covidcast_endpoints.py

+281-44
Large diffs are not rendered by default.

requirements.api.txt

+1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
delphi_utils
12
epiweeks==2.1.2
23
Flask==2.2.2
34
itsdangerous<2.1

src/acquisition/covidcast/covidcast_row.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,8 @@ def as_dataframe(self, ignore_fields: Optional[List[str]] = None) -> pd.DataFram
219219
df = pd.concat([row.as_dataframe(ignore_fields=ignore_fields) for row in self.rows], ignore_index=True)
220220
return df[columns]
221221
else:
222-
return pd.DataFrame(columns=columns)
222+
df = pd.DataFrame(columns=columns)
223+
return set_df_dtypes(df, self._pandas_dtypes)
223224

224225
@property
225226
def api_row_df(self) -> pd.DataFrame:

src/server/_config.py

+1
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88

99
MAX_RESULTS = int(10e6)
1010
MAX_COMPATIBILITY_RESULTS = int(3650)
11+
MAX_SMOOTHER_WINDOW = 30
1112

1213
SQLALCHEMY_DATABASE_URI = os.environ.get("SQLALCHEMY_DATABASE_URI", "sqlite:///test.db")
1314

src/server/_params.py

+30-3
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
1-
from math import inf
21
import re
32
from dataclasses import dataclass
3+
from itertools import groupby
4+
from math import inf
45
from typing import List, Optional, Sequence, Tuple, Union
56

67
from flask import request
7-
8+
from more_itertools import flatten
89

910
from ._exceptions import ValidationFailedException
1011
from .utils import days_in_range, weeks_in_range, guess_time_value_is_day, guess_time_value_is_week, TimeValues, days_to_ranges, weeks_to_ranges
@@ -92,9 +93,35 @@ def count(self) -> float:
9293
return inf if self.signal else 0
9394
return len(self.signal)
9495

96+
def add_signal(self, signal: str) -> None:
97+
if not isinstance(self.signal, bool):
98+
self.signal.append(signal)
99+
100+
def __hash__(self) -> int:
101+
return hash((self.source, self.signal if self.signal is isinstance(self.signal, bool) else tuple(self.signal)))
102+
103+
104+
def _combine_source_signal_pairs(source_signal_pairs: List[SourceSignalPair]) -> List[SourceSignalPair]:
105+
"""Combine SourceSignalPairs with the same source into a single SourceSignalPair object.
106+
107+
Example:
108+
[SourceSignalPair("src", ["sig1", "sig2"]), SourceSignalPair("src", ["sig2", "sig3"])] will be merged
109+
into [SourceSignalPair("src", ["sig1", "sig2", "sig3])].
110+
"""
111+
source_signal_pairs_grouped = groupby(sorted(source_signal_pairs, key=lambda x: x.source), lambda x: x.source)
112+
source_signal_pairs_combined = []
113+
for source, group in source_signal_pairs_grouped:
114+
group = list(group)
115+
if any(x.signal == True for x in group):
116+
combined_signals = True
117+
else:
118+
combined_signals = sorted(set(flatten(x.signal for x in group)))
119+
source_signal_pairs_combined.append(SourceSignalPair(source, combined_signals))
120+
return source_signal_pairs_combined
121+
95122

96123
def parse_source_signal_arg(key: str = "signal") -> List[SourceSignalPair]:
97-
return [SourceSignalPair(source, signals) for [source, signals] in _parse_common_multi_arg(key)]
124+
return _combine_source_signal_pairs(SourceSignalPair(source, signals) for [source, signals] in _parse_common_multi_arg(key))
98125

99126

100127
def parse_single_source_signal_arg(key: str) -> SourceSignalPair:

src/server/_printer.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ def gen():
5858
r = self._print_row(row)
5959
if r is not None:
6060
yield r
61-
except:
61+
except Exception as e:
6262
get_structured_logger('server_error').error("Exception while executing printer", exception=e)
6363
self.result = -1
6464
yield self._error(e)

src/server/_validate.py

+21
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,17 @@ def extract_integer(key: Union[str, Sequence[str]]) -> Optional[int]:
9898
raise ValidationFailedException(f"{key}: not a number: {s}")
9999

100100

101+
def extract_float(key: Union[str, Sequence[str]]) -> Optional[float]:
102+
s = _extract_value(key)
103+
if not s:
104+
# nothing to do
105+
return None
106+
try:
107+
return float(s)
108+
except ValueError as e:
109+
raise ValidationFailedException(f"{key}: not a number: {s}")
110+
111+
101112
def extract_integers(key: Union[str, Sequence[str]]) -> Optional[List[IntRange]]:
102113
parts = extract_strings(key)
103114
if not parts:
@@ -187,3 +198,13 @@ def push_range(first: str, last: str):
187198
values.append(parse_date(part))
188199
# success, return the list
189200
return values
201+
202+
def extract_bool(key: Union[str, Sequence[str]]) -> Optional[bool]:
203+
s = _extract_value(key)
204+
if not s:
205+
return None
206+
if s.lower() == "true":
207+
return True
208+
if s.lower() == "false":
209+
return False
210+
raise ValidationFailedException(f"{key}: not a boolean: {s}")

0 commit comments

Comments
 (0)