@@ -38,15 +38,15 @@ def run_module(params):
38
38
__name__ , filename = params ["common" ].get ("log_filename" ),
39
39
log_exceptions = params ["common" ].get ("log_exceptions" , True ))
40
40
mapper = GeoMapper ()
41
- stats = []
41
+ run_stats = []
42
42
## build the base version of the signal at the most detailed geo level you can get.
43
43
## compute stuff here or farm out to another function or file
44
44
all_data = pd .DataFrame (columns = ["timestamp" , "val" , "zip" , "sample_size" , "se" ])
45
45
## aggregate & smooth
46
46
## TODO: add num/prop variations if needed
47
47
for sensor , smoother , geo in product (SIGNALS , SMOOTHERS , GEOS ):
48
48
df = mapper .replace_geocode (
49
- all_data , "zip" ,
49
+ all_data , "zip" , geo ,
50
50
new_col = "geo_id" ,
51
51
date_col = "timestamp" )
52
52
## TODO: recompute sample_size, se here if not NA
@@ -63,11 +63,11 @@ def run_module(params):
63
63
sensor_name ,
64
64
start_date = start_date )
65
65
if len (dates ) > 0 :
66
- stats .append ((max (dates ), len (dates )))
66
+ run_stats .append ((max (dates ), len (dates )))
67
67
## log this indicator run
68
68
elapsed_time_in_seconds = round (time .time () - start_time , 2 )
69
- min_max_date = stats and min (s [0 ] for s in stats )
70
- csv_export_count = sum (s [- 1 ] for s in stats )
69
+ min_max_date = run_stats and min (s [0 ] for s in run_stats )
70
+ csv_export_count = sum (s [- 1 ] for s in run_stats )
71
71
max_lag_in_days = min_max_date and (datetime .now () - min_max_date ).days
72
72
formatted_min_max_date = min_max_date and min_max_date .strftime ("%Y-%m-%d" )
73
73
logger .info ("Completed indicator run" ,
0 commit comments