@@ -869,6 +869,7 @@ def validate(self, export_dir):
869
869
"""
870
870
871
871
# Get relevant data file names and info.
872
+
872
873
export_files = read_filenames (export_dir )
873
874
date_filter = make_date_filter (self .start_date , self .end_date )
874
875
@@ -963,14 +964,21 @@ def validate(self, export_dir):
963
964
if geo_sig_api_df is None :
964
965
continue
965
966
966
- earliest_available_date = geo_sig_df [ "time_value" ]. min ()
967
+
967
968
968
969
# Outlier dataframe
969
- outlier_start_date = earliest_available_date - outlier_lookbehind
970
- outlier_end_date = earliest_available_date - timedelta (days = 1 )
971
- outlier_api_df = geo_sig_api_df .query \
972
- ('time_value <= @outlier_end_date & time_value >= @outlier_start_date' )
973
- self .check_positive_negative_spikes (source_df , outlier_api_df , geo_type , signal_type )
970
+ if (signal_type in ["confirmed_7dav_cumulative_num" , "confirmed_7dav_incidence_num" , \
971
+ "confirmed_cumulative_num" , "confirmed_incidence_num" , "deaths_7dav_cumulative_num" , \
972
+ "deaths_cumulative_num" ]):
973
+ earliest_available_date = geo_sig_df ["time_value" ].min ()
974
+ source_df = geo_sig_df .query (
975
+ 'time_value <= @date_list[-1] & time_value >= @date_list[0]' )
976
+ print (source_df )
977
+ outlier_start_date = earliest_available_date - outlier_lookbehind
978
+ outlier_end_date = earliest_available_date - timedelta (days = 1 )
979
+ outlier_api_df = geo_sig_api_df .query \
980
+ ('time_value <= @outlier_end_date & time_value >= @outlier_start_date' )
981
+ self .check_positive_negative_spikes (source_df , outlier_api_df , geo_type , signal_type )
974
982
975
983
# Check data from a group of dates against recent (previous 7 days,
976
984
# by default) data from the API.
0 commit comments