@@ -77,12 +77,12 @@ def upload_archive(
77
77
csv_importer_impl = CsvImporter ):
78
78
"""Upload CSVs to the database and archive them using the specified handlers.
79
79
80
- :path_details: output from CsvImporter.find*_csv_files
81
-
80
+ :path_details: output from CsvImporter.find*_csv_files
81
+
82
82
:database: an open connection to the epidata database
83
83
84
84
:handlers: functions for archiving (successful, failed) files
85
-
85
+
86
86
:return: the number of modified rows
87
87
"""
88
88
archive_as_successful , archive_as_failed = handlers
@@ -130,7 +130,7 @@ def upload_archive(
130
130
archive_as_successful (path_src , filename , source , logger )
131
131
else :
132
132
archive_as_failed (path_src , filename , source ,logger )
133
-
133
+
134
134
return total_modified_row_count
135
135
136
136
@@ -149,7 +149,7 @@ def main(
149
149
if not path_details :
150
150
logger .info ('nothing to do; exiting...' )
151
151
return
152
-
152
+
153
153
logger .info ("Ingesting CSVs" , csv_count = len (path_details ))
154
154
155
155
database = database_impl ()
@@ -161,13 +161,12 @@ def main(
161
161
database ,
162
162
make_handlers (args .data_dir , args .specific_issue_date ),
163
163
logger )
164
- logger .info ("Finished inserting database rows" , row_count = modified_row_count )
165
- # the following print statement serves the same function as the logger.info call above
166
- # print('inserted/updated %d rows' % modified_row_count)
164
+ logger .info ("Finished inserting/updating database rows" , row_count = modified_row_count )
167
165
finally :
166
+ database .do_analyze ()
168
167
# unconditionally commit database changes since CSVs have been archived
169
168
database .disconnect (True )
170
-
169
+
171
170
logger .info (
172
171
"Ingested CSVs into database" ,
173
172
total_runtime_in_seconds = round (time .time () - start_time , 2 ))
0 commit comments