Skip to content

Commit 2e49339

Browse files
authored
Merge pull request #917 from cmu-delphi/ds/remove-wip-tests
Remove remaining wip pieces in tests
2 parents a6d53f4 + 09013fa commit 2e49339

File tree

3 files changed

+14
-53
lines changed

3 files changed

+14
-53
lines changed

integrations/acquisition/covidcast/test_csv_uploading.py

+2-39
Original file line numberDiff line numberDiff line change
@@ -101,8 +101,6 @@ def test_uploading(self):
101101
log_file=log_file_directory +
102102
"output.log",
103103
data_dir=data_dir,
104-
is_wip_override=False,
105-
not_wip_override=False,
106104
specific_issue_date=False)
107105
uploader_column_rename = {"geo_id": "geo_value", "val": "value", "se": "stderr", "missing_val": "missing_value", "missing_se": "missing_stderr"}
108106

@@ -227,41 +225,6 @@ def test_uploading(self):
227225
self.setUp()
228226

229227

230-
with self.subTest("Valid wip"):
231-
values = pd.DataFrame({
232-
"geo_id": ["me", "nd", "wa"],
233-
"val": [10.0, 20.0, 30.0],
234-
"se": [0.01, 0.02, 0.03],
235-
"sample_size": [100.0, 200.0, 300.0],
236-
"missing_val": [Nans.NOT_MISSING] * 3,
237-
"missing_se": [Nans.NOT_MISSING] * 3,
238-
"missing_sample_size": [Nans.NOT_MISSING] * 3
239-
})
240-
signal_name = "wip_prototype"
241-
values.to_csv(source_receiving_dir + f'/20200419_state_{signal_name}.csv', index=False)
242-
243-
# upload CSVs
244-
main(args)
245-
response = Epidata.covidcast('src-name', signal_name, 'day', 'state', 20200419, '*')
246-
247-
expected_values = pd.concat([values, pd.DataFrame({
248-
"time_value": [20200419] * 3,
249-
"signal": [signal_name] * 3,
250-
"direction": [None] * 3
251-
})], axis=1).rename(columns=uploader_column_rename).to_dict(orient="records")
252-
expected_response = {'result': 1, 'epidata': self.apply_lag(expected_values), 'message': 'success'}
253-
254-
self.assertEqual(response, expected_response)
255-
self.verify_timestamps_and_defaults()
256-
257-
# Verify that files were archived
258-
path = data_dir + f'/archive/successful/src-name/20200419_state_wip_prototype.csv.gz'
259-
self.assertIsNotNone(os.stat(path))
260-
261-
self.tearDown()
262-
self.setUp()
263-
264-
265228
with self.subTest("Valid signal with name length 32<x<64"):
266229
values = pd.DataFrame({
267230
"geo_id": ["pa"],
@@ -272,7 +235,7 @@ def test_uploading(self):
272235
"missing_se": [Nans.NOT_MISSING],
273236
"missing_sample_size": [Nans.NOT_MISSING]
274237
})
275-
signal_name = "wip_really_long_name_that_will_be_accepted"
238+
signal_name = "really_long_name_that_will_be_accepted"
276239
values.to_csv(source_receiving_dir + f'/20200419_state_{signal_name}.csv', index=False)
277240

278241
# upload CSVs
@@ -303,7 +266,7 @@ def test_uploading(self):
303266
"missing_se": [Nans.NOT_MISSING],
304267
"missing_sample_size": [Nans.NOT_MISSING]
305268
})
306-
signal_name = "wip_really_long_name_that_will_get_truncated_lorem_ipsum_dolor_sit_amet"
269+
signal_name = "really_long_name_that_will_get_truncated_lorem_ipsum_dolor_sit_amet"
307270
values.to_csv(source_receiving_dir + f'/20200419_state_{signal_name}.csv', index=False)
308271

309272
# upload CSVs

integrations/server/test_covidcast.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -88,11 +88,11 @@ def test_round_trip(self):
8888
# `covidcast` (`id`, `source`, `signal`, `time_type`, `geo_type`,
8989
# `time_value`, `geo_value`, `value_updated_timestamp`,
9090
# `value`, `stderr`, `sample_size`, `direction_updated_timestamp`,
91-
# `direction`, `issue`, `lag`, `is_latest_issue`, `is_wip`,`missing_value`,
91+
# `direction`, `issue`, `lag`, `is_latest_issue`, `missing_value`,
9292
# `missing_stderr`,`missing_sample_size`)
9393
# VALUES
9494
# (0, 'src', 'sig', 'day', 'county', 20200414, '01234',
95-
# 123, 1.5, 2.5, 3.5, 456, 4, 20200414, 0, 1, False,
95+
# 123, 1.5, 2.5, 3.5, 456, 4, 20200414, 0, 1,
9696
# {Nans.NOT_MISSING}, {Nans.NOT_MISSING}, {Nans.NOT_MISSING})
9797
# ''')
9898
# self.cnx.commit()

tests/acquisition/covidcast/test_csv_to_database.py

+10-12
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
# standard library
44
import argparse
5+
from typing import Iterable
56
import unittest
67
from unittest.mock import MagicMock
78

@@ -27,9 +28,7 @@ def _path_details(self):
2728
# a file with a data error
2829
('path/b.csv', ('src_b', 'sig_b', 'week', 'msa', 202016, 202017, 1)),
2930
# emulate a file that's named incorrectly
30-
('path/c.csv', None),
31-
# another good file w/ wip
32-
('path/d.csv', ('src_d', 'wip_sig_d', 'week', 'msa', 202016, 202017, 1)),
31+
('path/c.csv', None)
3332
]
3433

3534
def test_collect_files(self):
@@ -65,15 +64,16 @@ def load_csv_impl(path, *args):
6564
yield make_row('b1')
6665
yield None
6766
yield make_row('b3')
68-
elif path == 'path/d.csv':
69-
yield make_row('d1')
7067
else:
7168
# fail the test for any other path
7269
raise Exception('unexpected path')
7370

71+
def iter_len(l: Iterable) -> int:
72+
return len(list(l))
73+
7474
data_dir = 'data_dir'
7575
mock_database = MagicMock()
76-
mock_database.insert_or_update_bulk.return_value = 2
76+
mock_database.insert_or_update_bulk = MagicMock(wraps=iter_len)
7777
mock_csv_importer = MagicMock()
7878
mock_csv_importer.load_csv = load_csv_impl
7979
mock_file_archiver = MagicMock()
@@ -87,30 +87,28 @@ def load_csv_impl(path, *args):
8787
mock_logger,
8888
csv_importer_impl=mock_csv_importer)
8989

90-
self.assertEqual(modified_row_count, 4)
90+
self.assertEqual(modified_row_count, 3)
9191
# verify that appropriate rows were added to the database
92-
self.assertEqual(mock_database.insert_or_update_bulk.call_count, 2)
92+
self.assertEqual(mock_database.insert_or_update_bulk.call_count, 1)
9393
call_args_list = mock_database.insert_or_update_bulk.call_args_list
9494
actual_args = [[(a.source, a.signal, a.time_type, a.geo_type, a.time_value,
9595
a.geo_value, a.value, a.stderr, a.sample_size, a.issue, a.lag)
9696
for a in call.args[0]] for call in call_args_list]
9797
expected_args = [
9898
[('src_a', 'sig_a', 'day', 'hrr', 20200419, 'a1', 'a1', 'a1', 'a1', 20200420, 1),
9999
('src_a', 'sig_a', 'day', 'hrr', 20200419, 'a2', 'a2', 'a2', 'a2', 20200420, 1),
100-
('src_a', 'sig_a', 'day', 'hrr', 20200419, 'a3', 'a3', 'a3', 'a3', 20200420, 1)],
101-
[('src_d', 'wip_sig_d', 'week', 'msa', 202016, 'd1', 'd1', 'd1', 'd1', 202017, 1)]
100+
('src_a', 'sig_a', 'day', 'hrr', 20200419, 'a3', 'a3', 'a3', 'a3', 20200420, 1)]
102101
]
103102
self.assertEqual(actual_args, expected_args)
104103

105104
# verify that two files were successful (a, d) and two failed (b, c)
106-
self.assertEqual(mock_file_archiver.archive_file.call_count, 4)
105+
self.assertEqual(mock_file_archiver.archive_file.call_count, 3)
107106
call_args_list = mock_file_archiver.archive_file.call_args_list
108107
actual_args = [args for (args, kwargs) in call_args_list]
109108
expected_args = [
110109
('path', 'data_dir/archive/successful/src_a', 'a.csv', True),
111110
('path', 'data_dir/archive/failed/src_b', 'b.csv', False),
112111
('path', 'data_dir/archive/failed/unknown', 'c.csv', False),
113-
('path', 'data_dir/archive/successful/src_d', 'd.csv', True),
114112
]
115113
self.assertEqual(actual_args, expected_args)
116114

0 commit comments

Comments
 (0)