Skip to content

incorporation of test improvements from parallel branch #959

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ jobs:
- name: Start services
run: |
docker network create --driver bridge delphi-net
docker run --rm -d -p 13306:3306 --network delphi-net --name delphi_database_epidata delphi_database_epidata
docker run --rm -d -p 13306:3306 --network delphi-net --name delphi_database_epidata --cap-add=sys_nice delphi_database_epidata
docker run --rm -d -p 10080:80 --env "SQLALCHEMY_DATABASE_URI=mysql+mysqldb://user:pass@delphi_database_epidata:3306/epidata" --env "FLASK_SECRET=abc" --env "FLASK_PREFIX=/epidata" --network delphi-net --name delphi_web_epidata delphi_web_epidata
docker ps

Expand Down
19 changes: 17 additions & 2 deletions dev/docker/database/epidata/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,14 @@
# start with the `delphi_database` image
FROM delphi_database
# start with a standard percona mysql image
FROM percona:ps-8

# percona exits with the mysql user but we need root for additional setup
USER root

# use delphi's timezome
RUN ln -s -f /usr/share/zoneinfo/America/New_York /etc/localtime

# specify a development-only password for the database user "root"
ENV MYSQL_ROOT_PASSWORD pass

# create the `epidata` database
ENV MYSQL_DATABASE epidata
Expand All @@ -14,5 +23,11 @@ COPY repos/delphi/delphi-epidata/dev/docker/database/epidata/_init.sql /docker-e
# provide DDL which will create empty tables at container startup
COPY repos/delphi/delphi-epidata/src/ddl/*.sql /docker-entrypoint-initdb.d/

# provide additional configuration needed for percona
COPY repos/delphi/delphi-epidata/dev/docker/database/mysql.d/*.cnf /etc/my.cnf.d/

# grant access to SQL scripts
RUN chmod o+r /docker-entrypoint-initdb.d/*.sql

# restore mysql user for percona
USER mysql
2 changes: 2 additions & 0 deletions dev/docker/database/mysql.d/my.cnf
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[mysqld]
default_authentication_plugin=mysql_native_password
Original file line number Diff line number Diff line change
Expand Up @@ -71,10 +71,15 @@ def test_caching(self):

# insert dummy data
self.cur.execute(f'''
INSERT INTO `signal_dim` (`signal_key_id`, `source`, `signal`) VALUES (42, 'src', 'sig');
INSERT INTO `signal_dim` (`signal_key_id`, `source`, `signal`)
VALUES
(42, 'src', 'sig');
''')
self.cur.execute(f'''
INSERT INTO `geo_dim` (`geo_key_id`, `geo_type`, `geo_value`) VALUES (96, 'state', 'pa'), (97, 'state', 'wa');
INSERT INTO `geo_dim` (`geo_key_id`, `geo_type`, `geo_value`)
VALUES
(96, 'state', 'pa'),
(97, 'state', 'wa');
''')
self.cur.execute(f'''
INSERT INTO
Expand Down
32 changes: 4 additions & 28 deletions integrations/acquisition/covidcast/test_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,37 +2,13 @@

from delphi_utils import Nans
from delphi.epidata.acquisition.covidcast.database import Database, CovidcastRow
from delphi.epidata.acquisition.covidcast.test_utils import CovidcastBase
import delphi.operations.secrets as secrets

# all the Nans we use here are just one value, so this is a shortcut to it:
nmv = Nans.NOT_MISSING.value

class TestTest(unittest.TestCase):

def setUp(self):
# use the local test instance of the database
secrets.db.host = 'delphi_database_epidata'
secrets.db.epi = ('user', 'pass')

self._db = Database()
self._db.connect()

# empty all of the data tables
for table in "signal_load signal_latest signal_history geo_dim signal_dim".split():
self._db._cursor.execute(f"TRUNCATE TABLE {table}")

def tearDown(self):
# close and destroy conenction to the database
self._db.disconnect(False)
del self._db

def _make_dummy_row(self):
return CovidcastRow('src', 'sig', 'day', 'state', 2022_02_22, 'pa', 2, 22, 222, nmv,nmv,nmv, 2022_02_22, 0)
# cols: ^ timeval v se ssz ^issue ^lag

def _insert_rows(self, rows):
self._db.insert_or_update_bulk(rows)
###db._connection.commit() # NOTE: this isnt needed here, but would be if using external access (like through client lib)
class TestTest(CovidcastBase):

def _find_matches_for_row(self, row):
# finds (if existing) row from both history and latest views that matches long-key of provided CovidcastRow
Expand Down Expand Up @@ -63,7 +39,7 @@ def test_id_sync(self):
latest_view = 'signal_latest_v'

# add a data point
base_row = self._make_dummy_row()
base_row, _ = self._make_placeholder_row()
self._insert_rows([base_row])
# ensure the primary keys match in the latest and history tables
matches = self._find_matches_for_row(base_row)
Expand All @@ -73,7 +49,7 @@ def test_id_sync(self):
old_pk_id = matches[latest_view][pk_column]

# add a reissue for said data point
next_row = self._make_dummy_row()
next_row, _ = self._make_placeholder_row()
next_row.issue += 1
self._insert_rows([next_row])
# ensure the new keys also match
Expand Down
6 changes: 3 additions & 3 deletions integrations/acquisition/covidcast/test_delete_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,9 +95,9 @@ def _test_delete_batch(self, cc_deletions):
),
# verify latest issue was corrected
Example(
f'select geo_value, issue from {self._db.latest_view} where time_value=0',
[('d_nonlatest', 2),
('d_latest', 2)]
f'select geo_value, issue from {self._db.latest_view} where time_value=0 order by geo_value',
[('d_latest', 2),
('d_nonlatest', 2)]
)
]

Expand Down
Loading