diff --git a/.gitignore b/.gitignore index 9a6aa2a3..7df44560 100644 --- a/.gitignore +++ b/.gitignore @@ -23,4 +23,4 @@ start_env.sh .mypy_cache/ *secrets* *kustomization* -src/.venv/ \ No newline at end of file +src/.venv/ diff --git a/src/server/alembic/insert_rfm_edges.sql b/src/server/alembic/insert_rfm_edges.sql new file mode 100644 index 00000000..d22c5187 --- /dev/null +++ b/src/server/alembic/insert_rfm_edges.sql @@ -0,0 +1,8 @@ +INSERT INTO "public"."kv_unique"( "keycol", "valcol") VALUES +( 'rfm_edges', + '{ + "r":{"5": 0, "4": 262, "3": 1097, "2": 1910, "1": 2851}, + "f": {"1": 0, "2": 1, "3": 2, "4": 3, "5": 4}, + "m": {"1": 0.0, "2": 50.0, "3": 75.0, "4": 100.0, "5": 210.0} + }' + ); diff --git a/src/server/alembic/versions/90f471ac445c_create_sl_events.py b/src/server/alembic/versions/90f471ac445c_create_sl_events.py new file mode 100644 index 00000000..f329069f --- /dev/null +++ b/src/server/alembic/versions/90f471ac445c_create_sl_events.py @@ -0,0 +1,41 @@ +"""Shelterluv animal events table + +Revision ID: 90f471ac445c +Revises: 9687db7928ee +Create Date: 2022-09-04 17:21:51.511030 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '90f471ac445c' +down_revision = '9687db7928ee' +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table ( + "sl_event_types", + sa.Column("id", sa.Integer, autoincrement=True, primary_key=True), + sa.Column("event_name", sa.Text, nullable=False), + ) + + op.create_table ( + "sl_animal_events", + sa.Column("id", sa.Integer, autoincrement=True, primary_key=True), + sa.Column("person_id", sa.Integer, nullable=False), + sa.Column("animal_id", sa.Integer, nullable=False), + sa.Column("event_type", sa.Integer, sa.ForeignKey('sl_event_types.id')), + sa.Column("time", sa.BigInteger, nullable=False) + ) + + op.create_index('sla_idx', 'sl_animal_events', ['person_id']) + + + +def downgrade(): + op.drop_table("sl_animal_events") + op.drop_table("sl_event_types") \ No newline at end of file diff --git a/src/server/alembic/versions/9687db7928ee_shelterluv_animals.py b/src/server/alembic/versions/9687db7928ee_shelterluv_animals.py new file mode 100644 index 00000000..7ae5de69 --- /dev/null +++ b/src/server/alembic/versions/9687db7928ee_shelterluv_animals.py @@ -0,0 +1,33 @@ +"""Create SL_animals table + +Revision ID: 9687db7928ee +Revises: 45a668fa6325 +Create Date: 2021-12-24 21:15:33.399197 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '9687db7928ee' +down_revision = '45a668fa6325' +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table ( + "shelterluv_animals", + sa.Column("id", sa.BigInteger, primary_key=True), + sa.Column("local_id", sa.BigInteger, nullable=False), + sa.Column("name", sa.Text, nullable=False), + sa.Column("type", sa.Text, nullable=False), + sa.Column("dob", sa.BigInteger, nullable=False), + sa.Column("update_stamp", sa.BigInteger, nullable=False), + sa.Column("photo", sa.Text, nullable=False) + ) + + +def downgrade(): + op.drop_table("shelterluv_animals") diff --git a/src/server/api/API_ingest/ingest_sources_from_api.py b/src/server/api/API_ingest/ingest_sources_from_api.py index 73b0204a..d9915a2c 100644 --- a/src/server/api/API_ingest/ingest_sources_from_api.py +++ b/src/server/api/API_ingest/ingest_sources_from_api.py @@ -1,9 +1,22 @@ -from api.API_ingest import shelterluv_api_handler import structlog logger = structlog.get_logger() +from api.API_ingest import shelterluv_api_handler, sl_animal_events + def start(conn): - logger.debug("Start Fetching raw data from different API sources") + logger.debug("Start fetching raw data from different API sources") + + logger.debug(" Fetching Shelterluv people") + #Run each source to store the output in dropbox and in the container as a CSV + slp_count = shelterluv_api_handler.store_shelterluv_people_all(conn) + logger.debug(" Finished fetching Shelterluv people - %d records" , slp_count) + + logger.debug(" Fetching Shelterluv events") #Run each source to store the output in dropbox and in the container as a CSV - shelterluv_api_handler.store_shelterluv_people_all(conn) - logger.debug("Finish Fetching raw data from different API sources") \ No newline at end of file + sle_count = sl_animal_events.slae_test() + logger.debug(" Finished fetching Shelterluv events - %d records" , sle_count) + + logger.debug("Finished fetching raw data from different API sources") + + + #TODO: Return object with count for each data source? diff --git a/src/server/api/API_ingest/shelterluv_animals.py b/src/server/api/API_ingest/shelterluv_animals.py new file mode 100644 index 00000000..4d7b9f9f --- /dev/null +++ b/src/server/api/API_ingest/shelterluv_animals.py @@ -0,0 +1,216 @@ +import os, time, json +import posixpath as path + +import requests + +from api.API_ingest import shelterluv_db +from server.api.API_ingest.shelterluv_db import insert_animals + + +# from config import engine +# from flask import current_app +# from sqlalchemy.sql import text + +BASE_URL = 'http://shelterluv.com/api/' +MAX_COUNT = 100 # Max records the API will return for one call + +try: + from secrets_dict import SHELTERLUV_SECRET_TOKEN +except ImportError: + # Not running locally + from os import environ + + try: + SHELTERLUV_SECRET_TOKEN = environ['SHELTERLUV_SECRET_TOKEN'] + except KeyError: + # Not in environment + # You're SOL for now + print("Couldn't get SHELTERLUV_SECRET_TOKEN from file or environment") + + + +headers = { + "Accept": "application/json", + "X-API-Key": SHELTERLUV_SECRET_TOKEN +} + +logger = print + +def get_animal_count(): + """Test that server is operational and get total animal count.""" + animals = 'v1/animals&offset=0&limit=1' + URL = path.join(BASE_URL,animals) + + try: + response = requests.request("GET",URL, headers=headers) + except Exception as e: + logger('get_animal_count failed with ', e) + return -2 + + if response.status_code != 200: + logger("get_animal_count ", response.status_code, "code") + return -3 + + try: + decoded = json.loads(response.text) + except json.decoder.JSONDecodeError as e: + logger("get_animal_count JSON decode failed with", e) + return -4 + + if decoded['success']: + return decoded['total_count'] + else: + return -5 # AFAICT, this means URL was bad + + +def get_updated_animal_count(last_update): + """Test that server is operational and get total animal count.""" + animals = 'v1/animals&offset=0&limit=1&sort=updated_at&since=' + str(last_update) + URL = path.join(BASE_URL,animals) + + try: + response = requests.request("GET",URL, headers=headers) + except Exception as e: + logger('get_updated_animal_count failed with ', e) + return -2 + + if response.status_code != 200: + logger("get_updated_animal_count ", response.status_code, "code") + return -3 + + try: + decoded = json.loads(response.text) + except json.decoder.JSONDecodeError as e: + logger("get_updated_animal_count JSON decode failed with", e) + return -4 + + if decoded['success']: + return decoded['total_count'] + else: + return -5 # AFAICT, this means URL was bad + + + + +def filter_animals(raw_list): + """Given a list of animal records as returned by SL, return a list of records with only the fields we care about.""" + + good_keys = ['ID', 'Internal-ID', 'Name', 'Type', 'DOBUnixTime', 'CoverPhoto','LastUpdatedUnixTime'] + + filtered = [] + + for r in raw_list: + f = {} + for k in good_keys: + try: + f[k] = r[k] + except: + if k in ('DOBUnixTime','LastUpdatedUnixTime'): + f[k] = 0 + else: + f[k] = '' + filtered.append(f) + + return filtered + + + + +def get_animals_bulk(total_count): + """Pull all animal records from SL """ + + # 'Great' API design - animal record 0 is the newest, so we need to start at the end, + # back up MAX_COUNT rows, make our request, then keep backing up. We need to keep checking + # the total records to ensure one wasn't added in the middle of the process. + # Good news, the API is robust and won't blow up if you request past the end. + + raw_url = path.join(BASE_URL, 'v1/animals&offset={0}&limit={1}') + + start_record = int(total_count) + offset = (start_record - MAX_COUNT) if (start_record - MAX_COUNT) > -1 else 0 + limit = MAX_COUNT + + while offset > -1 : + + logger("getting at offset", offset) + url = raw_url.format(offset,limit) + + try: + response = requests.request("GET",url, headers=headers) + except Exception as e: + logger('get_animals failed with ', e) + return -2 + + if response.status_code != 200: + logger("get_animal_count ", response.status_code, "code") + return -3 + + try: + decoded = json.loads(response.text) + except json.decoder.JSONDecodeError as e: + logger("get_animal_count JSON decode failed with", e) + return -4 + + if decoded['success']: + insert_animals( filter_animals(decoded['animals']) ) + if offset == 0: + break + offset -= MAX_COUNT + if offset < 0 : + limit = limit + offset + offset = 0 + else: + return -5 # AFAICT, this means URL was bad + + return 'zero' + + +def update_animals(last_update): + """Get the animals inserted or updated since last check, insert/update db records. """ + + updated_records = get_updated_animal_count(last_update) + + + + + + + + + + + + + + + + + + + + +def sla_test(): + total_count = get_animal_count() + print('Total animals:',total_count) + + b = get_animals_bulk(total_count) + print(len(b)) + + # f = filter_animals(b) + # print(f) + + # count = shelterluv_db.insert_animals(f) + return len(b) + +# if __name__ == '__main__' : + +# total_count = get_animal_count() +# print('Total animals:',total_count) + +# b = get_animals_bulk(9) +# print(len(b)) + +# f = filter_animals(b) +# print(f) + +# count = shelterluv_db.insert_animals(f) \ No newline at end of file diff --git a/src/server/api/API_ingest/shelterluv_api_handler.py b/src/server/api/API_ingest/shelterluv_api_handler.py index c1c87fa5..84572efc 100644 --- a/src/server/api/API_ingest/shelterluv_api_handler.py +++ b/src/server/api/API_ingest/shelterluv_api_handler.py @@ -10,6 +10,9 @@ import structlog logger = structlog.get_logger() + +TEST_MODE = os.getenv("TEST_MODE") + try: from secrets_dict import SHELTERLUV_SECRET_TOKEN except ImportError: @@ -80,7 +83,14 @@ def store_shelterluv_people_all(conn): has_more = response["has_more"] offset += 100 - logger.debug("Finish getting shelterluv contacts from people table") + if offset % 1000 == 0: + print("Reading offset ", str(offset)) + if TEST_MODE and offset > 1000: + has_more=False # Break out early + + + + print("Finish getting shelterluv contacts from people table") logger.debug("Start storing latest shelterluvpeople results to container") if os.listdir(RAW_DATA_PATH): @@ -100,3 +110,5 @@ def store_shelterluv_people_all(conn): logger.debug("Uploading shelterluvpeople csv to database") ShelterluvPeople.insert_from_df(pd.read_csv(file_path, dtype="string"), conn) + + return offset diff --git a/src/server/api/API_ingest/shelterluv_db.py b/src/server/api/API_ingest/shelterluv_db.py new file mode 100644 index 00000000..b3518cf5 --- /dev/null +++ b/src/server/api/API_ingest/shelterluv_db.py @@ -0,0 +1,149 @@ +from api.api import common_api +from config import engine +from flask import jsonify, current_app +from sqlalchemy.sql import text +import requests +import time +from datetime import datetime + +from sqlalchemy.dialects.postgresql import insert +from sqlalchemy import Table, MetaData +from pipeline import flow_script +from config import engine +from flask import request, redirect, jsonify, current_app +from api.file_uploader import validate_and_arrange_upload +from sqlalchemy.orm import Session, sessionmaker + + +def insert_animals(animal_list): + """Insert animal records into shelterluv_animals table and return row count. """ + + Session = sessionmaker(engine) + session = Session() + metadata = MetaData() + sla = Table("shelterluv_animals", metadata, autoload=True, autoload_with=engine) + + # From Shelterluv: ['ID', 'Internal-ID', 'Name', 'Type', 'DOBUnixTime', 'CoverPhoto', 'LastUpdatedUnixTime'] + # In db: ['local_id', 'id' (PK), 'name', 'type', 'dob', 'photo', 'update_stamp'] + + ins_list = [] # Create a list of per-row dicts + for rec in animal_list: + ins_list.append( + { + "id": rec["Internal-ID"], + "local_id": rec["ID"] if rec["ID"] else 0, # Sometimes there's no local id + "name": rec["Name"], + "type": rec["Type"], + "dob": rec["DOBUnixTime"], + "update_stamp": rec["LastUpdatedUnixTime"], + "photo": rec["CoverPhoto"], + } + ) + + ret = session.execute(sla.insert(ins_list)) + + session.commit() # Commit all inserted rows + session.close() + + return ret.rowcount + + +def truncate_animals(): + """Truncate the shelterluv_animals table""" + + Session = sessionmaker(engine) + session = Session() + metadata = MetaData() + sla = Table("shelterluv_animals", metadata, autoload=True, autoload_with=engine) + + truncate = "TRUNCATE table shelterluv_animals;" + result = session.execute(truncate) + + session.commit() # Commit all inserted rows + session.close() + + return 0 + + +def truncate_events(): + """Truncate the shelterluv_events table""" + + Session = sessionmaker(engine) + session = Session() + metadata = MetaData() + sla = Table("sl_animal_events", metadata, autoload=True, autoload_with=engine) + + truncate = "TRUNCATE table sl_animal_events;" + result = session.execute(truncate) + + session.commit() # Commit all inserted rows + session.close() + + return 0 + + +def insert_events(event_list): + """Insert event records into sl_animal_events table and return row count. """ + + # Always a clean insert + truncate_events() + + Session = sessionmaker(engine) + session = Session() + metadata = MetaData() + sla = Table("sl_animal_events", metadata, autoload=True, autoload_with=engine) + + # TODO: Pull from DB - inserted in db_setup/base_users.py/populate_sl_event_types() + event_map = { + "Outcome.Adoption": 1, + "Outcome.Foster": 2, + "Outcome.ReturnToOwner": 3, + "Intake.AdoptionReturn": 4, + "Intake.FosterReturn":5 + } + + # """ INSERT INTO "sl_event_types" ("id","event_name") VALUES + # ( 1,'Outcome.Adoption' ), + # ( 2,'Outcome.Foster' ), + # ( 3,'Outcome.ReturnToOwner' ), + # ( 4,'Intake.AdoptionReturn' ), + # ( 5,'Intake.FosterReturn' ) """ + + + + + # Event record: [ AssociatedRecords[Type = Person]["Id"]', + # AssociatedRecords[Type = Animal]["Id"]', + # "Type", + # "Time" + # ] + # + # In db: ['id', + # 'person_id', + # 'animal_id', + # 'event_type', + # 'time'] + + ins_list = [] # Create a list of per-row dicts + for rec in event_list: + ins_list.append( + { + "person_id": next( + filter(lambda x: x["Type"] == "Person", rec["AssociatedRecords"]) + )["Id"], + "animal_id": next( + filter(lambda x: x["Type"] == "Animal", rec["AssociatedRecords"]) + )["Id"], + "event_type": event_map[rec["Type"]], + "time": rec["Time"], + } + ) + + # TODO: Wrap with try/catch + ret = session.execute(sla.insert(ins_list)) + + session.commit() # Commit all inserted rows + session.close() + + return ret.rowcount + diff --git a/src/server/api/API_ingest/sl_animal_events.py b/src/server/api/API_ingest/sl_animal_events.py new file mode 100644 index 00000000..d7f6a472 --- /dev/null +++ b/src/server/api/API_ingest/sl_animal_events.py @@ -0,0 +1,194 @@ +import os, time, json +import posixpath as path + +import structlog +logger = structlog.get_logger() + +import requests + +from api.API_ingest import shelterluv_db +from server.api.API_ingest.shelterluv_db import insert_animals + +# There are a number of different record types. These are the ones we care about. +keep_record_types = [ + "Outcome.Adoption", + "Outcome.Foster", + "Outcome.ReturnToOwner", + "Intake.AdoptionReturn", + "Intake.FosterReturn" +] + +# from config import engine +# from flask import current_app +# from sqlalchemy.sql import text + +BASE_URL = "http://shelterluv.com/api/" +MAX_COUNT = 100 # Max records the API will return for one call + +# Get the API key +try: + from secrets_dict import SHELTERLUV_SECRET_TOKEN +except ImportError: + # Not running locally + from os import environ + + try: + SHELTERLUV_SECRET_TOKEN = environ["SHELTERLUV_SECRET_TOKEN"] + except KeyError: + # Not in environment + # You're SOL for now + logger.error("Couldn't get SHELTERLUV_SECRET_TOKEN from file or environment") + + +TEST_MODE=os.getenv("TEST_MODE") # if not present, has value None + +headers = {"Accept": "application/json", "X-API-Key": SHELTERLUV_SECRET_TOKEN} + +# Sample response from events request: + +# { +# "success": 1, +# "events": [ +# { +# "Type": "Outcome.Adoption", +# "Subtype": "PAC", +# "Time": "1656536900", +# "User": "phlp_mxxxx", +# "AssociatedRecords": [ +# { +# "Type": "Animal", +# "Id": "5276xxxx" +# }, +# { +# "Type": "Person", +# "Id": "5633xxxx" +# } +# ] +# }, +# {...} +# ], +# "has_more": true, +# "total_count": 67467 +# } + + +def get_event_count(): + """Test that server is operational and get total event count.""" + events = "v1/events&offset=0&limit=1" + URL = path.join(BASE_URL, events) + + try: + response = requests.request("GET", URL, headers=headers) + except Exception as e: + logger.error("get_event_count failed with ", e) + return -2 + + if response.status_code != 200: + logger.error("get_event_count ", response.status_code, "code") + return -3 + + try: + decoded = json.loads(response.text) + except json.decoder.JSONDecodeError as e: + logger.error("get_event_count JSON decode failed with", e) + return -4 + + if decoded["success"]: + return decoded["total_count"] + else: + logger.error(decoded['error_message']) + return -5 # AFAICT, this means URL was bad + + +def get_events_bulk(): + """Pull all event records from SL """ + + # Interesting API design - event record 0 is the newest. But since we pull all records each time it doesn't + # really matter which direction we go. Simplest to count up, and we can pull until 'has_more' goes false. + # Good news, the API is robust and won't blow up if you request past the end. + # At 100 per request, API returns about 5000 records/minute + + event_records = [] + + raw_url = path.join(BASE_URL, "v1/events&offset={0}&limit={1}") + offset = 0 + limit = MAX_COUNT + more_records = True + + while more_records: + + url = raw_url.format(offset, limit) + + try: + response = requests.request("GET", url, headers=headers) + except Exception as e: + logger.error("get_events failed with ", e) + return -2 + + if response.status_code != 200: + logger.error("get_event_count ", response.status_code, "code") + return -3 + + try: + decoded = json.loads(response.text) + except json.decoder.JSONDecodeError as e: + logger.error("get_event_count JSON decode failed with", e) + return -4 + + if decoded["success"]: + for evrec in decoded["events"]: + if evrec["Type"] in keep_record_types: + event_records.append(evrec) + + more_records = decoded["has_more"] # if so, we'll make another pass + offset += limit + if offset % 1000 == 0: + logger.debug("Reading offset ", str(offset)) + if TEST_MODE and offset > 1000: + more_records=False # Break out early + + else: + return -5 # AFAICT, this means URL was bad + + return event_records + + +def slae_test(): + total_count = get_event_count() + logger.debug("Total events:", total_count) + + b = get_events_bulk() + logger.debug("Strored records:", len(b)) + + # f = filter_events(b) + # print(f) + + count = shelterluv_db.insert_events(b) + return count + + +# Query to get last adopt/foster event: + +# """ +# select +# person_id as sl_person_id, max(to_timestamp(time)::date) as last_fosteradopt_event +# from +# sl_animal_events +# where event_type < 4 -- check this +# group by +# person_id +# order by +# person_id asc; +# """ +# Volgistics last shift + +# """ +# select +# volg_id, max(from_date) as last_shift +# from +# volgisticsshifts +# group by +# volg_id +# order by +# volg_id ; +# """ \ No newline at end of file diff --git a/src/server/api/admin_api.py b/src/server/api/admin_api.py index cbebcf06..80763f8b 100644 --- a/src/server/api/admin_api.py +++ b/src/server/api/admin_api.py @@ -392,16 +392,47 @@ def generate_dummy_rfm_scores(): return count +# ########### Test API endpoints +# TODO: Remove for production - -# Use this as a way to trigger functions for testing -# TODO: Remove when not needed +# trigger rfm scoring process @admin_api.route("/api/admin/test_endpoint_gdrs", methods=["GET"]) def hit_gdrs(): num_scores = generate_dummy_rfm_scores() return jsonify({"scores added" : num_scores}) +# trigger pull of SL animals +@admin_api.route("/api/admin/test_sla", methods=["GET"]) +def trigger_sla_pull(): + + import api.API_ingest.shelterluv_animals + + num_rows = api.API_ingest.shelterluv_animals.sla_test() + return jsonify({"rows added" : num_rows}) + +# trigger pull of SL people +@admin_api.route("/api/admin/test_slp", methods=["GET"]) +def trigger_slp_pull(): + + import api.API_ingest.shelterluv_api_handler + + num_rows = api.API_ingest.shelterluv_api_handler.store_shelterluv_people_all() + return jsonify({"rows added" : num_rows}) + +# trigger pull of SL animal events +@admin_api.route("/api/admin/test_slae", methods=["GET"]) +def trigger_slae_pull(): + + import api.API_ingest.sl_animal_events + + num_rows = api.API_ingest.sl_animal_events.slae_test() + return jsonify({"rows added" : num_rows}) + + + + + # def pdfr(): # dlist = pull_donations_for_rfm() # print("Returned " + str(len(dlist)) + " rows") diff --git a/src/server/api/internal_api.py b/src/server/api/internal_api.py index 1a6db9a2..bcae6704 100644 --- a/src/server/api/internal_api.py +++ b/src/server/api/internal_api.py @@ -25,7 +25,7 @@ def user_test2(): return jsonify(("OK from INTERNAL test/test @ " + str(datetime.now()))) -@internal_api.route("/api/ingestRawData", methods=["GET"]) +@internal_api.route("/api/internal/ingestRawData", methods=["GET"]) def ingest_raw_data(): try: with engine.begin() as conn: diff --git a/src/server/config.py b/src/server/config.py index ec2c0aae..ac2aebd2 100644 --- a/src/server/config.py +++ b/src/server/config.py @@ -79,10 +79,11 @@ # logger.warn("Testing") with engine.connect() as connection: - import user_mgmt.base_users - user_mgmt.base_users.create_base_roles() # IFF there are no roles already - user_mgmt.base_users.create_base_users() # IFF there are no users already - user_mgmt.base_users.populate_rfm_mapping_table() # Set to True to force loading latest version of populate script + import db_setup.base_users + db_setup.base_users.create_base_roles() # IFF there are no roles already + db_setup.base_users.create_base_users() # IFF there are no users already + db_setup.base_users.populate_sl_event_types() # IFF there are no event types already + db_setup.base_users.populate_rfm_mapping_table() # Set to True to force loading latest version of populate script # found in the server/alembic directory # Create these directories only one time - when initializing diff --git a/src/server/user_mgmt/README.md b/src/server/db_setup/README.md similarity index 100% rename from src/server/user_mgmt/README.md rename to src/server/db_setup/README.md diff --git a/src/server/user_mgmt/__init__.py b/src/server/db_setup/__init__.py similarity index 100% rename from src/server/user_mgmt/__init__.py rename to src/server/db_setup/__init__.py diff --git a/src/server/user_mgmt/base_users.py b/src/server/db_setup/base_users.py similarity index 84% rename from src/server/user_mgmt/base_users.py rename to src/server/db_setup/base_users.py index 2e09b470..4bd232f9 100644 --- a/src/server/user_mgmt/base_users.py +++ b/src/server/db_setup/base_users.py @@ -136,4 +136,21 @@ def table_empty(): else: logger.debug("rfm_mapping table already populated; overwrite not True so not changing.") - return \ No newline at end of file + return + + +def populate_sl_event_types(): + """If not present, insert values for shelterluv animal event types.""" + with engine.connect() as connection: + result = connection.execute("select id from sl_event_types") + type_count = len(result.fetchall()) + if type_count == 0: + print("Inserting SL event types") + connection.execute("""INSERT into sl_event_types values + (1, 'Outcome.Adoption'), + (2, 'Outcome.Foster'), + (3, 'Outcome.ReturnToOwner'), + (4, 'Intake.AdoptionReturn'), + (5, 'Intake.FosterReturn'); """) + else: + logger.debug("%d event types already present in DB, not creating", type_count) diff --git a/src/server/requirements.txt b/src/server/requirements.txt index 58f42e03..d70b1115 100644 --- a/src/server/requirements.txt +++ b/src/server/requirements.txt @@ -1,8 +1,9 @@ +numpy==1.19.5 Flask==1.1.2 pandas==1.3.2 -numpy==1.18.1 + sqlalchemy==1.4.15 -psycopg2-binary==2.8.4 +psycopg2-binary==2.9.1 xlrd==1.2.0 # currently used for xlsx, but we should consider adjusting code to openpyxl for xlsx openpyxl requests