Skip to content

Commit e559730

Browse files
committed
Merge branch 'master' into 522-flask-logging
2 parents 0edb284 + 106254f commit e559730

25 files changed

+900
-141
lines changed

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,3 +23,6 @@ start_env.sh
2323
.mypy_cache/
2424
*secrets*
2525
*kustomization*
26+
src/.venv/
27+
src/server/secrets_dict.py
28+
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
INSERT INTO "public"."kv_unique"( "keycol", "valcol") VALUES
2+
( 'rfm_edges',
3+
'{
4+
"r":{"5": 0, "4": 262, "3": 1097, "2": 1910, "1": 2851},
5+
"f": {"1": 0, "2": 1, "3": 2, "4": 3, "5": 4},
6+
"m": {"1": 0.0, "2": 50.0, "3": 75.0, "4": 100.0, "5": 210.0}
7+
}'
8+
);
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
"""Shelterluv animal events table
2+
3+
Revision ID: 90f471ac445c
4+
Revises: 9687db7928ee
5+
Create Date: 2022-09-04 17:21:51.511030
6+
7+
"""
8+
from alembic import op
9+
import sqlalchemy as sa
10+
11+
12+
# revision identifiers, used by Alembic.
13+
revision = '90f471ac445c'
14+
down_revision = '9687db7928ee'
15+
branch_labels = None
16+
depends_on = None
17+
18+
19+
def upgrade():
20+
op.create_table (
21+
"sl_event_types",
22+
sa.Column("id", sa.Integer, autoincrement=True, primary_key=True),
23+
sa.Column("event_name", sa.Text, nullable=False),
24+
)
25+
26+
op.create_table (
27+
"sl_animal_events",
28+
sa.Column("id", sa.Integer, autoincrement=True, primary_key=True),
29+
sa.Column("person_id", sa.Integer, nullable=False),
30+
sa.Column("animal_id", sa.Integer, nullable=False),
31+
sa.Column("event_type", sa.Integer, sa.ForeignKey('sl_event_types.id')),
32+
sa.Column("time", sa.BigInteger, nullable=False)
33+
)
34+
35+
op.create_index('sla_idx', 'sl_animal_events', ['person_id'])
36+
37+
38+
39+
def downgrade():
40+
op.drop_table("sl_animal_events")
41+
op.drop_table("sl_event_types")
Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
"""Create SL_animals table
2+
3+
Revision ID: 9687db7928ee
4+
Revises: 45a668fa6325
5+
Create Date: 2021-12-24 21:15:33.399197
6+
7+
"""
8+
from alembic import op
9+
import sqlalchemy as sa
10+
11+
12+
# revision identifiers, used by Alembic.
13+
revision = '9687db7928ee'
14+
down_revision = '45a668fa6325'
15+
branch_labels = None
16+
depends_on = None
17+
18+
19+
def upgrade():
20+
op.create_table (
21+
"shelterluv_animals",
22+
sa.Column("id", sa.BigInteger, primary_key=True),
23+
sa.Column("local_id", sa.BigInteger, nullable=False),
24+
sa.Column("name", sa.Text, nullable=False),
25+
sa.Column("type", sa.Text, nullable=False),
26+
sa.Column("dob", sa.BigInteger, nullable=False),
27+
sa.Column("update_stamp", sa.BigInteger, nullable=False),
28+
sa.Column("photo", sa.Text, nullable=False)
29+
)
30+
31+
32+
def downgrade():
33+
op.drop_table("shelterluv_animals")

src/server/api/.optic/.gitignore

Lines changed: 0 additions & 2 deletions
This file was deleted.
Lines changed: 17 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,21 @@
1-
from api.API_ingest import shelterluv_api_handler
1+
from api.API_ingest import shelterluv_people, salesforce_contacts, sl_animal_events
22
import structlog
33
logger = structlog.get_logger()
44

5-
def start(conn):
5+
def start():
66
logger.debug("Start Fetching raw data from different API sources")
7-
#Run each source to store the output in dropbox and in the container as a CSV
8-
shelterluv_api_handler.store_shelterluv_people_all(conn)
9-
logger.debug("Finish Fetching raw data from different API sources")
7+
8+
logger.debug(" Fetching Salesforce contacts")
9+
salesforce_contacts.store_contacts_all()
10+
logger.debug(" Finished fetching Salesforce contacts")
11+
12+
logger.debug(" Fetching Shelterluv people")
13+
slp_count = shelterluv_people.store_shelterluv_people_all()
14+
logger.debug(" Finished fetching Shelterluv people - %d records" , slp_count)
15+
16+
logger.debug(" Fetching Shelterluv events")
17+
sle_count = sl_animal_events.store_all_animals_and_events()
18+
logger.debug(" Finished fetching Shelterluv events - %d records" , sle_count)
19+
20+
logger.debug("Finished fetching raw data from different API sources")
21+
Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
import os
2+
3+
from sqlalchemy.orm import sessionmaker
4+
from simple_salesforce import Salesforce
5+
from config import engine
6+
from models import SalesForceContacts
7+
8+
import structlog
9+
logger = structlog.get_logger()
10+
11+
def store_contacts_all():
12+
Session = sessionmaker(engine)
13+
with Session() as session:
14+
15+
logger.debug("truncating table salesforcecontacts")
16+
session.execute("TRUNCATE TABLE salesforcecontacts")
17+
18+
logger.debug("retrieving the latest salesforce contacts data")
19+
sf = Salesforce(domain=os.getenv('SALESFORCE_DOMAIN'), password=os.getenv('SALESFORCE_PW'), username=os.getenv('SALESFORCE_USERNAME'), security_token=os.getenv('SALESFORCE_SECURITY_TOKEN'))
20+
results = sf.query("SELECT Contact_ID_18__c, FirstName, LastName, Contact.Account.Name, MailingCountry, MailingStreet, MailingCity, MailingState, MailingPostalCode, Phone, MobilePhone, Email FROM Contact")
21+
logger.debug("Query returned %d Salesforce contact records", len(results['records']) )
22+
23+
done = False
24+
while not done:
25+
for row in results['records']:
26+
account_name = row['Account']['Name'] if row['Account'] is not None else None
27+
contact = SalesForceContacts(contact_id=row['Contact_ID_18__c'],
28+
first_name=row['FirstName'],
29+
last_name=row['LastName'],
30+
account_name=account_name,
31+
mailing_country=row['MailingCountry'],
32+
mailing_street=row['MailingStreet'],
33+
mailing_city=row['MailingCity'],
34+
mailing_state_province=row['MailingState'],
35+
mailing_zip_postal_code=row['MailingPostalCode'],
36+
phone=row['Phone'],
37+
mobile=row['MobilePhone'],
38+
email=['Email'])
39+
session.add(contact)
40+
done = results['done']
41+
if not done:
42+
results = sf.query_more(results['nextRecordsUrl'])
43+
session.commit()
44+
logger.debug("finished downloading latest salesforce contacts data")
Lines changed: 216 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,216 @@
1+
import os, time, json
2+
import posixpath as path
3+
4+
import requests
5+
6+
from api.API_ingest import shelterluv_db
7+
from server.api.API_ingest.shelterluv_db import insert_animals
8+
9+
10+
# from config import engine
11+
# from flask import current_app
12+
# from sqlalchemy.sql import text
13+
14+
BASE_URL = 'http://shelterluv.com/api/'
15+
MAX_COUNT = 100 # Max records the API will return for one call
16+
17+
try:
18+
from secrets_dict import SHELTERLUV_SECRET_TOKEN
19+
except ImportError:
20+
# Not running locally
21+
from os import environ
22+
23+
try:
24+
SHELTERLUV_SECRET_TOKEN = environ['SHELTERLUV_SECRET_TOKEN']
25+
except KeyError:
26+
# Not in environment
27+
# You're SOL for now
28+
print("Couldn't get SHELTERLUV_SECRET_TOKEN from file or environment")
29+
30+
31+
32+
headers = {
33+
"Accept": "application/json",
34+
"X-API-Key": SHELTERLUV_SECRET_TOKEN
35+
}
36+
37+
logger = print
38+
39+
def get_animal_count():
40+
"""Test that server is operational and get total animal count."""
41+
animals = 'v1/animals&offset=0&limit=1'
42+
URL = path.join(BASE_URL,animals)
43+
44+
try:
45+
response = requests.request("GET",URL, headers=headers)
46+
except Exception as e:
47+
logger('get_animal_count failed with ', e)
48+
return -2
49+
50+
if response.status_code != 200:
51+
logger("get_animal_count ", response.status_code, "code")
52+
return -3
53+
54+
try:
55+
decoded = json.loads(response.text)
56+
except json.decoder.JSONDecodeError as e:
57+
logger("get_animal_count JSON decode failed with", e)
58+
return -4
59+
60+
if decoded['success']:
61+
return decoded['total_count']
62+
else:
63+
return -5 # AFAICT, this means URL was bad
64+
65+
66+
def get_updated_animal_count(last_update):
67+
"""Test that server is operational and get total animal count."""
68+
animals = 'v1/animals&offset=0&limit=1&sort=updated_at&since=' + str(last_update)
69+
URL = path.join(BASE_URL,animals)
70+
71+
try:
72+
response = requests.request("GET",URL, headers=headers)
73+
except Exception as e:
74+
logger('get_updated_animal_count failed with ', e)
75+
return -2
76+
77+
if response.status_code != 200:
78+
logger("get_updated_animal_count ", response.status_code, "code")
79+
return -3
80+
81+
try:
82+
decoded = json.loads(response.text)
83+
except json.decoder.JSONDecodeError as e:
84+
logger("get_updated_animal_count JSON decode failed with", e)
85+
return -4
86+
87+
if decoded['success']:
88+
return decoded['total_count']
89+
else:
90+
return -5 # AFAICT, this means URL was bad
91+
92+
93+
94+
95+
def filter_animals(raw_list):
96+
"""Given a list of animal records as returned by SL, return a list of records with only the fields we care about."""
97+
98+
good_keys = ['ID', 'Internal-ID', 'Name', 'Type', 'DOBUnixTime', 'CoverPhoto','LastUpdatedUnixTime']
99+
100+
filtered = []
101+
102+
for r in raw_list:
103+
f = {}
104+
for k in good_keys:
105+
try:
106+
f[k] = r[k]
107+
except:
108+
if k in ('DOBUnixTime','LastUpdatedUnixTime'):
109+
f[k] = 0
110+
else:
111+
f[k] = ''
112+
filtered.append(f)
113+
114+
return filtered
115+
116+
117+
118+
119+
def get_animals_bulk(total_count):
120+
"""Pull all animal records from SL """
121+
122+
# 'Great' API design - animal record 0 is the newest, so we need to start at the end,
123+
# back up MAX_COUNT rows, make our request, then keep backing up. We need to keep checking
124+
# the total records to ensure one wasn't added in the middle of the process.
125+
# Good news, the API is robust and won't blow up if you request past the end.
126+
127+
raw_url = path.join(BASE_URL, 'v1/animals&offset={0}&limit={1}')
128+
129+
start_record = int(total_count)
130+
offset = (start_record - MAX_COUNT) if (start_record - MAX_COUNT) > -1 else 0
131+
limit = MAX_COUNT
132+
133+
while offset > -1 :
134+
135+
logger("getting at offset", offset)
136+
url = raw_url.format(offset,limit)
137+
138+
try:
139+
response = requests.request("GET",url, headers=headers)
140+
except Exception as e:
141+
logger('get_animals failed with ', e)
142+
return -2
143+
144+
if response.status_code != 200:
145+
logger("get_animal_count ", response.status_code, "code")
146+
return -3
147+
148+
try:
149+
decoded = json.loads(response.text)
150+
except json.decoder.JSONDecodeError as e:
151+
logger("get_animal_count JSON decode failed with", e)
152+
return -4
153+
154+
if decoded['success']:
155+
insert_animals( filter_animals(decoded['animals']) )
156+
if offset == 0:
157+
break
158+
offset -= MAX_COUNT
159+
if offset < 0 :
160+
limit = limit + offset
161+
offset = 0
162+
else:
163+
return -5 # AFAICT, this means URL was bad
164+
165+
return 'zero'
166+
167+
168+
def update_animals(last_update):
169+
"""Get the animals inserted or updated since last check, insert/update db records. """
170+
171+
updated_records = get_updated_animal_count(last_update)
172+
173+
174+
175+
176+
177+
178+
179+
180+
181+
182+
183+
184+
185+
186+
187+
188+
189+
190+
191+
192+
def sla_test():
193+
total_count = get_animal_count()
194+
print('Total animals:',total_count)
195+
196+
b = get_animals_bulk(total_count)
197+
print(len(b))
198+
199+
# f = filter_animals(b)
200+
# print(f)
201+
202+
# count = shelterluv_db.insert_animals(f)
203+
return len(b)
204+
205+
# if __name__ == '__main__' :
206+
207+
# total_count = get_animal_count()
208+
# print('Total animals:',total_count)
209+
210+
# b = get_animals_bulk(9)
211+
# print(len(b))
212+
213+
# f = filter_animals(b)
214+
# print(f)
215+
216+
# count = shelterluv_db.insert_animals(f)

0 commit comments

Comments
 (0)