Skip to content

Commit 106254f

Browse files
authored
Merge pull request #530 from CodeForPhilly/512-salesforce-automation
api endpoint to download full datasets from salesforce and shelterluv
2 parents fa7add9 + b65d1a6 commit 106254f

16 files changed

+252
-229
lines changed

.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,3 +24,5 @@ start_env.sh
2424
*secrets*
2525
*kustomization*
2626
src/.venv/
27+
src/server/secrets_dict.py
28+

src/server/api/.optic/.gitignore

Lines changed: 0 additions & 2 deletions
This file was deleted.
Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,22 +1,21 @@
1+
from api.API_ingest import shelterluv_people, salesforce_contacts, sl_animal_events
12
import structlog
23
logger = structlog.get_logger()
34

4-
from api.API_ingest import shelterluv_api_handler, sl_animal_events
5+
def start():
6+
logger.debug("Start Fetching raw data from different API sources")
57

6-
def start(conn):
7-
logger.debug("Start fetching raw data from different API sources")
8+
logger.debug(" Fetching Salesforce contacts")
9+
salesforce_contacts.store_contacts_all()
10+
logger.debug(" Finished fetching Salesforce contacts")
811

912
logger.debug(" Fetching Shelterluv people")
10-
#Run each source to store the output in dropbox and in the container as a CSV
11-
slp_count = shelterluv_api_handler.store_shelterluv_people_all(conn)
13+
slp_count = shelterluv_people.store_shelterluv_people_all()
1214
logger.debug(" Finished fetching Shelterluv people - %d records" , slp_count)
1315

1416
logger.debug(" Fetching Shelterluv events")
15-
#Run each source to store the output in dropbox and in the container as a CSV
16-
sle_count = sl_animal_events.slae_test()
17+
sle_count = sl_animal_events.store_all_animals_and_events()
1718
logger.debug(" Finished fetching Shelterluv events - %d records" , sle_count)
1819

1920
logger.debug("Finished fetching raw data from different API sources")
2021

21-
22-
#TODO: Return object with count for each data source?
Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
import os
2+
3+
from sqlalchemy.orm import sessionmaker
4+
from simple_salesforce import Salesforce
5+
from config import engine
6+
from models import SalesForceContacts
7+
8+
import structlog
9+
logger = structlog.get_logger()
10+
11+
def store_contacts_all():
12+
Session = sessionmaker(engine)
13+
with Session() as session:
14+
15+
logger.debug("truncating table salesforcecontacts")
16+
session.execute("TRUNCATE TABLE salesforcecontacts")
17+
18+
logger.debug("retrieving the latest salesforce contacts data")
19+
sf = Salesforce(domain=os.getenv('SALESFORCE_DOMAIN'), password=os.getenv('SALESFORCE_PW'), username=os.getenv('SALESFORCE_USERNAME'), security_token=os.getenv('SALESFORCE_SECURITY_TOKEN'))
20+
results = sf.query("SELECT Contact_ID_18__c, FirstName, LastName, Contact.Account.Name, MailingCountry, MailingStreet, MailingCity, MailingState, MailingPostalCode, Phone, MobilePhone, Email FROM Contact")
21+
logger.debug("Query returned %d Salesforce contact records", len(results['records']) )
22+
23+
done = False
24+
while not done:
25+
for row in results['records']:
26+
account_name = row['Account']['Name'] if row['Account'] is not None else None
27+
contact = SalesForceContacts(contact_id=row['Contact_ID_18__c'],
28+
first_name=row['FirstName'],
29+
last_name=row['LastName'],
30+
account_name=account_name,
31+
mailing_country=row['MailingCountry'],
32+
mailing_street=row['MailingStreet'],
33+
mailing_city=row['MailingCity'],
34+
mailing_state_province=row['MailingState'],
35+
mailing_zip_postal_code=row['MailingPostalCode'],
36+
phone=row['Phone'],
37+
mobile=row['MobilePhone'],
38+
email=['Email'])
39+
session.add(contact)
40+
done = results['done']
41+
if not done:
42+
results = sf.query_more(results['nextRecordsUrl'])
43+
session.commit()
44+
logger.debug("finished downloading latest salesforce contacts data")

src/server/api/API_ingest/shelterluv_api_handler.py

Lines changed: 0 additions & 114 deletions
This file was deleted.
Lines changed: 64 additions & 81 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,10 @@
1-
from api.api import common_api
2-
from config import engine
3-
from flask import jsonify, current_app
4-
from sqlalchemy.sql import text
5-
import requests
6-
import time
7-
from datetime import datetime
8-
9-
from sqlalchemy.dialects.postgresql import insert
101
from sqlalchemy import Table, MetaData
11-
from pipeline import flow_script
2+
from sqlalchemy.orm import sessionmaker
3+
124
from config import engine
13-
from flask import request, redirect, jsonify, current_app
14-
from api.file_uploader import validate_and_arrange_upload
15-
from sqlalchemy.orm import Session, sessionmaker
5+
6+
import structlog
7+
logger = structlog.get_logger()
168

179

1810
def insert_animals(animal_list):
@@ -53,11 +45,9 @@ def truncate_animals():
5345

5446
Session = sessionmaker(engine)
5547
session = Session()
56-
metadata = MetaData()
57-
sla = Table("shelterluv_animals", metadata, autoload=True, autoload_with=engine)
5848

5949
truncate = "TRUNCATE table shelterluv_animals;"
60-
result = session.execute(truncate)
50+
session.execute(truncate)
6151

6252
session.commit() # Commit all inserted rows
6353
session.close()
@@ -69,81 +59,74 @@ def truncate_events():
6959
"""Truncate the shelterluv_events table"""
7060

7161
Session = sessionmaker(engine)
72-
session = Session()
73-
metadata = MetaData()
74-
sla = Table("sl_animal_events", metadata, autoload=True, autoload_with=engine)
75-
76-
truncate = "TRUNCATE table sl_animal_events;"
77-
result = session.execute(truncate)
78-
79-
session.commit() # Commit all inserted rows
80-
session.close()
62+
with Session() as session:
63+
truncate = "TRUNCATE table sl_animal_events;"
64+
session.execute(truncate)
65+
session.commit()
8166

8267
return 0
8368

84-
8569
def insert_events(event_list):
8670
"""Insert event records into sl_animal_events table and return row count. """
8771

8872
# Always a clean insert
8973
truncate_events()
9074

9175
Session = sessionmaker(engine)
92-
session = Session()
93-
metadata = MetaData()
94-
sla = Table("sl_animal_events", metadata, autoload=True, autoload_with=engine)
95-
96-
# TODO: Pull from DB - inserted in db_setup/base_users.py/populate_sl_event_types()
97-
event_map = {
98-
"Outcome.Adoption": 1,
99-
"Outcome.Foster": 2,
100-
"Outcome.ReturnToOwner": 3,
101-
"Intake.AdoptionReturn": 4,
102-
"Intake.FosterReturn":5
103-
}
104-
105-
# """ INSERT INTO "sl_event_types" ("id","event_name") VALUES
106-
# ( 1,'Outcome.Adoption' ),
107-
# ( 2,'Outcome.Foster' ),
108-
# ( 3,'Outcome.ReturnToOwner' ),
109-
# ( 4,'Intake.AdoptionReturn' ),
110-
# ( 5,'Intake.FosterReturn' ) """
111-
112-
113-
114-
115-
# Event record: [ AssociatedRecords[Type = Person]["Id"]',
116-
# AssociatedRecords[Type = Animal]["Id"]',
117-
# "Type",
118-
# "Time"
119-
# ]
120-
#
121-
# In db: ['id',
122-
# 'person_id',
123-
# 'animal_id',
124-
# 'event_type',
125-
# 'time']
126-
127-
ins_list = [] # Create a list of per-row dicts
128-
for rec in event_list:
129-
ins_list.append(
130-
{
131-
"person_id": next(
132-
filter(lambda x: x["Type"] == "Person", rec["AssociatedRecords"])
133-
)["Id"],
134-
"animal_id": next(
135-
filter(lambda x: x["Type"] == "Animal", rec["AssociatedRecords"])
136-
)["Id"],
137-
"event_type": event_map[rec["Type"]],
138-
"time": rec["Time"],
139-
}
140-
)
141-
142-
# TODO: Wrap with try/catch
143-
ret = session.execute(sla.insert(ins_list))
144-
145-
session.commit() # Commit all inserted rows
146-
session.close()
76+
with Session() as session:
77+
metadata = MetaData()
78+
sla = Table("sl_animal_events", metadata, autoload=True, autoload_with=engine)
79+
80+
# TODO: Pull from DB - inserted in db_setup/base_users.py/populate_sl_event_types()
81+
event_map = {
82+
"Outcome.Adoption": 1,
83+
"Outcome.Foster": 2,
84+
"Outcome.ReturnToOwner": 3,
85+
"Intake.AdoptionReturn": 4,
86+
"Intake.FosterReturn":5
87+
}
88+
89+
# """ INSERT INTO "sl_event_types" ("id","event_name") VALUES
90+
# ( 1,'Outcome.Adoption' ),
91+
# ( 2,'Outcome.Foster' ),
92+
# ( 3,'Outcome.ReturnToOwner' ),
93+
# ( 4,'Intake.AdoptionReturn' ),
94+
# ( 5,'Intake.FosterReturn' ) """
95+
96+
97+
98+
99+
# Event record: [ AssociatedRecords[Type = Person]["Id"]',
100+
# AssociatedRecords[Type = Animal]["Id"]',
101+
# "Type",
102+
# "Time"
103+
# ]
104+
#
105+
# In db: ['id',
106+
# 'person_id',
107+
# 'animal_id',
108+
# 'event_type',
109+
# 'time']
110+
111+
ins_list = [] # Create a list of per-row dicts
112+
for rec in event_list:
113+
ins_list.append(
114+
{
115+
"person_id": next(
116+
filter(lambda x: x["Type"] == "Person", rec["AssociatedRecords"])
117+
)["Id"],
118+
"animal_id": next(
119+
filter(lambda x: x["Type"] == "Animal", rec["AssociatedRecords"])
120+
)["Id"],
121+
"event_type": event_map[rec["Type"]],
122+
"time": rec["Time"],
123+
}
124+
)
125+
126+
# TODO: Wrap with try/catch
127+
ret = session.execute(sla.insert(ins_list))
128+
session.commit()
129+
logger.debug("finished inserting events")
147130

148131
return ret.rowcount
149132

0 commit comments

Comments
 (0)