Skip to content

Commit cbb26fe

Browse files
authored
Merge branch 'master' into 506-salesforce-message-publisher
2 parents 70686c9 + 8c592b0 commit cbb26fe

18 files changed

+510
-617
lines changed

src/client/src/pages/Admin.js

Lines changed: 48 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -154,42 +154,46 @@ export default function Admin(props) {
154154

155155
<Grid container item spacing={5} direction="row" style={{padding: 20}}>
156156
<Grid container item direction="column" spacing={3} sm={6}>
157-
<Grid item>
158-
<Typography variant="h5">Latest Files</Typography>
159-
</Grid>
160-
<Grid item>
161-
{_.isEmpty(fileListHtml) !== true &&
162-
<TableContainer component={Paper}>
163-
<Table aria-label="simple table">
164-
<TableHead>
165-
<TableRow>
166-
<TableCell><b>File Type</b></TableCell>
167-
<TableCell><b>Last Updated</b></TableCell>
168-
</TableRow>
169-
</TableHead>
170-
<TableBody>
171-
{_.map(fileListHtml, (file, index) => {
172-
const fileName = file.split("-")[0];
173-
let fileDate = file.split("-").slice(1).join().split(".")[0];
174-
let fileDateOnlyNumbers = fileDate.replaceAll(",", "");
175-
let fileDateFormatted = moment(fileDateOnlyNumbers, "YYYYMMDDhmmss").local().format("MMMM Do YYYY, h:mm:ss a");
176-
177-
return (
178-
<TableRow key={index}>
179-
<TableCell>{fileName}</TableCell>
180-
<TableCell>{fileDateFormatted}</TableCell>
157+
{_.isEmpty(fileListHtml) !== true &&
158+
<>
159+
<Grid item>
160+
<Typography variant="h5">Latest Files</Typography>
161+
</Grid>
162+
<Grid item>
163+
<TableContainer component={Paper}>
164+
<Table aria-label="simple table">
165+
<TableHead>
166+
<TableRow>
167+
<TableCell><b>File Type</b></TableCell>
168+
<TableCell><b>Last Updated</b></TableCell>
181169
</TableRow>
182-
)
183-
})
184-
}
185-
</TableBody>
186-
</Table>
187-
</TableContainer>}
188-
</Grid>
170+
</TableHead>
171+
<TableBody>
172+
{_.map(fileListHtml, (file, index) => {
173+
const fileName = file.split("-")[0];
174+
let fileDate = file.split("-").slice(1).join().split(".")[0];
175+
let fileDateOnlyNumbers = fileDate.replaceAll(",", "");
176+
let fileDateFormatted = moment(fileDateOnlyNumbers, "YYYYMMDDhmmss").local().format("MMMM Do YYYY, h:mm:ss a");
177+
178+
return (
179+
<TableRow key={index}>
180+
<TableCell>{fileName}</TableCell>
181+
<TableCell>{fileDateFormatted}</TableCell>
182+
</TableRow>
183+
)
184+
})
185+
}
186+
</TableBody>
187+
</Table>
188+
</TableContainer>
189+
</Grid>
190+
</>
191+
}
189192
<Grid item>
190193
<Paper>
191194
<CardContent>
192195
<Typography variant="h5">Upload Files</Typography>
196+
<Typography variant="caption">Note: This upload feature now only accepts Volgistics data files. Other data is uploaded automatically.</Typography>
193197
<form onSubmit={handleUpload}>
194198
<input type="file" id="fileItemsID"
195199
value={filesInput}
@@ -205,6 +209,19 @@ export default function Admin(props) {
205209
</Grid>
206210
</Grid>
207211
<Grid container item direction="column" spacing={3} sm={6}>
212+
<Grid item>
213+
<Paper style={{ padding: 5 }}>
214+
<CardContent>
215+
<Typography variant="h5" styles={{paddingBottom: 5}}>Run New Analysis</Typography>
216+
<form onSubmit={handleExecute}>
217+
<Button type="submit" variant="contained" color="primary"
218+
disabled={statistics === 'Running'}>
219+
Run Data Analysis
220+
</Button>
221+
</form>
222+
</CardContent>
223+
</Paper>
224+
</Grid>
208225
<Grid item>
209226
<Typography variant="h5">Last Match Analysis</Typography>
210227
</Grid>
@@ -237,19 +254,6 @@ export default function Admin(props) {
237254
</TableContainer>
238255
}
239256
</Grid>
240-
<Grid item>
241-
<Paper style={{padding: 5, marginTop: 10}}>
242-
<CardContent>
243-
<Typography variant="h5" styles={{paddingBottom: 5}}>Run New Analysis</Typography>
244-
<form onSubmit={handleExecute}>
245-
<Button type="submit" variant="contained" color="primary"
246-
disabled={statistics === 'Running'}>
247-
Run Data Analysis
248-
</Button>
249-
</form>
250-
</CardContent>
251-
</Paper>
252-
</Grid>
253257

254258
</Grid>
255259
</Grid>
Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
"""rmv shifts uniqueness constraint
2+
3+
Revision ID: d80cb6df0fa2
4+
Revises: 90f471ac445c
5+
Create Date: 2023-03-18 16:22:23.282568
6+
7+
"""
8+
from alembic import op
9+
import sqlalchemy as sa
10+
11+
12+
# revision identifiers, used by Alembic.
13+
revision = 'd80cb6df0fa2'
14+
down_revision = '90f471ac445c'
15+
branch_labels = None
16+
depends_on = None
17+
18+
# It's probably more likely that a duplicate row is actually a real shift with a bad (dupe)
19+
# like Saturday, Saturday instead of Saturday, Sunday
20+
# We really care about last shift so this is not critical
21+
22+
def upgrade():
23+
op.drop_constraint( "uq_shift", "volgisticsshifts")
24+
25+
def downgrade():
26+
# op.create_unique_constraint( "uq_shift", "volgisticsshifts", ["volg_id", "assignment", "from_date", "hours"] )
27+
# This will fail if you have any dupes
28+
# running
29+
# ALTER TABLE "public"."volgisticsshifts" ADD CONSTRAINT "uq_shift" UNIQUE( "volg_id", "assignment", "from_date", "hours" );
30+
# will fail and tell you of any dupes so you can fix
31+
32+
pass

src/server/api/API_ingest/ingest_sources_from_api.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
from api.API_ingest import shelterluv_people, salesforce_contacts, sl_animal_events
22
import structlog
3+
4+
from pipeline.log_db import log_shelterluv_update
35
logger = structlog.get_logger()
46

57
def start():
@@ -16,6 +18,7 @@ def start():
1618
logger.debug(" Fetching Shelterluv events")
1719
sle_count = sl_animal_events.store_all_animals_and_events()
1820
logger.debug(" Finished fetching Shelterluv events - %d records" , sle_count)
21+
log_shelterluv_update()
1922

2023
logger.debug("Finished fetching raw data from different API sources")
2124

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
from sqlalchemy import Table, MetaData
2+
from sqlalchemy.orm import sessionmaker
3+
4+
from config import engine
5+
6+
import structlog
7+
logger = structlog.get_logger()
8+
9+
def insert_volgistics_people(row_list):
10+
11+
row_count = 0
12+
try:
13+
Session = sessionmaker(engine)
14+
session = Session()
15+
metadata = MetaData()
16+
volg_table = Table("volgistics", metadata, autoload=True, autoload_with=engine)
17+
18+
result = session.execute("TRUNCATE table volgistics;")
19+
ret = session.execute(volg_table.insert(row_list))
20+
21+
row_count = ret.rowcount
22+
23+
session.commit() # Commit all inserted rows
24+
session.close()
25+
except Exception as e:
26+
row_count = 0
27+
logger.error("Exception inserting volgistics people")
28+
logger.exception(e)
29+
return row_count
30+
31+
32+
def insert_volgistics_shifts(row_list):
33+
34+
row_count = 0
35+
try:
36+
Session = sessionmaker(engine)
37+
session = Session()
38+
metadata = MetaData()
39+
volg_table = Table("volgisticsshifts", metadata, autoload=True, autoload_with=engine)
40+
41+
result = session.execute("TRUNCATE table volgisticsshifts;")
42+
ret = session.execute(volg_table.insert(row_list))
43+
44+
row_count = ret.rowcount
45+
46+
session.commit() # Commit all inserted rows
47+
session.close()
48+
except Exception as e:
49+
row_count = 0
50+
logger.error("Exception inserting volgistics shifts")
51+
logger.exception(e.pgerror)
52+
return row_count

src/server/api/admin_api.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -231,6 +231,26 @@ def start_job():
231231
return job_id
232232

233233

234+
@admin_api.route("/api/get_last_runs", methods=["GET"])
235+
#@jwt_ops.admin_required
236+
def get_run_logs():
237+
""" Get the timestamps of the last update runs"""
238+
239+
with engine.connect() as connection:
240+
q = text("""select keycol,valcol from kv_unique where keycol like '%_update'; """)
241+
result = connection.execute(q)
242+
243+
if result.rowcount > 0:
244+
rows = result.fetchall()
245+
246+
row_list = []
247+
248+
for row in rows:
249+
row_dict = row._mapping
250+
row_list.append({row_dict['keycol'] : row_dict['valcol']})
251+
252+
return jsonify(row_list)
253+
234254

235255
def insert_rfm_scores(score_list):
236256
"""Take a list of (matching_id, score) and insert into the

src/server/api/file_uploader.py

Lines changed: 12 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,10 @@
1-
import pandas as pd
21
from config import engine
32
from donations_importer import validate_import_sfd
43
from flask import current_app
54
from models import ManualMatches, SalesForceContacts, ShelterluvPeople, Volgistics
6-
from shifts_importer import validate_import_vs
5+
from pipeline.log_db import log_volgistics_update
6+
from volgistics_importer import open_volgistics, validate_import_vs, volgistics_people_import
77
from werkzeug.utils import secure_filename
8-
98
import structlog
109
logger = structlog.get_logger()
1110

@@ -26,39 +25,17 @@ def determine_upload_type(file, file_extension, conn):
2625
# automatically pulling from vendor APIs directly, in which case we'd know
2726
# what kind of data we had.
2827
if file_extension == "csv":
29-
logger.debug("File extension is CSV")
30-
df = pd.read_csv(file, dtype="string")
31-
32-
if {"salesforcecontacts", "volgistics", "shelterluvpeople"}.issubset(df.columns):
33-
logger.debug("File appears to be salesforcecontacts, volgistics, or shelterluvpeople (manual)")
34-
ManualMatches.insert_from_df(df, conn)
35-
return
36-
elif {"Animal_ids", "Internal-ID"}.issubset(df.columns):
37-
logger.debug("File appears to be shelterluvpeople")
38-
ShelterluvPeople.insert_from_df(df, conn)
39-
return
28+
logger.warn("%s: We no longer support CSV files", file.filename)
29+
return
4030

4131
if file_extension == "xlsx":
42-
excel_file = pd.ExcelFile(file)
43-
if {"Master", "Service"}.issubset(excel_file.sheet_names):
44-
logger.debug("File appears to be Volgistics")
45-
# Volgistics
46-
validate_import_vs(file, conn)
47-
Volgistics.insert_from_file(excel_file, conn)
48-
return
49-
50-
df = pd.read_excel(excel_file)
51-
if "Contact ID 18" in df.columns:
52-
# Salesforce something-or-other
53-
if "Amount" in df.columns:
54-
# Salesforce donations
55-
logger.debug("File appears to be Salesforce donations")
56-
validate_import_sfd(file, conn)
57-
return
58-
else:
59-
# Salesforce contacts
60-
logger.debug("File appears to be Salesforce contacts")
61-
SalesForceContacts.insert_from_file_df(df, conn)
62-
return
32+
# Assume it's Volgistics
33+
workbook = open_volgistics(file)
34+
if workbook:
35+
validate_import_vs(workbook)
36+
volgistics_people_import(workbook)
37+
workbook.close()
38+
log_volgistics_update()
39+
return
6340

6441
logger.error("Don't know how to process file: %s", file.filename)

src/server/api/internal_api.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -6,10 +6,12 @@
66
from api.API_ingest import ingest_sources_from_api
77
from api.API_ingest import updated_data
88
from api.api import internal_api
9+
910
from pipeline import flow_script
1011
from pub_sub import salesforce_message_publisher
1112
from rfm_funcs.create_scores import create_scores
1213

14+
1315
logger = structlog.get_logger()
1416

1517
### Internal API endpoints can only be accessed from inside the cluster;
@@ -39,12 +41,12 @@ def ingest_raw_data():
3941
return jsonify({'outcome': 'OK'}), 200
4042

4143

42-
@internal_api.route("/api/internal/create_scores", methods=["GET"])
43-
def hit_create_scores():
44-
logger.info("Hitting create_scores() ")
45-
tuple_count = create_scores()
46-
logger.info("create_scores() processed %s scores", str(tuple_count) )
47-
return jsonify(200)
44+
# @internal_api.route("/api/internal/create_scores", methods=["GET"])
45+
# def hit_create_scores():
46+
# logger.info("Hitting create_scores() ")
47+
# tuple_count = create_scores()
48+
# logger.info("create_scores() processed %s scores", str(tuple_count) )
49+
# return jsonify(200)
4850

4951

5052
@internal_api.route("/api/internal/get_updated_data", methods=["GET"])

0 commit comments

Comments
 (0)