Skip to content

Commit 30fc432

Browse files
authored
Merge pull request #1772 from cmu-delphi/ndefries/backfill/check-publish-files
[Backfill corrections] check if any files exist in receiving before trying to publish
2 parents e5ffa45 + 9cbc276 commit 30fc432

File tree

1 file changed

+8
-4
lines changed

1 file changed

+8
-4
lines changed

backfill_corrections/Makefile

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -81,10 +81,14 @@ run:
8181
/bin/bash -c "cp params.host.json params.json && make gurobi.lic && make standardize-dirs && make run-local OPTIONS=\"${OPTIONS}\""
8282

8383
publish:
84-
aws configure set aws_access_key_id $(AWS_KEY_ID)
85-
aws configure set aws_secret_access_key $(AWS_SECRET_KEY)
86-
aws s3 cp $(USR_INPUT_DIR) $(S3_BUCKET)/ --recursive --exclude "*" --include "*.csv.gz" --acl public-read
87-
echo "SUCCESS: published `ls -1 $(USR_EXPORT_DIR)/*.csv.gz | wc -l` files to the S3 bucket" >> $(LOG_FILE)
84+
if [ -f $(USR_INPUT_DIR)/*.csv.gz ]; then \
85+
aws configure set aws_access_key_id $(AWS_KEY_ID); \
86+
aws configure set aws_secret_access_key $(AWS_SECRET_KEY); \
87+
aws s3 cp $(USR_INPUT_DIR) $(S3_BUCKET)/ --recursive --exclude "*" --include "*.csv.gz" --acl public-read; \
88+
echo "SUCCESS: published `ls -1 $(USR_EXPORT_DIR)/*.csv.gz | wc -l` files to the S3 bucket" >> $(LOG_FILE); \
89+
else \
90+
echo "No files in $(USR_EXPORT_DIR) to publish" >> $(LOG_FILE); \
91+
fi
8892

8993
pipeline: setup-dirs run publish clean
9094

0 commit comments

Comments
 (0)