Skip to content

change: allow only one integration test run per time #880

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 24 commits into from
Jun 27, 2019
Merged
Show file tree
Hide file tree
Changes from 11 commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
131ec2a
change: allow only one integration test run per time
mvsusp Jun 25, 2019
1c3c673
Update buildspec.yml
mvsusp Jun 25, 2019
bde21ff
Update buildspec.yml
mvsusp Jun 25, 2019
cb94061
Update buildspec.yml
mvsusp Jun 25, 2019
bc2fa85
Update buildspec.yml
mvsusp Jun 25, 2019
9c7367d
change: allow only one integration test run per time
mvsusp Jun 25, 2019
6b54703
Merge branch 'mvs-cilock' of github.com:mvsusp/sagemaker-python-sdk i…
mvsusp Jun 25, 2019
7f94164
change: allow only one integration test run per time
mvsusp Jun 25, 2019
5da7012
change: allow only one integration test run per time
mvsusp Jun 25, 2019
b420893
change: allow only one integration test run per time
mvsusp Jun 25, 2019
5273782
change: allow only one integration test run per time
mvsusp Jun 25, 2019
266d7be
change: allow only one integration test run per time
mvsusp Jun 25, 2019
0e79d18
change: allow only one integration test run per time
mvsusp Jun 25, 2019
e4d6505
change: allow only one integration test run per time
mvsusp Jun 25, 2019
6c7af65
change: allow only one integration test run per time
mvsusp Jun 25, 2019
1a5fd2a
change: allow only one integration test run per time
mvsusp Jun 25, 2019
ef94d66
change: allow only one integration test run per time
mvsusp Jun 25, 2019
21ee422
change: allow only one integration test run per time
mvsusp Jun 25, 2019
d5b46aa
change: allow only one integration test run per time
mvsusp Jun 25, 2019
29bdd84
change: allow only one integration test run per time
mvsusp Jun 25, 2019
6f3e7d4
change: allow only one integration test run per time
mvsusp Jun 25, 2019
3877f2d
Merge branch 'master' into mvs-cilock
mvsusp Jun 25, 2019
eec3f3a
Merge branch 'master' into mvs-cilock
mvsusp Jun 26, 2019
7916612
Merge branch 'master' into mvs-cilock
mvsusp Jun 26, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
72 changes: 43 additions & 29 deletions buildspec.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,34 +7,48 @@ phases:

build:
commands:
# run linters
- tox -e flake8,pylint

# run package and docbuild checks
- tox -e twine
- tox -e sphinx

# run format verification
- tox -e black-check

# run unit tests
- AWS_ACCESS_KEY_ID= AWS_SECRET_ACCESS_KEY= AWS_SESSION_TOKEN=
AWS_CONTAINER_CREDENTIALS_RELATIVE_URI= AWS_DEFAULT_REGION=
tox -e py36,py27 -- tests/unit

# run notebook test
- |
if has-matching-changes "src/*.py" "setup.py" "setup.cfg" "buildspec.yml"; then
echo "running notebook test"
./tests/scripts/run-notebook-test.sh
else
echo "skipping notebook test"
fi
python3 ci-scripts/queue_build.py
sleep 20m

# run integration tests
- |
if has-matching-changes "tests/" "src/*.py" "setup.py" "setup.cfg" "buildspec.yml"; then
IGNORE_COVERAGE=- tox -e py36,py27 -- tests/integ -n 24 --boxed --reruns 2
else
echo "skipping integration tests"
fi
# # run linters
# - tox -e flake8,pylint
#
# # run package and docbuild checks
# - tox -e twine
# - tox -e sphinx
#
# # run format verification
# - tox -e black-check
#
# # run unit tests
# - AWS_ACCESS_KEY_ID= AWS_SECRET_ACCESS_KEY= AWS_SESSION_TOKEN=
# AWS_CONTAINER_CREDENTIALS_RELATIVE_URI= AWS_DEFAULT_REGION=
# tox -e py36,py27 -- tests/unit
#
# # run notebook test
# - |
# if has-matching-changes "src/*.py" "setup.py" "setup.cfg" "buildspec.yml"; then
# echo "running notebook test"
# ./tests/scripts/run-notebook-test.sh
# else
# echo "skipping notebook test"
# fi
#
# # wait its turn to run integration tests
# - ACCOUNT=$(aws sts get-caller-identity --output text | awk '{print $1}')
# - S3_BUCKET_DIR=s3://sagemaker-us-west-2-${ACCOUNT}/ci-lock
#
# # run integration tests
# - |
# if has-matching-changes "tests/" "src/*.py" "setup.py" "setup.cfg" "buildspec.yml"; then
# python ci/queue_build.py
# IGNORE_COVERAGE=- tox -e py36,py27 -- tests/integ -n 24 --boxed --reruns 2
# else
# echo "skipping integration tests"
# fi
finally:
- FILENAME=$(ls ci-lock/)
- ACCOUNT=$(aws sts get-caller-identity --output text | awk '{print $1}')
- S3_BUCKET_DIR=s3://sagemaker-us-west-2-${ACCOUNT}/ci-lock/
- aws s3 rm ${S3_BUCKET_DIR}${FILENAME}
106 changes: 106 additions & 0 deletions ci-scripts/queue_build.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
import time
import boto3
import botocore.exceptions

account = boto3.client("sts").get_caller_identity()["Account"]
bucket_name = 'sagemaker-us-west-2-%s' % account


def queue_build():
build_id = os.environ.get('CODEBUILD_BUILD_ID', 'CODEBUILD-BUILD-ID')
source_version = os.environ.get('CODEBUILD_SOURCE_VERSION', 'CODEBUILD-SOURCE-VERSION').replace('/', '-')
ticket_number = int(time.time())
filename = '%s_%s_%s' % (ticket_number, build_id, source_version)

print('Created queue ticket %s' % ticket_number)

_write_ticket(filename)
files = _list_tickets()
_cleanup_tickets_older_than_8_days(files)
_wait_for_other_builds(files, ticket_number)


def _build_info_from_file(file):
filename = file.key.split('/')[1]
ticket_number, build_id, source_version = filename.split('_')
return int(ticket_number), build_id, source_version


def _wait_for_other_builds(files, ticket_number):
newfiles = list(filter(lambda file: not _file_older_than(file), files))
sorted_files = list(sorted(newfiles, key=lambda y: y.key))

print('build queue status:')
print()

for order, file in enumerate(sorted_files):
file_ticket_number, build_id, source_version = _build_info_from_file(file)
print('%s -> %s %s, ticket number: %s' % (order, build_id, source_version, file_ticket_number))

for file in sorted_files:
file_ticket_number, build_id, source_version = _build_info_from_file(file)

if file_ticket_number == ticket_number:

break
else:
while True:
try:
print('waiting on build %s %s %s' % (build_id, source_version, file_ticket_number))
file.wait_until_not_exists()
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Will this ever exit? Does the file get removed from the filesystem if it's deleted in s3?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

As discussed, file is an s3 object, so this works fine.

break
except botocore.exceptions.WaiterError:
# keep waiting
pass


def _cleanup_tickets_older_than_8_days(files):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

days -> hours

oldfiles = list(filter(_file_older_than, files))
for file in oldfiles:
print('object %s older than 8 hours. Deleting' % file.key)
file.delete()
return files


def _list_tickets():
s3 = boto3.resource('s3')
bucket = s3.Bucket(bucket_name)
objects = [file for file in bucket.objects.filter(Prefix='ci-lock/')]
files = list(filter(lambda x: x != 'ci-lock/', objects))
return files


def _file_older_than(file):
timelimit = 60 * 60 * 8

file_ticket_number, build_id, source_version = _build_info_from_file(file)

return int(time.time()) - file_ticket_number > timelimit


def _write_ticket(ticket_number):

if not os.path.exists('ci-lock'):
os.mkdir('ci-lock')

filename = 'ci-lock/' + ticket_number
with open(filename, 'w') as file:
file.write(ticket_number)
boto3.Session().resource("s3").Object(bucket_name, filename).upload_file(filename)


if __name__ == '__main__':
queue_build()