Skip to content

Commit da7babd

Browse files
authored
Revert "Merge 'master' branch into 'tf-2' branch. (#279)" (#282)
This reverts commit 555de39.
1 parent 555de39 commit da7babd

32 files changed

+84
-1021
lines changed

README.rst

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -56,10 +56,6 @@ The Docker images are built from the Dockerfiles specified in
5656
The Docker files are grouped based on TensorFlow version and separated
5757
based on Python version and processor type.
5858

59-
The Docker files for TensorFlow 2.0 are available in the
60-
`tf-2 <https://github.com/aws/sagemaker-tensorflow-container/tree/tf-2>`__ branch, in
61-
`docker/2.0.0/ <https://github.com/aws/sagemaker-tensorflow-container/tree/tf-2/docker/2.0.0>`__.
62-
6359
The Docker images, used to run training & inference jobs, are built from
6460
both corresponding "base" and "final" Dockerfiles.
6561

buildspec-release.yml

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ version: 0.2
22

33
env:
44
variables:
5-
FRAMEWORK_VERSION: '1.15.0'
5+
FRAMEWORK_VERSION: '1.13.1'
66
GPU_INSTANCE_TYPE: 'ml.p2.xlarge'
77
SETUP_FILE: 'setup_cmds.sh'
88
SETUP_CMDS: '#!/bin/bash\npip install --upgrade pip\npip install -U -e .\npip install -U -e .[test]'
@@ -60,21 +60,21 @@ phases:
6060
echo '[{
6161
"repository": "sagemaker-tensorflow-scriptmode",
6262
"tags": [{
63-
"source": "1.15.0-cpu-py2",
64-
"dest": ["1.15.0-cpu-py2", "1.15-cpu-py2", "1.15.0-cpu-py2-'${CODEBUILD_BUILD_ID#*:}'"]
63+
"source": "1.13.1-cpu-py2",
64+
"dest": ["1.13.1-cpu-py2", "1.13-cpu-py2", "1.13.1-cpu-py2-'${CODEBUILD_BUILD_ID#*:}'"]
6565
},{
66-
"source": "1.15.0-cpu-py3",
67-
"dest": ["1.15.0-cpu-py3", "1.15-cpu-py3", "1.15.0-cpu-py3-'${CODEBUILD_BUILD_ID#*:}'"]
66+
"source": "1.13.1-cpu-py3",
67+
"dest": ["1.13.1-cpu-py3", "1.13-cpu-py3", "1.13.1-cpu-py3-'${CODEBUILD_BUILD_ID#*:}'"]
6868
},{
69-
"source": "1.15.0-gpu-py2",
70-
"dest": ["1.15.0-gpu-py2", "1.15-gpu-py2", "1.15.0-gpu-py2-'${CODEBUILD_BUILD_ID#*:}'"]
69+
"source": "1.13.1-gpu-py2",
70+
"dest": ["1.13.1-gpu-py2", "1.13-gpu-py2", "1.13.1-gpu-py2-'${CODEBUILD_BUILD_ID#*:}'"]
7171
},{
72-
"source": "1.15.0-gpu-py3",
73-
"dest": ["1.15.0-gpu-py3", "1.15-gpu-py3", "1.15.0-gpu-py3-'${CODEBUILD_BUILD_ID#*:}'"]
72+
"source": "1.13.1-gpu-py3",
73+
"dest": ["1.13.1-gpu-py3", "1.13-gpu-py3", "1.13.1-gpu-py3-'${CODEBUILD_BUILD_ID#*:}'"]
7474
}],
7575
"test": [
76-
"IGNORE_COVERAGE=- tox -e py36 -- -m deploy_test test/integration/sagemaker -n 4 --region {region} --account-id {aws-id} --instance-type {cpu-instance-type} --docker-base-name sagemaker-tensorflow-scriptmode --framework-version 1.15.0 --processor cpu --py-version 2,3",
77-
"IGNORE_COVERAGE=- tox -e py36 -- -m deploy_test test/integration/sagemaker -n 4 --region {region} --account-id {aws-id} --docker-base-name sagemaker-tensorflow-scriptmode --framework-version 1.15.0 --processor gpu --py-version 2,3"
76+
"IGNORE_COVERAGE=- tox -e py36 -- -m deploy_test test/integration/sagemaker -n 4 --region {region} --account-id {aws-id} --instance-type {cpu-instance-type} --docker-base-name sagemaker-tensorflow-scriptmode --framework-version 1.13.1 --processor cpu --py-version 2,3",
77+
"IGNORE_COVERAGE=- tox -e py36 -- -m deploy_test test/integration/sagemaker -n 4 --region {region} --account-id {aws-id} --docker-base-name sagemaker-tensorflow-scriptmode --framework-version 1.13.1 --processor gpu --py-version 2,3"
7878
]
7979
}]' > deployments.json
8080

buildspec.yml

Lines changed: 48 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,11 @@ version: 0.2
22

33
env:
44
variables:
5-
FRAMEWORK_VERSION: '1.15.0'
5+
FRAMEWORK_VERSION: '1.13.1'
6+
CPU_FRAMEWORK_BINARY: 'https://s3-us-west-2.amazonaws.com/tensorflow-aws/1.13/AmazonLinux/cpu/latest-patch-latest-patch/tensorflow-1.13.1-cp36-cp36m-linux_x86_64.whl'
7+
CPU_PY_VERSION: '3'
8+
GPU_FRAMEWORK_BINARY: 'https://s3-us-west-2.amazonaws.com/tensorflow-aws/1.13/AmazonLinux/gpu/latest-patch-latest-patch/tensorflow-1.13.1-cp36-cp36m-linux_x86_64.whl'
9+
GPU_PY_VERSION: '3'
610
ECR_REPO: 'sagemaker-test'
711
GITHUB_REPO: 'sagemaker-tensorflow-container'
812
SETUP_FILE: 'setup_cmds.sh'
@@ -30,95 +34,80 @@ phases:
3034
- tox -e py36,py27 test/unit
3135

3236
# Create pip archive
33-
- root_dir=$(pwd)
37+
- build_dir="docker/$FRAMEWORK_VERSION"
3438
- build_id="$(echo $CODEBUILD_BUILD_ID | sed -e 's/:/-/g')"
3539
- python3 setup.py sdist
3640
- tar_name=$(ls dist)
37-
38-
# Find build artifacts
39-
- build_artifacts=$root_dir/docker/artifacts
40-
41-
# build py2 images
42-
43-
# prepare build context
44-
- build_dir="$root_dir/docker/$FRAMEWORK_VERSION/py2"
45-
- cp $root_dir/dist/$tar_name $build_dir
46-
- cp $build_artifacts/* $build_dir/
47-
- cd $build_dir
41+
- cp dist/$tar_name $build_dir
4842

4943
# build cpu image
5044
- cpu_dockerfile="Dockerfile.cpu"
51-
- CPU_TAG_PY2="$FRAMEWORK_VERSION-cpu-py2-$build_id"
52-
- docker build -f $cpu_dockerfile -t $PREPROD_IMAGE:$CPU_TAG_PY2 .
5345

54-
# build gpu image
55-
- gpu_dockerfile="Dockerfile.gpu"
56-
- GPU_TAG_PY2="$FRAMEWORK_VERSION-gpu-py2-$build_id"
57-
- docker build -f $gpu_dockerfile -t $PREPROD_IMAGE:$GPU_TAG_PY2 .
46+
# Download framework binary
47+
- cpu_fw_binary=$(basename $CPU_FRAMEWORK_BINARY)
48+
- wget -O $build_dir/$cpu_fw_binary $CPU_FRAMEWORK_BINARY
5849

59-
# build py3 images
50+
- CPU_TAG="$FRAMEWORK_VERSION-cpu-py$CPU_PY_VERSION-$build_id"
6051

61-
# prepare build context
62-
- build_dir="$root_dir/docker/$FRAMEWORK_VERSION/py3"
63-
- cp $root_dir/dist/$tar_name $build_dir
64-
- cp $build_artifacts/* $build_dir/
6552
- cd $build_dir
66-
67-
# build cpu image
68-
- cpu_dockerfile="Dockerfile.cpu"
69-
- CPU_TAG_PY3="$FRAMEWORK_VERSION-cpu-py3-$build_id"
70-
- docker build -f $cpu_dockerfile -t $PREPROD_IMAGE:$CPU_TAG_PY3 .
53+
- docker build -f $cpu_dockerfile --build-arg framework_support_installable=$tar_name --build-arg py_version=$CPU_PY_VERSION --build-arg framework_installable=$cpu_fw_binary -t $PREPROD_IMAGE:$CPU_TAG .
54+
- cd ../../
7155

7256
# build gpu image
7357
- gpu_dockerfile="Dockerfile.gpu"
74-
- GPU_TAG_PY3="$FRAMEWORK_VERSION-gpu-py3-$build_id"
75-
- docker build -f $gpu_dockerfile -t $PREPROD_IMAGE:$GPU_TAG_PY3 .
58+
59+
# Download framework binary
60+
- gpu_fw_binary=$(basename $GPU_FRAMEWORK_BINARY)
61+
- wget -O $build_dir/$gpu_fw_binary $GPU_FRAMEWORK_BINARY
62+
63+
- GPU_TAG="$FRAMEWORK_VERSION-gpu-py$GPU_PY_VERSION-$build_id"
64+
65+
- cd $build_dir
66+
- docker build -f $gpu_dockerfile --build-arg framework_support_installable=$tar_name --build-arg py_version=$GPU_PY_VERSION --build-arg framework_installable=$gpu_fw_binary -t $PREPROD_IMAGE:$GPU_TAG .
67+
- cd ../../
7668

7769
# push images to ecr
7870
- $(aws ecr get-login --registry-ids $ACCOUNT --no-include-email --region $AWS_DEFAULT_REGION)
79-
- docker push $PREPROD_IMAGE:$CPU_TAG_PY2
80-
- docker push $PREPROD_IMAGE:$GPU_TAG_PY2
81-
- docker push $PREPROD_IMAGE:$CPU_TAG_PY3
82-
- docker push $PREPROD_IMAGE:$GPU_TAG_PY3
71+
- docker push $PREPROD_IMAGE:$CPU_TAG
72+
- docker push $PREPROD_IMAGE:$GPU_TAG
8373

8474
# launch remote gpu instance
8575
- instance_type='p2.xlarge'
8676
- create-key-pair
8777
- launch-ec2-instance --instance-type $instance_type --ami-name dlami-ubuntu
8878

8979
# run cpu integration tests
90-
- py3_cmd="pytest test/integration/local --region $AWS_DEFAULT_REGION --docker-base-name $PREPROD_IMAGE --tag $CPU_TAG_PY2 --framework-version $FRAMEWORK_VERSION --py-version 2 --processor cpu"
91-
- py2_cmd="pytest test/integration/local --region $AWS_DEFAULT_REGION --docker-base-name $PREPROD_IMAGE --tag $CPU_TAG_PY3 --framework-version $FRAMEWORK_VERSION --py-version 3 --processor cpu"
92-
- execute-command-if-has-matching-changes "$py3_cmd" "test/" "src/*.py" "setup.py" "docker/*" "buildspec.yml"
93-
- execute-command-if-has-matching-changes "$py2_cmd" "test/" "src/*.py" "setup.py" "docker/*" "buildspec.yml"
80+
- |
81+
if has-matching-changes "test/" "tests/" "src/*.py" "docker/*" "buildspec.yml"; then
82+
pytest test/integration/local --region $AWS_DEFAULT_REGION --docker-base-name $PREPROD_IMAGE --tag $CPU_TAG --framework-version $FRAMEWORK_VERSION --py-version $CPU_PY_VERSION --processor cpu
83+
else
84+
echo "skipping cpu integration tests"
85+
fi
9486
9587
# run gpu integration tests
96-
- printf "$SETUP_CMDS" > $SETUP_FILE
97-
- cmd="pytest test/integration/local --region $AWS_DEFAULT_REGION --docker-base-name $PREPROD_IMAGE --tag $GPU_TAG_PY2 --framework-version $FRAMEWORK_VERSION --py-version 2 --processor gpu"
98-
- py3_cmd="remote-test --github-repo $GITHUB_REPO --test-cmd \"$cmd\" --setup-file $SETUP_FILE --pr-number \"$PR_NUM\""
99-
- execute-command-if-has-matching-changes "$py3_cmd" "test/" "src/*.py" "setup.py" "docker/*" "buildspec.yml"
100-
101-
- cmd="pytest test/integration/local --region $AWS_DEFAULT_REGION --docker-base-name $PREPROD_IMAGE --tag $GPU_TAG_PY3 --framework-version $FRAMEWORK_VERSION --py-version 3 --processor gpu"
102-
- py2_cmd="remote-test --github-repo $GITHUB_REPO --test-cmd \"$cmd\" --setup-file $SETUP_FILE --pr-number \"$PR_NUM\""
103-
- execute-command-if-has-matching-changes "$py2_cmd" "test/" "src/*.py" "setup.py" "docker/*" "buildspec.yml"
88+
- |
89+
if has-matching-changes "test/" "tests/" "src/*.py" "docker/*" "buildspec.yml"; then
90+
printf "$SETUP_CMDS" > $SETUP_FILE
91+
cmd="pytest test/integration/local --region $AWS_DEFAULT_REGION --docker-base-name $PREPROD_IMAGE --tag $GPU_TAG --framework-version $FRAMEWORK_VERSION --py-version $GPU_PY_VERSION --processor gpu"
92+
remote-test --github-repo $GITHUB_REPO --test-cmd "$cmd" --setup-file $SETUP_FILE --pr-number "$PR_NUM"
93+
else
94+
echo "skipping gpu integration tests"
95+
fi
10496
10597
# run sagemaker tests
106-
- test_cmd="pytest test/integration/sagemaker -n 8 --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --account-id $ACCOUNT --tag $CPU_TAG_PY2 --py-version 2 --processor cpu"
107-
- execute-command-if-has-matching-changes "$test_cmd" "test/" "src/*.py" "setup.py" "docker/*" "buildspec.yml"
108-
- test_cmd="pytest test/integration/sagemaker -n 8 --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --account-id $ACCOUNT --tag $GPU_TAG_PY2 --py-version 2 --processor gpu"
109-
- execute-command-if-has-matching-changes "$test_cmd" "test/" "src/*.py" "setup.py" "docker/*" "buildspec.yml"
110-
- test_cmd="pytest test/integration/sagemaker -n 8 --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --account-id $ACCOUNT --tag $CPU_TAG_PY3 --py-version 3 --processor cpu"
111-
- execute-command-if-has-matching-changes "$test_cmd" "test/" "src/*.py" "setup.py" "docker/*" "buildspec.yml"
112-
- test_cmd="pytest test/integration/sagemaker -n 8 --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --account-id $ACCOUNT --tag $GPU_TAG_PY3 --py-version 3 --processor gpu"
113-
- execute-command-if-has-matching-changes "$test_cmd" "test/" "src/*.py" "setup.py" "docker/*" "buildspec.yml"
98+
- |
99+
if has-matching-changes "test/" "tests/" "src/*.py" "docker/*" "buildspec.yml"; then
100+
pytest test/integration/sagemaker -n 8 --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --account-id $ACCOUNT --tag $CPU_TAG --py-version $CPU_PY_VERSION --processor cpu
101+
pytest test/integration/sagemaker -n 8 --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --account-id $ACCOUNT --tag $GPU_TAG --py-version $GPU_PY_VERSION --processor gpu
102+
else
103+
echo "skipping sagemaker tests"
104+
fi
114105
115106
finally:
116107
# shut down remote gpu instance
117108
- cleanup-gpu-instances
118109
- cleanup-key-pairs
119110

120111
# remove ecr image
121-
- aws ecr batch-delete-image --repository-name $ECR_REPO --region $AWS_DEFAULT_REGION --image-ids imageTag=$CPU_TAG_PY2
122-
- aws ecr batch-delete-image --repository-name $ECR_REPO --region $AWS_DEFAULT_REGION --image-ids imageTag=$GPU_TAG_PY2
123-
- aws ecr batch-delete-image --repository-name $ECR_REPO --region $AWS_DEFAULT_REGION --image-ids imageTag=$CPU_TAG_PY3
124-
- aws ecr batch-delete-image --repository-name $ECR_REPO --region $AWS_DEFAULT_REGION --image-ids imageTag=$GPU_TAG_PY3
112+
- aws ecr batch-delete-image --repository-name $ECR_REPO --region $AWS_DEFAULT_REGION --image-ids imageTag=$CPU_TAG
113+
- aws ecr batch-delete-image --repository-name $ECR_REPO --region $AWS_DEFAULT_REGION --image-ids imageTag=$GPU_TAG

docker/1.15.0/py2/Dockerfile.cpu

Lines changed: 0 additions & 125 deletions
This file was deleted.

0 commit comments

Comments
 (0)