Skip to content

Merge 'master' branch into 'tf-2' branch. #279

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 11 commits into from
Feb 10, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,10 @@ The Docker images are built from the Dockerfiles specified in
The Docker files are grouped based on TensorFlow version and separated
based on Python version and processor type.

The Docker files for TensorFlow 2.0 are available in the
`tf-2 <https://github.com/aws/sagemaker-tensorflow-container/tree/tf-2>`__ branch, in
`docker/2.0.0/ <https://github.com/aws/sagemaker-tensorflow-container/tree/tf-2/docker/2.0.0>`__.

The Docker images, used to run training & inference jobs, are built from
both corresponding "base" and "final" Dockerfiles.

Expand Down
22 changes: 11 additions & 11 deletions buildspec-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ version: 0.2

env:
variables:
FRAMEWORK_VERSION: '1.13.1'
FRAMEWORK_VERSION: '1.15.0'
GPU_INSTANCE_TYPE: 'ml.p2.xlarge'
SETUP_FILE: 'setup_cmds.sh'
SETUP_CMDS: '#!/bin/bash\npip install --upgrade pip\npip install -U -e .\npip install -U -e .[test]'
Expand Down Expand Up @@ -60,21 +60,21 @@ phases:
echo '[{
"repository": "sagemaker-tensorflow-scriptmode",
"tags": [{
"source": "1.13.1-cpu-py2",
"dest": ["1.13.1-cpu-py2", "1.13-cpu-py2", "1.13.1-cpu-py2-'${CODEBUILD_BUILD_ID#*:}'"]
"source": "1.15.0-cpu-py2",
"dest": ["1.15.0-cpu-py2", "1.15-cpu-py2", "1.15.0-cpu-py2-'${CODEBUILD_BUILD_ID#*:}'"]
},{
"source": "1.13.1-cpu-py3",
"dest": ["1.13.1-cpu-py3", "1.13-cpu-py3", "1.13.1-cpu-py3-'${CODEBUILD_BUILD_ID#*:}'"]
"source": "1.15.0-cpu-py3",
"dest": ["1.15.0-cpu-py3", "1.15-cpu-py3", "1.15.0-cpu-py3-'${CODEBUILD_BUILD_ID#*:}'"]
},{
"source": "1.13.1-gpu-py2",
"dest": ["1.13.1-gpu-py2", "1.13-gpu-py2", "1.13.1-gpu-py2-'${CODEBUILD_BUILD_ID#*:}'"]
"source": "1.15.0-gpu-py2",
"dest": ["1.15.0-gpu-py2", "1.15-gpu-py2", "1.15.0-gpu-py2-'${CODEBUILD_BUILD_ID#*:}'"]
},{
"source": "1.13.1-gpu-py3",
"dest": ["1.13.1-gpu-py3", "1.13-gpu-py3", "1.13.1-gpu-py3-'${CODEBUILD_BUILD_ID#*:}'"]
"source": "1.15.0-gpu-py3",
"dest": ["1.15.0-gpu-py3", "1.15-gpu-py3", "1.15.0-gpu-py3-'${CODEBUILD_BUILD_ID#*:}'"]
}],
"test": [
"IGNORE_COVERAGE=- tox -e py36 -- -m deploy_test test/integration/sagemaker -n 4 --region {region} --account-id {aws-id} --instance-type {cpu-instance-type} --docker-base-name sagemaker-tensorflow-scriptmode --framework-version 1.13.1 --processor cpu --py-version 2,3",
"IGNORE_COVERAGE=- tox -e py36 -- -m deploy_test test/integration/sagemaker -n 4 --region {region} --account-id {aws-id} --docker-base-name sagemaker-tensorflow-scriptmode --framework-version 1.13.1 --processor gpu --py-version 2,3"
"IGNORE_COVERAGE=- tox -e py36 -- -m deploy_test test/integration/sagemaker -n 4 --region {region} --account-id {aws-id} --instance-type {cpu-instance-type} --docker-base-name sagemaker-tensorflow-scriptmode --framework-version 1.15.0 --processor cpu --py-version 2,3",
"IGNORE_COVERAGE=- tox -e py36 -- -m deploy_test test/integration/sagemaker -n 4 --region {region} --account-id {aws-id} --docker-base-name sagemaker-tensorflow-scriptmode --framework-version 1.15.0 --processor gpu --py-version 2,3"
]
}]' > deployments.json

Expand Down
107 changes: 59 additions & 48 deletions buildspec.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,7 @@ version: 0.2

env:
variables:
FRAMEWORK_VERSION: '1.13.1'
CPU_FRAMEWORK_BINARY: 'https://s3-us-west-2.amazonaws.com/tensorflow-aws/1.13/AmazonLinux/cpu/latest-patch-latest-patch/tensorflow-1.13.1-cp36-cp36m-linux_x86_64.whl'
CPU_PY_VERSION: '3'
GPU_FRAMEWORK_BINARY: 'https://s3-us-west-2.amazonaws.com/tensorflow-aws/1.13/AmazonLinux/gpu/latest-patch-latest-patch/tensorflow-1.13.1-cp36-cp36m-linux_x86_64.whl'
GPU_PY_VERSION: '3'
FRAMEWORK_VERSION: '1.15.0'
ECR_REPO: 'sagemaker-test'
GITHUB_REPO: 'sagemaker-tensorflow-container'
SETUP_FILE: 'setup_cmds.sh'
Expand Down Expand Up @@ -34,80 +30,95 @@ phases:
- tox -e py36,py27 test/unit

# Create pip archive
- build_dir="docker/$FRAMEWORK_VERSION"
- root_dir=$(pwd)
- build_id="$(echo $CODEBUILD_BUILD_ID | sed -e 's/:/-/g')"
- python3 setup.py sdist
- tar_name=$(ls dist)
- cp dist/$tar_name $build_dir

# build cpu image
- cpu_dockerfile="Dockerfile.cpu"
# Find build artifacts
- build_artifacts=$root_dir/docker/artifacts

# Download framework binary
- cpu_fw_binary=$(basename $CPU_FRAMEWORK_BINARY)
- wget -O $build_dir/$cpu_fw_binary $CPU_FRAMEWORK_BINARY

- CPU_TAG="$FRAMEWORK_VERSION-cpu-py$CPU_PY_VERSION-$build_id"
# build py2 images

# prepare build context
- build_dir="$root_dir/docker/$FRAMEWORK_VERSION/py2"
- cp $root_dir/dist/$tar_name $build_dir
- cp $build_artifacts/* $build_dir/
- cd $build_dir
- docker build -f $cpu_dockerfile --build-arg framework_support_installable=$tar_name --build-arg py_version=$CPU_PY_VERSION --build-arg framework_installable=$cpu_fw_binary -t $PREPROD_IMAGE:$CPU_TAG .
- cd ../../

# build cpu image
- cpu_dockerfile="Dockerfile.cpu"
- CPU_TAG_PY2="$FRAMEWORK_VERSION-cpu-py2-$build_id"
- docker build -f $cpu_dockerfile -t $PREPROD_IMAGE:$CPU_TAG_PY2 .

# build gpu image
- gpu_dockerfile="Dockerfile.gpu"
- GPU_TAG_PY2="$FRAMEWORK_VERSION-gpu-py2-$build_id"
- docker build -f $gpu_dockerfile -t $PREPROD_IMAGE:$GPU_TAG_PY2 .

# Download framework binary
- gpu_fw_binary=$(basename $GPU_FRAMEWORK_BINARY)
- wget -O $build_dir/$gpu_fw_binary $GPU_FRAMEWORK_BINARY

- GPU_TAG="$FRAMEWORK_VERSION-gpu-py$GPU_PY_VERSION-$build_id"
# build py3 images

# prepare build context
- build_dir="$root_dir/docker/$FRAMEWORK_VERSION/py3"
- cp $root_dir/dist/$tar_name $build_dir
- cp $build_artifacts/* $build_dir/
- cd $build_dir
- docker build -f $gpu_dockerfile --build-arg framework_support_installable=$tar_name --build-arg py_version=$GPU_PY_VERSION --build-arg framework_installable=$gpu_fw_binary -t $PREPROD_IMAGE:$GPU_TAG .
- cd ../../

# build cpu image
- cpu_dockerfile="Dockerfile.cpu"
- CPU_TAG_PY3="$FRAMEWORK_VERSION-cpu-py3-$build_id"
- docker build -f $cpu_dockerfile -t $PREPROD_IMAGE:$CPU_TAG_PY3 .

# build gpu image
- gpu_dockerfile="Dockerfile.gpu"
- GPU_TAG_PY3="$FRAMEWORK_VERSION-gpu-py3-$build_id"
- docker build -f $gpu_dockerfile -t $PREPROD_IMAGE:$GPU_TAG_PY3 .

# push images to ecr
- $(aws ecr get-login --registry-ids $ACCOUNT --no-include-email --region $AWS_DEFAULT_REGION)
- docker push $PREPROD_IMAGE:$CPU_TAG
- docker push $PREPROD_IMAGE:$GPU_TAG
- docker push $PREPROD_IMAGE:$CPU_TAG_PY2
- docker push $PREPROD_IMAGE:$GPU_TAG_PY2
- docker push $PREPROD_IMAGE:$CPU_TAG_PY3
- docker push $PREPROD_IMAGE:$GPU_TAG_PY3

# launch remote gpu instance
- instance_type='p2.xlarge'
- create-key-pair
- launch-ec2-instance --instance-type $instance_type --ami-name dlami-ubuntu

# run cpu integration tests
- |
if has-matching-changes "test/" "tests/" "src/*.py" "docker/*" "buildspec.yml"; then
pytest test/integration/local --region $AWS_DEFAULT_REGION --docker-base-name $PREPROD_IMAGE --tag $CPU_TAG --framework-version $FRAMEWORK_VERSION --py-version $CPU_PY_VERSION --processor cpu
else
echo "skipping cpu integration tests"
fi
- py3_cmd="pytest test/integration/local --region $AWS_DEFAULT_REGION --docker-base-name $PREPROD_IMAGE --tag $CPU_TAG_PY2 --framework-version $FRAMEWORK_VERSION --py-version 2 --processor cpu"
- py2_cmd="pytest test/integration/local --region $AWS_DEFAULT_REGION --docker-base-name $PREPROD_IMAGE --tag $CPU_TAG_PY3 --framework-version $FRAMEWORK_VERSION --py-version 3 --processor cpu"
- execute-command-if-has-matching-changes "$py3_cmd" "test/" "src/*.py" "setup.py" "docker/*" "buildspec.yml"
- execute-command-if-has-matching-changes "$py2_cmd" "test/" "src/*.py" "setup.py" "docker/*" "buildspec.yml"

# run gpu integration tests
- |
if has-matching-changes "test/" "tests/" "src/*.py" "docker/*" "buildspec.yml"; then
printf "$SETUP_CMDS" > $SETUP_FILE
cmd="pytest test/integration/local --region $AWS_DEFAULT_REGION --docker-base-name $PREPROD_IMAGE --tag $GPU_TAG --framework-version $FRAMEWORK_VERSION --py-version $GPU_PY_VERSION --processor gpu"
remote-test --github-repo $GITHUB_REPO --test-cmd "$cmd" --setup-file $SETUP_FILE --pr-number "$PR_NUM"
else
echo "skipping gpu integration tests"
fi
- printf "$SETUP_CMDS" > $SETUP_FILE
- cmd="pytest test/integration/local --region $AWS_DEFAULT_REGION --docker-base-name $PREPROD_IMAGE --tag $GPU_TAG_PY2 --framework-version $FRAMEWORK_VERSION --py-version 2 --processor gpu"
- py3_cmd="remote-test --github-repo $GITHUB_REPO --test-cmd \"$cmd\" --setup-file $SETUP_FILE --pr-number \"$PR_NUM\""
- execute-command-if-has-matching-changes "$py3_cmd" "test/" "src/*.py" "setup.py" "docker/*" "buildspec.yml"

- cmd="pytest test/integration/local --region $AWS_DEFAULT_REGION --docker-base-name $PREPROD_IMAGE --tag $GPU_TAG_PY3 --framework-version $FRAMEWORK_VERSION --py-version 3 --processor gpu"
- py2_cmd="remote-test --github-repo $GITHUB_REPO --test-cmd \"$cmd\" --setup-file $SETUP_FILE --pr-number \"$PR_NUM\""
- execute-command-if-has-matching-changes "$py2_cmd" "test/" "src/*.py" "setup.py" "docker/*" "buildspec.yml"

# run sagemaker tests
- |
if has-matching-changes "test/" "tests/" "src/*.py" "docker/*" "buildspec.yml"; then
pytest test/integration/sagemaker -n 8 --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --account-id $ACCOUNT --tag $CPU_TAG --py-version $CPU_PY_VERSION --processor cpu
pytest test/integration/sagemaker -n 8 --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --account-id $ACCOUNT --tag $GPU_TAG --py-version $GPU_PY_VERSION --processor gpu
else
echo "skipping sagemaker tests"
fi
- test_cmd="pytest test/integration/sagemaker -n 8 --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --account-id $ACCOUNT --tag $CPU_TAG_PY2 --py-version 2 --processor cpu"
- execute-command-if-has-matching-changes "$test_cmd" "test/" "src/*.py" "setup.py" "docker/*" "buildspec.yml"
- test_cmd="pytest test/integration/sagemaker -n 8 --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --account-id $ACCOUNT --tag $GPU_TAG_PY2 --py-version 2 --processor gpu"
- execute-command-if-has-matching-changes "$test_cmd" "test/" "src/*.py" "setup.py" "docker/*" "buildspec.yml"
- test_cmd="pytest test/integration/sagemaker -n 8 --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --account-id $ACCOUNT --tag $CPU_TAG_PY3 --py-version 3 --processor cpu"
- execute-command-if-has-matching-changes "$test_cmd" "test/" "src/*.py" "setup.py" "docker/*" "buildspec.yml"
- test_cmd="pytest test/integration/sagemaker -n 8 --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --account-id $ACCOUNT --tag $GPU_TAG_PY3 --py-version 3 --processor gpu"
- execute-command-if-has-matching-changes "$test_cmd" "test/" "src/*.py" "setup.py" "docker/*" "buildspec.yml"

finally:
# shut down remote gpu instance
- cleanup-gpu-instances
- cleanup-key-pairs

# remove ecr image
- aws ecr batch-delete-image --repository-name $ECR_REPO --region $AWS_DEFAULT_REGION --image-ids imageTag=$CPU_TAG
- aws ecr batch-delete-image --repository-name $ECR_REPO --region $AWS_DEFAULT_REGION --image-ids imageTag=$GPU_TAG
- aws ecr batch-delete-image --repository-name $ECR_REPO --region $AWS_DEFAULT_REGION --image-ids imageTag=$CPU_TAG_PY2
- aws ecr batch-delete-image --repository-name $ECR_REPO --region $AWS_DEFAULT_REGION --image-ids imageTag=$GPU_TAG_PY2
- aws ecr batch-delete-image --repository-name $ECR_REPO --region $AWS_DEFAULT_REGION --image-ids imageTag=$CPU_TAG_PY3
- aws ecr batch-delete-image --repository-name $ECR_REPO --region $AWS_DEFAULT_REGION --image-ids imageTag=$GPU_TAG_PY3
125 changes: 125 additions & 0 deletions docker/1.15.0/py2/Dockerfile.cpu
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
FROM ubuntu:18.04

LABEL maintainer="Amazon AI"

# Prevent docker build get stopped by requesting user interaction
ENV DEBIAN_FRONTEND=noninteractive
ENV DEBCONF_NONINTERACTIVE_SEEN=true
# Set environment variables for MKL
# https://www.tensorflow.org/performance/performance_guide#tensorflow_with_intel%C2%AE_mkl_dnn
ENV KMP_AFFINITY=granularity=fine,compact,1,0
ENV KMP_BLOCKTIME=1
ENV KMP_SETTINGS=0
# Python won’t try to write .pyc or .pyo files on the import of source modules
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
# See http://bugs.python.org/issue19846
ENV PYTHONIOENCODING=UTF-8
ENV LANG=C.UTF-8
ENV LC_ALL=C.UTF-8
# Specify the location of module that contains the training logic for SageMaker
# https://docs.aws.amazon.com/sagemaker/latest/dg/docker-container-environmental-variables-entrypoint.html
ENV SAGEMAKER_TRAINING_MODULE=sagemaker_tensorflow_container.training:main

# Define framework-related package sources
ARG FRAMEWORK_SUPPORT_INSTALLABLE=sagemaker_tensorflow_container*.tar.gz
ARG TF_URL=https://tensorflow-aws.s3-us-west-2.amazonaws.com/1.15/AmazonLinux/cpu/final/tensorflow-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl

RUN apt-get update \
&& apt-get install -y --no-install-recommends \
software-properties-common \
build-essential \
openssh-client \
openssh-server \
ca-certificates \
curl \
git \
wget \
vim \
zlib1g-dev \
&& rm -rf /var/lib/apt/lists/*

# Install Open MPI
RUN mkdir /tmp/openmpi \
&& cd /tmp/openmpi \
&& curl -fSsL -O https://download.open-mpi.org/release/open-mpi/v4.0/openmpi-4.0.1.tar.gz \
&& tar zxf openmpi-4.0.1.tar.gz \
&& cd openmpi-4.0.1 \
&& ./configure --enable-orterun-prefix-by-default \
&& make -j $(nproc) all \
&& make install \
&& ldconfig \
&& rm -rf /tmp/openmpi

# Create a wrapper for OpenMPI to allow running as root by default
RUN mv /usr/local/bin/mpirun /usr/local/bin/mpirun.real \
&& echo '#!/bin/bash' > /usr/local/bin/mpirun \
&& echo 'mpirun.real --allow-run-as-root "$@"' >> /usr/local/bin/mpirun \
&& chmod a+x /usr/local/bin/mpirun

RUN echo "hwloc_base_binding_policy = none" >> /usr/local/etc/openmpi-mca-params.conf \
&& echo "rmaps_base_mapping_policy = slot" >> /usr/local/etc/openmpi-mca-params.conf

ENV LD_LIBRARY_PATH=/usr/local/openmpi/lib:$LD_LIBRARY_PATH
ENV PATH=/usr/local/openmpi/bin/:$PATH

# SSH login fix. Otherwise user is kicked off after login
RUN sed 's@session\s*required\s*pam_loginuid.so@session optional pam_loginuid.so@g' -i /etc/pam.d/sshd

# Create SSH key.
RUN mkdir -p /root/.ssh/ \
&& mkdir -p /var/run/sshd \
&& ssh-keygen -q -t rsa -N '' -f /root/.ssh/id_rsa \
&& cp /root/.ssh/id_rsa.pub /root/.ssh/authorized_keys \
&& printf "Host *\n StrictHostKeyChecking no\n" >> /root/.ssh/config

WORKDIR /

RUN apt-get update \
&& apt-get install -y \
python \
python-pip

COPY $FRAMEWORK_SUPPORT_INSTALLABLE .

RUN pip --no-cache-dir install --upgrade \
pip \
setuptools

# Some TF tools expect a "python" binary
RUN ln -s $(which python) /usr/local/bin/python

RUN pip install --no-cache-dir -U \
numpy==1.16.5 \
scipy==1.2.2 \
scikit-learn==0.20.3 \
pandas==0.24.2 \
Pillow==6.2.1 \
h5py==2.9.0 \
keras_applications==1.0.8 \
keras_preprocessing==1.1.0 \
requests==2.22.0 \
keras==2.3.1 \
mpi4py==3.0.2 \
"cryptography>=2.3" \
"sagemaker-tensorflow>=1.15,<1.16" \
# Let's install TensorFlow separately in the end to avoid the library version to be overwritten
&& pip install --force-reinstall --no-cache-dir -U \
${TF_URL} \
&& pip install --no-cache-dir -U \
$FRAMEWORK_SUPPORT_INSTALLABLE \
awscli==1.17.7 \
&& rm -f $FRAMEWORK_SUPPORT_INSTALLABLE \
&& pip install --no-cache-dir -U \
horovod==0.18.2

COPY dockerd-entrypoint.py /usr/local/bin/dockerd-entrypoint.py
COPY deep_learning_container.py /usr/local/bin/deep_learning_container.py

RUN chmod +x /usr/local/bin/dockerd-entrypoint.py \
&& chmod +x /usr/local/bin/deep_learning_container.py

RUN curl https://aws-dlc-licenses.s3.amazonaws.com/tensorflow/license.txt -o /license.txt

ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"]
CMD ["bin/bash"]
Loading