diff --git a/buildspec.yml b/buildspec.yml index 1d4e9adf..d7139c89 100644 --- a/buildspec.yml +++ b/buildspec.yml @@ -2,7 +2,7 @@ version: 0.2 env: variables: - FRAMEWORK_VERSION: '1.3.1' + FRAMEWORK_VERSION: '1.4.0' CPU_PY2_VERSION: '2' CPU_PY3_VERSION: '3' CPU_INSTANCE_TYPE: 'ml.c4.xlarge' diff --git a/docker/1.4.0/py2/Dockerfile.cpu b/docker/1.4.0/py2/Dockerfile.cpu new file mode 100644 index 00000000..9b04e0ff --- /dev/null +++ b/docker/1.4.0/py2/Dockerfile.cpu @@ -0,0 +1,89 @@ +FROM ubuntu:16.04 + +LABEL maintainer="Amazon AI" +LABEL com.amazonaws.sagemaker.capabilities.accept-bind-to-port=true +LABEL com.amazonaws.sagemaker.capabilities.multi-models=true + +ARG PYTHON_VERSION=2.7 +ARG PYTORCH_VERSION=1.4.0 +ARG TORCHVISION_VERSION=0.5.0 +ARG MMS_VERSION=1.0.8 + +# See http://bugs.python.org/issue19846 +ENV LANG C.UTF-8 +ENV LD_LIBRARY_PATH /opt/conda/lib/:$LD_LIBRARY_PATH +ENV PATH /opt/conda/bin:$PATH +ENV SAGEMAKER_SERVING_MODULE sagemaker_pytorch_serving_container.serving:main +ENV TEMP=/home/model-server/tmp + +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + build-essential \ + ca-certificates \ + cmake \ + curl \ + git \ + jq \ + libgl1-mesa-glx \ + libglib2.0-0 \ + libsm6 \ + libxext6 \ + libxrender-dev \ + openjdk-8-jdk-headless \ + vim \ + wget \ + zlib1g-dev + + +RUN curl -o ~/miniconda.sh -O https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh \ + && chmod +x ~/miniconda.sh \ + && ~/miniconda.sh -b -p /opt/conda \ + && rm ~/miniconda.sh \ + && /opt/conda/bin/conda update conda \ + && /opt/conda/bin/conda install -y \ + python=$PYTHON_VERSION \ + cython==0.29.12 \ + ipython==5.8.0 \ + mkl-include==2019.4 \ + mkl==2019.4 \ + numpy==1.16.4 \ + scipy==1.2.1 \ + typing==3.7.4 \ + && /opt/conda/bin/conda clean -ya + +RUN conda install -c \ + conda-forge \ + awscli==1.16.296 \ + opencv==4.0.1 \ + && conda install -y \ + scikit-learn==0.20.3 \ + pandas==0.24.2 \ + pillow==6.2.1 \ + h5py==2.9.0 \ + requests==2.22.0 \ + && conda install \ + pytorch==$PYTORCH_VERSION \ + torchvision==$TORCHVISION_VERSION cpuonly -c pytorch \ + && conda clean -ya \ + && pip install --upgrade pip --trusted-host pypi.org --trusted-host files.pythonhosted.org \ + && pip install mxnet-model-server==$MMS_VERSION + +RUN useradd -m model-server \ + && mkdir -p /home/model-server/tmp \ + && chown -R model-server /home/model-server + +COPY mms-entrypoint.py /usr/local/bin/dockerd-entrypoint.py +COPY config.properties /home/model-server + +RUN chmod +x /usr/local/bin/dockerd-entrypoint.py + +COPY sagemaker_pytorch_inference.tar.gz /sagemaker_pytorch_inference.tar.gz +RUN pip install --no-cache-dir \ + /sagemaker_pytorch_inference.tar.gz \ + && rm /sagemaker_pytorch_inference.tar.gz + +RUN curl https://aws-dlc-licenses.s3.amazonaws.com/pytorch/license.txt -o /license.txt + +EXPOSE 8080 8081 +ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"] +CMD ["mxnet-model-server", "--start", "--mms-config", "/home/model-server/config.properties"] \ No newline at end of file diff --git a/docker/1.4.0/py2/Dockerfile.gpu b/docker/1.4.0/py2/Dockerfile.gpu new file mode 100644 index 00000000..e37f2781 --- /dev/null +++ b/docker/1.4.0/py2/Dockerfile.gpu @@ -0,0 +1,113 @@ +FROM nvidia/cuda:10.1-cudnn7-devel-ubuntu16.04 +# NCCL_VERSION=2.4.7, CUDNN_VERSION=7.6.2.24 +LABEL maintainer="Amazon AI" +LABEL com.amazonaws.sagemaker.capabilities.accept-bind-to-port=true + +ARG PYTHON_VERSION=2.7 +ARG PYTORCH_VERSION=1.4.0 +ARG TORCHVISION_VERSION=0.5.0 +ARG MMS_VERSION=1.0.8 + +# See http://bugs.python.org/issue19846 +ENV LANG C.UTF-8 +ENV LD_LIBRARY_PATH /opt/conda/lib/:$LD_LIBRARY_PATH +ENV PATH /opt/conda/bin:$PATH +ENV SAGEMAKER_SERVING_MODULE sagemaker_pytorch_serving_container.serving:main +ENV TEMP=/home/model-server/tmp + +RUN apt-get update \ + && apt-get install -y --allow-downgrades --allow-change-held-packages --no-install-recommends \ + build-essential \ + build-essential \ + ca-certificates \ + cmake \ + curl \ + git \ + jq \ + libgl1-mesa-glx \ + libglib2.0-0 \ + libgomp1 \ + libibverbs-dev \ + libsm6 \ + libxext6 \ + libxrender-dev \ + openjdk-8-jdk-headless \ + vim \ + wget \ + zlib1g-dev + +# Install OpenSSH, Allow OpenSSH to talk to containers without asking for confirmation +RUN apt-get install -y --no-install-recommends \ + openssh-client openssh-server \ + && mkdir -p /var/run/sshd \ + && cat /etc/ssh/ssh_config | grep -v StrictHostKeyChecking > /etc/ssh/ssh_config.new \ + && echo " StrictHostKeyChecking no" >> /etc/ssh/ssh_config.new \ + && mv /etc/ssh/ssh_config.new /etc/ssh/ssh_config + +RUN curl -o ~/miniconda.sh -O https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh \ + && chmod +x ~/miniconda.sh \ + && ~/miniconda.sh -b -p /opt/conda \ + && rm ~/miniconda.sh \ + && /opt/conda/bin/conda update conda \ + && /opt/conda/bin/conda install -y \ + python=$PYTHON_VERSION \ + cython==0.29.12 \ + ipython==5.8.0 \ + mkl-include==2019.4 \ + mkl==2019.4 \ + numpy==1.16.4 \ + scipy==1.2.1 \ + typing==3.7.4 \ + && /opt/conda/bin/conda clean -ya + + +RUN conda install -c \ + pytorch magma-cuda101==2.5.1 \ + && conda install -c \ + conda-forge \ + awscli==1.16.296 \ + opencv==4.0.1 \ + && conda install -y scikit-learn==0.20.3 \ + h5py==2.9.0 \ + pandas==0.24.2 \ + pillow==6.2.1 \ + requests==2.22.0 \ + && conda install -c \ + pytorch \ + cudatoolkit=10.1 \ + pytorch==$PYTORCH_VERSION \ + torchvision==$TORCHVISION_VERSION \ + && conda clean -ya \ + && /opt/conda/bin/conda config --set ssl_verify False \ + && pip install --upgrade pip --trusted-host pypi.org --trusted-host files.pythonhosted.org \ + && pip install mxnet-model-server==$MMS_VERSION + +RUN useradd -m model-server \ + && mkdir -p /home/model-server/tmp \ + && chown -R model-server /home/model-server + +COPY mms-entrypoint.py /usr/local/bin/dockerd-entrypoint.py +COPY config.properties /home/model-server + +RUN chmod +x /usr/local/bin/dockerd-entrypoint.py + +# Install OpenSSH for MPI to communicate between containers, Allow OpenSSH to talk to containers without asking for confirmation +RUN apt-get install -y --no-install-recommends \ + openssh-client openssh-server \ + && mkdir -p /var/run/sshd \ + && cat /etc/ssh/ssh_config | grep -v StrictHostKeyChecking > /etc/ssh/ssh_config.new \ + && echo " StrictHostKeyChecking no" >> /etc/ssh/ssh_config.new \ + && mv /etc/ssh/ssh_config.new /etc/ssh/ssh_config + +# RUN pip install --no-cache-dir 'opencv-python>=4.0,<4.1' + +COPY sagemaker_pytorch_inference.tar.gz /sagemaker_pytorch_inference.tar.gz +RUN pip install --no-cache-dir \ + /sagemaker_pytorch_inference.tar.gz \ + && rm /sagemaker_pytorch_inference.tar.gz + +RUN curl https://aws-dlc-licenses.s3.amazonaws.com/pytorch/license.txt -o /license.txt + +EXPOSE 8080 8081 +ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"] +CMD ["mxnet-model-server", "--start", "--mms-config", "/home/model-server/config.properties"] \ No newline at end of file diff --git a/docker/1.4.0/py3/Dockerfile.cpu b/docker/1.4.0/py3/Dockerfile.cpu new file mode 100644 index 00000000..532681c9 --- /dev/null +++ b/docker/1.4.0/py3/Dockerfile.cpu @@ -0,0 +1,88 @@ +FROM ubuntu:16.04 + +LABEL maintainer="Amazon AI" +LABEL com.amazonaws.sagemaker.capabilities.accept-bind-to-port=true +LABEL com.amazonaws.sagemaker.capabilities.multi-models=true + +ARG PYTHON_VERSION=3.6.6 +ARG PYTORCH_VERSION=1.4.0 +ARG TORCHVISION_VERSION=0.5.0 +ARG MMS_VERSION=1.0.8 + +# See http://bugs.python.org/issue19846 +ENV LANG C.UTF-8 +ENV LD_LIBRARY_PATH /opt/conda/lib/:$LD_LIBRARY_PATH +ENV PATH /opt/conda/bin:$PATH +ENV SAGEMAKER_SERVING_MODULE sagemaker_pytorch_serving_container.serving:main +ENV TEMP=/home/model-server/tmp + +RUN apt-get update && apt-get install -y --no-install-recommends \ + build-essential \ + ca-certificates \ + cmake \ + curl \ + git \ + jq \ + libgl1-mesa-glx \ + libglib2.0-0 \ + libsm6 \ + libxext6 \ + libxrender-dev \ + openjdk-8-jdk-headless \ + vim \ + wget \ + zlib1g-dev + +RUN curl -o ~/miniconda.sh -O https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh \ + && chmod +x ~/miniconda.sh \ + && ~/miniconda.sh -b -p /opt/conda \ + && rm ~/miniconda.sh \ + && /opt/conda/bin/conda update conda \ + && /opt/conda/bin/conda install -y \ + python=$PYTHON_VERSION \ + cython==0.29.12 \ + ipython==7.7.0 \ + mkl-include==2019.4 \ + mkl==2019.4 \ + numpy==1.16.4 \ + scipy==1.3.0 \ + typing==3.6.4 \ + && /opt/conda/bin/conda clean -ya + +RUN conda install -c \ + conda-forge \ + awscli==1.16.296 \ + opencv==4.0.1 \ + && conda install -y \ + scikit-learn==0.21.2 \ + pandas==0.25.0 \ + pillow==6.2.1 \ + h5py==2.9.0 \ + requests==2.22.0 \ + && conda install \ + pytorch==$PYTORCH_VERSION \ + torchvision==$TORCHVISION_VERSION cpuonly -c pytorch \ + && conda clean -ya \ + && pip install --upgrade pip --trusted-host pypi.org --trusted-host files.pythonhosted.org \ + && ln -s /opt/conda/bin/pip /usr/local/bin/pip3 \ + && pip install mxnet-model-server==$MMS_VERSION + +RUN useradd -m model-server \ + && mkdir -p /home/model-server/tmp \ + && chown -R model-server /home/model-server + +COPY mms-entrypoint.py /usr/local/bin/dockerd-entrypoint.py +COPY config.properties /home/model-server + +RUN chmod +x /usr/local/bin/dockerd-entrypoint.py + +COPY sagemaker_pytorch_inference.tar.gz /sagemaker_pytorch_inference.tar.gz +RUN pip install --no-cache-dir \ + /sagemaker_pytorch_inference.tar.gz \ + && rm /sagemaker_pytorch_inference.tar.gz + +RUN curl https://aws-dlc-licenses.s3.amazonaws.com/pytorch/license.txt -o /license.txt + +EXPOSE 8080 8081 +ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"] +CMD ["mxnet-model-server", "--start", "--mms-config", "/home/model-server/config.properties"] \ No newline at end of file diff --git a/docker/1.4.0/py3/Dockerfile.gpu b/docker/1.4.0/py3/Dockerfile.gpu new file mode 100644 index 00000000..d8dec111 --- /dev/null +++ b/docker/1.4.0/py3/Dockerfile.gpu @@ -0,0 +1,105 @@ +FROM nvidia/cuda:10.1-cudnn7-devel-ubuntu16.04 +# NCCL_VERSION=2.4.7, CUDNN_VERSION=7.6.2.24 +LABEL maintainer="Amazon AI" +LABEL com.amazonaws.sagemaker.capabilities.accept-bind-to-port=true + +# Add arguments to achieve the version, python and url +ARG PYTHON_VERSION=3.6.6 +ARG PYTORCH_VERSION=1.4.0 +ARG TORCHVISION_VERSION=0.5.0 +ARG MMS_VERSION=1.0.8 + +# See http://bugs.python.org/issue19846 +ENV LANG C.UTF-8 +ENV LD_LIBRARY_PATH /opt/conda/lib/:$LD_LIBRARY_PATH +ENV PATH /opt/conda/bin:$PATH +ENV SAGEMAKER_SERVING_MODULE sagemaker_pytorch_serving_container.serving:main +ENV TEMP=/home/model-server/tmp + +RUN apt-get update \ + && apt-get install -y --allow-downgrades --allow-change-held-packages --no-install-recommends \ + build-essential \ + ca-certificates \ + cmake \ + curl \ + git \ + jq \ + libgl1-mesa-glx \ + libglib2.0-0 \ + libgomp1 \ + libibverbs-dev \ + libsm6 \ + libxext6 \ + libxrender-dev \ + openjdk-8-jdk-headless \ + vim \ + wget \ + zlib1g-dev + +# Install OpenSSH. Allow OpenSSH to talk to containers without asking for confirmation +RUN apt-get install -y --no-install-recommends \ + openssh-client \ + openssh-server \ + && mkdir -p /var/run/sshd \ + && cat /etc/ssh/ssh_config | grep -v StrictHostKeyChecking > /etc/ssh/ssh_config.new \ + && echo " StrictHostKeyChecking no" >> /etc/ssh/ssh_config.new \ + && mv /etc/ssh/ssh_config.new /etc/ssh/ssh_configs + +RUN curl -o ~/miniconda.sh -O https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh \ + && chmod +x ~/miniconda.sh \ + && ~/miniconda.sh -b -p /opt/conda \ + && rm ~/miniconda.sh \ + && /opt/conda/bin/conda update conda \ + && /opt/conda/bin/conda install -y \ + python=$PYTHON_VERSION \ + cython==0.29.12 \ + ipython==7.7.0 \ + mkl-include==2019.4 \ + mkl==2019.4 \ + numpy==1.16.4 \ + scipy==1.3.0 \ + typing==3.6.4 \ + && /opt/conda/bin/conda clean -ya + +RUN conda install -c \ + pytorch magma-cuda101==2.5.1 \ + && conda install -c \ + conda-forge \ + awscli==1.16.296 \ + opencv==4.0.1 \ + && conda install -y \ + scikit-learn==0.21.2 \ + pandas==0.25.0 \ + pillow==6.2.1 \ + h5py==2.9.0 \ + requests==2.22.0 \ + && conda install -c \ + pytorch \ + pytorch==$PYTORCH_VERSION \ + torchvision==$TORCHVISION_VERSION \ + cudatoolkit=10.1 \ + && conda clean -ya \ + && /opt/conda/bin/conda config --set ssl_verify False \ + && pip install --upgrade pip --trusted-host pypi.org --trusted-host files.pythonhosted.org \ + && ln -s /opt/conda/bin/pip /usr/local/bin/pip3 \ + && pip install mxnet-model-server==$MMS_VERSION + +RUN useradd -m model-server \ + && mkdir -p /home/model-server/tmp \ + && chown -R model-server /home/model-server + +COPY mms-entrypoint.py /usr/local/bin/dockerd-entrypoint.py +COPY config.properties /home/model-server + +RUN chmod +x /usr/local/bin/dockerd-entrypoint.py + +COPY sagemaker_pytorch_inference.tar.gz /sagemaker_pytorch_inference.tar.gz +RUN pip install --no-cache-dir \ + /sagemaker_pytorch_inference.tar.gz \ + && rm /sagemaker_pytorch_inference.tar.gz + +RUN curl https://aws-dlc-licenses.s3.amazonaws.com/pytorch/license.txt -o /license.txt + +EXPOSE 8080 8081 +ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"] +CMD ["mxnet-model-server", "--start", "--mms-config", "/home/model-server/config.properties"] \ No newline at end of file diff --git a/setup.py b/setup.py index b743ec29..48e7b7cf 100644 --- a/setup.py +++ b/setup.py @@ -26,7 +26,7 @@ def read(fname): setup( name='sagemaker_pytorch_inference', - version='1.3', + version='1.4', description='Open source library for creating PyTorch containers to run on Amazon SageMaker.', packages=find_packages(where='src', exclude=('test',)), @@ -47,12 +47,12 @@ def read(fname): 'Programming Language :: Python :: 3.6', ], install_requires=['numpy==1.16.4', 'Pillow==6.2.0', 'retrying==1.3.3', 'sagemaker-containers==2.5.4', - 'six==1.12.0', 'torch==1.3.1', 'requests_mock==1.6.0', 'sagemaker-inference==1.1.2', + 'six==1.12.0', 'torch==1.4.0', 'requests_mock==1.6.0', 'sagemaker-inference==1.1.2', 'retrying==1.3.3'], extras_require={ 'test': ['boto3==1.10.32', 'coverage==4.5.3', 'docker-compose==1.23.2', 'flake8==3.7.7', 'Flask==1.1.1', 'mock==2.0.0', 'pytest==4.4.0', 'pytest-cov==2.7.1', 'pytest-xdist==1.28.0', 'PyYAML==3.10', - 'sagemaker==1.48.0', 'requests==2.20.0', 'torchvision==0.4.2', 'tox==3.7.0', 'requests_mock==1.6.0'] + 'sagemaker==1.48.0', 'requests==2.20.0', 'torchvision==0.5.0', 'tox==3.7.0', 'requests_mock==1.6.0'] }, entry_points={