Skip to content

Commit e110e37

Browse files
authored
Adding changes for PyTorch 1.4.0 DLC (#37)
* adding 1.4 dockerfiles, update setup and buildspec * fix version * bumping cuda to 10.1 * bump cuda to 10.1 for py2 GPU * removing deep_learning_container.py * unpin awscli * updating package versions * update package name * rerun build * pin opencv version * reverting package versions
1 parent 1004a20 commit e110e37

File tree

6 files changed

+399
-4
lines changed

6 files changed

+399
-4
lines changed

buildspec.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ version: 0.2
22

33
env:
44
variables:
5-
FRAMEWORK_VERSION: '1.3.1'
5+
FRAMEWORK_VERSION: '1.4.0'
66
CPU_PY2_VERSION: '2'
77
CPU_PY3_VERSION: '3'
88
CPU_INSTANCE_TYPE: 'ml.c4.xlarge'

docker/1.4.0/py2/Dockerfile.cpu

Lines changed: 89 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
FROM ubuntu:16.04
2+
3+
LABEL maintainer="Amazon AI"
4+
LABEL com.amazonaws.sagemaker.capabilities.accept-bind-to-port=true
5+
LABEL com.amazonaws.sagemaker.capabilities.multi-models=true
6+
7+
ARG PYTHON_VERSION=2.7
8+
ARG PYTORCH_VERSION=1.4.0
9+
ARG TORCHVISION_VERSION=0.5.0
10+
ARG MMS_VERSION=1.0.8
11+
12+
# See http://bugs.python.org/issue19846
13+
ENV LANG C.UTF-8
14+
ENV LD_LIBRARY_PATH /opt/conda/lib/:$LD_LIBRARY_PATH
15+
ENV PATH /opt/conda/bin:$PATH
16+
ENV SAGEMAKER_SERVING_MODULE sagemaker_pytorch_serving_container.serving:main
17+
ENV TEMP=/home/model-server/tmp
18+
19+
RUN apt-get update \
20+
&& apt-get install -y --no-install-recommends \
21+
build-essential \
22+
ca-certificates \
23+
cmake \
24+
curl \
25+
git \
26+
jq \
27+
libgl1-mesa-glx \
28+
libglib2.0-0 \
29+
libsm6 \
30+
libxext6 \
31+
libxrender-dev \
32+
openjdk-8-jdk-headless \
33+
vim \
34+
wget \
35+
zlib1g-dev
36+
37+
38+
RUN curl -o ~/miniconda.sh -O https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh \
39+
&& chmod +x ~/miniconda.sh \
40+
&& ~/miniconda.sh -b -p /opt/conda \
41+
&& rm ~/miniconda.sh \
42+
&& /opt/conda/bin/conda update conda \
43+
&& /opt/conda/bin/conda install -y \
44+
python=$PYTHON_VERSION \
45+
cython==0.29.12 \
46+
ipython==5.8.0 \
47+
mkl-include==2019.4 \
48+
mkl==2019.4 \
49+
numpy==1.16.4 \
50+
scipy==1.2.1 \
51+
typing==3.7.4 \
52+
&& /opt/conda/bin/conda clean -ya
53+
54+
RUN conda install -c \
55+
conda-forge \
56+
awscli==1.16.296 \
57+
opencv==4.0.1 \
58+
&& conda install -y \
59+
scikit-learn==0.20.3 \
60+
pandas==0.24.2 \
61+
pillow==6.2.1 \
62+
h5py==2.9.0 \
63+
requests==2.22.0 \
64+
&& conda install \
65+
pytorch==$PYTORCH_VERSION \
66+
torchvision==$TORCHVISION_VERSION cpuonly -c pytorch \
67+
&& conda clean -ya \
68+
&& pip install --upgrade pip --trusted-host pypi.org --trusted-host files.pythonhosted.org \
69+
&& pip install mxnet-model-server==$MMS_VERSION
70+
71+
RUN useradd -m model-server \
72+
&& mkdir -p /home/model-server/tmp \
73+
&& chown -R model-server /home/model-server
74+
75+
COPY mms-entrypoint.py /usr/local/bin/dockerd-entrypoint.py
76+
COPY config.properties /home/model-server
77+
78+
RUN chmod +x /usr/local/bin/dockerd-entrypoint.py
79+
80+
COPY sagemaker_pytorch_inference.tar.gz /sagemaker_pytorch_inference.tar.gz
81+
RUN pip install --no-cache-dir \
82+
/sagemaker_pytorch_inference.tar.gz \
83+
&& rm /sagemaker_pytorch_inference.tar.gz
84+
85+
RUN curl https://aws-dlc-licenses.s3.amazonaws.com/pytorch/license.txt -o /license.txt
86+
87+
EXPOSE 8080 8081
88+
ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"]
89+
CMD ["mxnet-model-server", "--start", "--mms-config", "/home/model-server/config.properties"]

docker/1.4.0/py2/Dockerfile.gpu

Lines changed: 113 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,113 @@
1+
FROM nvidia/cuda:10.1-cudnn7-devel-ubuntu16.04
2+
# NCCL_VERSION=2.4.7, CUDNN_VERSION=7.6.2.24
3+
LABEL maintainer="Amazon AI"
4+
LABEL com.amazonaws.sagemaker.capabilities.accept-bind-to-port=true
5+
6+
ARG PYTHON_VERSION=2.7
7+
ARG PYTORCH_VERSION=1.4.0
8+
ARG TORCHVISION_VERSION=0.5.0
9+
ARG MMS_VERSION=1.0.8
10+
11+
# See http://bugs.python.org/issue19846
12+
ENV LANG C.UTF-8
13+
ENV LD_LIBRARY_PATH /opt/conda/lib/:$LD_LIBRARY_PATH
14+
ENV PATH /opt/conda/bin:$PATH
15+
ENV SAGEMAKER_SERVING_MODULE sagemaker_pytorch_serving_container.serving:main
16+
ENV TEMP=/home/model-server/tmp
17+
18+
RUN apt-get update \
19+
&& apt-get install -y --allow-downgrades --allow-change-held-packages --no-install-recommends \
20+
build-essential \
21+
build-essential \
22+
ca-certificates \
23+
cmake \
24+
curl \
25+
git \
26+
jq \
27+
libgl1-mesa-glx \
28+
libglib2.0-0 \
29+
libgomp1 \
30+
libibverbs-dev \
31+
libsm6 \
32+
libxext6 \
33+
libxrender-dev \
34+
openjdk-8-jdk-headless \
35+
vim \
36+
wget \
37+
zlib1g-dev
38+
39+
# Install OpenSSH, Allow OpenSSH to talk to containers without asking for confirmation
40+
RUN apt-get install -y --no-install-recommends \
41+
openssh-client openssh-server \
42+
&& mkdir -p /var/run/sshd \
43+
&& cat /etc/ssh/ssh_config | grep -v StrictHostKeyChecking > /etc/ssh/ssh_config.new \
44+
&& echo " StrictHostKeyChecking no" >> /etc/ssh/ssh_config.new \
45+
&& mv /etc/ssh/ssh_config.new /etc/ssh/ssh_config
46+
47+
RUN curl -o ~/miniconda.sh -O https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh \
48+
&& chmod +x ~/miniconda.sh \
49+
&& ~/miniconda.sh -b -p /opt/conda \
50+
&& rm ~/miniconda.sh \
51+
&& /opt/conda/bin/conda update conda \
52+
&& /opt/conda/bin/conda install -y \
53+
python=$PYTHON_VERSION \
54+
cython==0.29.12 \
55+
ipython==5.8.0 \
56+
mkl-include==2019.4 \
57+
mkl==2019.4 \
58+
numpy==1.16.4 \
59+
scipy==1.2.1 \
60+
typing==3.7.4 \
61+
&& /opt/conda/bin/conda clean -ya
62+
63+
64+
RUN conda install -c \
65+
pytorch magma-cuda101==2.5.1 \
66+
&& conda install -c \
67+
conda-forge \
68+
awscli==1.16.296 \
69+
opencv==4.0.1 \
70+
&& conda install -y scikit-learn==0.20.3 \
71+
h5py==2.9.0 \
72+
pandas==0.24.2 \
73+
pillow==6.2.1 \
74+
requests==2.22.0 \
75+
&& conda install -c \
76+
pytorch \
77+
cudatoolkit=10.1 \
78+
pytorch==$PYTORCH_VERSION \
79+
torchvision==$TORCHVISION_VERSION \
80+
&& conda clean -ya \
81+
&& /opt/conda/bin/conda config --set ssl_verify False \
82+
&& pip install --upgrade pip --trusted-host pypi.org --trusted-host files.pythonhosted.org \
83+
&& pip install mxnet-model-server==$MMS_VERSION
84+
85+
RUN useradd -m model-server \
86+
&& mkdir -p /home/model-server/tmp \
87+
&& chown -R model-server /home/model-server
88+
89+
COPY mms-entrypoint.py /usr/local/bin/dockerd-entrypoint.py
90+
COPY config.properties /home/model-server
91+
92+
RUN chmod +x /usr/local/bin/dockerd-entrypoint.py
93+
94+
# Install OpenSSH for MPI to communicate between containers, Allow OpenSSH to talk to containers without asking for confirmation
95+
RUN apt-get install -y --no-install-recommends \
96+
openssh-client openssh-server \
97+
&& mkdir -p /var/run/sshd \
98+
&& cat /etc/ssh/ssh_config | grep -v StrictHostKeyChecking > /etc/ssh/ssh_config.new \
99+
&& echo " StrictHostKeyChecking no" >> /etc/ssh/ssh_config.new \
100+
&& mv /etc/ssh/ssh_config.new /etc/ssh/ssh_config
101+
102+
# RUN pip install --no-cache-dir 'opencv-python>=4.0,<4.1'
103+
104+
COPY sagemaker_pytorch_inference.tar.gz /sagemaker_pytorch_inference.tar.gz
105+
RUN pip install --no-cache-dir \
106+
/sagemaker_pytorch_inference.tar.gz \
107+
&& rm /sagemaker_pytorch_inference.tar.gz
108+
109+
RUN curl https://aws-dlc-licenses.s3.amazonaws.com/pytorch/license.txt -o /license.txt
110+
111+
EXPOSE 8080 8081
112+
ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"]
113+
CMD ["mxnet-model-server", "--start", "--mms-config", "/home/model-server/config.properties"]

docker/1.4.0/py3/Dockerfile.cpu

Lines changed: 88 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,88 @@
1+
FROM ubuntu:16.04
2+
3+
LABEL maintainer="Amazon AI"
4+
LABEL com.amazonaws.sagemaker.capabilities.accept-bind-to-port=true
5+
LABEL com.amazonaws.sagemaker.capabilities.multi-models=true
6+
7+
ARG PYTHON_VERSION=3.6.6
8+
ARG PYTORCH_VERSION=1.4.0
9+
ARG TORCHVISION_VERSION=0.5.0
10+
ARG MMS_VERSION=1.0.8
11+
12+
# See http://bugs.python.org/issue19846
13+
ENV LANG C.UTF-8
14+
ENV LD_LIBRARY_PATH /opt/conda/lib/:$LD_LIBRARY_PATH
15+
ENV PATH /opt/conda/bin:$PATH
16+
ENV SAGEMAKER_SERVING_MODULE sagemaker_pytorch_serving_container.serving:main
17+
ENV TEMP=/home/model-server/tmp
18+
19+
RUN apt-get update && apt-get install -y --no-install-recommends \
20+
build-essential \
21+
ca-certificates \
22+
cmake \
23+
curl \
24+
git \
25+
jq \
26+
libgl1-mesa-glx \
27+
libglib2.0-0 \
28+
libsm6 \
29+
libxext6 \
30+
libxrender-dev \
31+
openjdk-8-jdk-headless \
32+
vim \
33+
wget \
34+
zlib1g-dev
35+
36+
RUN curl -o ~/miniconda.sh -O https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh \
37+
&& chmod +x ~/miniconda.sh \
38+
&& ~/miniconda.sh -b -p /opt/conda \
39+
&& rm ~/miniconda.sh \
40+
&& /opt/conda/bin/conda update conda \
41+
&& /opt/conda/bin/conda install -y \
42+
python=$PYTHON_VERSION \
43+
cython==0.29.12 \
44+
ipython==7.7.0 \
45+
mkl-include==2019.4 \
46+
mkl==2019.4 \
47+
numpy==1.16.4 \
48+
scipy==1.3.0 \
49+
typing==3.6.4 \
50+
&& /opt/conda/bin/conda clean -ya
51+
52+
RUN conda install -c \
53+
conda-forge \
54+
awscli==1.16.296 \
55+
opencv==4.0.1 \
56+
&& conda install -y \
57+
scikit-learn==0.21.2 \
58+
pandas==0.25.0 \
59+
pillow==6.2.1 \
60+
h5py==2.9.0 \
61+
requests==2.22.0 \
62+
&& conda install \
63+
pytorch==$PYTORCH_VERSION \
64+
torchvision==$TORCHVISION_VERSION cpuonly -c pytorch \
65+
&& conda clean -ya \
66+
&& pip install --upgrade pip --trusted-host pypi.org --trusted-host files.pythonhosted.org \
67+
&& ln -s /opt/conda/bin/pip /usr/local/bin/pip3 \
68+
&& pip install mxnet-model-server==$MMS_VERSION
69+
70+
RUN useradd -m model-server \
71+
&& mkdir -p /home/model-server/tmp \
72+
&& chown -R model-server /home/model-server
73+
74+
COPY mms-entrypoint.py /usr/local/bin/dockerd-entrypoint.py
75+
COPY config.properties /home/model-server
76+
77+
RUN chmod +x /usr/local/bin/dockerd-entrypoint.py
78+
79+
COPY sagemaker_pytorch_inference.tar.gz /sagemaker_pytorch_inference.tar.gz
80+
RUN pip install --no-cache-dir \
81+
/sagemaker_pytorch_inference.tar.gz \
82+
&& rm /sagemaker_pytorch_inference.tar.gz
83+
84+
RUN curl https://aws-dlc-licenses.s3.amazonaws.com/pytorch/license.txt -o /license.txt
85+
86+
EXPOSE 8080 8081
87+
ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"]
88+
CMD ["mxnet-model-server", "--start", "--mms-config", "/home/model-server/config.properties"]

0 commit comments

Comments
 (0)