Skip to content

Commit d0b02e2

Browse files
authored
PyTorch 1.3.1 (#13)
pytorch 1.3.1 changes
1 parent 1ba29db commit d0b02e2

9 files changed

+478
-1
lines changed

.gitignore

+2-1
Original file line numberDiff line numberDiff line change
@@ -1 +1,2 @@
1-
.idea/
1+
.idea/
2+
.DS_Store

docker/1.3.1/py2/Dockerfile.cpu

+88
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,88 @@
1+
FROM ubuntu:16.04
2+
3+
LABEL maintainer="Amazon AI"
4+
5+
ARG PYTHON_VERSION=2.7
6+
ARG PYTORCH_VERSION=1.3.1
7+
ARG TORCHVISION_VERSION=0.4.2
8+
ARG MMS_VERSION=1.0.8
9+
10+
# See http://bugs.python.org/issue19846
11+
ENV LANG C.UTF-8
12+
ENV LD_LIBRARY_PATH /opt/conda/lib/:$LD_LIBRARY_PATH
13+
ENV PATH /opt/conda/bin:$PATH
14+
ENV SAGEMAKER_SERVING_MODULE sagemaker_pytorch_serving_container.serving:main
15+
ENV TEMP=/home/model-server/tmp
16+
17+
RUN apt-get update \
18+
&& apt-get install -y --no-install-recommends \
19+
build-essential \
20+
ca-certificates \
21+
cmake \
22+
curl \
23+
git \
24+
jq \
25+
libgl1-mesa-glx \
26+
libglib2.0-0 \
27+
libsm6 \
28+
libxext6 \
29+
libxrender-dev \
30+
openjdk-8-jdk-headless \
31+
vim \
32+
wget \
33+
zlib1g-dev
34+
35+
36+
RUN curl -o ~/miniconda.sh -O https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh \
37+
&& chmod +x ~/miniconda.sh \
38+
&& ~/miniconda.sh -b -p /opt/conda \
39+
&& rm ~/miniconda.sh \
40+
&& /opt/conda/bin/conda update conda \
41+
&& /opt/conda/bin/conda install -y \
42+
python=$PYTHON_VERSION \
43+
cython==0.29.12 \
44+
ipython==5.8.0 \
45+
mkl-include==2019.4 \
46+
mkl==2019.4 \
47+
numpy==1.16.4 \
48+
scipy==1.2.1 \
49+
typing==3.7.4 \
50+
&& /opt/conda/bin/conda clean -ya
51+
52+
RUN conda install -c \
53+
conda-forge \
54+
awscli==1.16.210 \
55+
opencv==4.0.1 \
56+
&& conda install -y \
57+
scikit-learn==0.20.3 \
58+
pandas==0.24.2 \
59+
pillow==6.1.0 \
60+
h5py==2.9.0 \
61+
requests==2.22.0 \
62+
&& conda install \
63+
pytorch==$PYTORCH_VERSION \
64+
torchvision==$TORCHVISION_VERSION cpuonly -c pytorch \
65+
&& conda clean -ya \
66+
&& pip install --upgrade pip --trusted-host pypi.org --trusted-host files.pythonhosted.org \
67+
&& pip install mxnet-model-server==$MMS_VERSION
68+
69+
RUN useradd -m model-server \
70+
&& mkdir -p /home/model-server/tmp \
71+
&& chown -R model-server /home/model-server
72+
73+
COPY docker/$PYTORCH_VERSION/py2/mms-entrypoint.py /usr/local/bin/dockerd-entrypoint.py
74+
COPY docker/$PYTORCH_VERSION/py2/config.properties /home/model-server
75+
COPY src/sagemaker_pytorch_serving_container/deep_learning_container.py /usr/local/bin/deep_learning_container.py
76+
77+
RUN chmod +x /usr/local/bin/dockerd-entrypoint.py \
78+
&& chmod +x /usr/local/bin/deep_learning_container.py
79+
80+
COPY dist/sagemaker_pytorch_serving_container-1.2-py2.py3-none-any.whl /sagemaker_pytorch_serving_container-1.2-py2.py3-none-any.whl
81+
RUN pip install --no-cache-dir \
82+
/sagemaker_pytorch_serving_container-1.2-py2.py3-none-any.whl \
83+
&& rm /sagemaker_pytorch_serving_container-1.2-py2.py3-none-any.whl
84+
85+
86+
EXPOSE 8080 8081
87+
ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"]
88+
CMD ["mxnet-model-server", "--start", "--mms-config", "/home/model-server/config.properties"]

docker/1.3.1/py2/Dockerfile.gpu

+112
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,112 @@
1+
FROM nvidia/cuda:10.1-cudnn7-devel-ubuntu16.04
2+
# NCCL_VERSION=2.4.7, CUDNN_VERSION=7.6.2.24
3+
LABEL maintainer="Amazon AI"
4+
5+
ARG PYTHON_VERSION=2.7
6+
ARG PYTORCH_VERSION=1.3.1
7+
ARG TORCHVISION_VERSION=0.4.2
8+
ARG MMS_VERSION=1.0.8
9+
10+
# See http://bugs.python.org/issue19846
11+
ENV LANG C.UTF-8
12+
ENV LD_LIBRARY_PATH /opt/conda/lib/:$LD_LIBRARY_PATH
13+
ENV PATH /opt/conda/bin:$PATH
14+
ENV SAGEMAKER_SERVING_MODULE sagemaker_pytorch_serving_container.serving:main
15+
ENV TEMP=/home/model-server/tmp
16+
17+
RUN apt-get update \
18+
&& apt-get install -y --allow-downgrades --allow-change-held-packages --no-install-recommends \
19+
build-essential \
20+
build-essential \
21+
ca-certificates \
22+
cmake \
23+
curl \
24+
git \
25+
jq \
26+
libgl1-mesa-glx \
27+
libglib2.0-0 \
28+
libgomp1 \
29+
libibverbs-dev \
30+
libsm6 \
31+
libxext6 \
32+
libxrender-dev \
33+
openjdk-8-jdk-headless \
34+
vim \
35+
wget \
36+
zlib1g-dev
37+
38+
# Install OpenSSH, Allow OpenSSH to talk to containers without asking for confirmation
39+
RUN apt-get install -y --no-install-recommends \
40+
openssh-client openssh-server \
41+
&& mkdir -p /var/run/sshd \
42+
&& cat /etc/ssh/ssh_config | grep -v StrictHostKeyChecking > /etc/ssh/ssh_config.new \
43+
&& echo " StrictHostKeyChecking no" >> /etc/ssh/ssh_config.new \
44+
&& mv /etc/ssh/ssh_config.new /etc/ssh/ssh_config
45+
46+
RUN curl -o ~/miniconda.sh -O https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh \
47+
&& chmod +x ~/miniconda.sh \
48+
&& ~/miniconda.sh -b -p /opt/conda \
49+
&& rm ~/miniconda.sh \
50+
&& /opt/conda/bin/conda update conda \
51+
&& /opt/conda/bin/conda install -y \
52+
python=$PYTHON_VERSION \
53+
cython==0.29.12 \
54+
ipython==5.8.0 \
55+
mkl-include==2019.4 \
56+
mkl==2019.4 \
57+
numpy==1.16.4 \
58+
scipy==1.2.1 \
59+
typing==3.7.4 \
60+
&& /opt/conda/bin/conda clean -ya
61+
62+
63+
RUN conda install -c \
64+
pytorch magma-cuda100 \
65+
&& conda install -c \
66+
conda-forge \
67+
awscli==1.16.210 \
68+
opencv==4.0.1 \
69+
&& conda install -y scikit-learn==0.20.3 \
70+
h5py==2.9.0 \
71+
pandas==0.24.2 \
72+
pillow==6.1.0 \
73+
requests==2.22.0 \
74+
&& conda install -c \
75+
pytorch \
76+
cudatoolkit=10.0 \
77+
pytorch==$PYTORCH_VERSION \
78+
torchvision==$TORCHVISION_VERSION \
79+
&& conda clean -ya \
80+
&& /opt/conda/bin/conda config --set ssl_verify False \
81+
&& pip install --upgrade pip --trusted-host pypi.org --trusted-host files.pythonhosted.org \
82+
&& pip install mxnet-model-server==$MMS_VERSION
83+
84+
RUN useradd -m model-server \
85+
&& mkdir -p /home/model-server/tmp \
86+
&& chown -R model-server /home/model-server
87+
88+
COPY docker/$PYTORCH_VERSION/py2/mms-entrypoint.py /usr/local/bin/dockerd-entrypoint.py
89+
COPY docker/$PYTORCH_VERSION/py2/config.properties /home/model-server
90+
COPY src/sagemaker_pytorch_serving_container/deep_learning_container.py /usr/local/bin/deep_learning_container.py
91+
92+
RUN chmod +x /usr/local/bin/dockerd-entrypoint.py \
93+
&& chmod +x /usr/local/bin/deep_learning_container.py
94+
95+
# Install OpenSSH for MPI to communicate between containers, Allow OpenSSH to talk to containers without asking for confirmation
96+
RUN apt-get install -y --no-install-recommends \
97+
openssh-client openssh-server \
98+
&& mkdir -p /var/run/sshd \
99+
&& cat /etc/ssh/ssh_config | grep -v StrictHostKeyChecking > /etc/ssh/ssh_config.new \
100+
&& echo " StrictHostKeyChecking no" >> /etc/ssh/ssh_config.new \
101+
&& mv /etc/ssh/ssh_config.new /etc/ssh/ssh_config
102+
103+
# RUN pip install --no-cache-dir 'opencv-python>=4.0,<4.1'
104+
105+
COPY dist/sagemaker_pytorch_serving_container-1.2-py2.py3-none-any.whl /sagemaker_pytorch_serving_container-1.2-py2.py3-none-any.whl
106+
RUN pip install --no-cache-dir \
107+
/sagemaker_pytorch_serving_container-1.2-py2.py3-none-any.whl \
108+
&& rm /sagemaker_pytorch_serving_container-1.2-py2.py3-none-any.whl
109+
110+
EXPOSE 8080 8081
111+
ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"]
112+
CMD ["mxnet-model-server", "--start", "--mms-config", "/home/model-server/config.properties"]

docker/1.3.1/py2/config.properties

+26
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
vmargs=-Xmx128m -XX:-UseLargePages -XX:+UseG1GC -XX:MaxMetaspaceSize=32M -XX:MaxDirectMemorySize=10m -XX:+ExitOnOutOfMemoryError
2+
model_store=/opt/ml/model
3+
load_models=ALL
4+
inference_address=http://0.0.0.0:8080
5+
management_address=http://0.0.0.0:8081
6+
# management_address=unix:/tmp/management.sock
7+
# number_of_netty_threads=0
8+
# netty_client_threads=0
9+
# default_response_timeout=120
10+
# default_workers_per_model=0
11+
# job_queue_size=100
12+
# async_logging=false
13+
# number_of_gpu=1
14+
# cors_allowed_origin
15+
# cors_allowed_methods
16+
# cors_allowed_headers
17+
# keystore=src/test/resources/keystore.p12
18+
# keystore_pass=changeit
19+
# keystore_type=PKCS12
20+
# private_key_file=src/test/resources/key.pem
21+
# certificate_file=src/test/resources/certs.pem
22+
# max_response_size=6553500
23+
# max_request_size=6553500
24+
# blacklist_env_vars=
25+
# decode_input_request=false
26+
# enable_envvars_config=false

docker/1.3.1/py2/mms-entrypoint.py

+16
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
import shlex
2+
import subprocess
3+
import sys
4+
import os.path
5+
6+
if not os.path.exists("/opt/ml/input/config"):
7+
subprocess.call(['python', '/usr/local/bin/deep_learning_container.py', '&>/dev/null', '&'])
8+
9+
if sys.argv[1] == 'serve':
10+
from sagemaker_pytorch_serving_container import serving
11+
serving.main()
12+
else:
13+
subprocess.check_call(shlex.split(' '.join(sys.argv[1:])))
14+
15+
# prevent docker exit
16+
subprocess.call(['tail', '-f', '/dev/null'])

docker/1.3.1/py3/Dockerfile.cpu

+87
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,87 @@
1+
FROM ubuntu:16.04
2+
3+
LABEL maintainer="Amazon AI"
4+
5+
ARG PYTHON_VERSION=3.6.6
6+
ARG PYTORCH_VERSION=1.3.1
7+
ARG TORCHVISION_VERSION=0.4.2
8+
ARG MMS_VERSION=1.0.8
9+
10+
# See http://bugs.python.org/issue19846
11+
ENV LANG C.UTF-8
12+
ENV LD_LIBRARY_PATH /opt/conda/lib/:$LD_LIBRARY_PATH
13+
ENV PATH /opt/conda/bin:$PATH
14+
ENV SAGEMAKER_SERVING_MODULE sagemaker_pytorch_serving_container.serving:main
15+
ENV TEMP=/home/model-server/tmp
16+
17+
RUN apt-get update && apt-get install -y --no-install-recommends \
18+
build-essential \
19+
ca-certificates \
20+
cmake \
21+
curl \
22+
git \
23+
jq \
24+
libgl1-mesa-glx \
25+
libglib2.0-0 \
26+
libsm6 \
27+
libxext6 \
28+
libxrender-dev \
29+
openjdk-8-jdk-headless \
30+
vim \
31+
wget \
32+
zlib1g-dev
33+
34+
RUN curl -o ~/miniconda.sh -O https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh \
35+
&& chmod +x ~/miniconda.sh \
36+
&& ~/miniconda.sh -b -p /opt/conda \
37+
&& rm ~/miniconda.sh \
38+
&& /opt/conda/bin/conda update conda \
39+
&& /opt/conda/bin/conda install -y \
40+
python=$PYTHON_VERSION \
41+
cython==0.29.12 \
42+
ipython==7.7.0 \
43+
mkl-include==2019.4 \
44+
mkl==2019.4 \
45+
numpy==1.16.4 \
46+
scipy==1.3.0 \
47+
typing==3.6.4 \
48+
&& /opt/conda/bin/conda clean -ya
49+
50+
RUN conda install -c \
51+
conda-forge \
52+
awscli==1.16.210 \
53+
opencv==4.0.1 \
54+
&& conda install -y \
55+
scikit-learn==0.21.2 \
56+
pandas==0.25.0 \
57+
pillow==5.4.1 \
58+
h5py==2.9.0 \
59+
requests==2.22.0 \
60+
&& conda install \
61+
pytorch==$PYTORCH_VERSION \
62+
torchvision==$TORCHVISION_VERSION cpuonly -c pytorch \
63+
&& conda clean -ya \
64+
&& pip install --upgrade pip --trusted-host pypi.org --trusted-host files.pythonhosted.org \
65+
&& ln -s /opt/conda/bin/pip /usr/local/bin/pip3 \
66+
&& pip install mxnet-model-server==$MMS_VERSION
67+
68+
RUN useradd -m model-server \
69+
&& mkdir -p /home/model-server/tmp \
70+
&& chown -R model-server /home/model-server
71+
72+
COPY docker/$PYTORCH_VERSION/py3/mms-entrypoint.py /usr/local/bin/dockerd-entrypoint.py
73+
COPY docker/$PYTORCH_VERSION/py3/config.properties /home/model-server
74+
COPY src/sagemaker_pytorch_serving_container/deep_learning_container.py /usr/local/bin/deep_learning_container.py
75+
76+
RUN chmod +x /usr/local/bin/dockerd-entrypoint.py \
77+
&& chmod +x /usr/local/bin/deep_learning_container.py
78+
79+
COPY dist/sagemaker_pytorch_serving_container-1.2-py2.py3-none-any.whl /sagemaker_pytorch_serving_container-1.2-py2.py3-none-any.whl
80+
RUN pip install --no-cache-dir \
81+
/sagemaker_pytorch_serving_container-1.2-py2.py3-none-any.whl \
82+
&& rm /sagemaker_pytorch_serving_container-1.2-py2.py3-none-any.whl
83+
84+
85+
EXPOSE 8080 8081
86+
ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"]
87+
CMD ["mxnet-model-server", "--start", "--mms-config", "/home/model-server/config.properties"]

0 commit comments

Comments
 (0)