Skip to content

Commit 5dada25

Browse files
committed
Update framework version to 1.6.0
1 parent 22567d0 commit 5dada25

File tree

6 files changed

+60
-3
lines changed

6 files changed

+60
-3
lines changed

buildspec.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ version: 0.2
22

33
env:
44
variables:
5-
FRAMEWORK_VERSION: '1.5.0'
5+
FRAMEWORK_VERSION: '1.6.0'
66
EIA_FRAMEWORK_VERSION: '1.3.1'
77
CPU_INSTANCE_TYPE: 'ml.c4.xlarge'
88
GPU_INSTANCE_TYPE: 'ml.p2.8xlarge'

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ def read(fname):
5757
'test': ['boto3==1.10.32', 'coverage==4.5.3', 'docker-compose==1.23.2', 'flake8==3.7.7', 'Flask==1.1.1',
5858
'mock==2.0.0', 'pytest==4.4.0', 'pytest-cov==2.7.1', 'pytest-xdist==1.28.0', 'PyYAML==3.10',
5959
'sagemaker==1.56.3', 'sagemaker-containers>=2.5.4', 'six==1.12.0', 'requests==2.20.0',
60-
'requests_mock==1.6.0', 'torch==1.5.0', 'torchvision==0.6.0', 'tox==3.7.0']
60+
'requests_mock==1.6.0', 'torch==1.6.0', 'torchvision==0.7.0', 'tox==3.7.0']
6161
},
6262

6363
entry_points={

test/conftest.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ def pytest_addoption(parser):
5353
parser.addoption('--accelerator-type')
5454
parser.addoption('--docker-base-name', default='sagemaker-pytorch-inference')
5555
parser.addoption('--region', default='us-west-2')
56-
parser.addoption('--framework-version', default="1.5.0")
56+
parser.addoption('--framework-version', default="1.6.0")
5757
parser.addoption('--py-version', choices=['2', '3'], default='3')
5858
# Processor is still "cpu" for EIA tests
5959
parser.addoption('--processor', choices=['gpu', 'cpu'], default='cpu')
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
ARG region
2+
FROM 763104351884.dkr.ecr.$region.amazonaws.com/pytorch-inference:1.5.0-cpu-py3
3+
4+
COPY dist/sagemaker_pytorch_inference-*.tar.gz /sagemaker_pytorch_inference.tar.gz
5+
RUN pip install --upgrade --no-cache-dir /sagemaker_pytorch_inference.tar.gz && \
6+
rm /sagemaker_pytorch_inference.tar.gz
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
ARG region
2+
FROM 763104351884.dkr.ecr.$region.amazonaws.com/pytorch-inference:1.5.0-gpu-py3
3+
4+
COPY dist/sagemaker_pytorch_inference-*.tar.gz /sagemaker_pytorch_inference.tar.gz
5+
RUN pip install --upgrade --no-cache-dir /sagemaker_pytorch_inference.tar.gz && \
6+
rm /sagemaker_pytorch_inference.tar.gz
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
FROM pytorch/pytorch:1.6.0-cuda10.1-cudnn7-runtime
2+
3+
LABEL com.amazonaws.sagemaker.capabilities.accept-bind-to-port=true
4+
LABEL com.amazonaws.sagemaker.capabilities.multi-models=true
5+
6+
ARG TS_VERSION=0.1.1
7+
8+
ENV SAGEMAKER_SERVING_MODULE sagemaker_pytorch_serving_container.serving:main
9+
ENV TEMP=/home/model-server/tmp
10+
11+
RUN apt-get update \
12+
&& apt-get install -y --no-install-recommends software-properties-common \
13+
&& add-apt-repository ppa:openjdk-r/ppa \
14+
&& apt-get update \
15+
&& apt-get install -y --no-install-recommends \
16+
libgl1-mesa-glx \
17+
libglib2.0-0 \
18+
libsm6 \
19+
libxext6 \
20+
libxrender-dev \
21+
openjdk-11-jdk-headless \
22+
&& rm -rf /var/lib/apt/lists/*
23+
24+
RUN conda install -c conda-forge opencv==4.0.1 \
25+
&& ln -s /opt/conda/bin/pip /usr/local/bin/pip3
26+
27+
RUN pip install torchserve==$TS_VERSION \
28+
&& pip install torch-model-archiver==$TS_VERSION
29+
30+
COPY dist/sagemaker_pytorch_inference-*.tar.gz /sagemaker_pytorch_inference.tar.gz
31+
RUN pip install --no-cache-dir /sagemaker_pytorch_inference.tar.gz && \
32+
rm /sagemaker_pytorch_inference.tar.gz
33+
34+
RUN useradd -m model-server \
35+
&& mkdir -p /home/model-server/tmp \
36+
&& chown -R model-server /home/model-server
37+
38+
COPY artifacts/ts-entrypoint.py /usr/local/bin/dockerd-entrypoint.py
39+
COPY artifacts/config.properties /home/model-server
40+
41+
RUN chmod +x /usr/local/bin/dockerd-entrypoint.py
42+
43+
EXPOSE 8080 8081
44+
ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"]
45+
CMD ["torchserve", "--start", "--ts-config", "/home/model-server/config.properties", "--model-store", "/home/model-server/"]

0 commit comments

Comments
 (0)