Skip to content

Commit 932fbbd

Browse files
authored
feature: install inference toolkit from PyPI. (#57)
1 parent e049d31 commit 932fbbd

File tree

7 files changed

+33
-52
lines changed

7 files changed

+33
-52
lines changed

README.rst

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,6 @@ Example:
168168
::
169169

170170
# PyTorch 1.3.1, Python 3, EI
171-
$ cp dist/sagemaker_pytorch_inference-*.tar.gz dist/sagemaker_pytorch_inference.tar.gz
172171
$ docker build -t preprod-pytorch-serving-eia:1.3.1-cpu-py3 -f docker/1.3.1/py3/Dockerfile.eia .
173172

174173

buildspec.yml

Lines changed: 28 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -31,55 +31,52 @@ phases:
3131

3232
build:
3333
commands:
34-
- tox -e flake8,twine
34+
- tox -e flake8
3535

3636
# install
37-
- pip3 install -U -e .
3837
- pip3 install -U -e .[test]
3938

40-
# run unit tests
41-
- pytest test/unit
42-
43-
# create tar in dist/
44-
- python3 setup.py sdist
45-
- tar_name=$(ls dist/sagemaker_pytorch_inference-*.tar.gz)
46-
- cp $tar_name sagemaker_pytorch_inference.tar.gz
47-
4839
- cpu_dockerfile="Dockerfile.cpu"
4940
- gpu_dockerfile="Dockerfile.gpu"
5041
- eia_dockerfile="Dockerfile.eia"
5142

5243
# build py2 images
5344
- build_dir="docker/$FRAMEWORK_VERSION/py$CPU_PY2_VERSION"
54-
- cp sagemaker_pytorch_inference.tar.gz $build_dir/
5545
- cp -r docker/build_artifacts/* $build_dir/
5646
- CPU_PY2_TAG="$FRAMEWORK_VERSION-cpu-py2-$BUILD_ID"
5747
- GPU_PY2_TAG="$FRAMEWORK_VERSION-gpu-py2-$BUILD_ID"
58-
- docker build -f "$build_dir/$cpu_dockerfile" -t $PREPROD_IMAGE:$CPU_PY2_TAG $build_dir
59-
- docker build -f "$build_dir/$gpu_dockerfile" -t $PREPROD_IMAGE:$GPU_PY2_TAG $build_dir
48+
- build_cmd="docker build -f "$build_dir/$cpu_dockerfile" -t $PREPROD_IMAGE:$CPU_PY2_TAG $build_dir"
49+
- execute-command-if-has-matching-changes "$build_cmd" "test/" "docker/*" "buildspec.yml"
50+
- build_cmd="docker build -f "$build_dir/$gpu_dockerfile" -t $PREPROD_IMAGE:$GPU_PY2_TAG $build_dir"
51+
- execute-command-if-has-matching-changes "$build_cmd" "test/" "docker/*" "buildspec.yml"
6052

6153
# build py3 image
6254
- build_dir="docker/$FRAMEWORK_VERSION/py$GPU_PY3_VERSION"
63-
- cp sagemaker_pytorch_inference.tar.gz $build_dir/
6455
- cp -r docker/build_artifacts/* $build_dir/
6556
- CPU_PY3_TAG="$FRAMEWORK_VERSION-cpu-py3-$BUILD_ID"
6657
- GPU_PY3_TAG="$FRAMEWORK_VERSION-gpu-py3-$BUILD_ID"
6758
- EIA_PY3_TAG="$EIA_FRAMEWORK_VERSION-eia-py3-$BUILD_ID"
68-
- docker build -f "$build_dir/$cpu_dockerfile" -t $PREPROD_IMAGE:$CPU_PY3_TAG $build_dir
69-
- docker build -f "$build_dir/$gpu_dockerfile" -t $PREPROD_IMAGE:$GPU_PY3_TAG $build_dir
59+
- build_cmd="docker build -f "$build_dir/$cpu_dockerfile" -t $PREPROD_IMAGE:$CPU_PY3_TAG $build_dir"
60+
- execute-command-if-has-matching-changes "$build_cmd" "test/" "docker/*" "buildspec.yml"
61+
- build_cmd="docker build -f "$build_dir/$gpu_dockerfile" -t $PREPROD_IMAGE:$GPU_PY3_TAG $build_dir"
62+
- execute-command-if-has-matching-changes "$build_cmd" "test/" "docker/*" "buildspec.yml"
7063
# PY2 not offered for EIA PyTorch
7164
- eia_build_dir="docker/$EIA_FRAMEWORK_VERSION/py$EIA_PY3_VERSION"
72-
- cp sagemaker_pytorch_inference.tar.gz $eia_build_dir/
7365
- cp -r docker/build_artifacts/* $eia_build_dir/
7466
- docker build -f "$eia_build_dir/$eia_dockerfile" -t $PREPROD_IMAGE:$EIA_PY3_TAG $eia_build_dir
7567

7668
# push images to ecr
7769
- $(aws ecr get-login --registry-ids $ACCOUNT --no-include-email --region $AWS_DEFAULT_REGION)
78-
- docker push $PREPROD_IMAGE:$CPU_PY2_TAG
79-
- docker push $PREPROD_IMAGE:$CPU_PY3_TAG
80-
- docker push $PREPROD_IMAGE:$GPU_PY2_TAG
81-
- docker push $PREPROD_IMAGE:$GPU_PY3_TAG
82-
- docker push $PREPROD_IMAGE:$EIA_PY3_TAG
70+
- push_cmd="docker push $PREPROD_IMAGE:$CPU_PY2_TAG"
71+
- execute-command-if-has-matching-changes "$push_cmd" "test/" "docker/*" "buildspec.yml"
72+
- push_cmd="docker push $PREPROD_IMAGE:$CPU_PY3_TAG"
73+
- execute-command-if-has-matching-changes "$push_cmd" "test/" "docker/*" "buildspec.yml"
74+
- push_cmd="docker push $PREPROD_IMAGE:$GPU_PY2_TAG"
75+
- execute-command-if-has-matching-changes "$push_cmd" "test/" "docker/*" "buildspec.yml"
76+
- push_cmd="docker push $PREPROD_IMAGE:$GPU_PY3_TAG"
77+
- execute-command-if-has-matching-changes "$push_cmd" "test/" "docker/*" "buildspec.yml"
78+
- push_cmd="docker push $PREPROD_IMAGE:$EIA_PY3_TAG"
79+
- execute-command-if-has-matching-changes "$push_cmd" "test/" "docker/*" "buildspec.yml"
8380

8481
# launch remote gpu instance
8582
- prefix='ml.'
@@ -90,34 +87,34 @@ phases:
9087
# run cpu integration tests
9188
- py3_cmd="pytest test/integration/local --region $AWS_DEFAULT_REGION --docker-base-name $PREPROD_IMAGE --framework-version $FRAMEWORK_VERSION --py-version $CPU_PY3_VERSION --processor cpu --tag $CPU_PY3_TAG"
9289
- py2_cmd="pytest test/integration/local --region $AWS_DEFAULT_REGION --docker-base-name $PREPROD_IMAGE --framework-version $FRAMEWORK_VERSION --py-version $CPU_PY2_VERSION --processor cpu --tag $CPU_PY2_TAG"
93-
- execute-command-if-has-matching-changes "$py3_cmd" "test/" "src/*.py" "setup.py" "setup.cfg" "docker/*" "buildspec.yml"
94-
- execute-command-if-has-matching-changes "$py2_cmd" "test/" "src/*.py" "setup.py" "setup.cfg" "docker/*" "buildspec.yml"
90+
- execute-command-if-has-matching-changes "$py3_cmd" "test/" "docker/*" "buildspec.yml"
91+
- execute-command-if-has-matching-changes "$py2_cmd" "test/" "docker/*" "buildspec.yml"
9592

9693
# run gpu integration tests
9794
- printf "$SETUP_CMDS" > $SETUP_FILE
9895
- py3_pytest_cmd="pytest test/integration/local --region $AWS_DEFAULT_REGION --docker-base-name $PREPROD_IMAGE --framework-version $FRAMEWORK_VERSION --py-version $GPU_PY3_VERSION --processor gpu --tag $GPU_PY3_TAG"
9996
- py3_cmd="remote-test --github-repo $GITHUB_REPO --test-cmd \"$py3_pytest_cmd\" --setup-file $SETUP_FILE --pr-number \"$PR_NUM\""
100-
- execute-command-if-has-matching-changes "$py3_cmd" "test/" "src/*.py" "setup.py" "setup.cfg" "docker/*" "buildspec.yml"
97+
- execute-command-if-has-matching-changes "$py3_cmd" "test/" "docker/*" "buildspec.yml"
10198

10299
- py2_pytest_cmd="pytest test/integration/local --region $AWS_DEFAULT_REGION --docker-base-name $PREPROD_IMAGE --framework-version $FRAMEWORK_VERSION --py-version $GPU_PY2_VERSION --processor gpu --tag $GPU_PY2_TAG"
103100
- py2_cmd="remote-test --github-repo $GITHUB_REPO --test-cmd \"$py2_pytest_cmd\" --setup-file $SETUP_FILE --pr-number \"$PR_NUM\" --skip-setup"
104-
- execute-command-if-has-matching-changes "$py2_cmd" "test/" "src/*.py" "setup.py" "setup.cfg" "docker/*" "buildspec.yml"
101+
- execute-command-if-has-matching-changes "$py2_cmd" "test/" "docker/*" "buildspec.yml"
105102

106103
# run cpu sagemaker tests
107104
- py3_cmd="pytest test/integration/sagemaker --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --aws-id $ACCOUNT --framework-version $FRAMEWORK_VERSION --py-version $CPU_PY3_VERSION --processor cpu --instance-type $CPU_INSTANCE_TYPE --tag $CPU_PY3_TAG"
108105
- py2_cmd="pytest test/integration/sagemaker --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --aws-id $ACCOUNT --framework-version $FRAMEWORK_VERSION --py-version $CPU_PY2_VERSION --processor cpu --instance-type $CPU_INSTANCE_TYPE --tag $CPU_PY2_TAG"
109-
- execute-command-if-has-matching-changes "$py3_cmd" "test/" "src/*.py" "setup.py" "setup.cfg" "docker/*" "buildspec.yml"
110-
- execute-command-if-has-matching-changes "$py2_cmd" "test/" "src/*.py" "setup.py" "setup.cfg" "docker/*" "buildspec.yml"
106+
- execute-command-if-has-matching-changes "$py3_cmd" "test/" "docker/*" "buildspec.yml"
107+
- execute-command-if-has-matching-changes "$py2_cmd" "test/" "docker/*" "buildspec.yml"
111108

112109
# run gpu sagemaker tests
113110
- py3_cmd="pytest test/integration/sagemaker --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --aws-id $ACCOUNT --framework-version $FRAMEWORK_VERSION --py-version $GPU_PY3_VERSION --processor gpu --instance-type $GPU_INSTANCE_TYPE --tag $GPU_PY3_TAG"
114111
- py2_cmd="pytest test/integration/sagemaker --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --aws-id $ACCOUNT --framework-version $FRAMEWORK_VERSION --py-version $GPU_PY2_VERSION --processor gpu --instance-type $GPU_INSTANCE_TYPE --tag $GPU_PY2_TAG"
115-
- execute-command-if-has-matching-changes "$py3_cmd" "test/" "src/*.py" "setup.py" "setup.cfg" "docker/*" "buildspec.yml"
116-
- execute-command-if-has-matching-changes "$py2_cmd" "test/" "src/*.py" "setup.py" "setup.cfg" "docker/*" "buildspec.yml"
112+
- execute-command-if-has-matching-changes "$py3_cmd" "test/" "docker/*" "buildspec.yml"
113+
- execute-command-if-has-matching-changes "$py2_cmd" "test/" "docker/*" "buildspec.yml"
117114

118115
# run eia sagemaker tests
119116
- py3_cmd="pytest test/integration/sagemaker --region $AWS_DEFAULT_REGION --docker-base-name $ECR_REPO --aws-id $ACCOUNT --framework-version $EIA_FRAMEWORK_VERSION --py-version $EIA_PY3_VERSION --processor cpu --instance-type $CPU_INSTANCE_TYPE --accelerator-type $EIA_ACCELERATOR_TYPE --tag $EIA_PY3_TAG"
120-
- execute-command-if-has-matching-changes "$py3_cmd" "test/" "src/*.py" "setup.py" "setup.cfg" "docker/*" "buildspec.yml"
117+
- execute-command-if-has-matching-changes "$py3_cmd" "test/" "docker/*" "buildspec.yml"
121118

122119
finally:
123120
# shut down remote gpu instance

docker/1.3.1/py3/Dockerfile.eia

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -89,10 +89,7 @@ COPY config.properties /home/model-server
8989

9090
RUN chmod +x /usr/local/bin/dockerd-entrypoint.py
9191

92-
COPY sagemaker_pytorch_inference.tar.gz /sagemaker_pytorch_inference.tar.gz
93-
RUN pip install --no-cache-dir \
94-
/sagemaker_pytorch_inference.tar.gz \
95-
&& rm /sagemaker_pytorch_inference.tar.gz
92+
RUN pip install --no-cache-dir "sagemaker-pytorch-inference<2"
9693

9794
RUN curl https://aws-dlc-licenses.s3.amazonaws.com/pytorch/license.txt -o /license.txt
9895

docker/1.4.0/py2/Dockerfile.cpu

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -77,10 +77,7 @@ COPY config.properties /home/model-server
7777

7878
RUN chmod +x /usr/local/bin/dockerd-entrypoint.py
7979

80-
COPY sagemaker_pytorch_inference.tar.gz /sagemaker_pytorch_inference.tar.gz
81-
RUN pip install --no-cache-dir \
82-
/sagemaker_pytorch_inference.tar.gz \
83-
&& rm /sagemaker_pytorch_inference.tar.gz
80+
RUN pip install --no-cache-dir "sagemaker-pytorch-inference<2"
8481

8582
RUN curl https://aws-dlc-licenses.s3.amazonaws.com/pytorch-1.4.0/license.txt -o /license.txt
8683

docker/1.4.0/py2/Dockerfile.gpu

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -101,10 +101,7 @@ RUN apt-get install -y --no-install-recommends \
101101

102102
# RUN pip install --no-cache-dir 'opencv-python>=4.0,<4.1'
103103

104-
COPY sagemaker_pytorch_inference.tar.gz /sagemaker_pytorch_inference.tar.gz
105-
RUN pip install --no-cache-dir \
106-
/sagemaker_pytorch_inference.tar.gz \
107-
&& rm /sagemaker_pytorch_inference.tar.gz
104+
RUN pip install --no-cache-dir "sagemaker-pytorch-inference<2"
108105

109106
RUN curl https://aws-dlc-licenses.s3.amazonaws.com/pytorch-1.4.0/license.txt -o /license.txt
110107

docker/1.4.0/py3/Dockerfile.cpu

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -76,10 +76,7 @@ COPY config.properties /home/model-server
7676

7777
RUN chmod +x /usr/local/bin/dockerd-entrypoint.py
7878

79-
COPY sagemaker_pytorch_inference.tar.gz /sagemaker_pytorch_inference.tar.gz
80-
RUN pip install --no-cache-dir \
81-
/sagemaker_pytorch_inference.tar.gz \
82-
&& rm /sagemaker_pytorch_inference.tar.gz
79+
RUN pip install --no-cache-dir "sagemaker-pytorch-inference<2"
8380

8481
RUN curl https://aws-dlc-licenses.s3.amazonaws.com/pytorch-1.4.0/license.txt -o /license.txt
8582

docker/1.4.0/py3/Dockerfile.gpu

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -93,10 +93,7 @@ COPY config.properties /home/model-server
9393

9494
RUN chmod +x /usr/local/bin/dockerd-entrypoint.py
9595

96-
COPY sagemaker_pytorch_inference.tar.gz /sagemaker_pytorch_inference.tar.gz
97-
RUN pip install --no-cache-dir \
98-
/sagemaker_pytorch_inference.tar.gz \
99-
&& rm /sagemaker_pytorch_inference.tar.gz
96+
RUN pip install --no-cache-dir "sagemaker-pytorch-inference<2"
10097

10198
RUN curl https://aws-dlc-licenses.s3.amazonaws.com/pytorch-1.4.0/license.txt -o /license.txt
10299

0 commit comments

Comments
 (0)