Skip to content

fix: construct correct code path and correctly tar up contents #84

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 10 commits into from
Sep 2, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion buildspec.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ version: 0.2

env:
variables:
FRAMEWORK_VERSION: '1.5.0'
FRAMEWORK_VERSION: '1.6.0'
EIA_FRAMEWORK_VERSION: '1.3.1'
CPU_INSTANCE_TYPE: 'ml.c4.xlarge'
GPU_INSTANCE_TYPE: 'ml.p2.8xlarge'
Expand Down
3 changes: 2 additions & 1 deletion src/sagemaker_pytorch_serving_container/torchserve.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
DEFAULT_TS_MODEL_DIRECTORY = os.path.join(os.getcwd(), ".sagemaker", "ts", "models")
DEFAULT_TS_MODEL_NAME = "model"
DEFAULT_TS_MODEL_SERIALIZED_FILE = "model.pth"
DEFAULT_TS_CODE_DIR = "code"
DEFAULT_HANDLER_SERVICE = "sagemaker_pytorch_serving_container.handler_service"

ENABLE_MULTI_MODEL = os.getenv("SAGEMAKER_MULTI_MODEL", "false") == "true"
Expand Down Expand Up @@ -121,7 +122,7 @@ def _adapt_to_ts_format(handler_service):
"--export-path",
DEFAULT_TS_MODEL_DIRECTORY,
"--extra-files",
os.path.join(environment.model_dir, environment.Environment().module_name + ".py"),
os.path.join(environment.model_dir, DEFAULT_TS_CODE_DIR, environment.Environment().module_name + ".py"),

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

use string interpolation here:

Suggested change
os.path.join(environment.model_dir, DEFAULT_TS_CODE_DIR, environment.Environment().module_name + ".py"),
os.path.join(environment.model_dir, DEFAULT_TS_CODE_DIR, "{}.py".format(environment.Environment().module_name)),

"--version",
"1",
]
Expand Down
19 changes: 1 addition & 18 deletions test/container/1.6.0/Dockerfile.dlc.cpu
Original file line number Diff line number Diff line change
@@ -1,23 +1,6 @@
ARG region
FROM 763104351884.dkr.ecr.$region.amazonaws.com/pytorch-inference:1.5.0-cpu-py3

ARG TS_VERSION=0.1.1
RUN apt-get update \
&& apt-get install -y --no-install-recommends software-properties-common \
&& add-apt-repository ppa:openjdk-r/ppa \
&& apt-get update \
&& apt-get install -y --no-install-recommends openjdk-11-jdk

RUN pip install torchserve==$TS_VERSION \
&& pip install torch-model-archiver==$TS_VERSION

RUN pip uninstall torch \
&& pip uninstall torchvision \
&& pip install torch=1.6.0 \
&& pip install torchvision=0.7.0
FROM 763104351884.dkr.ecr.$region.amazonaws.com/pytorch-inference:1.6.0-cpu-py3

COPY dist/sagemaker_pytorch_inference-*.tar.gz /sagemaker_pytorch_inference.tar.gz
RUN pip install --upgrade --no-cache-dir /sagemaker_pytorch_inference.tar.gz && \
rm /sagemaker_pytorch_inference.tar.gz

CMD ["torchserve", "--start", "--ts-config", "/home/model-server/config.properties", "--model-store", "/home/model-server/"]
19 changes: 1 addition & 18 deletions test/container/1.6.0/Dockerfile.dlc.gpu
Original file line number Diff line number Diff line change
@@ -1,23 +1,6 @@
ARG region
FROM 763104351884.dkr.ecr.$region.amazonaws.com/pytorch-inference:1.5.0-cpu-py3

ARG TS_VERSION=0.1.1
RUN apt-get update \
&& apt-get install -y --no-install-recommends software-properties-common \
&& add-apt-repository ppa:openjdk-r/ppa \
&& apt-get update \
&& apt-get install -y --no-install-recommends openjdk-11-jdk

RUN pip install torchserve==$TS_VERSION \
&& pip install torch-model-archiver==$TS_VERSION

RUN pip uninstall torch \
&& pip uninstall torchvision \
&& pip install torch=1.6.0 \
&& pip install torchvision=0.7.0
FROM 763104351884.dkr.ecr.$region.amazonaws.com/pytorch-inference:1.6.0-gpu-py3

COPY dist/sagemaker_pytorch_inference-*.tar.gz /sagemaker_pytorch_inference.tar.gz
RUN pip install --upgrade --no-cache-dir /sagemaker_pytorch_inference.tar.gz && \
rm /sagemaker_pytorch_inference.tar.gz

CMD ["torchserve", "--start", "--ts-config", "/home/model-server/config.properties", "--model-store", "/home/model-server/"]
21 changes: 13 additions & 8 deletions test/integration/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,36 +23,41 @@
cpu_sub_dir = 'model_cpu'
gpu_sub_dir = 'model_gpu'
eia_sub_dir = 'model_eia'
code_sub_dir = 'code'

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

apply black formatter and favor double-quotes here and throughout


model_cpu_dir = os.path.join(mnist_path, cpu_sub_dir)
mnist_cpu_script = os.path.join(model_cpu_dir, 'mnist.py')
mnist_cpu_script = os.path.join(model_cpu_dir, code_sub_dir, 'mnist.py')

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

standardize the quotes used.

Suggested change
mnist_cpu_script = os.path.join(model_cpu_dir, code_sub_dir, 'mnist.py')
mnist_cpu_script = os.path.join(model_cpu_dir, code_sub_dir, "mnist.py")

applying black formatter will standardize code and increase readability. we'll introduce black to this package to auto-format, increase code quality, and eliminate these nits

model_cpu_tar = file_utils.make_tarfile(mnist_cpu_script,
os.path.join(model_cpu_dir, "model.pth"),
model_cpu_dir)
model_cpu_dir,
script_path="code")

model_cpu_1d_dir = os.path.join(model_cpu_dir, '1d')
mnist_1d_script = os.path.join(model_cpu_1d_dir, 'mnist_1d.py')
mnist_1d_script = os.path.join(model_cpu_1d_dir, code_sub_dir, 'mnist_1d.py')
model_cpu_1d_tar = file_utils.make_tarfile(mnist_1d_script,
os.path.join(model_cpu_1d_dir, "model.pth"),
model_cpu_1d_dir)
model_cpu_1d_dir,
script_path="code")

model_gpu_dir = os.path.join(mnist_path, gpu_sub_dir)
mnist_gpu_script = os.path.join(model_gpu_dir, 'mnist.py')
mnist_gpu_script = os.path.join(model_gpu_dir, code_sub_dir, 'mnist.py')
model_gpu_tar = file_utils.make_tarfile(mnist_gpu_script,
os.path.join(model_gpu_dir, "model.pth"),
model_gpu_dir)
model_gpu_dir,
script_path="code")

model_eia_dir = os.path.join(mnist_path, eia_sub_dir)
mnist_eia_script = os.path.join(model_eia_dir, 'mnist.py')
model_eia_tar = file_utils.make_tarfile(mnist_eia_script,
os.path.join(model_eia_dir, "model.pth"),
model_eia_dir)

call_model_fn_once_script = os.path.join(model_cpu_dir, 'call_model_fn_once.py')
call_model_fn_once_script = os.path.join(model_cpu_dir, code_sub_dir, 'call_model_fn_once.py')
call_model_fn_once_tar = file_utils.make_tarfile(call_model_fn_once_script,
os.path.join(model_cpu_dir, "model.pth"),
model_cpu_dir,
"model_call_model_fn_once.tar.gz")
"model_call_model_fn_once.tar.gz",
script_path="code")

ROLE = 'dummy/unused-role'
DEFAULT_TIMEOUT = 20
Expand Down
4 changes: 3 additions & 1 deletion test/unit/test_model_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,9 @@ def test_adapt_to_ts_format(path_exists, make_dir, subprocess_check_call, set_py
"--export-path",
torchserve.DEFAULT_TS_MODEL_DIRECTORY,
"--extra-files",
os.path.join(environment.model_dir, environment.Environment().module_name + ".py"),
os.path.join(environment.model_dir,
torchserve.DEFAULT_TS_CODE_DIR,
environment.Environment().module_name + ".py"),

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
environment.Environment().module_name + ".py"),
"{}.py".format(environment.Environment().module_name)),

"--version",
"1",
]
Expand Down
7 changes: 5 additions & 2 deletions test/utils/file_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,12 @@
import tarfile


def make_tarfile(script, model, output_path, filename="model.tar.gz"):
def make_tarfile(script, model, output_path, filename="model.tar.gz", script_path=None):

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

default to empty string and then later you can simplify code (plus it matches the type expected):

Suggested change
def make_tarfile(script, model, output_path, filename="model.tar.gz", script_path=None):
def make_tarfile(script, model, output_path, filename="model.tar.gz", script_path=""):

output_filename = os.path.join(output_path, filename)
with tarfile.open(output_filename, "w:gz") as tar:
tar.add(script, arcname=os.path.basename(script))
if(script_path):
tar.add(script, arcname=os.path.join(script_path, os.path.basename(script)))
else:
tar.add(script, arcname=os.path.basename(script))
Comment on lines +22 to +25

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

assuming script_path is defaulted to empty string:

Suggested change
if(script_path):
tar.add(script, arcname=os.path.join(script_path, os.path.basename(script)))
else:
tar.add(script, arcname=os.path.basename(script))
tar.add(script, arcname=os.path.join(script_path, os.path.basename(script)))

this is because os.path.join with empty string arg has no effect on the resulting path:

>>> os.path.join("", "/tmp/foo")
'/tmp/foo'
>>> os.path.join("", "/tmp", "foo")
'/tmp/foo'
>>> os.path.join("", "tmp", "foo")
'tmp/foo'

tar.add(model, arcname=os.path.basename(model))
return output_filename