-
Notifications
You must be signed in to change notification settings - Fork 72
fix: construct correct code path and correctly tar up contents #84
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
8e6d29d
bb0acc5
c050581
5378d68
358b4cc
77faf02
3e03702
67f9a23
7cd8cbb
508a8a2
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,23 +1,6 @@ | ||
ARG region | ||
FROM 763104351884.dkr.ecr.$region.amazonaws.com/pytorch-inference:1.5.0-cpu-py3 | ||
|
||
ARG TS_VERSION=0.1.1 | ||
RUN apt-get update \ | ||
&& apt-get install -y --no-install-recommends software-properties-common \ | ||
&& add-apt-repository ppa:openjdk-r/ppa \ | ||
&& apt-get update \ | ||
&& apt-get install -y --no-install-recommends openjdk-11-jdk | ||
|
||
RUN pip install torchserve==$TS_VERSION \ | ||
&& pip install torch-model-archiver==$TS_VERSION | ||
|
||
RUN pip uninstall torch \ | ||
&& pip uninstall torchvision \ | ||
&& pip install torch=1.6.0 \ | ||
&& pip install torchvision=0.7.0 | ||
FROM 763104351884.dkr.ecr.$region.amazonaws.com/pytorch-inference:1.6.0-cpu-py3 | ||
|
||
COPY dist/sagemaker_pytorch_inference-*.tar.gz /sagemaker_pytorch_inference.tar.gz | ||
RUN pip install --upgrade --no-cache-dir /sagemaker_pytorch_inference.tar.gz && \ | ||
rm /sagemaker_pytorch_inference.tar.gz | ||
|
||
CMD ["torchserve", "--start", "--ts-config", "/home/model-server/config.properties", "--model-store", "/home/model-server/"] |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,23 +1,6 @@ | ||
ARG region | ||
FROM 763104351884.dkr.ecr.$region.amazonaws.com/pytorch-inference:1.5.0-cpu-py3 | ||
|
||
ARG TS_VERSION=0.1.1 | ||
RUN apt-get update \ | ||
&& apt-get install -y --no-install-recommends software-properties-common \ | ||
&& add-apt-repository ppa:openjdk-r/ppa \ | ||
&& apt-get update \ | ||
&& apt-get install -y --no-install-recommends openjdk-11-jdk | ||
|
||
RUN pip install torchserve==$TS_VERSION \ | ||
&& pip install torch-model-archiver==$TS_VERSION | ||
|
||
RUN pip uninstall torch \ | ||
&& pip uninstall torchvision \ | ||
&& pip install torch=1.6.0 \ | ||
&& pip install torchvision=0.7.0 | ||
FROM 763104351884.dkr.ecr.$region.amazonaws.com/pytorch-inference:1.6.0-gpu-py3 | ||
|
||
COPY dist/sagemaker_pytorch_inference-*.tar.gz /sagemaker_pytorch_inference.tar.gz | ||
RUN pip install --upgrade --no-cache-dir /sagemaker_pytorch_inference.tar.gz && \ | ||
rm /sagemaker_pytorch_inference.tar.gz | ||
|
||
CMD ["torchserve", "--start", "--ts-config", "/home/model-server/config.properties", "--model-store", "/home/model-server/"] |
Original file line number | Diff line number | Diff line change | ||||
---|---|---|---|---|---|---|
|
@@ -23,36 +23,41 @@ | |||||
cpu_sub_dir = 'model_cpu' | ||||||
gpu_sub_dir = 'model_gpu' | ||||||
eia_sub_dir = 'model_eia' | ||||||
code_sub_dir = 'code' | ||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. apply |
||||||
|
||||||
model_cpu_dir = os.path.join(mnist_path, cpu_sub_dir) | ||||||
mnist_cpu_script = os.path.join(model_cpu_dir, 'mnist.py') | ||||||
mnist_cpu_script = os.path.join(model_cpu_dir, code_sub_dir, 'mnist.py') | ||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. standardize the quotes used.
Suggested change
applying |
||||||
model_cpu_tar = file_utils.make_tarfile(mnist_cpu_script, | ||||||
os.path.join(model_cpu_dir, "model.pth"), | ||||||
model_cpu_dir) | ||||||
model_cpu_dir, | ||||||
script_path="code") | ||||||
|
||||||
model_cpu_1d_dir = os.path.join(model_cpu_dir, '1d') | ||||||
mnist_1d_script = os.path.join(model_cpu_1d_dir, 'mnist_1d.py') | ||||||
mnist_1d_script = os.path.join(model_cpu_1d_dir, code_sub_dir, 'mnist_1d.py') | ||||||
model_cpu_1d_tar = file_utils.make_tarfile(mnist_1d_script, | ||||||
os.path.join(model_cpu_1d_dir, "model.pth"), | ||||||
model_cpu_1d_dir) | ||||||
model_cpu_1d_dir, | ||||||
script_path="code") | ||||||
|
||||||
model_gpu_dir = os.path.join(mnist_path, gpu_sub_dir) | ||||||
mnist_gpu_script = os.path.join(model_gpu_dir, 'mnist.py') | ||||||
mnist_gpu_script = os.path.join(model_gpu_dir, code_sub_dir, 'mnist.py') | ||||||
model_gpu_tar = file_utils.make_tarfile(mnist_gpu_script, | ||||||
os.path.join(model_gpu_dir, "model.pth"), | ||||||
model_gpu_dir) | ||||||
model_gpu_dir, | ||||||
script_path="code") | ||||||
|
||||||
model_eia_dir = os.path.join(mnist_path, eia_sub_dir) | ||||||
mnist_eia_script = os.path.join(model_eia_dir, 'mnist.py') | ||||||
model_eia_tar = file_utils.make_tarfile(mnist_eia_script, | ||||||
os.path.join(model_eia_dir, "model.pth"), | ||||||
model_eia_dir) | ||||||
|
||||||
call_model_fn_once_script = os.path.join(model_cpu_dir, 'call_model_fn_once.py') | ||||||
call_model_fn_once_script = os.path.join(model_cpu_dir, code_sub_dir, 'call_model_fn_once.py') | ||||||
call_model_fn_once_tar = file_utils.make_tarfile(call_model_fn_once_script, | ||||||
os.path.join(model_cpu_dir, "model.pth"), | ||||||
model_cpu_dir, | ||||||
"model_call_model_fn_once.tar.gz") | ||||||
"model_call_model_fn_once.tar.gz", | ||||||
script_path="code") | ||||||
|
||||||
ROLE = 'dummy/unused-role' | ||||||
DEFAULT_TIMEOUT = 20 | ||||||
|
Original file line number | Diff line number | Diff line change | ||||
---|---|---|---|---|---|---|
|
@@ -150,7 +150,9 @@ def test_adapt_to_ts_format(path_exists, make_dir, subprocess_check_call, set_py | |||||
"--export-path", | ||||||
torchserve.DEFAULT_TS_MODEL_DIRECTORY, | ||||||
"--extra-files", | ||||||
os.path.join(environment.model_dir, environment.Environment().module_name + ".py"), | ||||||
os.path.join(environment.model_dir, | ||||||
torchserve.DEFAULT_TS_CODE_DIR, | ||||||
environment.Environment().module_name + ".py"), | ||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Suggested change
|
||||||
"--version", | ||||||
"1", | ||||||
] | ||||||
|
Original file line number | Diff line number | Diff line change | ||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|
|
@@ -16,9 +16,12 @@ | |||||||||||
import tarfile | ||||||||||||
|
||||||||||||
|
||||||||||||
def make_tarfile(script, model, output_path, filename="model.tar.gz"): | ||||||||||||
def make_tarfile(script, model, output_path, filename="model.tar.gz", script_path=None): | ||||||||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. default to empty string and then later you can simplify code (plus it matches the type expected):
Suggested change
|
||||||||||||
output_filename = os.path.join(output_path, filename) | ||||||||||||
with tarfile.open(output_filename, "w:gz") as tar: | ||||||||||||
tar.add(script, arcname=os.path.basename(script)) | ||||||||||||
if(script_path): | ||||||||||||
tar.add(script, arcname=os.path.join(script_path, os.path.basename(script))) | ||||||||||||
else: | ||||||||||||
tar.add(script, arcname=os.path.basename(script)) | ||||||||||||
Comment on lines
+22
to
+25
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. assuming
Suggested change
this is because
|
||||||||||||
tar.add(model, arcname=os.path.basename(model)) | ||||||||||||
return output_filename |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
use string interpolation here: