Skip to content

Commit 8efb167

Browse files
aykulkarniakulk314ajaykarpur
authored
change: upgrade MMS version and update command (#71)
Co-authored-by: akulk314 <[email protected]> Co-authored-by: Ajay Karpur <[email protected]>
1 parent c469854 commit 8efb167

File tree

7 files changed

+16
-16
lines changed

7 files changed

+16
-16
lines changed

README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ A container provides an effectively isolated environment, ensuring a consistent
1818
Containerizing your model and code enables fast and reliable deployment of your model.
1919

2020
The **SageMaker Inference Toolkit** implements a model serving stack and can be easily added to any Docker container, making it [deployable to SageMaker](https://aws.amazon.com/sagemaker/deploy/).
21-
This library's serving stack is built on [Multi Model Server](https://github.com/awslabs/mxnet-model-server), and it can serve your own models or those you trained on SageMaker using [machine learning frameworks with native SageMaker support](https://docs.aws.amazon.com/sagemaker/latest/dg/frameworks.html).
21+
This library's serving stack is built on [Multi Model Server](https://github.com/awslabs/multi-model-server), and it can serve your own models or those you trained on SageMaker using [machine learning frameworks with native SageMaker support](https://docs.aws.amazon.com/sagemaker/latest/dg/frameworks.html).
2222
If you use a [prebuilt SageMaker Docker image for inference](https://docs.aws.amazon.com/sagemaker/latest/dg/pre-built-containers-frameworks-deep-learning.html), this library may already be included.
2323

2424
For more information, see the Amazon SageMaker Developer Guide sections on [building your own container with Multi Model Server](https://docs.aws.amazon.com/sagemaker/latest/dg/build-multi-model-build-container.html) and [using your own models](https://docs.aws.amazon.com/sagemaker/latest/dg/your-algorithms.html).
@@ -98,7 +98,7 @@ To use the SageMaker Inference Toolkit, you need to do the following:
9898

9999
2. Implement a handler service that is executed by the model server.
100100
([Here is an example](https://github.com/aws/sagemaker-pytorch-serving-container/blob/master/src/sagemaker_pytorch_serving_container/handler_service.py) of a handler service.)
101-
For more information on how to define your `HANDLER_SERVICE` file, see [the MMS custom service documentation](https://github.com/awslabs/mxnet-model-server/blob/master/docs/custom_service.md).
101+
For more information on how to define your `HANDLER_SERVICE` file, see [the MMS custom service documentation](https://github.com/awslabs/multi-model-server/blob/master/docs/custom_service.md).
102102

103103
``` python
104104
from sagemaker_inference.default_handler_service import DefaultHandlerService
@@ -112,7 +112,7 @@ To use the SageMaker Inference Toolkit, you need to do the following:
112112
This class extends ``DefaultHandlerService``, which define the following:
113113
- The ``handle`` method is invoked for all incoming inference requests to the model server.
114114
- The ``initialize`` method is invoked at model server start up.
115-
Based on: https://github.com/awslabs/mxnet-model-server/blob/master/docs/custom_service.md
115+
Based on: https://github.com/awslabs/multi-model-server/blob/master/docs/custom_service.md
116116
"""
117117
def __init__(self):
118118
transformer = Transformer(default_inference_handler=DefaultPytorchInferenceHandler())

src/sagemaker_inference/default_handler_service.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ class DefaultHandlerService(object):
2727
- The ``handle`` method is invoked for all incoming inference requests to the model server.
2828
- The ``initialize`` method is invoked at model server start up.
2929
30-
Implementation of: https://github.com/awslabs/mxnet-model-server/blob/master/docs/custom_service.md
30+
Implementation of: https://github.com/awslabs/multi-model-server/blob/master/docs/custom_service.md
3131
"""
3232

3333
def __init__(self, transformer=None):
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Based on: https://github.com/awslabs/mxnet-model-server/blob/master/docs/configuration.md
1+
# Based on: https://github.com/awslabs/multi-model-server/blob/master/docs/configuration.md
22
enable_envvars_config=true
33
decode_input_request=false
44
load_models=ALL

src/sagemaker_inference/model_server.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -79,8 +79,8 @@ def start_model_server(handler_service=DEFAULT_HANDLER_SERVICE):
7979
if os.path.exists(REQUIREMENTS_PATH):
8080
_install_requirements()
8181

82-
mxnet_model_server_cmd = [
83-
"mxnet-model-server",
82+
multi_model_server_cmd = [
83+
"multi-model-server",
8484
"--start",
8585
"--model-store",
8686
MODEL_STORE,
@@ -90,8 +90,8 @@ def start_model_server(handler_service=DEFAULT_HANDLER_SERVICE):
9090
DEFAULT_MMS_LOG_FILE,
9191
]
9292

93-
logger.info(mxnet_model_server_cmd)
94-
subprocess.Popen(mxnet_model_server_cmd)
93+
logger.info(multi_model_server_cmd)
94+
subprocess.Popen(multi_model_server_cmd)
9595

9696
mms_process = _retrieve_mms_server_process()
9797

test/container/dummy/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ ENV SAGEMAKER_MULTI_MODEL=true
1111
ENV SAGEMAKER_BIND_TO_PORT=${SAGEMAKER_BIND_TO_PORT:-8080}
1212

1313
# Update MMS version
14-
RUN pip3 install mxnet-model-server==1.0.8
14+
RUN pip3 install multi-model-server
1515

1616
# Install Mxnet (for handler_service)
1717
RUN pip3 install mxnet

test/container/mxnet/Dockerfile

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ LABEL com.amazonaws.sagemaker.capabilities.accept-bind-to-port=true
99
# https://docs.aws.amazon.com/sagemaker/latest/dg/build-multi-model-build-container.html
1010
LABEL com.amazonaws.sagemaker.capabilities.multi-models=true
1111

12-
ARG MMS_VERSION=1.0.8
12+
ARG MMS_VERSION=1.1.2
1313
ARG MX_URL=https://aws-mxnet-pypi.s3-us-west-2.amazonaws.com/1.6.0/aws_mxnet_mkl-1.6.0-py2.py3-none-manylinux1_x86_64.whl
1414
ARG PYTHON=python3
1515
ARG PYTHON_PIP=python3-pip
@@ -73,7 +73,7 @@ COPY mxnet/sagemaker_inference.tar.gz /sagemaker_inference.tar.gz
7373
RUN ${PIP} install --no-cache-dir \
7474
${MX_URL} \
7575
git+git://github.com/dmlc/[email protected] \
76-
mxnet-model-server==$MMS_VERSION \
76+
multi-model-server==$MMS_VERSION \
7777
keras-mxnet==2.2.4.1 \
7878
numpy==1.17.4 \
7979
onnx==1.4.1 \
@@ -122,4 +122,4 @@ RUN curl https://aws-dlc-licenses.s3.amazonaws.com/aws-mxnet-1.6.0/license.txt -
122122

123123
EXPOSE 8080 8081
124124
ENTRYPOINT ["python", "/usr/local/bin/dockerd_entrypoint.py"]
125-
CMD ["mxnet-model-server", "--start", "--mms-config", "/home/model-server/config.properties"]
125+
CMD ["multi-model-server", "--start", "--mms-config", "/home/model-server/config.properties"]

test/unit/test_model_server.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -50,8 +50,8 @@ def test_start_model_server_default_service_handler(
5050
exists.assert_called_once_with(REQUIREMENTS_PATH)
5151
install_requirements.assert_called_once_with()
5252

53-
mxnet_model_server_cmd = [
54-
"mxnet-model-server",
53+
multi_model_server_cmd = [
54+
"multi-model-server",
5555
"--start",
5656
"--model-store",
5757
model_server.DEFAULT_MMS_MODEL_DIRECTORY,
@@ -61,7 +61,7 @@ def test_start_model_server_default_service_handler(
6161
model_server.DEFAULT_MMS_LOG_FILE,
6262
]
6363

64-
subprocess_popen.assert_called_once_with(mxnet_model_server_cmd)
64+
subprocess_popen.assert_called_once_with(multi_model_server_cmd)
6565
sigterm.assert_called_once_with(retrieve.return_value)
6666

6767

0 commit comments

Comments
 (0)