Skip to content

breaking: rename sagemaker.tensorflow.serving to sagemaker.tensorflow.model #1541

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Jun 3, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions doc/sagemaker.tensorflow.rst
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,15 @@ TensorFlow Estimator
TensorFlow Serving Model
------------------------

.. autoclass:: sagemaker.tensorflow.serving.Model
.. autoclass:: sagemaker.tensorflow.model.TensorFlowModel
:members:
:undoc-members:
:show-inheritance:

TensorFlow Serving Predictor
----------------------------

.. autoclass:: sagemaker.tensorflow.serving.Predictor
.. autoclass:: sagemaker.tensorflow.model.TensorFlowPredictor
:members:
:undoc-members:
:show-inheritance:
4 changes: 2 additions & 2 deletions src/sagemaker/cli/tensorflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,9 @@ def create_model(self, model_url):
Args:
model_url:
"""
from sagemaker.tensorflow.serving import Model
from sagemaker.tensorflow.model import TensorFlowModel

return Model(
return TensorFlowModel(
model_data=model_url,
role=self.role_name,
entry_point=self.script,
Expand Down
19 changes: 9 additions & 10 deletions src/sagemaker/rl/estimator.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import sagemaker.fw_utils as fw_utils
from sagemaker.model import FrameworkModel, SAGEMAKER_OUTPUT_LOCATION
from sagemaker.mxnet.model import MXNetModel
from sagemaker.tensorflow.model import TensorFlowModel
from sagemaker.vpc_utils import VPC_CONFIG_DEFAULT

logger = logging.getLogger("sagemaker")
Expand Down Expand Up @@ -90,7 +91,7 @@ def __init__(
:meth:`~sagemaker.amazon.estimator.Framework.deploy` creates a hosted
SageMaker endpoint and based on the specified framework returns an
:class:`~sagemaker.amazon.mxnet.model.MXNetPredictor` or
:class:`~sagemaker.amazon.tensorflow.serving.Predictor` instance that
:class:`~sagemaker.amazon.tensorflow.model.TensorFlowPredictor` instance that
can be used to perform inference against the hosted model.

Technical documentation on preparing RLEstimator scripts for
Expand Down Expand Up @@ -205,15 +206,15 @@ def create_model(
sagemaker.model.FrameworkModel: Depending on input parameters returns
one of the following:

* :class:`~sagemaker.model.FrameworkModel` - if ``image_name`` was specified
* :class:`~sagemaker.model.FrameworkModel` - if ``image_name`` is specified
on the estimator;
* :class:`~sagemaker.mxnet.MXNetModel` - if ``image_name`` wasn't specified and
MXNet was used as the RL backend;
* :class:`~sagemaker.tensorflow.serving.Model` - if ``image_name`` wasn't specified
and TensorFlow was used as the RL backend.
* :class:`~sagemaker.mxnet.MXNetModel` - if ``image_name`` isn't specified and
MXNet is used as the RL backend;
* :class:`~sagemaker.tensorflow.model.TensorFlowModel` - if ``image_name`` isn't
specified and TensorFlow is used as the RL backend.

Raises:
ValueError: If image_name was not specified and framework enum is not valid.
ValueError: If image_name is not specified and framework enum is not valid.
"""
base_args = dict(
model_data=self.model_data,
Expand Down Expand Up @@ -252,9 +253,7 @@ def create_model(
)

if self.framework == RLFramework.TENSORFLOW.value:
from sagemaker.tensorflow.serving import Model as tfsModel

return tfsModel(framework_version=self.framework_version, **base_args)
return TensorFlowModel(framework_version=self.framework_version, **base_args)
if self.framework == RLFramework.MXNET.value:
return MXNetModel(
framework_version=self.framework_version, py_version=PYTHON_VERSION, **extended_args
Expand Down
1 change: 1 addition & 0 deletions src/sagemaker/tensorflow/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,3 +14,4 @@
from __future__ import absolute_import

from sagemaker.tensorflow.estimator import TensorFlow # noqa: F401 (imported but unused)
from sagemaker.tensorflow.model import TensorFlowModel, TensorFlowPredictor # noqa: F401
25 changes: 13 additions & 12 deletions src/sagemaker/tensorflow/estimator.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from sagemaker.estimator import Framework
import sagemaker.fw_utils as fw
from sagemaker.tensorflow import defaults
from sagemaker.tensorflow.serving import Model
from sagemaker.tensorflow.model import TensorFlowModel
from sagemaker.transformer import Transformer
from sagemaker.vpc_utils import VPC_CONFIG_DEFAULT

Expand Down Expand Up @@ -249,12 +249,13 @@ def create_model(
dependencies=None,
**kwargs
):
"""Create a ``Model`` object that can be used for creating SageMaker model entities,
deploying to a SageMaker endpoint, or starting SageMaker Batch Transform jobs.
"""Create a ``TensorFlowModel`` object that can be used for creating
SageMaker model entities, deploying to a SageMaker endpoint, or
starting SageMaker Batch Transform jobs.

Args:
role (str): The ``ExecutionRoleArn`` IAM Role ARN for the ``Model``, which is also
used during transform jobs. If not specified, the role from the Estimator is used.
role (str): The ``TensorFlowModel``, which is also used during transform jobs.
If not specified, the role from the Estimator is used.
vpc_config_override (dict[str, list[str]]): Optional override for VpcConfig set on the
model. Default: use subnets and security groups from this Estimator.

Expand All @@ -267,11 +268,12 @@ def create_model(
source code dependencies aside from the entry point file (default: None).
dependencies (list[str]): A list of paths to directories (absolute or relative) with
any additional libraries that will be exported to the container (default: None).
**kwargs: Additional kwargs passed to :class:`~sagemaker.tensorflow.serving.Model`.
**kwargs: Additional kwargs passed to
:class:`~sagemaker.tensorflow.model.TensorFlowModel`.

Returns:
sagemaker.tensorflow.serving.Model: A ``Model`` object.
See :class:`~sagemaker.tensorflow.serving.Model` for full details.
sagemaker.tensorflow.model.TensorFlowModel: A ``TensorFlowModel`` object.
See :class:`~sagemaker.tensorflow.model.TensorFlowModel` for full details.
"""
if "image" not in kwargs:
kwargs["image"] = self.image_name
Expand All @@ -282,7 +284,7 @@ def create_model(
if "enable_network_isolation" not in kwargs:
kwargs["enable_network_isolation"] = self.enable_network_isolation()

return Model(
return TensorFlowModel(
model_data=self.model_data,
role=role or self.role,
container_log_level=self.container_log_level,
Expand Down Expand Up @@ -418,9 +420,8 @@ def transformer(
container in MB.
tags (list[dict]): List of tags for labeling a transform job. If none specified, then
the tags used for the training job are used for the transform job.
role (str): The ``ExecutionRoleArn`` IAM Role ARN for the ``Model``, which is also
used during transform jobs. If not specified, the role from the Estimator will be
used.
role (str): The IAM Role ARN for the ``TensorFlowModel``, which is also used
during transform jobs. If not specified, the role from the Estimator is used.
volume_kms_key (str): Optional. KMS key ID for encrypting the volume attached to the ML
compute instance (default: None).
entry_point (str): Path (absolute or relative) to the local Python source file which
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""Placeholder docstring"""
"""Classes for using TensorFlow on Amazon SageMaker for inference."""
from __future__ import absolute_import

import logging
Expand All @@ -22,7 +22,7 @@
from sagemaker.tensorflow.defaults import TF_VERSION


class Predictor(sagemaker.RealTimePredictor):
class TensorFlowPredictor(sagemaker.RealTimePredictor):
"""A ``RealTimePredictor`` implementation for inference against TensorFlow
Serving endpoints.
"""
Expand All @@ -37,7 +37,7 @@ def __init__(
model_name=None,
model_version=None,
):
"""Initialize a ``TFSPredictor``. See ``sagemaker.RealTimePredictor``
"""Initialize a ``TensorFlowPredictor``. See :class:`~sagemaker.predictor.RealTimePredictor`
for more info about parameters.

Args:
Expand All @@ -61,7 +61,7 @@ def __init__(
that should handle the request. If not specified, the latest
version of the model will be used.
"""
super(Predictor, self).__init__(
super(TensorFlowPredictor, self).__init__(
endpoint_name, sagemaker_session, serializer, deserializer, content_type
)

Expand Down Expand Up @@ -115,13 +115,13 @@ def predict(self, data, initial_args=None):
else:
args["CustomAttributes"] = self._model_attributes

return super(Predictor, self).predict(data, args)
return super(TensorFlowPredictor, self).predict(data, args)


class Model(sagemaker.model.FrameworkModel):
"""Placeholder docstring"""
class TensorFlowModel(sagemaker.model.FrameworkModel):
"""A ``FrameworkModel`` implementation for inference with TensorFlow Serving."""

FRAMEWORK_NAME = "tensorflow-serving"
__framework_name__ = "tensorflow-serving"
LOG_LEVEL_PARAM_NAME = "SAGEMAKER_TFS_NGINX_LOGLEVEL"
LOG_LEVEL_MAP = {
logging.DEBUG: "debug",
Expand All @@ -140,7 +140,7 @@ def __init__(
image=None,
framework_version=TF_VERSION,
container_log_level=None,
predictor_cls=Predictor,
predictor_cls=TensorFlowPredictor,
**kwargs
):
"""Initialize a Model.
Expand Down Expand Up @@ -171,15 +171,15 @@ def __init__(
:class:`~sagemaker.model.FrameworkModel` and
:class:`~sagemaker.model.Model`.
"""
super(Model, self).__init__(
super(TensorFlowModel, self).__init__(
model_data=model_data,
role=role,
image=image,
predictor_cls=predictor_cls,
entry_point=entry_point,
**kwargs
)
self._framework_version = framework_version
self.framework_version = framework_version
self._container_log_level = container_log_level

def deploy(
Expand All @@ -196,10 +196,10 @@ def deploy(
):

if accelerator_type and not self._eia_supported():
msg = "The TensorFlow version %s doesn't support EIA." % self._framework_version

msg = "The TensorFlow version %s doesn't support EIA." % self.framework_version
raise AttributeError(msg)
return super(Model, self).deploy(

return super(TensorFlowModel, self).deploy(
initial_instance_count=initial_instance_count,
instance_type=instance_type,
accelerator_type=accelerator_type,
Expand All @@ -213,7 +213,7 @@ def deploy(

def _eia_supported(self):
"""Return true if TF version is EIA enabled"""
return [int(s) for s in self._framework_version.split(".")][:2] <= self.LATEST_EIA_VERSION
return [int(s) for s in self.framework_version.split(".")][:2] <= self.LATEST_EIA_VERSION

def prepare_container_def(self, instance_type, accelerator_type=None):
"""
Expand Down Expand Up @@ -249,12 +249,12 @@ def _get_container_env(self):
if not self._container_log_level:
return self.env

if self._container_log_level not in Model.LOG_LEVEL_MAP:
if self._container_log_level not in self.LOG_LEVEL_MAP:
logging.warning("ignoring invalid container log level: %s", self._container_log_level)
return self.env

env = dict(self.env)
env[Model.LOG_LEVEL_PARAM_NAME] = Model.LOG_LEVEL_MAP[self._container_log_level]
env[self.LOG_LEVEL_PARAM_NAME] = self.LOG_LEVEL_MAP[self._container_log_level]
return env

def _get_image_uri(self, instance_type, accelerator_type=None):
Expand All @@ -269,9 +269,9 @@ def _get_image_uri(self, instance_type, accelerator_type=None):
region_name = self.sagemaker_session.boto_region_name
return create_image_uri(
region_name,
Model.FRAMEWORK_NAME,
self.__framework_name__,
instance_type,
self._framework_version,
self.framework_version,
accelerator_type=accelerator_type,
)

Expand Down
8 changes: 4 additions & 4 deletions tests/integ/test_data_capture_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import tests.integ
import tests.integ.timeout
from sagemaker.model_monitor import DataCaptureConfig, NetworkConfig
from sagemaker.tensorflow.serving import Model
from sagemaker.tensorflow.model import TensorFlowModel
from sagemaker.utils import unique_name_from_base
from tests.integ.retry import retries

Expand Down Expand Up @@ -49,7 +49,7 @@ def test_enabling_data_capture_on_endpoint_shows_correct_data_capture_status(
key_prefix="tensorflow-serving/models",
)
with tests.integ.timeout.timeout_and_delete_endpoint_by_name(endpoint_name, sagemaker_session):
model = Model(
model = TensorFlowModel(
model_data=model_data,
role=ROLE,
framework_version=tf_full_version,
Expand Down Expand Up @@ -106,7 +106,7 @@ def test_disabling_data_capture_on_endpoint_shows_correct_data_capture_status(
key_prefix="tensorflow-serving/models",
)
with tests.integ.timeout.timeout_and_delete_endpoint_by_name(endpoint_name, sagemaker_session):
model = Model(
model = TensorFlowModel(
model_data=model_data,
role=ROLE,
framework_version=tf_full_version,
Expand Down Expand Up @@ -192,7 +192,7 @@ def test_updating_data_capture_on_endpoint_shows_correct_data_capture_status(
key_prefix="tensorflow-serving/models",
)
with tests.integ.timeout.timeout_and_delete_endpoint_by_name(endpoint_name, sagemaker_session):
model = Model(
model = TensorFlowModel(
model_data=model_data,
role=ROLE,
framework_version=tf_full_version,
Expand Down
4 changes: 2 additions & 2 deletions tests/integ/test_model_monitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
from sagemaker.model_monitor import CronExpressionGenerator
from sagemaker.processing import ProcessingInput
from sagemaker.processing import ProcessingOutput
from sagemaker.tensorflow.serving import Model
from sagemaker.tensorflow.model import TensorFlowModel
from sagemaker.utils import unique_name_from_base

from tests.integ.kms_utils import get_or_create_kms_key
Expand Down Expand Up @@ -97,7 +97,7 @@ def predictor(sagemaker_session, tf_full_version):
with tests.integ.timeout.timeout_and_delete_endpoint_by_name(
endpoint_name=endpoint_name, sagemaker_session=sagemaker_session, hours=2
):
model = Model(
model = TensorFlowModel(
model_data=model_data,
role=ROLE,
framework_version=tf_full_version,
Expand Down
12 changes: 6 additions & 6 deletions tests/integ/test_tfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import sagemaker.utils
import tests.integ
import tests.integ.timeout
from sagemaker.tensorflow.serving import Model, Predictor
from sagemaker.tensorflow.model import TensorFlowModel, TensorFlowPredictor


@pytest.fixture(scope="module")
Expand All @@ -34,7 +34,7 @@ def tfs_predictor(sagemaker_session, tf_full_version):
key_prefix="tensorflow-serving/models",
)
with tests.integ.timeout.timeout_and_delete_endpoint_by_name(endpoint_name, sagemaker_session):
model = Model(
model = TensorFlowModel(
model_data=model_data,
role="SageMakerRole",
framework_version=tf_full_version,
Expand Down Expand Up @@ -62,7 +62,7 @@ def tfs_predictor_with_model_and_entry_point_same_tar(
os.path.join(tests.integ.DATA_DIR, "tfs/tfs-test-model-with-inference"), tmpdir
)

model = Model(
model = TensorFlowModel(
model_data="file://" + model_tar,
role="SageMakerRole",
framework_version=tf_full_version,
Expand Down Expand Up @@ -93,7 +93,7 @@ def tfs_predictor_with_model_and_entry_point_and_dependencies(
tests.integ.DATA_DIR, "tensorflow-serving-test-model.tar.gz"
)

model = Model(
model = TensorFlowModel(
entry_point=entry_point,
model_data=model_data,
role="SageMakerRole",
Expand All @@ -118,7 +118,7 @@ def tfs_predictor_with_accelerator(sagemaker_session, ei_tf_full_version, cpu_in
key_prefix="tensorflow-serving/models",
)
with tests.integ.timeout.timeout_and_delete_endpoint_by_name(endpoint_name, sagemaker_session):
model = Model(
model = TensorFlowModel(
model_data=model_data,
role="SageMakerRole",
framework_version=ei_tf_full_version,
Expand Down Expand Up @@ -235,7 +235,7 @@ def test_predict_csv(tfs_predictor):
input_data = "1.0,2.0,5.0\n1.0,2.0,5.0"
expected_result = {"predictions": [[3.5, 4.0, 5.5], [3.5, 4.0, 5.5]]}

predictor = Predictor(
predictor = TensorFlowPredictor(
tfs_predictor.endpoint,
tfs_predictor.sagemaker_session,
serializer=sagemaker.predictor.csv_serializer,
Expand Down
Loading