diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 3f9f6e1c67..93492770d2 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -2,14 +2,26 @@ CHANGELOG ========= -1.18.3.dev +1.18.4.dev ========== +* doc-fix: Remove incorrect parameter for EI TFS Python README +* feature: ``Predictor``: delete SageMaker model +* feature: ``Pipeline``: delete SageMaker model + +1.18.3.post1 +============ + +* doc-fix: fix README for PyPI + +1.18.3 +====== + * doc-fix: update information about saving models in the MXNet README * doc-fix: change ReadTheDocs links from latest to stable * doc-fix: add ``transform_fn`` information and fix ``input_fn`` signature in the MXNet README -* feature: Support for ``Predictor`` to delete endpoint configuration by default when calling ``delete_endpoint()`` -* feature: Support for ``model`` to delete SageMaker model +* feature: Support for ``Predictor`` to delete model, and delete endpoint configuration by default when calling ``delete_endpoint()`` +* feature: Support for ``Model`` to delete SageMaker model * feature: Support for ``Transformer`` to delete SageMaker model * bug-fix: Default account for SKLearnModel fixed diff --git a/README.rst b/README.rst index b468c0c912..aa509ea301 100644 --- a/README.rst +++ b/README.rst @@ -192,10 +192,13 @@ Here is an end to end example of how to use a SageMaker Estimator: # Tears down the SageMaker endpoint and endpoint configuration mxnet_predictor.delete_endpoint() + # Deletes SageMaker model + mxnet_predictor.delete_model() The example above will eventually delete both the SageMaker endpoint and endpoint configuration through `delete_endpoint()`. If you want to keep your SageMaker endpoint configuration, use the value False for the `delete_endpoint_config` parameter, as shown below. .. code:: python + # Only delete the SageMaker endpoint, while keeping the corresponding endpoint configuration. mxnet_predictor.delete_endpoint(delete_endpoint_config=False) @@ -229,6 +232,9 @@ For more `information `__ , and use e # Tears down the endpoint container and deletes the corresponding endpoint configuration mxnet_predictor.delete_endpoint() + # Deletes the model + mxnet_predictor.delete_model() + If you have an existing model and want to deploy it locally, don't specify a sagemaker_session argument to the ``MXNetModel`` constructor. The correct session is generated when you call ``model.deploy()``. @@ -306,6 +315,9 @@ Here is an end-to-end example: # Tear down the endpoint container and delete the corresponding endpoint configuration predictor.delete_endpoint() + # Deletes the model + predictor.delete_model() + If you don't want to deploy your model locally, you can also choose to perform a Local Batch Transform Job. This is useful if you want to test your container before creating a Sagemaker Batch Transform Job. Note that the performance diff --git a/setup.py b/setup.py index 3372a45467..ce6ca0f16d 100644 --- a/setup.py +++ b/setup.py @@ -33,7 +33,7 @@ def read(fname): required_packages.append('enum34>=1.1.6') setup(name="sagemaker", - version='1.18.2', + version='1.18.3.post1', description="Open source library for training and deploying models on Amazon SageMaker.", packages=find_packages('src'), package_dir={'': 'src'}, diff --git a/src/sagemaker/pipeline.py b/src/sagemaker/pipeline.py index af379a6b3d..4d9d3cd19e 100644 --- a/src/sagemaker/pipeline.py +++ b/src/sagemaker/pipeline.py @@ -103,3 +103,14 @@ def deploy(self, initial_instance_count, instance_type, endpoint_name=None, tags self.sagemaker_session.endpoint_from_production_variants(self.endpoint_name, [production_variant], tags) if self.predictor_cls: return self.predictor_cls(self.endpoint_name, self.sagemaker_session) + + def delete_model(self): + """Delete the SageMaker model backing this pipeline model. This does not delete the list of SageMaker models used + in multiple containers to build the inference pipeline. + + """ + + if self.name is None: + raise ValueError('The SageMaker model must be created before attempting to delete.') + + self.sagemaker_session.delete_model(self.name) diff --git a/src/sagemaker/predictor.py b/src/sagemaker/predictor.py index 958b21afe6..dd47b27e17 100644 --- a/src/sagemaker/predictor.py +++ b/src/sagemaker/predictor.py @@ -56,6 +56,7 @@ def __init__(self, endpoint, sagemaker_session=None, serializer=None, deserializ self.deserializer = deserializer self.content_type = content_type or getattr(serializer, 'content_type', None) self.accept = accept or getattr(deserializer, 'accept', None) + self._model_names = self._get_model_names() def predict(self, data, initial_args=None): """Return the inference from the specified endpoint. @@ -109,16 +110,16 @@ def _delete_endpoint_config(self): """Delete the Amazon SageMaker endpoint configuration """ - endpoint_description = self.sagemaker_session.sagemaker_client.describe_endpoint(EndpointName=self.endpoint) - endpoint_config_name = endpoint_description['EndpointConfigName'] - self.sagemaker_session.delete_endpoint_config(endpoint_config_name) + self.sagemaker_session.delete_endpoint_config(self._endpoint_config_name) def delete_endpoint(self, delete_endpoint_config=True): - """Delete the Amazon SageMaker endpoint and endpoint configuration backing this predictor. + """Delete the Amazon SageMaker endpoint backing this predictor. Also delete the endpoint configuration attached + to it if delete_endpoint_config is True. Args: - delete_endpoint_config (bool): Flag to indicate whether to delete the corresponding SageMaker endpoint - configuration tied to the endpoint. If False, only the endpoint will be deleted. (default: True) + delete_endpoint_config (bool, optional): Flag to indicate whether to delete endpoint configuration together + with endpoint. Defaults to True. If True, both endpoint and endpoint configuration will be deleted. If + False, only endpoint will be deleted. """ if delete_endpoint_config: @@ -126,6 +127,25 @@ def delete_endpoint(self, delete_endpoint_config=True): self.sagemaker_session.delete_endpoint(self.endpoint) + def delete_model(self): + """Deletes the Amazon SageMaker models backing this predictor. + + """ + for model_name in self._model_names: + self.sagemaker_session.delete_model(model_name) + + def _get_endpoint_config_desc(self): + endpoint_desc = self.sagemaker_session.sagemaker_client.describe_endpoint(EndpointName=self.endpoint) + self._endpoint_config_name = endpoint_desc['EndpointConfigName'] + endpoint_config = self.sagemaker_session.sagemaker_client.describe_endpoint_config( + EndpointConfigName=self._endpoint_config_name) + return endpoint_config + + def _get_model_names(self): + endpoint_config = self._get_endpoint_config_desc() + production_variants = endpoint_config['ProductionVariants'] + return map(lambda d: d['ModelName'], production_variants) + class _CsvSerializer(object): def __init__(self): diff --git a/src/sagemaker/tensorflow/deploying_python.rst b/src/sagemaker/tensorflow/deploying_python.rst index 6a7fc6c78b..fdacbdd92b 100644 --- a/src/sagemaker/tensorflow/deploying_python.rst +++ b/src/sagemaker/tensorflow/deploying_python.rst @@ -31,8 +31,7 @@ TensorFlow serving on SageMaker has support for `Elastic Inference