Skip to content

Commit d41602f

Browse files
deprecation: deprecate Serverless Lambda model-predictor
1 parent 9c14fb3 commit d41602f

File tree

14 files changed

+28
-1425
lines changed

14 files changed

+28
-1425
lines changed

doc/api/inference/model.rst

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,3 @@ Model
1616
:undoc-members:
1717
:show-inheritance:
1818

19-
.. autoclass:: sagemaker.serverless.model.LambdaModel
20-
:members:
21-
:undoc-members:
22-
:show-inheritance:

doc/api/inference/predictors.rst

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,3 @@ Make real-time predictions against SageMaker endpoints with Python objects
77
:members:
88
:undoc-members:
99
:show-inheritance:
10-
11-
.. autoclass:: sagemaker.serverless.predictor.LambdaPredictor
12-
:members:
13-
:undoc-members:
14-
:show-inheritance:

doc/overview.rst

Lines changed: 0 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -1063,50 +1063,6 @@ You can also find these notebooks in the **Advanced Functionality** section of t
10631063
For information about using sample notebooks in a SageMaker notebook instance, see `Use Example Notebooks <https://docs.aws.amazon.com/sagemaker/latest/dg/howitworks-nbexamples.html>`__
10641064
in the AWS documentation.
10651065
1066-
********************
1067-
Serverless Inference
1068-
********************
1069-
1070-
You can use the SageMaker Python SDK to perform serverless inference on Lambda.
1071-
1072-
To deploy models to Lambda, you must complete the following prerequisites:
1073-
1074-
- `Package your model and inference code as a container image. <https://docs.aws.amazon.com/lambda/latest/dg/images-create.html>`_
1075-
- `Create a role that lists Lambda as a trusted entity. <https://docs.aws.amazon.com/lambda/latest/dg/lambda-intro-execution-role.html#permissions-executionrole-console>`_
1076-
1077-
After completing the prerequisites, you can deploy your model to Lambda using
1078-
the `LambdaModel`_ class.
1079-
1080-
.. code:: python
1081-
1082-
from sagemaker.serverless import LambdaModel
1083-
1084-
image_uri = "123456789012.dkr.ecr.us-west-2.amazonaws.com/my-lambda-repository:latest"
1085-
role = "arn:aws:iam::123456789012:role/MyLambdaExecutionRole"
1086-
1087-
model = LambdaModel(image_uri=image_uri, role=role)
1088-
predictor = model.deploy("my-lambda-function", timeout=20, memory_size=4092)
1089-
1090-
The ``deploy`` method returns a `LambdaPredictor`_ instance. Use the
1091-
`LambdaPredictor`_ ``predict`` method to perform inference on Lambda.
1092-
1093-
.. code:: python
1094-
1095-
url = "https://example.com/cat.jpeg"
1096-
predictor.predict({"url": url}) # {'class': 'tabby'}
1097-
1098-
Once you are done performing inference on Lambda, free the `LambdaModel`_ and
1099-
`LambdaPredictor`_ resources using the ``delete_model`` and ``delete_predictor``
1100-
methods.
1101-
1102-
.. code:: python
1103-
1104-
model.delete_model()
1105-
predictor.delete_predictor()
1106-
1107-
.. _LambdaModel : https://sagemaker.readthedocs.io/en/stable/api/inference/model.html#sagemaker.serverless.model.LambdaModel
1108-
.. _LambdaPredictor : https://sagemaker.readthedocs.io/en/stable/api/inference/predictors.html#sagemaker.serverless.predictor.LambdaPredictor
1109-
11101066
******************
11111067
SageMaker Workflow
11121068
******************

src/sagemaker/serverless/model.py

Lines changed: 6 additions & 78 deletions
Original file line numberDiff line numberDiff line change
@@ -12,84 +12,12 @@
1212
# language governing permissions and limitations under the License.
1313
"""Models that can be deployed to serverless compute."""
1414
from __future__ import absolute_import
15+
from sagemaker.deprecations import deprecated
1516

16-
import time
17-
from typing import Optional
1817

19-
import boto3
20-
import botocore
18+
@deprecated
19+
class LambdaModel:
20+
"""A model that can be deployed to Lambda.
2121
22-
from sagemaker.model import ModelBase
23-
from sagemaker.deprecations import deprecation_warning
24-
from .predictor import LambdaPredictor
25-
26-
27-
@deprecation_warning(
28-
msg="Based on customer experience and feedback an"
29-
" alternative support will be added in near future",
30-
date="10/27/2021",
31-
)
32-
class LambdaModel(ModelBase):
33-
"""A model that can be deployed to Lambda."""
34-
35-
def __init__(
36-
self, image_uri: str, role: str, client: Optional[botocore.client.BaseClient] = None
37-
) -> None:
38-
"""Initialize instance attributes.
39-
40-
Arguments:
41-
image_uri: URI of a container image in the Amazon ECR registry. The image
42-
should contain a handler that performs inference.
43-
role: The Amazon Resource Name (ARN) of the IAM role that Lambda will assume
44-
when it performs inference
45-
client: The Lambda client used to interact with Lambda.
46-
"""
47-
self._client = client or boto3.client("lambda")
48-
self._image_uri = image_uri
49-
self._role = role
50-
51-
def deploy(
52-
self, function_name: str, timeout: int, memory_size: int, wait: bool = True
53-
) -> LambdaPredictor:
54-
"""Create a Lambda function using the image specified in the constructor.
55-
56-
Arguments:
57-
function_name: The name of the function.
58-
timeout: The number of seconds that the function can run for before being terminated.
59-
memory_size: The amount of memory in MB that the function has access to.
60-
wait: If true, wait until the deployment completes (default: True).
61-
62-
Returns:
63-
A LambdaPredictor instance that performs inference using the specified image.
64-
"""
65-
response = self._client.create_function(
66-
FunctionName=function_name,
67-
PackageType="Image",
68-
Role=self._role,
69-
Code={
70-
"ImageUri": self._image_uri,
71-
},
72-
Timeout=timeout,
73-
MemorySize=memory_size,
74-
)
75-
76-
if not wait:
77-
return LambdaPredictor(function_name, client=self._client)
78-
79-
# Poll function state.
80-
polling_interval = 5
81-
while response["State"] == "Pending":
82-
time.sleep(polling_interval)
83-
response = self._client.get_function_configuration(FunctionName=function_name)
84-
85-
if response["State"] != "Active":
86-
raise RuntimeError("Failed to deploy model to Lambda: %s" % response["StateReason"])
87-
88-
return LambdaPredictor(function_name, client=self._client)
89-
90-
def delete_model(self) -> None:
91-
"""Destroy resources associated with this model.
92-
93-
This method does not delete the image specified in the constructor. As
94-
a result, this method is a no-op.
95-
"""
22+
note:: Deprecated in versions > v2.66.0. An alternative support will be added in near future.
23+
"""

src/sagemaker/serverless/predictor.py

Lines changed: 6 additions & 70 deletions
Original file line numberDiff line numberDiff line change
@@ -12,76 +12,12 @@
1212
# language governing permissions and limitations under the License.
1313
"""Predictors that are hosted on serverless compute."""
1414
from __future__ import absolute_import
15+
from sagemaker.deprecations import deprecated
1516

16-
from typing import Optional, Tuple
1717

18-
import boto3
19-
import botocore
18+
@deprecated
19+
class LambdaPredictor:
20+
"""A deployed model hosted on Lambda.
2021
21-
from sagemaker import deserializers, serializers
22-
from sagemaker.predictor import PredictorBase
23-
from sagemaker.deprecations import deprecation_warning
24-
25-
26-
@deprecation_warning(
27-
msg="Based on customer experience and feedback an"
28-
" alternative support will be added in near future",
29-
date="10/27/2021",
30-
)
31-
class LambdaPredictor(PredictorBase):
32-
"""A deployed model hosted on Lambda."""
33-
34-
def __init__(
35-
self, function_name: str, client: Optional[botocore.client.BaseClient] = None
36-
) -> None:
37-
"""Initialize instance attributes.
38-
39-
Arguments:
40-
function_name: The name of the function.
41-
client: The Lambda client used to interact with Lambda.
42-
"""
43-
self._client = client or boto3.client("lambda")
44-
self._function_name = function_name
45-
self._serializer = serializers.JSONSerializer()
46-
self._deserializer = deserializers.JSONDeserializer()
47-
48-
def predict(self, data: dict) -> dict:
49-
"""Invoke the Lambda function specified in the constructor.
50-
51-
This function is synchronous. It will only return after the function
52-
has produced a prediction.
53-
54-
Arguments:
55-
data: The data sent to the Lambda function as input.
56-
57-
Returns:
58-
The data returned by the Lambda function.
59-
"""
60-
response = self._client.invoke(
61-
FunctionName=self._function_name,
62-
InvocationType="RequestResponse",
63-
Payload=self._serializer.serialize(data),
64-
)
65-
return self._deserializer.deserialize(
66-
response["Payload"],
67-
response["ResponseMetadata"]["HTTPHeaders"]["content-type"],
68-
)
69-
70-
def delete_predictor(self) -> None:
71-
"""Destroy the Lambda function specified in the constructor."""
72-
self._client.delete_function(FunctionName=self._function_name)
73-
74-
@property
75-
def content_type(self) -> str:
76-
"""The MIME type of the data sent to the Lambda function."""
77-
return self._serializer.CONTENT_TYPE
78-
79-
@property
80-
def accept(self) -> Tuple[str]:
81-
"""The content type(s) that are expected from the Lambda function."""
82-
return self._deserializer.ACCEPT
83-
84-
@property
85-
def function_name(self) -> str:
86-
"""The name of the Lambda function this predictor invokes."""
87-
return self._function_name
22+
note:: Deprecated in versions > v2.66.0. An alternative support will be added in near future.
23+
"""

tests/data/serverless/Dockerfile

Lines changed: 0 additions & 12 deletions
This file was deleted.

tests/data/serverless/README.md

Lines changed: 0 additions & 6 deletions
This file was deleted.

tests/data/serverless/app.py

Lines changed: 0 additions & 34 deletions
This file was deleted.

0 commit comments

Comments
 (0)