Skip to content

feature: Add support for SparkML v3.3 #3420

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Dec 2, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ In order to host a SparkML model in SageMaker, it should be serialized with ``ML

For more information on MLeap, see https://github.com/combust/mleap .

Supported major version of Spark: 2.4 (MLeap version - 0.9.6)
Supported major version of Spark: 3.3 (MLeap version - 0.20.0)

Here is an example on how to create an instance of ``SparkMLModel`` class and use ``deploy()`` method to create an
endpoint which can be used to perform prediction against your trained SparkML Model.
Expand Down
31 changes: 31 additions & 0 deletions src/sagemaker/image_uri_config/sparkml-serving.json
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,37 @@
"us-west-2": "246618743249"
},
"repository": "sagemaker-sparkml-serving"
},
"3.3": {
"registries": {
"af-south-1": "510948584623",
"ap-east-1": "651117190479",
"ap-northeast-1": "354813040037",
"ap-northeast-2": "366743142698",
"ap-northeast-3": "867004704886",
"ap-south-1": "720646828776",
"ap-southeast-1": "121021644041",
"ap-southeast-2": "783357654285",
"ap-southeast-3": "951798379941",
"ca-central-1": "341280168497",
"cn-north-1": "450853457545",
"cn-northwest-1": "451049120500",
"eu-central-1": "492215442770",
"eu-north-1": "662702820516",
"eu-west-1": "141502667606",
"eu-west-2": "764974769150",
"eu-west-3": "659782779980",
"eu-south-1": "978288397137",
"me-south-1": "801668240914",
"sa-east-1": "737474898029",
"us-east-1": "683313688378",
"us-east-2": "257758044811",
"us-gov-west-1": "414596584902",
"us-iso-east-1": "833128469047",
"us-west-1": "746614075791",
"us-west-2": "246618743249"
},
"repository": "sagemaker-sparkml-serving"
}
}
}
4 changes: 2 additions & 2 deletions src/sagemaker/sparkml/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def __init__(
self,
model_data: Union[str, PipelineVariable],
role: Optional[str] = None,
spark_version: str = "2.4",
spark_version: str = "3.3",
sagemaker_session: Optional[Session] = None,
**kwargs,
):
Expand All @@ -95,7 +95,7 @@ def __init__(
artifacts. After the endpoint is created, the inference code
might use the IAM role, if it needs to access an AWS resource.
spark_version (str): Spark version you want to use for executing the
inference (default: '2.4').
inference (default: '3.3').
sagemaker_session (sagemaker.session.Session): Session object which
manages interactions with Amazon SageMaker APIs and any other
AWS services needed. If not specified, the estimator creates one
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/sagemaker/image_uris/test_sparkml.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
"us-west-1": "746614075791",
"us-west-2": "246618743249",
}
VERSIONS = ["2.2", "2.4"]
VERSIONS = ["2.2", "2.4", "3.3"]


@pytest.mark.parametrize("version", VERSIONS)
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/sagemaker/workflow/test_steps.py
Original file line number Diff line number Diff line change
Expand Up @@ -918,7 +918,7 @@ def test_create_model_step_with_model_pipeline(tfo, time, sagemaker_session):
},
{
"Environment": {"SAGEMAKER_DEFAULT_INVOCATIONS_ACCEPT": "text/csv"},
"Image": "246618743249.dkr.ecr.us-west-2.amazonaws.com/sagemaker-sparkml-serving:2.4",
"Image": "246618743249.dkr.ecr.us-west-2.amazonaws.com/sagemaker-sparkml-serving:3.3",
"ModelDataUrl": "s3://bucket/model_2.tar.gz",
},
],
Expand Down
4 changes: 2 additions & 2 deletions tests/unit/test_pipeline_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def test_prepare_container_def(tfo, time, sagemaker_session):
{
"Environment": {"SAGEMAKER_DEFAULT_INVOCATIONS_ACCEPT": "text/csv"},
"Image": "246618743249.dkr.ecr.us-west-2.amazonaws.com"
+ "/sagemaker-sparkml-serving:2.4",
+ "/sagemaker-sparkml-serving:3.3",
"ModelDataUrl": "s3://bucket/model_2.tar.gz",
},
]
Expand Down Expand Up @@ -338,7 +338,7 @@ def test_network_isolation(tfo, time, sagemaker_session):
"ModelDataUrl": "s3://bucket/model_1.tar.gz",
},
{
"Image": "246618743249.dkr.ecr.us-west-2.amazonaws.com/sagemaker-sparkml-serving:2.4",
"Image": "246618743249.dkr.ecr.us-west-2.amazonaws.com/sagemaker-sparkml-serving:3.3",
"Environment": {},
"ModelDataUrl": "s3://bucket/model_2.tar.gz",
},
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/test_sparkml_serving.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def sagemaker_session():

def test_sparkml_model(sagemaker_session):
sparkml = SparkMLModel(sagemaker_session=sagemaker_session, model_data=MODEL_DATA, role=ROLE)
assert sparkml.image_uri == image_uris.retrieve("sparkml-serving", REGION, version="2.4")
assert sparkml.image_uri == image_uris.retrieve("sparkml-serving", REGION, version="3.3")


def test_predictor_type(sagemaker_session):
Expand Down