Skip to content

feature: add support to TF 1.14 serving with elastic accelerator. #1045

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 7 commits into from
Sep 17, 2019
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ By using TensorFlow SageMaker Estimators, you can train and host TensorFlow mode

Supported versions of TensorFlow: ``1.4.1``, ``1.5.0``, ``1.6.0``, ``1.7.0``, ``1.8.0``, ``1.9.0``, ``1.10.0``, ``1.11.0``, ``1.12.0``, ``1.13.1``, ``1.14``.

Supported versions of TensorFlow for Elastic Inference: ``1.11.0``, ``1.12.0``, ``1.13.1``
Supported versions of TensorFlow for Elastic Inference: ``1.11.0``, ``1.12.0``, ``1.13.1``, ``1.14``.

We recommend that you use the latest supported version, because that's where we focus most of our development efforts.

Expand Down
50 changes: 38 additions & 12 deletions src/sagemaker/fw_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,14 +63,18 @@
"tensorflow-scriptmode": "tensorflow-training",
"mxnet": "mxnet-training",
"tensorflow-serving": "tensorflow-inference",
"tensorflow-serving-eia": "tensorflow-inference-eia",
"mxnet-serving": "mxnet-inference",
"mxnet-serving-eia": "mxnet-inference-eia",
}

MERGED_FRAMEWORKS_LOWEST_VERSIONS = {
"tensorflow-scriptmode": [1, 13, 1],
"mxnet": [1, 4, 1],
"tensorflow-serving": [1, 13, 0],
"tensorflow-serving-eia": [1, 14, 0],
"mxnet-serving": [1, 4, 1],
"mxnet-serving-eia": [1, 4, 1],
}


Expand Down Expand Up @@ -101,7 +105,7 @@ def _is_merged_versions(framework, framework_version):
return False


def _using_merged_images(region, framework, py_version, accelerator_type, framework_version):
def _using_merged_images(region, framework, py_version, framework_version):
"""
Args:
region:
Expand All @@ -116,8 +120,11 @@ def _using_merged_images(region, framework, py_version, accelerator_type, framew
return (
(not is_gov_region)
and is_merged_versions
and (is_py3 or _is_tf_14_or_later(framework, framework_version))
and accelerator_type is None
and (
is_py3
or _is_tf_14_or_later(framework, framework_version)
or _is_mxnet_serving_141_or_later(framework, framework_version)
)
)


Expand All @@ -135,7 +142,25 @@ def _is_tf_14_or_later(framework, framework_version):
)


def _registry_id(region, framework, py_version, account, accelerator_type, framework_version):
def _is_mxnet_serving_141_or_later(framework, framework_version):
"""
Args:
framework:
framework_version:
"""
asimov_lowest_mxnet = [1, 4, 1]

version = [int(s) for s in framework_version.split(".")]

if len(version) == 2:
version.append(0)

return (
framework.startswith("mxnet-serving") and version >= asimov_lowest_mxnet[0 : len(version)]
)


def _registry_id(region, framework, py_version, account, framework_version):
"""
Args:
region:
Expand All @@ -145,7 +170,7 @@ def _registry_id(region, framework, py_version, account, accelerator_type, frame
accelerator_type:
framework_version:
"""
if _using_merged_images(region, framework, py_version, accelerator_type, framework_version):
if _using_merged_images(region, framework, py_version, framework_version):
if region in ASIMOV_OPT_IN_ACCOUNTS_BY_REGION:
return ASIMOV_OPT_IN_ACCOUNTS_BY_REGION.get(region)
return "763104351884"
Expand Down Expand Up @@ -193,7 +218,6 @@ def create_image_uri(
framework=framework,
py_version=py_version,
account=account,
accelerator_type=accelerator_type,
framework_version=framework_version,
)

Expand All @@ -218,19 +242,21 @@ def create_image_uri(
else:
device_type = "cpu"

if py_version:
tag = "{}-{}-{}".format(framework_version, device_type, py_version)
else:
tag = "{}-{}".format(framework_version, device_type)

if _accelerator_type_valid_for_framework(
framework=framework,
accelerator_type=accelerator_type,
optimized_families=optimized_families,
):
framework += "-eia"

if _using_merged_images(region, framework, py_version, accelerator_type, framework_version):
using_merged_images = _using_merged_images(region, framework, py_version, framework_version)

if not py_version or (using_merged_images and framework == "tensorflow-serving-eia"):
tag = "{}-{}".format(framework_version, device_type)
else:
tag = "{}-{}-{}".format(framework_version, device_type, py_version)

if using_merged_images:
return "{}/{}:{}".format(
get_ecr_image_uri_prefix(account, region), MERGED_FRAMEWORKS_REPO_MAP[framework], tag
)
Expand Down
2 changes: 1 addition & 1 deletion tests/integ/test_tfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ def tfs_predictor_with_accelerator(sagemaker_session, tf_full_version, cpu_insta
model = Model(
model_data=model_data,
role="SageMakerRole",
framework_version="1.13",
framework_version="1.14",
sagemaker_session=sagemaker_session,
)
predictor = model.deploy(
Expand Down
47 changes: 45 additions & 2 deletions tests/unit/test_fw_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,49 @@ def test_create_image_uri_hkg():
}


def test_tf_eia_images():
image_uri = fw_utils.create_image_uri(
"us-west-2",
"tensorflow-serving",
"ml.p3.2xlarge",
"1.14.0",
"py3",
accelerator_type="ml.eia1.medium",
)
assert (
image_uri
== "763104351884.dkr.ecr.us-west-2.amazonaws.com/tensorflow-inference-eia:1.14.0-gpu"
)


def test_mxnet_eia_images():
image_uri = fw_utils.create_image_uri(
"us-west-2",
"mxnet-serving",
"ml.p3.2xlarge",
"1.4.1",
"py2",
accelerator_type="ml.eia1.medium",
)
assert (
image_uri
== "763104351884.dkr.ecr.us-west-2.amazonaws.com/mxnet-inference-eia:1.4.1-gpu-py2"
)

image_uri = fw_utils.create_image_uri(
"us-east-1",
"mxnet-serving",
"ml.c4.2xlarge",
"1.4.1",
"py3",
accelerator_type="ml.eia1.large",
)
assert (
image_uri
== "763104351884.dkr.ecr.us-east-1.amazonaws.com/mxnet-inference-eia:1.4.1-cpu-py3"
)


def test_create_image_uri_merged():
image_uri = fw_utils.create_image_uri(
"us-west-2", "tensorflow-scriptmode", "ml.p3.2xlarge", "1.14", "py3"
Expand Down Expand Up @@ -198,11 +241,11 @@ def test_create_image_uri_merged_py2():
assert image_uri == "520713654638.dkr.ecr.us-west-2.amazonaws.com/sagemaker-mxnet:1.4.1-gpu-py2"

image_uri = fw_utils.create_image_uri(
"us-west-2", "mxnet-serving", "ml.c4.2xlarge", "1.4.1", "py2"
"us-west-2", "mxnet-serving", "ml.c4.2xlarge", "1.3.1", "py2"
)
assert (
image_uri
== "520713654638.dkr.ecr.us-west-2.amazonaws.com/sagemaker-mxnet-serving:1.4.1-cpu-py2"
== "520713654638.dkr.ecr.us-west-2.amazonaws.com/sagemaker-mxnet-serving:1.3.1-cpu-py2"
)


Expand Down
8 changes: 7 additions & 1 deletion tests/unit/test_mxnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,13 @@ def test_mxnet_mms_version(

model = mx.create_model()

expected_image_base = _get_full_image_uri(mxnet_version, IMAGE_REPO_SERVING_NAME, "gpu")
if mxnet_version == "1.4.1":
expected_image_base = (
"763104351884.dkr.ecr.us-west-2.amazonaws.com/mxnet-inference:1.4.1-gpu-py2"

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Has Asimov team provided the MXNet 1.4.1 py2 DLC? I do not think they have done that.

)
else:
expected_image_base = _get_full_image_uri(mxnet_version, IMAGE_REPO_SERVING_NAME, "gpu")

environment = {
"Environment": {
"SAGEMAKER_SUBMIT_DIRECTORY": "s3://mybucket/sagemaker-mxnet-2017-11-06-14:14:15.672/model.tar.gz",
Expand Down