|
| 1 | +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. |
| 2 | +# |
| 3 | +# Licensed under the Apache License, Version 2.0 (the "License"). You |
| 4 | +# may not use this file except in compliance with the License. A copy of |
| 5 | +# the License is located at |
| 6 | +# |
| 7 | +# http://aws.amazon.com/apache2.0/ |
| 8 | +# |
| 9 | +# or in the "license" file accompanying this file. This file is |
| 10 | +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF |
| 11 | +# ANY KIND, either express or implied. See the License for the specific |
| 12 | +# language governing permissions and limitations under the License. |
| 13 | +"""Holds mixin logic to support deployment of Model ID""" |
| 14 | +from __future__ import absolute_import |
| 15 | +import logging |
| 16 | +from typing import Type |
| 17 | +from abc import ABC, abstractmethod |
| 18 | + |
| 19 | +from sagemaker import image_uris |
| 20 | +from sagemaker.model import Model |
| 21 | +from sagemaker.djl_inference.model import _get_model_config_properties_from_hf |
| 22 | + |
| 23 | +from sagemaker.huggingface import HuggingFaceModel |
| 24 | +from sagemaker.serve.utils.local_hardware import ( |
| 25 | + _get_nb_instance, |
| 26 | +) |
| 27 | +from sagemaker.serve.model_server.tgi.prepare import _create_dir_structure |
| 28 | +from sagemaker.serve.utils.predictors import TgiLocalModePredictor |
| 29 | +from sagemaker.serve.utils.types import ModelServer |
| 30 | +from sagemaker.serve.mode.function_pointers import Mode |
| 31 | +from sagemaker.serve.utils.telemetry_logger import _capture_telemetry |
| 32 | +from sagemaker.base_predictor import PredictorBase |
| 33 | + |
| 34 | +logger = logging.getLogger(__name__) |
| 35 | + |
| 36 | +_CODE_FOLDER = "code" |
| 37 | + |
| 38 | + |
| 39 | +class TEI(ABC): |
| 40 | + """TEI build logic for ModelBuilder()""" |
| 41 | + |
| 42 | + def __init__(self): |
| 43 | + self.model = None |
| 44 | + self.serve_settings = None |
| 45 | + self.sagemaker_session = None |
| 46 | + self.model_path = None |
| 47 | + self.dependencies = None |
| 48 | + self.modes = None |
| 49 | + self.mode = None |
| 50 | + self.model_server = None |
| 51 | + self.image_uri = None |
| 52 | + self._is_custom_image_uri = False |
| 53 | + self.image_config = None |
| 54 | + self.vpc_config = None |
| 55 | + self._original_deploy = None |
| 56 | + self.hf_model_config = None |
| 57 | + self._default_tensor_parallel_degree = None |
| 58 | + self._default_data_type = None |
| 59 | + self._default_max_tokens = None |
| 60 | + self.pysdk_model = None |
| 61 | + self.schema_builder = None |
| 62 | + self.env_vars = None |
| 63 | + self.nb_instance_type = None |
| 64 | + self.ram_usage_model_load = None |
| 65 | + self.secret_key = None |
| 66 | + self.jumpstart = None |
| 67 | + self.role_arn = None |
| 68 | + |
| 69 | + @abstractmethod |
| 70 | + def _prepare_for_mode(self): |
| 71 | + """Placeholder docstring""" |
| 72 | + |
| 73 | + @abstractmethod |
| 74 | + def _get_client_translators(self): |
| 75 | + """Placeholder docstring""" |
| 76 | + |
| 77 | + def _set_to_tgi(self): |
| 78 | + """Placeholder docstring""" |
| 79 | + if self.model_server != ModelServer.TGI: |
| 80 | + messaging = ( |
| 81 | + "HuggingFace Model ID support on model server: " |
| 82 | + f"{self.model_server} is not currently supported. " |
| 83 | + f"Defaulting to {ModelServer.TGI}" |
| 84 | + ) |
| 85 | + logger.warning(messaging) |
| 86 | + self.model_server = ModelServer.TGI |
| 87 | + |
| 88 | + def _create_tei_model(self, **kwargs) -> Type[Model]: |
| 89 | + """Placeholder docstring""" |
| 90 | + if self.nb_instance_type and "instance_type" not in kwargs: |
| 91 | + kwargs.update({"instance_type": self.nb_instance_type}) |
| 92 | + |
| 93 | + if not self.image_uri: |
| 94 | + self.image_uri = image_uris.retrieve( |
| 95 | + "huggingface-tei", |
| 96 | + image_scope="inference", |
| 97 | + instance_type=kwargs.get("instance_type"), |
| 98 | + region=self.sagemaker_session.boto_region_name, |
| 99 | + ) |
| 100 | + |
| 101 | + pysdk_model = HuggingFaceModel( |
| 102 | + image_uri=self.image_uri, |
| 103 | + image_config=self.image_config, |
| 104 | + vpc_config=self.vpc_config, |
| 105 | + env=self.env_vars, |
| 106 | + role=self.role_arn, |
| 107 | + sagemaker_session=self.sagemaker_session, |
| 108 | + ) |
| 109 | + |
| 110 | + logger.info("Detected %s. Proceeding with the the deployment.", self.image_uri) |
| 111 | + |
| 112 | + self._original_deploy = pysdk_model.deploy |
| 113 | + pysdk_model.deploy = self._tei_model_builder_deploy_wrapper |
| 114 | + return pysdk_model |
| 115 | + |
| 116 | + @_capture_telemetry("tei.deploy") |
| 117 | + def _tei_model_builder_deploy_wrapper(self, *args, **kwargs) -> Type[PredictorBase]: |
| 118 | + """Placeholder docstring""" |
| 119 | + timeout = kwargs.get("model_data_download_timeout") |
| 120 | + if timeout: |
| 121 | + self.pysdk_model.env.update({"MODEL_LOADING_TIMEOUT": str(timeout)}) |
| 122 | + |
| 123 | + if "mode" in kwargs and kwargs.get("mode") != self.mode: |
| 124 | + overwrite_mode = kwargs.get("mode") |
| 125 | + # mode overwritten by customer during model.deploy() |
| 126 | + logger.warning( |
| 127 | + "Deploying in %s Mode, overriding existing configurations set for %s mode", |
| 128 | + overwrite_mode, |
| 129 | + self.mode, |
| 130 | + ) |
| 131 | + |
| 132 | + if overwrite_mode == Mode.SAGEMAKER_ENDPOINT: |
| 133 | + self.mode = self.pysdk_model.mode = Mode.SAGEMAKER_ENDPOINT |
| 134 | + elif overwrite_mode == Mode.LOCAL_CONTAINER: |
| 135 | + self._prepare_for_mode() |
| 136 | + self.mode = self.pysdk_model.mode = Mode.LOCAL_CONTAINER |
| 137 | + else: |
| 138 | + raise ValueError("Mode %s is not supported!" % overwrite_mode) |
| 139 | + |
| 140 | + serializer = self.schema_builder.input_serializer |
| 141 | + deserializer = self.schema_builder._output_deserializer |
| 142 | + if self.mode == Mode.LOCAL_CONTAINER: |
| 143 | + timeout = kwargs.get("model_data_download_timeout") |
| 144 | + |
| 145 | + predictor = TgiLocalModePredictor( |
| 146 | + self.modes[str(Mode.LOCAL_CONTAINER)], serializer, deserializer |
| 147 | + ) |
| 148 | + |
| 149 | + self.modes[str(Mode.LOCAL_CONTAINER)].create_server( |
| 150 | + self.image_uri, |
| 151 | + timeout if timeout else 1800, |
| 152 | + None, |
| 153 | + predictor, |
| 154 | + self.pysdk_model.env, |
| 155 | + jumpstart=False, |
| 156 | + ) |
| 157 | + |
| 158 | + return predictor |
| 159 | + |
| 160 | + if "mode" in kwargs: |
| 161 | + del kwargs["mode"] |
| 162 | + if "role" in kwargs: |
| 163 | + self.pysdk_model.role = kwargs.get("role") |
| 164 | + del kwargs["role"] |
| 165 | + |
| 166 | + # set model_data to uncompressed s3 dict |
| 167 | + self.pysdk_model.model_data, env_vars = self._prepare_for_mode() |
| 168 | + self.env_vars.update(env_vars) |
| 169 | + self.pysdk_model.env.update(self.env_vars) |
| 170 | + |
| 171 | + # if the weights have been cached via local container mode -> set to offline |
| 172 | + if str(Mode.LOCAL_CONTAINER) in self.modes: |
| 173 | + self.pysdk_model.env.update({"TRANSFORMERS_OFFLINE": "1"}) |
| 174 | + else: |
| 175 | + # if has not been built for local container we must use cache |
| 176 | + # that hosting has write access to. |
| 177 | + self.pysdk_model.env["TRANSFORMERS_CACHE"] = "/tmp" |
| 178 | + self.pysdk_model.env["HUGGINGFACE_HUB_CACHE"] = "/tmp" |
| 179 | + |
| 180 | + if "endpoint_logging" not in kwargs: |
| 181 | + kwargs["endpoint_logging"] = True |
| 182 | + |
| 183 | + if not self.nb_instance_type and "instance_type" not in kwargs: |
| 184 | + raise ValueError( |
| 185 | + "Instance type must be provided when deploying " "to SageMaker Endpoint mode." |
| 186 | + ) |
| 187 | + |
| 188 | + if "initial_instance_count" not in kwargs: |
| 189 | + kwargs.update({"initial_instance_count": 1}) |
| 190 | + |
| 191 | + predictor = self._original_deploy(*args, **kwargs) |
| 192 | + |
| 193 | + predictor.serializer = serializer |
| 194 | + predictor.deserializer = deserializer |
| 195 | + return predictor |
| 196 | + |
| 197 | + def _build_for_hf_tei(self): |
| 198 | + """Placeholder docstring""" |
| 199 | + self.nb_instance_type = _get_nb_instance() |
| 200 | + |
| 201 | + _create_dir_structure(self.model_path) |
| 202 | + if not hasattr(self, "pysdk_model"): |
| 203 | + self.env_vars.update({"HF_MODEL_ID": self.model}) |
| 204 | + self.hf_model_config = _get_model_config_properties_from_hf( |
| 205 | + self.model, self.env_vars.get("HUGGING_FACE_HUB_TOKEN") |
| 206 | + ) |
| 207 | + |
| 208 | + self.pysdk_model = self._create_tei_model() |
| 209 | + |
| 210 | + if self.mode == Mode.LOCAL_CONTAINER: |
| 211 | + self._prepare_for_mode() |
| 212 | + |
| 213 | + return self.pysdk_model |
| 214 | + |
| 215 | + def _build_for_tei(self): |
| 216 | + """Placeholder docstring""" |
| 217 | + self.secret_key = None |
| 218 | + |
| 219 | + self._set_to_tgi() |
| 220 | + |
| 221 | + self.pysdk_model = self._build_for_hf_tei() |
| 222 | + return self.pysdk_model |
0 commit comments