|
12 | 12 | # language governing permissions and limitations under the License.
|
13 | 13 | from __future__ import absolute_import
|
14 | 14 |
|
| 15 | +SPECIAL_MODEL_SPECS_DICT = { |
| 16 | + "huggingface-text2text-flan-t5-xxl-fp16": { |
| 17 | + "model_id": "huggingface-text2text-flan-t5-xxl-fp16", |
| 18 | + "url": "https://huggingface.co/google/flan-t5-xxl", |
| 19 | + "version": "1.0.0", |
| 20 | + "min_sdk_version": "2.130.0", |
| 21 | + "training_supported": False, |
| 22 | + "incremental_training_supported": False, |
| 23 | + "hosting_ecr_specs": { |
| 24 | + "framework": "pytorch", |
| 25 | + "framework_version": "1.12.0", |
| 26 | + "py_version": "py38", |
| 27 | + "huggingface_transformers_version": "4.17.0", |
| 28 | + }, |
| 29 | + "hosting_artifact_key": "huggingface-infer/infer-huggingface-text2text-flan-t5-xxl-fp16.tar.gz", |
| 30 | + "hosting_script_key": "source-directory-tarballs/huggingface/inference/text2text/v1.0.2/sourcedir.tar.gz", |
| 31 | + "hosting_prepacked_artifact_key": "huggingface-infer/prepack/v1.0.0/infer-prepack-huggingface-" |
| 32 | + "text2text-flan-t5-xxl-fp16.tar.gz", |
| 33 | + "hosting_prepacked_artifact_version": "1.0.0", |
| 34 | + "inference_vulnerable": False, |
| 35 | + "inference_dependencies": [ |
| 36 | + "accelerate==0.16.0", |
| 37 | + "bitsandbytes==0.37.0", |
| 38 | + "filelock==3.9.0", |
| 39 | + "huggingface-hub==0.12.0", |
| 40 | + "regex==2022.7.9", |
| 41 | + "tokenizers==0.13.2", |
| 42 | + "transformers==4.26.0", |
| 43 | + ], |
| 44 | + "inference_vulnerabilities": [], |
| 45 | + "training_vulnerable": False, |
| 46 | + "training_dependencies": [], |
| 47 | + "training_vulnerabilities": [], |
| 48 | + "deprecated": False, |
| 49 | + "inference_environment_variables": [ |
| 50 | + { |
| 51 | + "name": "SAGEMAKER_PROGRAM", |
| 52 | + "type": "text", |
| 53 | + "default": "inference.py", |
| 54 | + "scope": "container", |
| 55 | + }, |
| 56 | + { |
| 57 | + "name": "SAGEMAKER_SUBMIT_DIRECTORY", |
| 58 | + "type": "text", |
| 59 | + "default": "/opt/ml/model/code", |
| 60 | + "scope": "container", |
| 61 | + }, |
| 62 | + { |
| 63 | + "name": "SAGEMAKER_CONTAINER_LOG_LEVEL", |
| 64 | + "type": "text", |
| 65 | + "default": "20", |
| 66 | + "scope": "container", |
| 67 | + }, |
| 68 | + { |
| 69 | + "name": "MODEL_CACHE_ROOT", |
| 70 | + "type": "text", |
| 71 | + "default": "/opt/ml/model", |
| 72 | + "scope": "container", |
| 73 | + }, |
| 74 | + {"name": "SAGEMAKER_ENV", "type": "text", "default": "1", "scope": "container"}, |
| 75 | + { |
| 76 | + "name": "SAGEMAKER_MODEL_SERVER_WORKERS", |
| 77 | + "type": "text", |
| 78 | + "default": "1", |
| 79 | + "scope": "container", |
| 80 | + }, |
| 81 | + { |
| 82 | + "name": "SAGEMAKER_MODEL_SERVER_TIMEOUT", |
| 83 | + "type": "text", |
| 84 | + "default": "3600", |
| 85 | + "scope": "container", |
| 86 | + }, |
| 87 | + ], |
| 88 | + "metrics": [], |
| 89 | + "default_inference_instance_type": "ml.g5.12xlarge", |
| 90 | + "supported_inference_instance_types": [ |
| 91 | + "ml.g5.12xlarge", |
| 92 | + "ml.g5.24xlarge", |
| 93 | + "ml.p3.8xlarge", |
| 94 | + "ml.p3.16xlarge", |
| 95 | + "ml.g4dn.12xlarge", |
| 96 | + ], |
| 97 | + } |
| 98 | +} |
| 99 | + |
15 | 100 | PROTOTYPICAL_MODEL_SPECS_DICT = {
|
16 | 101 | "pytorch-eqa-bert-base-cased": {
|
17 | 102 | "model_id": "pytorch-eqa-bert-base-cased",
|
|
1093 | 1178 | "training_artifact_key": "pytorch-training/train-pytorch-ic-mobilenet-v2.tar.gz",
|
1094 | 1179 | "hosting_script_key": "source-directory-tarballs/pytorch/inference/ic/v1.0.0/sourcedir.tar.gz",
|
1095 | 1180 | "training_script_key": "source-directory-tarballs/pytorch/transfer_learning/ic/v1.0.0/sourcedir.tar.gz",
|
| 1181 | + "hosting_prepacked_artifact_key": None, |
1096 | 1182 | "hyperparameters": [
|
1097 | 1183 | {
|
1098 | 1184 | "name": "epochs",
|
|
0 commit comments