@@ -3236,6 +3236,7 @@ async def put_trained_model(
3236
3236
model_type : t .Optional [
3237
3237
t .Union ["t.Literal['lang_ident', 'pytorch', 'tree_ensemble']" , str ]
3238
3238
] = None ,
3239
+ platform_architecture : t .Optional [str ] = None ,
3239
3240
pretty : t .Optional [bool ] = None ,
3240
3241
tags : t .Optional [t .Union [t .List [str ], t .Tuple [str , ...]]] = None ,
3241
3242
) -> ObjectApiResponse [t .Any ]:
@@ -3264,6 +3265,14 @@ async def put_trained_model(
3264
3265
model in memory. This property is supported only if defer_definition_decompression
3265
3266
is true or the model definition is not supplied.
3266
3267
:param model_type: The model type.
3268
+ :param platform_architecture: The platform architecture (if applicable) of the
3269
+ trained mode. If the model only works on one platform, because it is heavily
3270
+ optimized for a particular processor architecture and OS combination, then
3271
+ this field specifies which. The format of the string must match the platform
3272
+ identifiers used by Elasticsearch, so one of, `linux-x86_64`, `linux-aarch64`,
3273
+ `darwin-x86_64`, `darwin-aarch64`, or `windows-x86_64`. For portable models
3274
+ (those that work independent of processor architecture or OS features), leave
3275
+ this field unset.
3267
3276
:param tags: An array of tags to organize the model.
3268
3277
"""
3269
3278
if model_id in SKIP_IN_PATH :
@@ -3295,6 +3304,8 @@ async def put_trained_model(
3295
3304
__body ["model_size_bytes" ] = model_size_bytes
3296
3305
if model_type is not None :
3297
3306
__body ["model_type" ] = model_type
3307
+ if platform_architecture is not None :
3308
+ __body ["platform_architecture" ] = platform_architecture
3298
3309
if pretty is not None :
3299
3310
__query ["pretty" ] = pretty
3300
3311
if tags is not None :
0 commit comments