11
11
# ANY KIND, either express or implied. See the License for the specific
12
12
# language governing permissions and limitations under the License.
13
13
"""Placeholder docstring"""
14
+
14
15
from __future__ import absolute_import
15
16
16
17
import importlib
@@ -641,8 +642,11 @@ def __init__(
641
642
extract the metric from the logs. This should be defined only
642
643
for hyperparameter tuning jobs that don't use an Amazon
643
644
algorithm.
644
- strategy (str or PipelineVariable): Strategy to be used for hyperparameter estimations
645
- (default: 'Bayesian').
645
+ strategy (str or PipelineVariable): Strategy to be used for hyperparameter estimations.
646
+ More information about different strategies:
647
+ https://docs.aws.amazon.com/sagemaker/latest/dg/automatic-model-tuning-how-it-works.html.
648
+ Available options are: 'Bayesian', 'Random', 'Hyperband',
649
+ 'Grid' (default: 'Bayesian')
646
650
objective_type (str or PipelineVariable): The type of the objective metric for
647
651
evaluating training jobs. This value can be either 'Minimize' or
648
652
'Maximize' (default: 'Maximize').
@@ -759,7 +763,8 @@ def __init__(
759
763
self .autotune = autotune
760
764
761
765
def override_resource_config (
762
- self , instance_configs : Union [List [InstanceConfig ], Dict [str , List [InstanceConfig ]]]
766
+ self ,
767
+ instance_configs : Union [List [InstanceConfig ], Dict [str , List [InstanceConfig ]]],
763
768
):
764
769
"""Override the instance configuration of the estimators used by the tuner.
765
770
@@ -966,7 +971,7 @@ def fit(
966
971
include_cls_metadata : Union [bool , Dict [str , bool ]] = False ,
967
972
estimator_kwargs : Optional [Dict [str , dict ]] = None ,
968
973
wait : bool = True ,
969
- ** kwargs
974
+ ** kwargs ,
970
975
):
971
976
"""Start a hyperparameter tuning job.
972
977
@@ -1055,7 +1060,7 @@ def _fit_with_estimator_dict(self, inputs, job_name, include_cls_metadata, estim
1055
1060
allowed_keys = estimator_names ,
1056
1061
)
1057
1062
1058
- for ( estimator_name , estimator ) in self .estimator_dict .items ():
1063
+ for estimator_name , estimator in self .estimator_dict .items ():
1059
1064
ins = inputs .get (estimator_name , None ) if inputs is not None else None
1060
1065
args = estimator_kwargs .get (estimator_name , {}) if estimator_kwargs is not None else {}
1061
1066
self ._prepare_estimator_for_tuning (estimator , ins , job_name , ** args )
@@ -1282,7 +1287,7 @@ def _attach_with_training_details_list(cls, sagemaker_session, estimator_cls, jo
1282
1287
objective_metric_name_dict = objective_metric_name_dict ,
1283
1288
hyperparameter_ranges_dict = hyperparameter_ranges_dict ,
1284
1289
metric_definitions_dict = metric_definitions_dict ,
1285
- ** init_params
1290
+ ** init_params ,
1286
1291
)
1287
1292
1288
1293
def deploy (
@@ -1297,7 +1302,7 @@ def deploy(
1297
1302
model_name = None ,
1298
1303
kms_key = None ,
1299
1304
data_capture_config = None ,
1300
- ** kwargs
1305
+ ** kwargs ,
1301
1306
):
1302
1307
"""Deploy the best trained or user specified model to an Amazon SageMaker endpoint.
1303
1308
@@ -1363,7 +1368,7 @@ def deploy(
1363
1368
model_name = model_name ,
1364
1369
kms_key = kms_key ,
1365
1370
data_capture_config = data_capture_config ,
1366
- ** kwargs
1371
+ ** kwargs ,
1367
1372
)
1368
1373
1369
1374
def stop_tuning_job (self ):
0 commit comments