Skip to content

Commit b3f2375

Browse files
committed
Improvement of the tuner documentation
1 parent 8d22789 commit b3f2375

File tree

1 file changed

+13
-8
lines changed

1 file changed

+13
-8
lines changed

src/sagemaker/tuner.py

+13-8
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
# ANY KIND, either express or implied. See the License for the specific
1212
# language governing permissions and limitations under the License.
1313
"""Placeholder docstring"""
14+
1415
from __future__ import absolute_import
1516

1617
import importlib
@@ -641,8 +642,11 @@ def __init__(
641642
extract the metric from the logs. This should be defined only
642643
for hyperparameter tuning jobs that don't use an Amazon
643644
algorithm.
644-
strategy (str or PipelineVariable): Strategy to be used for hyperparameter estimations
645-
(default: 'Bayesian').
645+
strategy (str or PipelineVariable): Strategy to be used for hyperparameter estimations.
646+
More information about different strategies:
647+
https://docs.aws.amazon.com/sagemaker/latest/dg/automatic-model-tuning-how-it-works.html.
648+
Available options are: 'Bayesian', 'Random', 'Hyperband',
649+
'Grid' (default: 'Bayesian')
646650
objective_type (str or PipelineVariable): The type of the objective metric for
647651
evaluating training jobs. This value can be either 'Minimize' or
648652
'Maximize' (default: 'Maximize').
@@ -759,7 +763,8 @@ def __init__(
759763
self.autotune = autotune
760764

761765
def override_resource_config(
762-
self, instance_configs: Union[List[InstanceConfig], Dict[str, List[InstanceConfig]]]
766+
self,
767+
instance_configs: Union[List[InstanceConfig], Dict[str, List[InstanceConfig]]],
763768
):
764769
"""Override the instance configuration of the estimators used by the tuner.
765770
@@ -966,7 +971,7 @@ def fit(
966971
include_cls_metadata: Union[bool, Dict[str, bool]] = False,
967972
estimator_kwargs: Optional[Dict[str, dict]] = None,
968973
wait: bool = True,
969-
**kwargs
974+
**kwargs,
970975
):
971976
"""Start a hyperparameter tuning job.
972977
@@ -1055,7 +1060,7 @@ def _fit_with_estimator_dict(self, inputs, job_name, include_cls_metadata, estim
10551060
allowed_keys=estimator_names,
10561061
)
10571062

1058-
for (estimator_name, estimator) in self.estimator_dict.items():
1063+
for estimator_name, estimator in self.estimator_dict.items():
10591064
ins = inputs.get(estimator_name, None) if inputs is not None else None
10601065
args = estimator_kwargs.get(estimator_name, {}) if estimator_kwargs is not None else {}
10611066
self._prepare_estimator_for_tuning(estimator, ins, job_name, **args)
@@ -1282,7 +1287,7 @@ def _attach_with_training_details_list(cls, sagemaker_session, estimator_cls, jo
12821287
objective_metric_name_dict=objective_metric_name_dict,
12831288
hyperparameter_ranges_dict=hyperparameter_ranges_dict,
12841289
metric_definitions_dict=metric_definitions_dict,
1285-
**init_params
1290+
**init_params,
12861291
)
12871292

12881293
def deploy(
@@ -1297,7 +1302,7 @@ def deploy(
12971302
model_name=None,
12981303
kms_key=None,
12991304
data_capture_config=None,
1300-
**kwargs
1305+
**kwargs,
13011306
):
13021307
"""Deploy the best trained or user specified model to an Amazon SageMaker endpoint.
13031308
@@ -1363,7 +1368,7 @@ def deploy(
13631368
model_name=model_name,
13641369
kms_key=kms_key,
13651370
data_capture_config=data_capture_config,
1366-
**kwargs
1371+
**kwargs,
13671372
)
13681373

13691374
def stop_tuning_job(self):

0 commit comments

Comments
 (0)