@@ -147,8 +147,9 @@ class CacheConfig:
147
147
If caching is enabled, the pipeline attempts to find a previous execution of a `Step`
148
148
that was called with the same arguments. `Step` caching only considers successful execution.
149
149
If a successful previous execution is found, the pipeline propagates the values
150
- from the previous execution rather than recomputing the `Step`. When multiple successful executions
151
- exist within the timeout period, it uses the result for the most recent successful execution.
150
+ from the previous execution rather than recomputing the `Step`.
151
+ When multiple successful executions exist within the timeout period,
152
+ it uses the result for the most recent successful execution.
152
153
153
154
154
155
Attributes:
@@ -237,8 +238,8 @@ def __init__(
237
238
):
238
239
"""Construct a `TrainingStep`, given an `EstimatorBase` instance.
239
240
240
- In addition to the `EstimatorBase` instance, the other arguments are those that are supplied to
241
- the `fit` method of the `sagemaker.estimator.Estimator`.
241
+ In addition to the `EstimatorBase` instance, the other arguments are those
242
+ that are supplied to the `fit` method of the `sagemaker.estimator.Estimator`.
242
243
243
244
Args:
244
245
name (str): The name of the `TrainingStep`.
@@ -408,8 +409,8 @@ def __init__(
408
409
):
409
410
"""Constructs a `TransformStep`, given a `Transformer` instance.
410
411
411
- In addition to the `Transformer` instance, the other arguments are those that are supplied to
412
- the `transform` method of the `sagemaker.transformer.Transformer`.
412
+ In addition to the `Transformer` instance, the other arguments are those
413
+ that are supplied to the `transform` method of the `sagemaker.transformer.Transformer`.
413
414
414
415
Args:
415
416
name (str): The name of the `TransformStep`.
@@ -614,8 +615,8 @@ def __init__(
614
615
):
615
616
"""Construct a `TuningStep`, given a `HyperparameterTuner` instance.
616
617
617
- In addition to the `HyperparameterTuner` instance, the other arguments are those that are supplied to
618
- the `fit` method of the `sagemaker.tuner.HyperparameterTuner`.
618
+ In addition to the `HyperparameterTuner` instance, the other arguments are those
619
+ that are supplied to the `fit` method of the `sagemaker.tuner.HyperparameterTuner`.
619
620
620
621
Args:
621
622
name (str): The name of the `TuningStep`.
@@ -652,7 +653,7 @@ def __init__(
652
653
job_arguments (List[str]): A list of strings to be passed into the processing job.
653
654
Defaults to `None`.
654
655
cache_config (CacheConfig): A `sagemaker.workflow.steps.CacheConfig` instance.
655
- depends_on (List[str] or List[Step]): A list of `Step` names or `Step` instances that
656
+ depends_on (List[str] or List[Step]): A list of `Step` names or `Step` instances that
656
657
this `sagemaker.workflow.steps.ProcessingStep` depends on.
657
658
retry_policies (List[RetryPolicy]): A list of retry policies.
658
659
"""
@@ -694,8 +695,9 @@ def arguments(self) -> RequestType:
694
695
695
696
@property
696
697
def properties (self ):
697
- """A `Properties` object representing
698
- `DescribeHyperParameterTuningJobResponse` and
698
+ """A `Properties` object
699
+
700
+ A `Properties` object representing `DescribeHyperParameterTuningJobResponse` and
699
701
`ListTrainingJobsForHyperParameterTuningJobResponse` data model.
700
702
"""
701
703
return self ._properties
@@ -713,7 +715,7 @@ def get_top_model_s3_uri(self, top_k: int, s3_bucket: str, prefix: str = "") ->
713
715
714
716
Args:
715
717
top_k (int): The index of the top performing training job
716
- tuning step stores up to 50 top performing training jobs.
718
+ tuning step stores up to 50 top performing training jobs.
717
719
A valid top_k value is from 0 to 49. The best training job
718
720
model is at index 0.
719
721
s3_bucket (str): The S3 bucket to store the training job output artifact.
0 commit comments