Skip to content

Commit 03102f9

Browse files
committed
Change job names in hyperparameter tuning integ tests
1 parent 53a43f6 commit 03102f9

File tree

1 file changed

+44
-40
lines changed

1 file changed

+44
-40
lines changed

tests/integ/test_tuner.py

Lines changed: 44 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@
1616
import json
1717
import os
1818
import pickle
19+
import random
20+
import string
1921
import sys
2022
import time
2123

@@ -32,7 +34,6 @@
3234
from sagemaker.predictor import json_deserializer
3335
from sagemaker.pytorch import PyTorch
3436
from sagemaker.tensorflow import TensorFlow
35-
from sagemaker.utils import name_from_base
3637
from sagemaker.tuner import IntegerParameter, ContinuousParameter, CategoricalParameter, HyperparameterTuner, \
3738
WarmStartConfig, WarmStartTypes, create_transfer_learning_tuner, create_identical_dataset_and_algorithm_tuner
3839
from tests.integ import DATA_DIR, PYTHON_VERSION, TUNING_DEFAULT_TIMEOUT_MINUTES
@@ -59,7 +60,7 @@ def kmeans_estimator(sagemaker_session):
5960

6061
kmeans = KMeans(role='SageMakerRole', train_instance_count=1,
6162
train_instance_type='ml.c4.xlarge',
62-
k=10, sagemaker_session=sagemaker_session, base_job_name='tk',
63+
k=10, sagemaker_session=sagemaker_session,
6364
output_path='s3://{}/'.format(sagemaker_session.default_bucket()))
6465
# set kmeans specific hp
6566
kmeans.init_method = 'random'
@@ -124,21 +125,24 @@ def _tune(kmeans_estimator, kmeans_train_set, tuner=None,
124125
return tuner
125126

126127

128+
def _job_name(base):
129+
return base + ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(8))
130+
131+
127132
@pytest.mark.continuous_testing
128-
def test_tuning_kmeans(sagemaker_session,
129-
kmeans_train_set,
130-
kmeans_estimator,
133+
def test_tuning_kmeans(sagemaker_session, kmeans_train_set, kmeans_estimator,
131134
hyperparameter_ranges):
132-
_tune_and_deploy(kmeans_estimator, kmeans_train_set, sagemaker_session, hyperparameter_ranges=hyperparameter_ranges)
135+
_tune_and_deploy(kmeans_estimator, kmeans_train_set, sagemaker_session,
136+
hyperparameter_ranges=hyperparameter_ranges, job_name=_job_name('tune-kmeans'))
133137

134138

135139
@pytest.mark.continuous_testing
136140
def test_tuning_kmeans_identical_dataset_algorithm_tuner_raw(sagemaker_session,
137141
kmeans_train_set,
138142
kmeans_estimator,
139143
hyperparameter_ranges):
140-
parent_tuning_job_name = name_from_base("kmeans-identical", max_length=32, short=True)
141-
child_tuning_job_name = name_from_base("c-kmeans-identical", max_length=32, short=True)
144+
parent_tuning_job_name = _job_name('kmeans-identical')
145+
child_tuning_job_name = _job_name('c-kmeans-identical')
142146
_tune(kmeans_estimator, kmeans_train_set, job_name=parent_tuning_job_name,
143147
hyperparameter_ranges=hyperparameter_ranges, max_parallel_jobs=1, max_jobs=1)
144148
child_tuner = _tune(kmeans_estimator, kmeans_train_set, job_name=child_tuning_job_name,
@@ -149,7 +153,7 @@ def test_tuning_kmeans_identical_dataset_algorithm_tuner_raw(sagemaker_session,
149153

150154
child_warm_start_config_response = WarmStartConfig.from_job_desc(
151155
sagemaker_session.sagemaker_client.describe_hyper_parameter_tuning_job(
152-
HyperParameterTuningJobName=child_tuning_job_name)["WarmStartConfig"])
156+
HyperParameterTuningJobName=child_tuning_job_name)['WarmStartConfig'])
153157

154158
assert child_warm_start_config_response.type == child_tuner.warm_start_config.type
155159
assert child_warm_start_config_response.parents == child_tuner.warm_start_config.parents
@@ -160,10 +164,10 @@ def test_tuning_kmeans_identical_dataset_algorithm_tuner(sagemaker_session,
160164
kmeans_estimator,
161165
hyperparameter_ranges):
162166
"""Tests Identical dataset and algorithm use case with one parent and child job launched with
163-
.identical_dataset_and_algorithm_tuner() """
167+
.identical_dataset_and_algorithm_tuner()"""
164168

165-
parent_tuning_job_name = name_from_base("km-iden1-parent", max_length=32, short=True)
166-
child_tuning_job_name = name_from_base("km-iden1-child", max_length=32, short=True)
169+
parent_tuning_job_name = _job_name('km-iden1-parent')
170+
child_tuning_job_name = _job_name('km-iden1-child')
167171

168172
parent_tuner = _tune(kmeans_estimator, kmeans_train_set, job_name=parent_tuning_job_name,
169173
hyperparameter_ranges=hyperparameter_ranges)
@@ -174,7 +178,7 @@ def test_tuning_kmeans_identical_dataset_algorithm_tuner(sagemaker_session,
174178

175179
child_warm_start_config_response = WarmStartConfig.from_job_desc(
176180
sagemaker_session.sagemaker_client.describe_hyper_parameter_tuning_job(
177-
HyperParameterTuningJobName=child_tuning_job_name)["WarmStartConfig"])
181+
HyperParameterTuningJobName=child_tuning_job_name)['WarmStartConfig'])
178182

179183
assert child_warm_start_config_response.type == child_tuner.warm_start_config.type
180184
assert child_warm_start_config_response.parents == child_tuner.warm_start_config.parents
@@ -186,10 +190,10 @@ def test_create_tuning_kmeans_identical_dataset_algorithm_tuner(sagemaker_sessio
186190
kmeans_estimator,
187191
hyperparameter_ranges):
188192
"""Tests Identical dataset and algorithm use case with one parent and child job launched with
189-
.create_identical_dataset_and_algorithm_tuner() """
193+
.create_identical_dataset_and_algorithm_tuner()"""
190194

191-
parent_tuning_job_name = name_from_base("km-iden2-parent", max_length=32, short=True)
192-
child_tuning_job_name = name_from_base("km-iden2-child", max_length=32, short=True)
195+
parent_tuning_job_name = _job_name('km-iden2-parent')
196+
child_tuning_job_name = _job_name('km-iden2-child')
193197

194198
parent_tuner = _tune(kmeans_estimator, kmeans_train_set, job_name=parent_tuning_job_name,
195199
hyperparameter_ranges=hyperparameter_ranges, max_parallel_jobs=1, max_jobs=1)
@@ -202,7 +206,7 @@ def test_create_tuning_kmeans_identical_dataset_algorithm_tuner(sagemaker_sessio
202206

203207
child_warm_start_config_response = WarmStartConfig.from_job_desc(
204208
sagemaker_session.sagemaker_client.describe_hyper_parameter_tuning_job(
205-
HyperParameterTuningJobName=child_tuning_job_name)["WarmStartConfig"])
209+
HyperParameterTuningJobName=child_tuning_job_name)['WarmStartConfig'])
206210

207211
assert child_warm_start_config_response.type == child_tuner.warm_start_config.type
208212
assert child_warm_start_config_response.parents == child_tuner.warm_start_config.parents
@@ -213,10 +217,10 @@ def test_transfer_learning_tuner(sagemaker_session,
213217
kmeans_estimator,
214218
hyperparameter_ranges):
215219
"""Tests Transfer learning use case with one parent and child job launched with
216-
.transfer_learning_tuner() """
220+
.transfer_learning_tuner()"""
217221

218-
parent_tuning_job_name = name_from_base("km-tran1-parent", max_length=32, short=True)
219-
child_tuning_job_name = name_from_base("km-tran1-child", max_length=32, short=True)
222+
parent_tuning_job_name = _job_name('km-tran1-parent')
223+
child_tuning_job_name = _job_name('km-tran1-child')
220224

221225
parent_tuner = _tune(kmeans_estimator, kmeans_train_set, job_name=parent_tuning_job_name,
222226
hyperparameter_ranges=hyperparameter_ranges, max_jobs=1, max_parallel_jobs=1)
@@ -227,7 +231,7 @@ def test_transfer_learning_tuner(sagemaker_session,
227231

228232
child_warm_start_config_response = WarmStartConfig.from_job_desc(
229233
sagemaker_session.sagemaker_client.describe_hyper_parameter_tuning_job(
230-
HyperParameterTuningJobName=child_tuning_job_name)["WarmStartConfig"])
234+
HyperParameterTuningJobName=child_tuning_job_name)['WarmStartConfig'])
231235

232236
assert child_warm_start_config_response.type == child_tuner.warm_start_config.type
233237
assert child_warm_start_config_response.parents == child_tuner.warm_start_config.parents
@@ -239,10 +243,10 @@ def test_create_transfer_learning_tuner(sagemaker_session,
239243
kmeans_estimator,
240244
hyperparameter_ranges):
241245
"""Tests Transfer learning use case with two parents and child job launched with
242-
create_transfer_learning_tuner() """
243-
parent_tuning_job_name_1 = name_from_base("km-tran2-parent1", max_length=32, short=True)
244-
parent_tuning_job_name_2 = name_from_base("km-tran2-parent2", max_length=32, short=True)
245-
child_tuning_job_name = name_from_base("km-tran2-child", max_length=32, short=True)
246+
create_transfer_learning_tuner()"""
247+
parent_tuning_job_name_1 = _job_name('km-tran2-parent1')
248+
parent_tuning_job_name_2 = _job_name('km-tran2-parent2')
249+
child_tuning_job_name = _job_name('km-tran2-child')
246250

247251
parent_tuner_1 = _tune(kmeans_estimator, kmeans_train_set, job_name=parent_tuning_job_name_1,
248252
hyperparameter_ranges=hyperparameter_ranges, max_parallel_jobs=1, max_jobs=1)
@@ -258,7 +262,7 @@ def test_create_transfer_learning_tuner(sagemaker_session,
258262

259263
child_warm_start_config_response = WarmStartConfig.from_job_desc(
260264
sagemaker_session.sagemaker_client.describe_hyper_parameter_tuning_job(
261-
HyperParameterTuningJobName=child_tuning_job_name)["WarmStartConfig"])
265+
HyperParameterTuningJobName=child_tuning_job_name)['WarmStartConfig'])
262266

263267
assert child_warm_start_config_response.type == child_tuner.warm_start_config.type
264268
assert child_warm_start_config_response.parents == child_tuner.warm_start_config.parents
@@ -270,9 +274,9 @@ def test_tuning_kmeans_identical_dataset_algorithm_tuner_from_non_terminal_paren
270274
kmeans_estimator,
271275
hyperparameter_ranges):
272276
"""Tests Identical dataset and algorithm use case with one non terminal parent and child job launched with
273-
.identical_dataset_and_algorithm_tuner() """
274-
parent_tuning_job_name = name_from_base("km-non-term", max_length=32, short=True)
275-
child_tuning_job_name = name_from_base("km-non-term-child", max_length=32, short=True)
277+
.identical_dataset_and_algorithm_tuner()"""
278+
parent_tuning_job_name = _job_name('km-non-term')
279+
child_tuning_job_name = _job_name('km-non-term-child')
276280

277281
parent_tuner = _tune(kmeans_estimator, kmeans_train_set, job_name=parent_tuning_job_name,
278282
hyperparameter_ranges=hyperparameter_ranges, wait_till_terminal=False, max_parallel_jobs=1,
@@ -296,7 +300,7 @@ def test_tuning_lda(sagemaker_session):
296300
feature_num = int(all_records[0].features['values'].float32_tensor.shape[0])
297301

298302
lda = LDA(role='SageMakerRole', train_instance_type='ml.c4.xlarge', num_topics=10,
299-
sagemaker_session=sagemaker_session, base_job_name='test-lda')
303+
sagemaker_session=sagemaker_session)
300304

301305
record_set = prepare_record_set_from_local_files(data_path, lda.data_location,
302306
len(all_records), feature_num, sagemaker_session)
@@ -313,7 +317,7 @@ def test_tuning_lda(sagemaker_session):
313317
hyperparameter_ranges=hyperparameter_ranges, objective_type='Maximize', max_jobs=2,
314318
max_parallel_jobs=2)
315319

316-
tuner.fit([record_set, test_record_set], mini_batch_size=1)
320+
tuner.fit([record_set, test_record_set], mini_batch_size=1, job_name=_job_name('tune-lda'))
317321

318322
print('Started hyperparameter tuning job with name:' + tuner.latest_tuning_job.name)
319323

@@ -337,8 +341,7 @@ def test_stop_tuning_job(sagemaker_session):
337341
train_input = np.random.rand(1000, feature_num)
338342

339343
rcf = RandomCutForest(role='SageMakerRole', train_instance_count=1, train_instance_type='ml.c4.xlarge',
340-
num_trees=50, num_samples_per_tree=20, sagemaker_session=sagemaker_session,
341-
base_job_name='test-randomcutforest')
344+
num_trees=50, num_samples_per_tree=20, sagemaker_session=sagemaker_session)
342345

343346
records = rcf.record_set(train_input)
344347
records.distribution = 'FullyReplicated'
@@ -354,7 +357,7 @@ def test_stop_tuning_job(sagemaker_session):
354357
hyperparameter_ranges=hyperparameter_ranges, objective_type='Maximize', max_jobs=2,
355358
max_parallel_jobs=2)
356359

357-
tuner.fit([records, test_records])
360+
tuner.fit([records, test_records], job_name=_job_name('tune-rcf'))
358361

359362
time.sleep(15)
360363

@@ -394,7 +397,7 @@ def test_tuning_mxnet(sagemaker_session):
394397
key_prefix='integ-test-data/mxnet_mnist/train')
395398
test_input = estimator.sagemaker_session.upload_data(path=os.path.join(data_path, 'test'),
396399
key_prefix='integ-test-data/mxnet_mnist/test')
397-
tuner.fit({'train': train_input, 'test': test_input})
400+
tuner.fit({'train': train_input, 'test': test_input}, job_name=_job_name('tune-mxnet'))
398401

399402
print('Started hyperparameter tuning job with name:' + tuner.latest_tuning_job.name)
400403

@@ -409,7 +412,7 @@ def test_tuning_mxnet(sagemaker_session):
409412

410413

411414
@pytest.mark.continuous_testing
412-
@pytest.mark.skipif(PYTHON_VERSION != 'py2', reason="TensorFlow image supports only python 2.")
415+
@pytest.mark.skipif(PYTHON_VERSION != 'py2', reason='TensorFlow image supports only Python 2.')
413416
def test_tuning_tf(sagemaker_session):
414417
with timeout(minutes=TUNING_DEFAULT_TIMEOUT_MINUTES):
415418
script_path = os.path.join(DATA_DIR, 'iris', 'iris-dnn-classifier.py')
@@ -433,7 +436,7 @@ def test_tuning_tf(sagemaker_session):
433436
tuner = HyperparameterTuner(estimator, objective_metric_name, hyperparameter_ranges, metric_definitions,
434437
objective_type='Minimize', max_jobs=2, max_parallel_jobs=2)
435438

436-
tuner.fit(inputs)
439+
tuner.fit(inputs, job_name=_job_name('tune-tf'))
437440

438441
print('Started hyperparameter tuning job with name:' + tuner.latest_tuning_job.name)
439442

@@ -481,7 +484,7 @@ def test_tuning_chainer(sagemaker_session):
481484
tuner = HyperparameterTuner(estimator, objective_metric_name, hyperparameter_ranges, metric_definitions,
482485
max_jobs=2, max_parallel_jobs=2)
483486

484-
tuner.fit({'train': train_input, 'test': test_input})
487+
tuner.fit({'train': train_input, 'test': test_input}, job_name='tune-chainer')
485488

486489
print('Started hyperparameter tuning job with name:' + tuner.latest_tuning_job.name)
487490

@@ -525,7 +528,7 @@ def test_attach_tuning_pytorch(sagemaker_session):
525528

526529
training_data = estimator.sagemaker_session.upload_data(path=os.path.join(mnist_dir, 'training'),
527530
key_prefix='integ-test-data/pytorch_mnist/training')
528-
tuner.fit({'training': training_data})
531+
tuner.fit({'training': training_data}, job_name=_job_name('tune-pytorch'))
529532

530533
tuning_job_name = tuner.latest_tuning_job.name
531534

@@ -591,7 +594,8 @@ def test_tuning_byo_estimator(sagemaker_session):
591594
hyperparameter_ranges=hyperparameter_ranges,
592595
max_jobs=2, max_parallel_jobs=2)
593596

594-
tuner.fit({'train': s3_train_data, 'test': s3_train_data}, include_cls_metadata=False)
597+
tuner.fit({'train': s3_train_data, 'test': s3_train_data}, include_cls_metadata=False,
598+
job_name=_job_name('tune-byo'))
595599

596600
print('Started hyperparameter tuning job with name:' + tuner.latest_tuning_job.name)
597601

0 commit comments

Comments
 (0)