Skip to content

Added scaling_type to Integer and Continuous ranges #697

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Mar 14, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,13 @@
CHANGELOG
=========


1.18.5.dev
==========
1.18.5
======

* bug-fix: pass kms id as parameter for uploading code with Server side encryption
* feature: ``PipelineModel``: Create a Transformer from a PipelineModel
* bug-fix: ``AlgorithmEstimator``: Make SupportedHyperParameters optional
* feature: ``Hyperparameter``: Support scaling hyperparameters
* doc-fix: Remove duplicate content from main README.rst, /tensorflow/README.rst, and /sklearn/README.rst and add links to readthedocs content

1.18.4
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def read(fname):
required_packages.append('enum34>=1.1.6')

setup(name="sagemaker",
version='1.18.4',
version='1.18.5',
description="Open source library for training and deploying models on Amazon SageMaker.",
packages=find_packages('src'),
package_dir={'': 'src'},
Expand Down
8 changes: 6 additions & 2 deletions src/sagemaker/parameter.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,15 +25,18 @@ class ParameterRange(object):

__all_types__ = ('Continuous', 'Categorical', 'Integer')

def __init__(self, min_value, max_value):
def __init__(self, min_value, max_value, scaling_type='Auto'):
"""Initialize a parameter range.

Args:
min_value (float or int): The minimum value for the range.
max_value (float or int): The maximum value for the range.
scaling_type (str): The scale used for searching the range during tuning (default: 'Auto').
Valid values: 'Auto', 'Linear', 'Logarithmic' and 'ReverseLogarithmic'.
"""
self.min_value = min_value
self.max_value = max_value
self.scaling_type = scaling_type

def is_valid(self, value):
"""Determine if a value is valid within this ParameterRange.
Expand Down Expand Up @@ -62,7 +65,8 @@ def as_tuning_range(self, name):
"""
return {'Name': name,
'MinValue': to_str(self.min_value),
'MaxValue': to_str(self.max_value)}
'MaxValue': to_str(self.max_value),
'ScalingType': self.scaling_type}


class ContinuousParameter(ParameterRange):
Expand Down
22 changes: 14 additions & 8 deletions src/sagemaker/tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -512,17 +512,23 @@ def _validate_parameter_ranges(self):
parameter_range = self._hyperparameter_ranges[value.name]

if isinstance(parameter_range, ParameterRange):
for _, parameter_range_value in parameter_range.__dict__.items():
# Categorical ranges
if isinstance(parameter_range_value, list):
for categorical_value in parameter_range_value:
value.validate(categorical_value)
# Continuous, Integer ranges
else:
value.validate(parameter_range_value)
self._validate_parameter_range(value, parameter_range)
except KeyError:
pass

def _validate_parameter_range(self, value_hp, parameter_range):
for parameter_range_key, parameter_range_value in parameter_range.__dict__.items():
if parameter_range_key == 'scaling_type':
continue

# Categorical ranges
if isinstance(parameter_range_value, list):
for categorical_value in parameter_range_value:
value_hp.validate(categorical_value)
# Continuous, Integer ranges
else:
value_hp.validate(parameter_range_value)

def transfer_learning_tuner(self, additional_parents=None, estimator=None):
"""Creates a new ``HyperparameterTuner`` by copying the request fields from the provided parent to the new
instance of ``HyperparameterTuner``. Followed by addition of warm start configuration with the type as
Expand Down
6 changes: 4 additions & 2 deletions tests/unit/test_airflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -502,15 +502,17 @@ def test_framework_tuning_config(sagemaker_session):
'ContinuousParameterRanges': [{
'Name': 'learning_rate',
'MinValue': '0.01',
'MaxValue': '0.2'}],
'MaxValue': '0.2',
'ScalingType': 'Auto'}],
'CategoricalParameterRanges': [{
'Name': 'optimizer',
'Values': ['"sgd"', '"Adam"']
}],
'IntegerParameterRanges': [{
'Name': 'num_epoch',
'MinValue': '10',
'MaxValue': '50'
'MaxValue': '50',
'ScalingType': 'Auto'
}]
}},
'TrainingJobDefinition': {
Expand Down
19 changes: 17 additions & 2 deletions tests/unit/test_tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@
'MaxValue': '100',
'Name': 'mini_batch_size',
'MinValue': '10',
'ScalingType': 'Auto'
},
]
},
Expand Down Expand Up @@ -631,10 +632,17 @@ def test_continuous_parameter():
def test_continuous_parameter_ranges():
cont_param = ContinuousParameter(0.1, 1e-2)
ranges = cont_param.as_tuning_range('some')
assert len(ranges.keys()) == 3
assert len(ranges.keys()) == 4
assert ranges['Name'] == 'some'
assert ranges['MinValue'] == '0.1'
assert ranges['MaxValue'] == '0.01'
assert ranges['ScalingType'] == 'Auto'


def test_continuous_parameter_scaling_type():
cont_param = ContinuousParameter(0.1, 2, scaling_type='ReverseLogarithmic')
cont_range = cont_param.as_tuning_range('range')
assert cont_range['ScalingType'] == 'ReverseLogarithmic'


def test_integer_parameter():
Expand All @@ -646,10 +654,17 @@ def test_integer_parameter():
def test_integer_parameter_ranges():
int_param = IntegerParameter(1, 2)
ranges = int_param.as_tuning_range('some')
assert len(ranges.keys()) == 3
assert len(ranges.keys()) == 4
assert ranges['Name'] == 'some'
assert ranges['MinValue'] == '1'
assert ranges['MaxValue'] == '2'
assert ranges['ScalingType'] == 'Auto'


def test_integer_parameter_scaling_type():
int_param = IntegerParameter(2, 3, scaling_type='Linear')
int_range = int_param.as_tuning_range('range')
assert int_range['ScalingType'] == 'Linear'


def test_categorical_parameter_list():
Expand Down