Skip to content

Add data_type to hyperparameters #54

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Jan 24, 2018
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 24 additions & 24 deletions src/sagemaker/amazon/factorization_machines.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,34 +23,34 @@ class FactorizationMachines(AmazonAlgorithmEstimatorBase):

repo = 'factorization-machines:1'

num_factors = hp('num_factors', (gt(0), isint), 'An integer greater than zero')
num_factors = hp('num_factors', (gt(0), isint), 'An integer greater than zero', int)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is good - we can probably also drop the is (e.g. isint) validation methods as well.

predictor_type = hp('predictor_type', isin('binary_classifier', 'regressor'),
'Value "binary_classifier" or "regressor"')
epochs = hp('epochs', (gt(0), isint), "An integer greater than 0")
clip_gradient = hp('clip_gradient', isnumber, "A float value")
eps = hp('eps', isnumber, "A float value")
rescale_grad = hp('rescale_grad', isnumber, "A float value")
bias_lr = hp('bias_lr', (ge(0), isnumber), "A non-negative float")
linear_lr = hp('linear_lr', (ge(0), isnumber), "A non-negative float")
factors_lr = hp('factors_lr', (ge(0), isnumber), "A non-negative float")
bias_wd = hp('bias_wd', (ge(0), isnumber), "A non-negative float")
linear_wd = hp('linear_wd', (ge(0), isnumber), "A non-negative float")
factors_wd = hp('factors_wd', (ge(0), isnumber), "A non-negative float")
'Value "binary_classifier" or "regressor"', str)
epochs = hp('epochs', (gt(0), isint), "An integer greater than 0", int)
clip_gradient = hp('clip_gradient', isnumber, "A float value", float)
eps = hp('eps', isnumber, "A float value", float)
rescale_grad = hp('rescale_grad', isnumber, "A float value", float)
bias_lr = hp('bias_lr', (ge(0), isnumber), "A non-negative float", float)
linear_lr = hp('linear_lr', (ge(0), isnumber), "A non-negative float", float)
factors_lr = hp('factors_lr', (ge(0), isnumber), "A non-negative float", float)
bias_wd = hp('bias_wd', (ge(0), isnumber), "A non-negative float", float)
linear_wd = hp('linear_wd', (ge(0), isnumber), "A non-negative float", float)
factors_wd = hp('factors_wd', (ge(0), isnumber), "A non-negative float", float)
bias_init_method = hp('bias_init_method', isin('normal', 'uniform', 'constant'),
'Value "normal", "uniform" or "constant"')
bias_init_scale = hp('bias_init_scale', (ge(0), isnumber), "A non-negative float")
bias_init_sigma = hp('bias_init_sigma', (ge(0), isnumber), "A non-negative float")
bias_init_value = hp('bias_init_value', isnumber, "A float value")
'Value "normal", "uniform" or "constant"', str)
bias_init_scale = hp('bias_init_scale', (ge(0), isnumber), "A non-negative float", float)
bias_init_sigma = hp('bias_init_sigma', (ge(0), isnumber), "A non-negative float", float)
bias_init_value = hp('bias_init_value', isnumber, "A float value", float)
linear_init_method = hp('linear_init_method', isin('normal', 'uniform', 'constant'),
'Value "normal", "uniform" or "constant"')
linear_init_scale = hp('linear_init_scale', (ge(0), isnumber), "A non-negative float")
linear_init_sigma = hp('linear_init_sigma', (ge(0), isnumber), "A non-negative float")
linear_init_value = hp('linear_init_value', isnumber, "A float value")
'Value "normal", "uniform" or "constant"', str)
linear_init_scale = hp('linear_init_scale', (ge(0), isnumber), "A non-negative float", float)
linear_init_sigma = hp('linear_init_sigma', (ge(0), isnumber), "A non-negative float", float)
linear_init_value = hp('linear_init_value', isnumber, "A float value", float)
factors_init_method = hp('factors_init_method', isin('normal', 'uniform', 'constant'),
'Value "normal", "uniform" or "constant"')
factors_init_scale = hp('factors_init_scale', (ge(0), isnumber), "A non-negative float")
factors_init_sigma = hp('factors_init_sigma', (ge(0), isnumber), "A non-negative float")
factors_init_value = hp('factors_init_value', isnumber, "A float value")
'Value "normal", "uniform" or "constant"', str)
factors_init_scale = hp('factors_init_scale', (ge(0), isnumber), "A non-negative float", float)
factors_init_sigma = hp('factors_init_sigma', (ge(0), isnumber), "A non-negative float", float)
factors_init_value = hp('factors_init_value', isnumber, "A float value", float)

def __init__(self, role, train_instance_count, train_instance_type,
num_factors, predictor_type,
Expand Down
3 changes: 2 additions & 1 deletion src/sagemaker/amazon/hyperparameter.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ class Hyperparameter(object):
"""An algorithm hyperparameter with optional validation. Implemented as a python
descriptor object."""

def __init__(self, name, validate=lambda _: True, validation_message=""):
def __init__(self, name, validate=lambda _: True, validation_message="", data_type=str):
"""Args:
name (str): The name of this hyperparameter
validate (callable[object]->[bool]): A validation function or list of validation functions.
Expand All @@ -27,6 +27,7 @@ def __init__(self, name, validate=lambda _: True, validation_message=""):
self.validation = validate
self.validation_message = validation_message
self.name = name
self.data_type = data_type
try:
iter(self.validation)
except TypeError:
Expand Down
18 changes: 9 additions & 9 deletions src/sagemaker/amazon/kmeans.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,15 @@ class KMeans(AmazonAlgorithmEstimatorBase):

repo = 'kmeans:1'

k = hp('k', (gt(1), isint), 'An integer greater-than 1')
init_method = hp('init_method', isin('random', 'kmeans++'), 'One of "random", "kmeans++"')
max_iterations = hp('local_lloyd_max_iterations', (gt(0), isint), 'An integer greater-than 0')
tol = hp('local_lloyd_tol', (gt(0), isint), 'An integer greater-than 0')
num_trials = hp('local_lloyd_num_trials', (gt(0), isint), 'An integer greater-than 0')
local_init_method = hp('local_lloyd_init_method', isin('random', 'kmeans++'), 'One of "random", "kmeans++"')
half_life_time_size = hp('half_life_time_size', (ge(0), isint), 'An integer greater-than-or-equal-to 0')
epochs = hp('epochs', (gt(0), isint), 'An integer greater-than 0')
center_factor = hp('extra_center_factor', (gt(0), isint), 'An integer greater-than 0')
k = hp('k', (gt(1), isint), 'An integer greater-than 1', int)
init_method = hp('init_method', isin('random', 'kmeans++'), 'One of "random", "kmeans++"', str)
max_iterations = hp('local_lloyd_max_iterations', (gt(0), isint), 'An integer greater-than 0', int)
tol = hp('local_lloyd_tol', (gt(0), isint), 'An integer greater-than 0', int)
num_trials = hp('local_lloyd_num_trials', (gt(0), isint), 'An integer greater-than 0', int)
local_init_method = hp('local_lloyd_init_method', isin('random', 'kmeans++'), 'One of "random", "kmeans++"', str)
half_life_time_size = hp('half_life_time_size', (ge(0), isint), 'An integer greater-than-or-equal-to 0', int)
epochs = hp('epochs', (gt(0), isint), 'An integer greater-than 0', int)
center_factor = hp('extra_center_factor', (gt(0), isint), 'An integer greater-than 0', int)

def __init__(self, role, train_instance_count, train_instance_type, k, init_method=None,
max_iterations=None, tol=None, num_trials=None, local_init_method=None,
Expand Down
65 changes: 33 additions & 32 deletions src/sagemaker/amazon/linear_learner.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,40 +27,41 @@ class LinearLearner(AmazonAlgorithmEstimatorBase):

binary_classifier_model_selection_criteria = hp('binary_classifier_model_selection_criteria',
isin('accuracy', 'f1', 'precision_at_target_recall',
'recall_at_target_precision', 'cross_entropy_loss'))
target_recall = hp('target_recall', (gt(0), lt(1)), "A float in (0,1)")
target_precision = hp('target_precision', (gt(0), lt(1)), "A float in (0,1)")
positive_example_weight_mult = hp('positive_example_weight_mult', gt(0), "A float greater than 0")
epochs = hp('epochs', (gt(0), isint), "An integer greater-than 0")
'recall_at_target_precision', 'cross_entropy_loss'),
data_type=str)
target_recall = hp('target_recall', (gt(0), lt(1)), "A float in (0,1)", float)
target_precision = hp('target_precision', (gt(0), lt(1)), "A float in (0,1)", float)
positive_example_weight_mult = hp('positive_example_weight_mult', gt(0), "A float greater than 0", float)
epochs = hp('epochs', (gt(0), isint), "An integer greater-than 0", int)
predictor_type = hp('predictor_type', isin('binary_classifier', 'regressor'),
'One of "binary_classifier" or "regressor"')
use_bias = hp('use_bias', isbool, "Either True or False")
num_models = hp('num_models', (gt(0), isint), "An integer greater-than 0")
num_calibration_samples = hp('num_calibration_samples', (gt(0), isint), "An integer greater-than 0")
init_method = hp('init_method', isin('uniform', 'normal'), 'One of "uniform" or "normal"')
init_scale = hp('init_scale', (gt(-1), lt(1)), 'A float in (-1, 1)')
init_sigma = hp('init_sigma', (gt(0), lt(1)), 'A float in (0, 1)')
init_bias = hp('init_bias', isnumber, 'A number')
optimizer = hp('optimizer', isin('sgd', 'adam', 'auto'), 'One of "sgd", "adam" or "auto')
'One of "binary_classifier" or "regressor"', str)
use_bias = hp('use_bias', isbool, "Either True or False", bool)
num_models = hp('num_models', (gt(0), isint), "An integer greater-than 0", int)
num_calibration_samples = hp('num_calibration_samples', (gt(0), isint), "An integer greater-than 0", int)
init_method = hp('init_method', isin('uniform', 'normal'), 'One of "uniform" or "normal"', str)
init_scale = hp('init_scale', (gt(-1), lt(1)), 'A float in (-1, 1)', float)
init_sigma = hp('init_sigma', (gt(0), lt(1)), 'A float in (0, 1)', float)
init_bias = hp('init_bias', isnumber, 'A number', float)
optimizer = hp('optimizer', isin('sgd', 'adam', 'auto'), 'One of "sgd", "adam" or "auto', str)
loss = hp('loss', isin('logistic', 'squared_loss', 'absolute_loss', 'auto'),
'"logistic", "squared_loss", "absolute_loss" or"auto"')
wd = hp('wd', (gt(0), lt(1)), 'A float in (0,1)')
l1 = hp('l1', (gt(0), lt(1)), 'A float in (0,1)')
momentum = hp('momentum', (gt(0), lt(1)), 'A float in (0,1)')
learning_rate = hp('learning_rate', (gt(0), lt(1)), 'A float in (0,1)')
beta_1 = hp('beta_1', (gt(0), lt(1)), 'A float in (0,1)')
beta_2 = hp('beta_1', (gt(0), lt(1)), 'A float in (0,1)')
bias_lr_mult = hp('bias_lr_mult', gt(0), 'A float greater-than 0')
bias_wd_mult = hp('bias_wd_mult', gt(0), 'A float greater-than 0')
use_lr_scheduler = hp('use_lr_scheduler', isbool, 'A boolean')
lr_scheduler_step = hp('lr_scheduler_step', (gt(0), isint), 'An integer greater-than 0')
lr_scheduler_factor = hp('lr_scheduler_factor', (gt(0), lt(1)), 'A float in (0,1)')
lr_scheduler_minimum_lr = hp('lr_scheduler_minimum_lr', gt(0), 'A float greater-than 0')
normalize_data = hp('normalize_data', isbool, 'A boolean')
normalize_label = hp('normalize_label', isbool, 'A boolean')
unbias_data = hp('unbias_data', isbool, 'A boolean')
unbias_label = hp('unbias_label', isbool, 'A boolean')
num_point_for_scalar = hp('num_point_for_scalar', (isint, gt(0)), 'An integer greater-than 0')
'"logistic", "squared_loss", "absolute_loss" or"auto"', str)
wd = hp('wd', (gt(0), lt(1)), 'A float in (0,1)', float)
l1 = hp('l1', (gt(0), lt(1)), 'A float in (0,1)', float)
momentum = hp('momentum', (gt(0), lt(1)), 'A float in (0,1)', float)
learning_rate = hp('learning_rate', (gt(0), lt(1)), 'A float in (0,1)', float)
beta_1 = hp('beta_1', (gt(0), lt(1)), 'A float in (0,1)', float)
beta_2 = hp('beta_1', (gt(0), lt(1)), 'A float in (0,1)', float)
bias_lr_mult = hp('bias_lr_mult', gt(0), 'A float greater-than 0', float)
bias_wd_mult = hp('bias_wd_mult', gt(0), 'A float greater-than 0', float)
use_lr_scheduler = hp('use_lr_scheduler', isbool, 'A boolean', bool)
lr_scheduler_step = hp('lr_scheduler_step', (gt(0), isint), 'An integer greater-than 0', int)
lr_scheduler_factor = hp('lr_scheduler_factor', (gt(0), lt(1)), 'A float in (0,1)', float)
lr_scheduler_minimum_lr = hp('lr_scheduler_minimum_lr', gt(0), 'A float greater-than 0', float)
normalize_data = hp('normalize_data', isbool, 'A boolean', bool)
normalize_label = hp('normalize_label', isbool, 'A boolean', bool)
unbias_data = hp('unbias_data', isbool, 'A boolean', bool)
unbias_label = hp('unbias_label', isbool, 'A boolean', bool)
num_point_for_scalar = hp('num_point_for_scalar', (isint, gt(0)), 'An integer greater-than 0', int)

def __init__(self, role, train_instance_count, train_instance_type, predictor_type='binary_classifier',
binary_classifier_model_selection_criteria=None, target_recall=None, target_precision=None,
Expand Down
8 changes: 4 additions & 4 deletions src/sagemaker/amazon/pca.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,13 @@ class PCA(AmazonAlgorithmEstimatorBase):
DEFAULT_MINI_BATCH_SIZE = 500

num_components = hp(name='num_components', validate=lambda x: x > 0 and isinstance(x, int),
validation_message='Value must be an integer greater than zero')
validation_message='Value must be an integer greater than zero', data_type=int)
algorithm_mode = hp(name='algorithm_mode', validate=lambda x: x in ['regular', 'stable', 'randomized'],
validation_message='Value must be one of "regular", "stable", "randomized"')
validation_message='Value must be one of "regular", "stable", "randomized"', data_type=str)
subtract_mean = hp(name='subtract_mean', validate=lambda x: isinstance(x, bool),
validation_message='Value must be a boolean')
validation_message='Value must be a boolean', data_type=bool)
extra_components = hp(name='extra_components', validate=lambda x: x >= 0 and isinstance(x, int),
validation_message="Value must be an integer greater than or equal to 0")
validation_message="Value must be an integer greater than or equal to 0", data_type=int)

def __init__(self, role, train_instance_count, train_instance_type, num_components,
algorithm_mode=None, subtract_mean=None, extra_components=None, **kwargs):
Expand Down
19 changes: 18 additions & 1 deletion tests/unit/test_hyperparameter.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ class Test(object):

blank = Hyperparameter(name="some-name")
elizabeth = Hyperparameter(name='elizabeth')
validated = Hyperparameter(name="validated", validate=lambda value: value > 55)
validated = Hyperparameter(name="validated", validate=lambda value: value > 55, data_type=int)


def test_blank_access():
Expand Down Expand Up @@ -55,3 +55,20 @@ def test_validated():
x.validated = 66
with pytest.raises(ValueError):
x.validated = 23


def test_data_type():
x = Test()
x.validated = 66
assert type(x.validated) == Test.__dict__["validated"].data_type


def test_from_string():
x = Test()
value = 65

x.validated = value
from_api = str(value)

x.validated = Test.__dict__["validated"].data_type(from_api)
assert x.validated == value