forked from aws/sagemaker-python-sdk
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathpca.py
238 lines (209 loc) · 10.1 KB
/
pca.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
# Copyright 2017-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""Placeholder docstring"""
from __future__ import absolute_import
from sagemaker.amazon.amazon_estimator import AmazonAlgorithmEstimatorBase, registry
from sagemaker.amazon.common import numpy_to_record_serializer, record_deserializer
from sagemaker.amazon.hyperparameter import Hyperparameter as hp # noqa
from sagemaker.amazon.validation import gt, isin
from sagemaker.predictor import Predictor
from sagemaker.model import Model
from sagemaker.session import Session
from sagemaker.vpc_utils import VPC_CONFIG_DEFAULT
class PCA(AmazonAlgorithmEstimatorBase):
"""Placeholder docstring"""
repo_name = "pca"
repo_version = 1
DEFAULT_MINI_BATCH_SIZE = 500
num_components = hp("num_components", gt(0), "Value must be an integer greater than zero", int)
algorithm_mode = hp(
"algorithm_mode",
isin("regular", "randomized"),
'Value must be one of "regular" and "randomized"',
str,
)
subtract_mean = hp(
name="subtract_mean", validation_message="Value must be a boolean", data_type=bool
)
extra_components = hp(
name="extra_components",
validation_message="Value must be an integer greater than or equal to 0, or -1.",
data_type=int,
)
def __init__(
self,
role,
train_instance_count,
train_instance_type,
num_components,
algorithm_mode=None,
subtract_mean=None,
extra_components=None,
**kwargs
):
"""A Principal Components Analysis (PCA)
:class:`~sagemaker.amazon.amazon_estimator.AmazonAlgorithmEstimatorBase`.
This Estimator may be fit via calls to
:meth:`~sagemaker.amazon.amazon_estimator.AmazonAlgorithmEstimatorBase.fit_ndarray`
or
:meth:`~sagemaker.amazon.amazon_estimator.AmazonAlgorithmEstimatorBase.fit`.
The former allows a PCA model to be fit on a 2-dimensional numpy array.
The latter requires Amazon :class:`~sagemaker.amazon.record_pb2.Record`
protobuf serialized data to be stored in S3.
To learn more about the Amazon protobuf Record class and how to
prepare bulk data in this format, please consult AWS technical
documentation:
https://docs.aws.amazon.com/sagemaker/latest/dg/cdf-training.html
After this Estimator is fit, model data is stored in S3. The model
may be deployed to an Amazon SageMaker Endpoint by invoking
:meth:`~sagemaker.amazon.estimator.EstimatorBase.deploy`. As well as
deploying an Endpoint, deploy returns a
:class:`~sagemaker.amazon.pca.PCAPredictor` object that can be used to
project input vectors to the learned lower-dimensional representation,
using the trained PCA model hosted in the SageMaker Endpoint.
PCA Estimators can be configured by setting hyperparameters. The
available hyperparameters for PCA are documented below. For further
information on the AWS PCA algorithm, please consult AWS technical
documentation: https://docs.aws.amazon.com/sagemaker/latest/dg/pca.html
This Estimator uses Amazon SageMaker PCA to perform training and host
deployed models. To learn more about Amazon SageMaker PCA, please read:
https://docs.aws.amazon.com/sagemaker/latest/dg/how-pca-works.html
Args:
role (str): An AWS IAM role (either name or full ARN). The Amazon
SageMaker training jobs and APIs that create Amazon SageMaker
endpoints use this role to access training data and model
artifacts. After the endpoint is created, the inference code
might use the IAM role, if accessing AWS resource.
train_instance_count (int): Number of Amazon EC2 instances to use
for training.
train_instance_type (str): Type of EC2 instance to use for training,
for example, 'ml.c4.xlarge'.
num_components (int): The number of principal components. Must be
greater than zero.
algorithm_mode (str): Mode for computing the principal components.
One of 'regular' or 'randomized'.
subtract_mean (bool): Whether the data should be unbiased both
during train and at inference.
extra_components (int): As the value grows larger, the solution
becomes more accurate but the runtime and memory consumption
increase linearly. If this value is unset or set to -1, then a
default value equal to the maximum of 10 and num_components will
be used. Valid for randomized mode only.
**kwargs: base class keyword argument values.
.. tip::
You can find additional parameters for initializing this class at
:class:`~sagemaker.estimator.amazon_estimator.AmazonAlgorithmEstimatorBase` and
:class:`~sagemaker.estimator.EstimatorBase`.
"""
super(PCA, self).__init__(role, train_instance_count, train_instance_type, **kwargs)
self.num_components = num_components
self.algorithm_mode = algorithm_mode
self.subtract_mean = subtract_mean
self.extra_components = extra_components
def create_model(self, vpc_config_override=VPC_CONFIG_DEFAULT, **kwargs):
"""Return a :class:`~sagemaker.amazon.pca.PCAModel` referencing the
latest s3 model data produced by this Estimator.
Args:
vpc_config_override (dict[str, list[str]]): Optional override for VpcConfig set on
the model. Default: use subnets and security groups from this Estimator.
* 'Subnets' (list[str]): List of subnet ids.
* 'SecurityGroupIds' (list[str]): List of security group ids.
**kwargs: Additional kwargs passed to the PCAModel constructor.
"""
return PCAModel(
self.model_data,
self.role,
sagemaker_session=self.sagemaker_session,
vpc_config=self.get_vpc_config(vpc_config_override),
**kwargs
)
def _prepare_for_training(self, records, mini_batch_size=None, job_name=None):
"""Set hyperparameters needed for training.
Args:
records (:class:`~RecordSet`): The records to train this ``Estimator`` on.
mini_batch_size (int or None): The size of each mini-batch to use when
training. If ``None``, a default value will be used.
job_name (str): Name of the training job to be created. If not
specified, one is generated, using the base name given to the
constructor if applicable.
"""
num_records = None
if isinstance(records, list):
for record in records:
if record.channel == "train":
num_records = record.num_records
break
if num_records is None:
raise ValueError("Must provide train channel.")
else:
num_records = records.num_records
# mini_batch_size is a required parameter
default_mini_batch_size = min(
self.DEFAULT_MINI_BATCH_SIZE, max(1, int(num_records / self.train_instance_count))
)
use_mini_batch_size = mini_batch_size or default_mini_batch_size
super(PCA, self)._prepare_for_training(
records=records, mini_batch_size=use_mini_batch_size, job_name=job_name
)
class PCAPredictor(Predictor):
"""Transforms input vectors to lower-dimesional representations.
The implementation of
:meth:`~sagemaker.predictor.Predictor.predict` in this
`Predictor` requires a numpy ``ndarray`` as input. The array should
contain the same number of columns as the feature-dimension of the data used
to fit the model this Predictor performs inference on.
:meth:`predict()` returns a list of
:class:`~sagemaker.amazon.record_pb2.Record` objects, one for each row in
the input ``ndarray``. The lower dimension vector result is stored in the
``projection`` key of the ``Record.label`` field.
"""
def __init__(self, endpoint_name, sagemaker_session=None):
"""
Args:
endpoint_name (str): Name of the Amazon SageMaker endpoint to which
requests are sent.
sagemaker_session (sagemaker.session.Session): A SageMaker Session
object, used for SageMaker interactions (default: None). If not
specified, one is created using the default AWS configuration
chain.
"""
super(PCAPredictor, self).__init__(
endpoint_name,
sagemaker_session,
serializer=numpy_to_record_serializer(),
deserializer=record_deserializer(),
)
class PCAModel(Model):
"""Reference PCA s3 model data. Calling
:meth:`~sagemaker.model.Model.deploy` creates an Endpoint and return a
Predictor that transforms vectors to a lower-dimensional representation.
"""
def __init__(self, model_data, role, sagemaker_session=None, **kwargs):
"""
Args:
model_data:
role:
sagemaker_session:
**kwargs:
"""
sagemaker_session = sagemaker_session or Session()
repo = "{}:{}".format(PCA.repo_name, PCA.repo_version)
image_uri = "{}/{}".format(registry(sagemaker_session.boto_session.region_name), repo)
super(PCAModel, self).__init__(
image_uri,
model_data,
role,
predictor_cls=PCAPredictor,
sagemaker_session=sagemaker_session,
**kwargs
)