Skip to content

Commit 3b9ec1b

Browse files
authored
Merge branch 'master' into iss96242
2 parents 57d9ba4 + 2035235 commit 3b9ec1b

File tree

9 files changed

+38
-14
lines changed

9 files changed

+38
-14
lines changed

CHANGELOG.md

+11
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,16 @@
11
# Changelog
22

3+
## v2.23.2 (2021-01-06)
4+
5+
### Bug Fixes and Other Changes
6+
7+
* remove shell=True in subprocess.check_output
8+
* use SecurityConfig dict key
9+
10+
### Documentation Changes
11+
12+
* remove D212 from ignore to comply with PEP257 standards
13+
314
## v2.23.1 (2020-12-29)
415

516
### Bug Fixes and Other Changes

VERSION

+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
2.23.2.dev0
1+
2.23.3.dev0

doc/v2.rst

+2-2
Original file line numberDiff line numberDiff line change
@@ -318,9 +318,9 @@ The follow serializer/deserializer classes have been renamed and/or moved:
318318
+--------------------------------------------------------+-------------------------------------------------------+
319319
| ``sagemaker.predictor._NPYSerializer`` | ``sagemaker.serializers.NumpySerializer`` |
320320
+--------------------------------------------------------+-------------------------------------------------------+
321-
| ``sagemaker.amazon.common.numpy_to_record_serializer`` | ``sagemaker.amazon.serializers.RecordSerializer`` |
321+
| ``sagemaker.amazon.common.numpy_to_record_serializer`` | ``sagemaker.amazon.common.RecordSerializer`` |
322322
+--------------------------------------------------------+-------------------------------------------------------+
323-
| ``sagemaker.amazon.common.record_deserializer`` | ``sagemaker.amazon.deserializers.RecordDeserializer`` |
323+
| ``sagemaker.amazon.common.record_deserializer`` | ``sagemaker.amazon.common.RecordDeserializer`` |
324324
+--------------------------------------------------------+-------------------------------------------------------+
325325
| ``sagemaker.predictor._JsonDeserializer`` | ``sagemaker.deserializers.JSONDeserializer`` |
326326
+--------------------------------------------------------+-------------------------------------------------------+

src/sagemaker/feature_store/feature_group.py

+4-1
Original file line numberDiff line numberDiff line change
@@ -152,7 +152,7 @@ class IngestionManagerPandas:
152152
feature_group_name (str): name of the Feature Group.
153153
sagemaker_session (Session): instance of the Session class to perform boto calls.
154154
data_frame (DataFrame): pandas DataFrame to be ingested to the given feature group.
155-
max_works (int): number of threads to create.
155+
max_workers (int): number of threads to create.
156156
"""
157157

158158
feature_group_name: str = attr.ib()
@@ -508,6 +508,9 @@ def as_hive_ddl(self, database: str = "sagemaker_featurestore", table_name: str
508508
f" {definition.feature_name} "
509509
f"{self._FEATURE_TYPE_TO_DDL_DATA_TYPE_MAP.get(definition.feature_type.value)}\n"
510510
)
511+
ddl += " write_time TIMESTAMP\n"
512+
ddl += " event_time TIMESTAMP\n"
513+
ddl += " is_deleted BOOLEAN\n"
511514
ddl += ")\n"
512515
ddl += (
513516
"ROW FORMAT SERDE 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'\n"

src/sagemaker/local/image.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -1067,7 +1067,7 @@ def _ecr_login_if_needed(boto_session, image):
10671067
ecr_url = auth["authorizationData"][0]["proxyEndpoint"]
10681068

10691069
cmd = "docker login -u AWS -p %s %s" % (token, ecr_url)
1070-
subprocess.check_output(cmd, shell=True)
1070+
subprocess.check_output(cmd.split())
10711071

10721072
return True
10731073

@@ -1081,5 +1081,5 @@ def _pull_image(image):
10811081
pull_image_command = ("docker pull %s" % image).strip()
10821082
logger.info("docker command: %s", pull_image_command)
10831083

1084-
subprocess.check_output(pull_image_command, shell=True)
1084+
subprocess.check_output(pull_image_command.split())
10851085
logger.info("image pulled: %s", image)

tests/integ/test_feature_store.py

+5-1
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,11 @@ def create_table_ddl():
131131
"CREATE EXTERNAL TABLE IF NOT EXISTS sagemaker_featurestore.{feature_group_name} (\n"
132132
" feature1 FLOAT\n"
133133
" feature2 INT\n"
134-
" feature3 STRING\n)\n"
134+
" feature3 STRING\n"
135+
" write_time TIMESTAMP\n"
136+
" event_time TIMESTAMP\n"
137+
" is_deleted BOOLEAN\n"
138+
")\n"
135139
"ROW FORMAT SERDE 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'\n"
136140
" STORED AS\n"
137141
" INPUTFORMAT 'parquet.hive.DeprecatedParquetInputFormat'\n"

tests/integ/test_workflow.py

+6-4
Original file line numberDiff line numberDiff line change
@@ -506,15 +506,17 @@ def test_training_job_with_debugger(
506506
except WaiterError:
507507
pass
508508
execution_steps = execution.list_steps()
509-
training_job_arn = execution_steps[0]["Metadata"]["TrainingJob"]["Arn"]
510-
job_description = sagemaker_session.sagemaker_client.describe_training_job(
511-
TrainingJobName=training_job_arn.split("/")[1]
512-
)
513509

514510
assert len(execution_steps) == 1
511+
assert execution_steps[0].get("FailureReason", "") == ""
515512
assert execution_steps[0]["StepName"] == "pytorch-train"
516513
assert execution_steps[0]["StepStatus"] == "Succeeded"
517514

515+
training_job_arn = execution_steps[0]["Metadata"]["TrainingJob"]["Arn"]
516+
job_description = sagemaker_session.sagemaker_client.describe_training_job(
517+
TrainingJobName=training_job_arn.split("/")[1]
518+
)
519+
518520
for index, rule in enumerate(rules):
519521
config = job_description["DebugRuleConfigurations"][index]
520522
assert config["RuleConfigurationName"] == rule.name

tests/unit/sagemaker/feature_store/test_feature_store.py

+5-1
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,11 @@ def create_table_ddl():
5757
"CREATE EXTERNAL TABLE IF NOT EXISTS {database}.{table_name} (\n"
5858
" feature1 FLOAT\n"
5959
" feature2 INT\n"
60-
" feature3 STRING\n)\n"
60+
" feature3 STRING\n"
61+
" write_time TIMESTAMP\n"
62+
" event_time TIMESTAMP\n"
63+
" is_deleted BOOLEAN\n"
64+
")\n"
6165
"ROW FORMAT SERDE 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'\n"
6266
" STORED AS\n"
6367
" INPUTFORMAT 'parquet.hive.DeprecatedParquetInputFormat'\n"

tests/unit/test_image.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -765,7 +765,7 @@ def test_ecr_login_needed(check_output):
765765
"docker login -u AWS -p %s https://520713654638.dkr.ecr.us-east-1.amazonaws.com" % token
766766
)
767767

768-
check_output.assert_called_with(expected_command, shell=True)
768+
check_output.assert_called_with(expected_command.split())
769769
session_mock.client("ecr").get_authorization_token.assert_called_with(
770770
registryIds=["520713654638"]
771771
)
@@ -781,7 +781,7 @@ def test_pull_image(check_output):
781781

782782
expected_command = "docker pull %s" % image
783783

784-
check_output.assert_called_once_with(expected_command, shell=True)
784+
check_output.assert_called_once_with(expected_command.split())
785785

786786

787787
def test__aws_credentials_with_long_lived_credentials():

0 commit comments

Comments
 (0)