@@ -166,7 +166,7 @@ def athena_dataset_definition(sagemaker_session):
166
166
catalog = "AwsDataCatalog" ,
167
167
database = "default" ,
168
168
work_group = "workgroup" ,
169
- query_string = 'SELECT * FROM "default"."s3_test_table_$STAGE_$REGIONUNDERSCORED";' ,
169
+ query_string = ( 'SELECT * FROM "default"."s3_test_table_$STAGE_$REGIONUNDERSCORED";' ) ,
170
170
output_s3_uri = f"s3://{ sagemaker_session .default_bucket ()} /add" ,
171
171
output_format = "JSON" ,
172
172
output_compression = "GZIP" ,
@@ -1107,7 +1107,7 @@ def test_one_step_lambda_pipeline(sagemaker_session, role, pipeline_name, region
1107
1107
step_lambda = LambdaStep (
1108
1108
name = "lambda-step" ,
1109
1109
lambda_func = Lambda (
1110
- function_arn = "arn:aws:lambda:us-west-2:123456789012:function:sagemaker_test_lambda" ,
1110
+ function_arn = ( "arn:aws:lambda:us-west-2:123456789012:function:sagemaker_test_lambda" ) ,
1111
1111
session = sagemaker_session ,
1112
1112
),
1113
1113
inputs = {"arg1" : "foo" },
@@ -1152,7 +1152,7 @@ def test_two_step_lambda_pipeline_with_output_reference(
1152
1152
step_lambda1 = LambdaStep (
1153
1153
name = "lambda-step1" ,
1154
1154
lambda_func = Lambda (
1155
- function_arn = "arn:aws:lambda:us-west-2:123456789012:function:sagemaker_test_lambda" ,
1155
+ function_arn = ( "arn:aws:lambda:us-west-2:123456789012:function:sagemaker_test_lambda" ) ,
1156
1156
session = sagemaker_session ,
1157
1157
),
1158
1158
inputs = {"arg1" : "foo" },
@@ -1162,7 +1162,7 @@ def test_two_step_lambda_pipeline_with_output_reference(
1162
1162
step_lambda2 = LambdaStep (
1163
1163
name = "lambda-step2" ,
1164
1164
lambda_func = Lambda (
1165
- function_arn = "arn:aws:lambda:us-west-2:123456789012:function:sagemaker_test_lambda" ,
1165
+ function_arn = ( "arn:aws:lambda:us-west-2:123456789012:function:sagemaker_test_lambda" ) ,
1166
1166
session = sagemaker_session ,
1167
1167
),
1168
1168
inputs = {"arg1" : outputParam1 },
@@ -1854,7 +1854,9 @@ def test_sklearn_xgboost_sip_model_registration(
1854
1854
1855
1855
@pytest .mark .skipif (
1856
1856
tests .integ .test_region () not in tests .integ .DRIFT_CHECK_BASELINES_SUPPORTED_REGIONS ,
1857
- reason = f"DriftCheckBaselines changes are not fully deployed in { tests .integ .test_region ()} ." ,
1857
+ reason = (
1858
+ "DriftCheckBaselines changes are not fully deployed in" f" { tests .integ .test_region ()} ."
1859
+ ),
1858
1860
)
1859
1861
def test_model_registration_with_drift_check_baselines (
1860
1862
sagemaker_session ,
@@ -2009,7 +2011,9 @@ def test_model_registration_with_drift_check_baselines(
2009
2011
assert len (execution_steps ) == 1
2010
2012
failure_reason = execution_steps [0 ].get ("FailureReason" , "" )
2011
2013
if failure_reason != "" :
2012
- logging .error (f"Pipeline execution failed with error: { failure_reason } . Retrying.." )
2014
+ logging .error (
2015
+ f"Pipeline execution failed with error: { failure_reason } ." " Retrying.."
2016
+ )
2013
2017
continue
2014
2018
assert execution_steps [0 ]["StepStatus" ] == "Succeeded"
2015
2019
assert execution_steps [0 ]["StepName" ] == "MyRegisterModelStep"
@@ -2166,7 +2170,7 @@ def test_training_job_with_debugger_and_profiler(
2166
2170
Rule .sagemaker (rule_configs .loss_not_decreasing ()),
2167
2171
]
2168
2172
debugger_hook_config = DebuggerHookConfig (
2169
- s3_output_path = f"s3://{ sagemaker_session .default_bucket ()} /{ uuid .uuid4 ()} /tensors"
2173
+ s3_output_path = ( f"s3://{ sagemaker_session .default_bucket ()} /{ uuid .uuid4 ()} /tensors" )
2170
2174
)
2171
2175
2172
2176
base_dir = os .path .join (DATA_DIR , "pytorch_mnist" )
0 commit comments