Skip to content

fix: Make test_processor_with_role_as_pipeline_parameter more concrete #3618

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Feb 1, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions src/sagemaker/processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -1361,7 +1361,7 @@ def __init__(
self,
estimator_cls: type,
framework_version: str,
role: str,
role: Union[str, PipelineVariable],
instance_count: Union[int, PipelineVariable],
instance_type: Union[str, PipelineVariable],
py_version: str = "py3",
Expand Down Expand Up @@ -1389,8 +1389,9 @@ def __init__(
estimator
framework_version (str): The version of the framework. Value is ignored when
``image_uri`` is provided.
role (str): An AWS IAM role name or ARN. Amazon SageMaker Processing uses
this role to access AWS resources, such as data stored in Amazon S3.
role (str or PipelineVariable): An AWS IAM role name or ARN. Amazon SageMaker
Processing uses this role to access AWS resources, such as data stored
in Amazon S3.
instance_count (int or PipelineVariable): The number of instances to run a
processing job with.
instance_type (str or PipelineVariable): The type of EC2 instance to use for
Expand Down
11 changes: 8 additions & 3 deletions tests/integ/sagemaker/workflow/test_processing_steps.py
Original file line number Diff line number Diff line change
Expand Up @@ -385,8 +385,10 @@ def test_multi_step_framework_processing_pipeline_same_source_dir(

SOURCE_DIR = "/pipeline/test_source_dir"

role_param = ParameterString(name="Role", default_value=role)

framework_processor_tf = FrameworkProcessor(
role=role,
role=role_param,
instance_type="ml.m5.xlarge",
instance_count=1,
estimator_cls=TensorFlow,
Expand All @@ -400,7 +402,7 @@ def test_multi_step_framework_processing_pipeline_same_source_dir(
instance_type="ml.m5.xlarge",
instance_count=1,
base_job_name="my-job",
role=role,
role=role_param,
estimator_cls=SKLearn,
sagemaker_session=pipeline_session,
)
Expand Down Expand Up @@ -431,7 +433,10 @@ def test_multi_step_framework_processing_pipeline_same_source_dir(
)

pipeline = Pipeline(
name=pipeline_name, steps=[step_1, step_2], sagemaker_session=pipeline_session
name=pipeline_name,
steps=[step_1, step_2],
sagemaker_session=pipeline_session,
parameters=[role_param],
)
try:
pipeline.create(role)
Expand Down
51 changes: 51 additions & 0 deletions tests/unit/sagemaker/workflow/test_processing_step.py
Original file line number Diff line number Diff line change
Expand Up @@ -1064,3 +1064,54 @@ def test_spark_processor_local_code(spark_processor, processing_input, pipeline_
step_def = json.loads(pipeline.definition())["Steps"][0]
step_def2 = json.loads(pipeline.definition())["Steps"][0]
assert step_def == step_def2


_PARAM_ROLE_NAME = "Role"


@pytest.mark.parametrize(
"processor_args",
[
(
ScriptProcessor(
role=ParameterString(name=_PARAM_ROLE_NAME, default_value=ROLE),
image_uri=IMAGE_URI,
instance_count=1,
instance_type="ml.m4.xlarge",
command=["python3"],
),
{"code": DUMMY_S3_SCRIPT_PATH},
),
(
Processor(
role=ParameterString(name=_PARAM_ROLE_NAME, default_value=ROLE),
image_uri=IMAGE_URI,
instance_count=1,
instance_type="ml.m4.xlarge",
),
{},
),
],
)
@patch("os.path.exists", return_value=True)
@patch("os.path.isfile", return_value=True)
def test_processor_with_role_as_pipeline_parameter(
exists_mock, isfile_mock, processor_args, pipeline_session
):
processor, run_inputs = processor_args
processor.sagemaker_session = pipeline_session
processor.run(**run_inputs)

step_args = processor.run(**run_inputs)
step = ProcessingStep(
name="MyProcessingStep",
step_args=step_args,
)
pipeline = Pipeline(
name="MyPipeline",
steps=[step],
sagemaker_session=pipeline_session,
)

step_def = json.loads(pipeline.definition())["Steps"][0]
assert step_def["Arguments"]["RoleArn"] == {"Get": f"Parameters.{_PARAM_ROLE_NAME}"}
57 changes: 0 additions & 57 deletions tests/unit/test_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@
from sagemaker.sklearn.processing import SKLearnProcessor
from sagemaker.pytorch.processing import PyTorchProcessor
from sagemaker.tensorflow.processing import TensorFlowProcessor
from sagemaker.workflow import ParameterString
from sagemaker.xgboost.processing import XGBoostProcessor
from sagemaker.mxnet.processing import MXNetProcessor
from sagemaker.network import NetworkConfig
Expand Down Expand Up @@ -738,62 +737,6 @@ def test_processor_with_required_parameters(sagemaker_session):
sagemaker_session.process.assert_called_with(**expected_args)


def test_processor_with_role_as_pipeline_parameter(sagemaker_session):

role = ParameterString(name="Role", default_value=ROLE)

processor = Processor(
role=role,
image_uri=CUSTOM_IMAGE_URI,
instance_count=1,
instance_type="ml.m4.xlarge",
sagemaker_session=sagemaker_session,
)

processor.run()

expected_args = _get_expected_args(processor._current_job_name)
assert expected_args["role_arn"] == role.default_value


@patch("os.path.exists", return_value=True)
@patch("os.path.isfile", return_value=True)
def test_script_processor_with_role_as_pipeline_parameter(
exists_mock, isfile_mock, sagemaker_session
):
role = ParameterString(name="Role", default_value=ROLE)

script_processor = ScriptProcessor(
role=role,
image_uri=CUSTOM_IMAGE_URI,
instance_count=1,
instance_type="ml.m4.xlarge",
sagemaker_session=sagemaker_session,
command=["python3"],
)

run_args = script_processor.get_run_args(
code="/local/path/to/processing_code.py",
inputs=_get_data_inputs_all_parameters(),
outputs=_get_data_outputs_all_parameters(),
arguments=["--drop-columns", "'SelfEmployed'"],
)

script_processor.run(
code=run_args.code,
inputs=run_args.inputs,
outputs=run_args.outputs,
arguments=run_args.arguments,
wait=True,
logs=False,
job_name="my_job_name",
experiment_config={"ExperimentName": "AnExperiment"},
)

expected_args = _get_expected_args(script_processor._current_job_name)
assert expected_args["role_arn"] == role.default_value


def test_processor_with_missing_network_config_parameters(sagemaker_session):
processor = Processor(
role=ROLE,
Expand Down