From c97c4678b89ff89bda6f1ea4573803d7e1bcb947 Mon Sep 17 00:00:00 2001 From: Kevin Date: Fri, 2 Dec 2022 12:48:09 -0800 Subject: [PATCH 01/11] fix: type hint of PySparkProcessor __init__ (#3297) From de589419595fbf7bf76e55745f454864cc5998be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Perez?= Date: Fri, 2 Dec 2022 22:01:39 +0100 Subject: [PATCH 02/11] fix: fix PySparkProcessor __init__ params type (#3354) From 41dd3305c2673a4f85e54eec9858f37393c89431 Mon Sep 17 00:00:00 2001 From: Shreya Pandit Date: Fri, 2 Dec 2022 13:18:14 -0800 Subject: [PATCH 03/11] fix: Allow Py 3.7 for MMS Test Docker env (#3080) Co-authored-by: Mufaddal Rohawala --- tests/data/multimodel/container/Dockerfile | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/data/multimodel/container/Dockerfile b/tests/data/multimodel/container/Dockerfile index 4792a429c1..71c38a6605 100644 --- a/tests/data/multimodel/container/Dockerfile +++ b/tests/data/multimodel/container/Dockerfile @@ -1,4 +1,5 @@ -FROM public.ecr.aws/ubuntu/ubuntu:18.04 +# added latest image from https://gallery.ecr.aws/lts/ubuntu +FROM public.ecr.aws/ubuntu/ubuntu:22.04 # Set a docker label to advertise multi-model support on the container LABEL com.amazonaws.sagemaker.capabilities.multi-models=true @@ -15,7 +16,7 @@ RUN apt-get update && \ curl \ vim \ && rm -rf /var/lib/apt/lists/* \ - && curl -O https://bootstrap.pypa.io/pip/3.6/get-pip.py \ + && curl -O https://bootstrap.pypa.io/pip/get-pip.py \ && python3 get-pip.py RUN update-alternatives --install /usr/bin/python python /usr/bin/python3 1 From 1e23a3f6a7cf554aa537c5c4e21e35548053a6ee Mon Sep 17 00:00:00 2001 From: maldil Date: Fri, 2 Dec 2022 13:19:59 -0800 Subject: [PATCH 04/11] refactoring : using with statement (#3286) --- src/sagemaker/git_utils.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/sagemaker/git_utils.py b/src/sagemaker/git_utils.py index 80bd62d5be..c424753286 100644 --- a/src/sagemaker/git_utils.py +++ b/src/sagemaker/git_utils.py @@ -279,9 +279,8 @@ def _run_clone_command(repo_url, dest_dir): subprocess.check_call(["git", "clone", repo_url, dest_dir], env=my_env) elif repo_url.startswith("git@"): with tempfile.NamedTemporaryFile() as sshnoprompt: - write_pipe = open(sshnoprompt.name, "w") - write_pipe.write("ssh -oBatchMode=yes $@") - write_pipe.close() + with open(sshnoprompt.name, "w") as write_pipe: + write_pipe.write("ssh -oBatchMode=yes $@") os.chmod(sshnoprompt.name, 0o511) my_env["GIT_SSH"] = sshnoprompt.name subprocess.check_call(["git", "clone", repo_url, dest_dir], env=my_env) From 19efadf043678a6c7da4122368d6141e1ec2df10 Mon Sep 17 00:00:00 2001 From: Shreya Pandit Date: Fri, 2 Dec 2022 13:21:34 -0800 Subject: [PATCH 05/11] Update local_requirements.txt PyYAML version (#3095) Co-authored-by: Basil Beirouti Co-authored-by: Kalyani Nikure <110067132+knikure@users.noreply.github.com> --- requirements/extras/local_requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/extras/local_requirements.txt b/requirements/extras/local_requirements.txt index 5304d82b2a..5f2c85c2fe 100644 --- a/requirements/extras/local_requirements.txt +++ b/requirements/extras/local_requirements.txt @@ -1,4 +1,4 @@ urllib3==1.26.8 docker-compose==1.29.2 docker>=5.0.2,<7.0.0 -PyYAML==5.4.1 +PyYAML==6.0.0 From 76f7782db112b38cb7e058dffb1508f2d34fb50b Mon Sep 17 00:00:00 2001 From: arjkesh <33526713+arjkesh@users.noreply.github.com> Date: Fri, 2 Dec 2022 13:22:35 -0800 Subject: [PATCH 06/11] feature: Update TF 2.9 and TF 2.10 inference DLCs (#3465) --- .../image_uri_config/tensorflow.json | 66 ++++++++++++++++++- 1 file changed, 65 insertions(+), 1 deletion(-) diff --git a/src/sagemaker/image_uri_config/tensorflow.json b/src/sagemaker/image_uri_config/tensorflow.json index 6a01c3e3e6..0122dcd3ca 100644 --- a/src/sagemaker/image_uri_config/tensorflow.json +++ b/src/sagemaker/image_uri_config/tensorflow.json @@ -285,7 +285,9 @@ "2.5": "2.5.1", "2.6": "2.6.3", "2.7": "2.7.0", - "2.8": "2.8.0" + "2.8": "2.8.0", + "2.9": "2.9.2", + "2.10": "2.10.0" }, "versions": { "1.10.0": { @@ -1468,6 +1470,68 @@ "us-west-2": "763104351884" }, "repository": "tensorflow-inference" + }, + "2.9.2": { + "registries": { + "af-south-1": "626614931356", + "ap-east-1": "871362719292", + "ap-northeast-1": "763104351884", + "ap-northeast-2": "763104351884", + "ap-northeast-3": "364406365360", + "ap-south-1": "763104351884", + "ap-southeast-1": "763104351884", + "ap-southeast-2": "763104351884", + "ap-southeast-3": "907027046896", + "ca-central-1": "763104351884", + "cn-north-1": "727897471807", + "cn-northwest-1": "727897471807", + "eu-central-1": "763104351884", + "eu-north-1": "763104351884", + "eu-south-1": "692866216735", + "eu-west-1": "763104351884", + "eu-west-2": "763104351884", + "eu-west-3": "763104351884", + "me-south-1": "217643126080", + "sa-east-1": "763104351884", + "us-east-1": "763104351884", + "us-east-2": "763104351884", + "us-gov-west-1": "442386744353", + "us-iso-east-1": "886529160074", + "us-west-1": "763104351884", + "us-west-2": "763104351884" + }, + "repository": "tensorflow-inference" + }, + "2.10.0": { + "registries": { + "af-south-1": "626614931356", + "ap-east-1": "871362719292", + "ap-northeast-1": "763104351884", + "ap-northeast-2": "763104351884", + "ap-northeast-3": "364406365360", + "ap-south-1": "763104351884", + "ap-southeast-1": "763104351884", + "ap-southeast-2": "763104351884", + "ap-southeast-3": "907027046896", + "ca-central-1": "763104351884", + "cn-north-1": "727897471807", + "cn-northwest-1": "727897471807", + "eu-central-1": "763104351884", + "eu-north-1": "763104351884", + "eu-south-1": "692866216735", + "eu-west-1": "763104351884", + "eu-west-2": "763104351884", + "eu-west-3": "763104351884", + "me-south-1": "217643126080", + "sa-east-1": "763104351884", + "us-east-1": "763104351884", + "us-east-2": "763104351884", + "us-gov-west-1": "442386744353", + "us-iso-east-1": "886529160074", + "us-west-1": "763104351884", + "us-west-2": "763104351884" + }, + "repository": "tensorflow-inference" } } }, From fde07388dc26cb270a0a0dfba91439c64e87751a Mon Sep 17 00:00:00 2001 From: Keshav Chandak Date: Sat, 3 Dec 2022 03:41:10 +0530 Subject: [PATCH 07/11] feature: Added transform with monitoring pipeline step in transformer (#3438) Co-authored-by: Keshav Chandak --- src/sagemaker/transformer.py | 158 +++++++++++++++++++++++++++++++- tests/integ/test_transformer.py | 66 ++++++++++++- 2 files changed, 220 insertions(+), 4 deletions(-) diff --git a/src/sagemaker/transformer.py b/src/sagemaker/transformer.py index cfcc637b99..97278abdd0 100644 --- a/src/sagemaker/transformer.py +++ b/src/sagemaker/transformer.py @@ -14,14 +14,17 @@ from __future__ import absolute_import from typing import Union, Optional, List, Dict -from botocore import exceptions +import logging +import copy +import time +from botocore import exceptions from sagemaker.job import _Job -from sagemaker.session import Session +from sagemaker.session import Session, get_execution_role from sagemaker.inputs import BatchDataCaptureConfig from sagemaker.workflow.entities import PipelineVariable from sagemaker.workflow.functions import Join -from sagemaker.workflow.pipeline_context import runnable_by_pipeline +from sagemaker.workflow.pipeline_context import runnable_by_pipeline, PipelineSession from sagemaker.workflow import is_pipeline_variable from sagemaker.workflow.execution_variables import ExecutionVariables from sagemaker.utils import base_name_from_image, name_from_base @@ -266,6 +269,155 @@ def transform( if wait: self.latest_transform_job.wait(logs=logs) + def transform_with_monitoring( + self, + monitoring_config, + monitoring_resource_config, + data: str, + data_type: str = "S3Prefix", + content_type: str = None, + compression_type: str = None, + split_type: str = None, + input_filter: str = None, + output_filter: str = None, + join_source: str = None, + model_client_config: Dict[str, str] = None, + batch_data_capture_config: BatchDataCaptureConfig = None, + monitor_before_transform: bool = False, + supplied_baseline_statistics: str = None, + supplied_baseline_constraints: str = None, + wait: bool = True, + pipeline_name: str = None, + role: str = None, + ): + """Runs a transform job with monitoring job. + + Note that this function will not start a transform job immediately, + instead, it will create a SageMaker Pipeline and execute it. + If you provide an existing pipeline_name, no new pipeline will be created, otherwise, + each transform_with_monitoring call will create a new pipeline and execute. + + Args: + monitoring_config (Union[ + `sagemaker.workflow.quality_check_step.QualityCheckConfig`, + `sagemaker.workflow.quality_check_step.ClarifyCheckConfig` + ]): the monitoring configuration used for run model monitoring. + monitoring_resource_config (`sagemaker.workflow.check_job_config.CheckJobConfig`): + the check job (processing job) cluster resource configuration. + transform_step_args (_JobStepArguments): the transform step transform arguments. + data (str): Input data location in S3 for the transform job + data_type (str): What the S3 location defines (default: 'S3Prefix'). + Valid values: + * 'S3Prefix' - the S3 URI defines a key name prefix. All objects with this prefix + will be used as inputs for the transform job. + * 'ManifestFile' - the S3 URI points to a single manifest file listing each S3 + object to use as an input for the transform job. + content_type (str): MIME type of the input data (default: None). + compression_type (str): Compression type of the input data, if + compressed (default: None). Valid values: 'Gzip', None. + split_type (str): The record delimiter for the input object + (default: 'None'). Valid values: 'None', 'Line', 'RecordIO', and + 'TFRecord'. + input_filter (str): A JSONPath to select a portion of the input to + pass to the algorithm container for inference. If you omit the + field, it gets the value '$', representing the entire input. + For CSV data, each row is taken as a JSON array, + so only index-based JSONPaths can be applied, e.g. $[0], $[1:]. + CSV data should follow the `RFC format `_. + See `Supported JSONPath Operators + `_ + for a table of supported JSONPath operators. + For more information, see the SageMaker API documentation for + `CreateTransformJob + `_. + Some examples: "$[1:]", "$.features" (default: None). + output_filter (str): A JSONPath to select a portion of the + joined/original output to return as the output. + For more information, see the SageMaker API documentation for + `CreateTransformJob + `_. + Some examples: "$[1:]", "$.prediction" (default: None). + join_source (str): The source of data to be joined to the transform + output. It can be set to 'Input' meaning the entire input record + will be joined to the inference result. You can use OutputFilter + to select the useful portion before uploading to S3. (default: + None). Valid values: Input, None. + model_client_config (dict[str, str]): Model configuration. + Dictionary contains two optional keys, + 'InvocationsTimeoutInSeconds', and 'InvocationsMaxRetries'. + (default: ``None``). + batch_data_capture_config (BatchDataCaptureConfig): Configuration object which + specifies the configurations related to the batch data capture for the transform job + (default: ``None``). + monitor_before_transform (bgool): If to run data quality + or model explainability monitoring type, + a true value of this flag indicates running the check step before the transform job. + fail_on_violation (Union[bool, PipelineVariable]): A opt-out flag to not to fail the + check step when a violation is detected. + supplied_baseline_statistics (Union[str, PipelineVariable]): The S3 path + to the supplied statistics object representing the statistics JSON file + which will be used for drift to check (default: None). + supplied_baseline_constraints (Union[str, PipelineVariable]): The S3 path + to the supplied constraints object representing the constraints JSON file + which will be used for drift to check (default: None). + wait (bool): To determine if needed to wait for the pipeline execution to complete + pipeline_name (str): The name of the Pipeline for the monitoring and transfrom step + role (str): Execution role + """ + + transformer = self + if not isinstance(self.sagemaker_session, PipelineSession): + sagemaker_session = self.sagemaker_session + self.sagemaker_session = None + transformer = copy.deepcopy(self) + transformer.sagemaker_session = PipelineSession() + self.sagemaker_session = sagemaker_session + + transform_step_args = transformer.transform( + data=data, + data_type=data_type, + content_type=content_type, + compression_type=compression_type, + split_type=split_type, + input_filter=input_filter, + output_filter=output_filter, + batch_data_capture_config=batch_data_capture_config, + join_source=join_source, + model_client_config=model_client_config, + ) + + from sagemaker.workflow.monitor_batch_transform_step import MonitorBatchTransformStep + + monitoring_batch_step = MonitorBatchTransformStep( + name="MonitorBatchTransformStep", + display_name="MonitorBatchTransformStep", + description="", + transform_step_args=transform_step_args, + monitor_configuration=monitoring_config, + check_job_configuration=monitoring_resource_config, + monitor_before_transform=monitor_before_transform, + supplied_baseline_constraints=supplied_baseline_constraints, + supplied_baseline_statistics=supplied_baseline_statistics, + ) + + pipeline_name = ( + pipeline_name if pipeline_name else f"TransformWithMonitoring{int(time.time())}" + ) + # if pipeline exists, just start the execution + from sagemaker.workflow.pipeline import Pipeline + + pipeline = Pipeline( + name=pipeline_name, + steps=[monitoring_batch_step], + sagemaker_session=transformer.sagemaker_session, + ) + pipeline.upsert(role_arn=role if role else get_execution_role()) + execution = pipeline.start() + if wait: + logging.info("Waiting for transform with monitoring to execute ...") + execution.wait() + return execution + def delete_model(self): """Delete the corresponding SageMaker model for this Transformer.""" self.sagemaker_session.delete_model(self.model_name) diff --git a/tests/integ/test_transformer.py b/tests/integ/test_transformer.py index a0e37ffc77..1de333b987 100644 --- a/tests/integ/test_transformer.py +++ b/tests/integ/test_transformer.py @@ -25,6 +25,7 @@ from sagemaker.transformer import Transformer from sagemaker.estimator import Estimator from sagemaker.inputs import BatchDataCaptureConfig +from sagemaker.xgboost import XGBoostModel from sagemaker.utils import unique_name_from_base from tests.integ import ( datasets, @@ -36,7 +37,7 @@ from tests.integ.timeout import timeout, timeout_and_delete_model_with_transformer from tests.integ.vpc_test_utils import get_or_create_vpc_resources -from sagemaker.model_monitor import DatasetFormat, Statistics +from sagemaker.model_monitor import DatasetFormat, Statistics, Constraints from sagemaker.workflow.check_job_config import CheckJobConfig from sagemaker.workflow.quality_check_step import ( @@ -645,3 +646,66 @@ def _create_transformer_and_transform_job( job_name=unique_name_from_base("test-transform"), ) return transformer + + +def test_transformer_and_monitoring_job( + pipeline_session, + sagemaker_session, + role, + pipeline_name, + check_job_config, + data_bias_check_config, +): + xgb_model_data_s3 = pipeline_session.upload_data( + path=os.path.join(os.path.join(DATA_DIR, "xgboost_abalone"), "xgb_model.tar.gz"), + key_prefix="integ-test-data/xgboost/model", + ) + data_bias_supplied_baseline_constraints = Constraints.from_file_path( + constraints_file_path=os.path.join( + DATA_DIR, "pipeline/clarify_check_step/data_bias/good_cases/analysis.json" + ), + sagemaker_session=sagemaker_session, + ).file_s3_uri + + xgb_model = XGBoostModel( + model_data=xgb_model_data_s3, + framework_version="1.3-1", + role=role, + sagemaker_session=sagemaker_session, + entry_point=os.path.join(os.path.join(DATA_DIR, "xgboost_abalone"), "inference.py"), + enable_network_isolation=True, + ) + + xgb_model.deploy(_INSTANCE_COUNT, _INSTANCE_TYPE) + + transform_output = f"s3://{sagemaker_session.default_bucket()}/{pipeline_name}Transform" + transformer = Transformer( + model_name=xgb_model.name, + strategy="SingleRecord", + instance_type="ml.m5.xlarge", + instance_count=1, + output_path=transform_output, + sagemaker_session=pipeline_session, + ) + + transform_input = pipeline_session.upload_data( + path=os.path.join(DATA_DIR, "xgboost_abalone", "abalone"), + key_prefix="integ-test-data/xgboost_abalone/abalone", + ) + + execution = transformer.transform_with_monitoring( + monitoring_config=data_bias_check_config, + monitoring_resource_config=check_job_config, + data=transform_input, + content_type="text/libsvm", + supplied_baseline_constraints=data_bias_supplied_baseline_constraints, + role=role, + ) + + execution_steps = execution.list_steps() + assert len(execution_steps) == 2 + + for execution_step in execution_steps: + assert execution_step["StepStatus"] == "Succeeded" + + xgb_model.delete_model() From 7f9f3b04b6704a4d2378b5d9aa3d37de9db45729 Mon Sep 17 00:00:00 2001 From: Clayton Parnell <42805768+claytonparnell@users.noreply.github.com> Date: Fri, 2 Dec 2022 17:12:34 -0500 Subject: [PATCH 08/11] fix: Fix bug forcing uploaded tar to be named sourcedir (#3412) --- src/sagemaker/processing.py | 19 +++++++++++-------- tests/integ/test_xgboost.py | 20 ++++++++++++++++++++ 2 files changed, 31 insertions(+), 8 deletions(-) diff --git a/src/sagemaker/processing.py b/src/sagemaker/processing.py index db6ce2badd..308783578d 100644 --- a/src/sagemaker/processing.py +++ b/src/sagemaker/processing.py @@ -1587,13 +1587,13 @@ def run( # type: ignore[override] framework script to run.Path (absolute or relative) to the local Python source file which should be executed as the entry point to training. When `code` is an S3 URI, ignore `source_dir`, - `dependencies, and `git_config`. If ``source_dir`` is specified, + `dependencies`, and `git_config`. If ``source_dir`` is specified, then ``code`` must point to a file located at the root of ``source_dir``. source_dir (str): Path (absolute, relative or an S3 URI) to a directory with any other processing source code dependencies aside from the entry point file (default: None). If ``source_dir`` is an S3 URI, it must - point to a tar.gz file. Structure within this directory are preserved - when processing on Amazon SageMaker (default: None). + point to a file named `sourcedir.tar.gz`. Structure within this directory + are preserved when processing on Amazon SageMaker (default: None). dependencies (list[str]): A list of paths to directories (absolute or relative) with any additional libraries that will be exported to the container (default: []). The library folders will be @@ -1730,12 +1730,15 @@ def _pack_and_upload_code( "sagemaker_session unspecified when creating your Processor to have one set up " "automatically." ) + if "/sourcedir.tar.gz" in estimator.uploaded_code.s3_prefix: + # Upload the bootstrapping code as s3://.../jobname/source/runproc.sh. + entrypoint_s3_uri = estimator.uploaded_code.s3_prefix.replace( + "sourcedir.tar.gz", + "runproc.sh", + ) + else: + raise RuntimeError("S3 source_dir file must be named `sourcedir.tar.gz.`") - # Upload the bootstrapping code as s3://.../jobname/source/runproc.sh. - entrypoint_s3_uri = estimator.uploaded_code.s3_prefix.replace( - "sourcedir.tar.gz", - "runproc.sh", - ) script = estimator.uploaded_code.script_name s3_runproc_sh = S3Uploader.upload_string_as_file_body( self._generate_framework_script(script), diff --git a/tests/integ/test_xgboost.py b/tests/integ/test_xgboost.py index 733ab4665a..df06a8863a 100644 --- a/tests/integ/test_xgboost.py +++ b/tests/integ/test_xgboost.py @@ -40,6 +40,26 @@ def xgboost_training_job( ) +def test_sourcedir_naming( + sagemaker_session, + xgboost_latest_version, + xgboost_latest_py_version, + cpu_instance_type, +): + with pytest.raises(RuntimeError): + processor = XGBoostProcessor( + framework_version=xgboost_latest_version, + role=ROLE, + instance_count=1, + instance_type=cpu_instance_type, + sagemaker_session=sagemaker_session, + ) + processor.run( + source_dir="s3://bucket/deps.tar.gz", + code="main_script.py", + ) + + @pytest.mark.release def test_framework_processing_job_with_deps( sagemaker_session, From 5d5976726cb8e0cf7143d86b4abb4b665842fd14 Mon Sep 17 00:00:00 2001 From: Navin Soni Date: Fri, 2 Dec 2022 14:32:01 -0800 Subject: [PATCH 09/11] feature: Add Code Owners file (#3503) Co-authored-by: Navin Soni --- CODEOWNERS | 1 + requirements/extras/local_requirements.txt | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 CODEOWNERS diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 0000000000..7f7ac28644 --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1 @@ +* @aws/sagemaker-ml-frameworks diff --git a/requirements/extras/local_requirements.txt b/requirements/extras/local_requirements.txt index 5f2c85c2fe..5304d82b2a 100644 --- a/requirements/extras/local_requirements.txt +++ b/requirements/extras/local_requirements.txt @@ -1,4 +1,4 @@ urllib3==1.26.8 docker-compose==1.29.2 docker>=5.0.2,<7.0.0 -PyYAML==6.0.0 +PyYAML==5.4.1 From 0d35b2ad60ffdcc8282ff9b004041eb4d90b06e1 Mon Sep 17 00:00:00 2001 From: Simon Zamarin Date: Mon, 3 Oct 2022 16:38:56 -0400 Subject: [PATCH 10/11] fixed issue with FrameworkProcessor not using the specified output_kms_key when uploading sourcedir.tar.gz --- src/sagemaker/processing.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/sagemaker/processing.py b/src/sagemaker/processing.py index 308783578d..0a91fef81d 100644 --- a/src/sagemaker/processing.py +++ b/src/sagemaker/processing.py @@ -1493,6 +1493,7 @@ def _create_estimator( enable_network_isolation=False, # True -> uploads to input channel. Not what we want! image_uri=self.image_uri, role=self.role, + output_kms_key=self.output_kms_key, # Estimator instance_count doesn't currently matter to FrameworkProcessor, and the # SKLearn Framework Estimator requires instance_type==1. So here we hard-wire it to 1, # but if it matters in future perhaps we could take self.instance_count here and have @@ -1801,7 +1802,7 @@ def _upload_payload( entry_point=entry_point, source_dir=source_dir, dependencies=dependencies, - git_config=git_config, + git_config=git_config ) estimator._prepare_for_training(job_name=job_name) From b7a84fce744d8261c6eba0a74bea3c3328c25c76 Mon Sep 17 00:00:00 2001 From: Simon Zamarin Date: Mon, 3 Oct 2022 16:42:00 -0400 Subject: [PATCH 11/11] fixed issue with FrameworkProcessor not using the specified output_kms_key when uploading sourcedir.tar.gz --- src/sagemaker/processing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sagemaker/processing.py b/src/sagemaker/processing.py index 0a91fef81d..bd495760f1 100644 --- a/src/sagemaker/processing.py +++ b/src/sagemaker/processing.py @@ -1802,7 +1802,7 @@ def _upload_payload( entry_point=entry_point, source_dir=source_dir, dependencies=dependencies, - git_config=git_config + git_config=git_config, ) estimator._prepare_for_training(job_name=job_name)