diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 99d80d2049..6afdf8f4c9 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -7,6 +7,7 @@ CHANGELOG * feature: Estimators: dependencies attribute allows export of additional libraries into the container * feature: Add APIs to export Airflow transform and deploy config +* bug-fix: Allow code_location argument to be S3 URI in training_config API 1.15.0 ====== diff --git a/src/sagemaker/estimator.py b/src/sagemaker/estimator.py index 1981dd2790..3451e69f25 100644 --- a/src/sagemaker/estimator.py +++ b/src/sagemaker/estimator.py @@ -670,8 +670,10 @@ def __init__(self, entry_point, source_dir=None, hyperparameters=None, enable_cl training jobs. This will be ignored for now and removed in a further release. container_log_level (int): Log level to use within the container (default: logging.INFO). Valid values are defined in the Python logging module. - code_location (str): Name of the S3 bucket where custom code is uploaded (default: None). - If not specified, default bucket created by ``sagemaker.session.Session`` is used. + code_location (str): The S3 prefix URI where custom code will be uploaded (default: None). + The code file uploaded in S3 is 'code_location/source/sourcedir.tar.gz'. + If not specified, the default code location is s3://default_bucket/job-name/. And code file + uploaded to S3 is s3://default_bucket/job-name/source/sourcedir.tar.gz image_name (str): An alternate image name to use instead of the official Sagemaker image for the framework. This is useful to run one of the Sagemaker supported frameworks with an image containing custom dependencies. diff --git a/src/sagemaker/workflow/airflow.py b/src/sagemaker/workflow/airflow.py index e5078c4234..f2fcee4a0b 100644 --- a/src/sagemaker/workflow/airflow.py +++ b/src/sagemaker/workflow/airflow.py @@ -27,8 +27,12 @@ def prepare_framework(estimator, s3_operations): estimator (sagemaker.estimator.Estimator): The framework estimator to get information from and update. s3_operations (dict): The dict to specify s3 operations (upload `source_dir`). """ - bucket = estimator.code_location if estimator.code_location else estimator.sagemaker_session._default_bucket - key = '{}/source/sourcedir.tar.gz'.format(estimator._current_job_name) + if estimator.code_location is not None: + bucket, key = fw_utils.parse_s3_url(estimator.code_location) + key = os.path.join(key, 'source', 'sourcedir.tar.gz') + else: + bucket = estimator.sagemaker_session._default_bucket + key = os.path.join(estimator._current_job_name, 'source', 'sourcedir.tar.gz') script = os.path.basename(estimator.entry_point) if estimator.source_dir and estimator.source_dir.lower().startswith('s3://'): code_dir = estimator.source_dir diff --git a/tests/unit/test_airflow.py b/tests/unit/test_airflow.py index 0dcb2d0b0a..02e963058f 100644 --- a/tests/unit/test_airflow.py +++ b/tests/unit/test_airflow.py @@ -244,7 +244,7 @@ def test_framework_training_config_all_args(sagemaker_session): source_dir="{{ source_dir }}", enable_cloudwatch_metrics=False, container_log_level="{{ log_level }}", - code_location="{{ bucket_name }}", + code_location="s3://{{ bucket_name }}/{{ prefix }}", training_steps=1000, evaluation_steps=100, checkpoint_path="{{ checkpoint_path }}", @@ -304,9 +304,7 @@ def test_framework_training_config_all_args(sagemaker_session): 'SecurityGroupIds': ['{{ security_group_ids }}'] }, 'HyperParameters': { - 'sagemaker_submit_directory': '"s3://{{ bucket_name }}/{{ base_job_name }}-' - '{{ execution_date.strftime(\'%Y-%m-%d-%H-%M-%S\') }}' - '/source/sourcedir.tar.gz"', + 'sagemaker_submit_directory': '"s3://{{ bucket_name }}/{{ prefix }}/source/sourcedir.tar.gz"', 'sagemaker_program': '"{{ entry_point }}"', 'sagemaker_enable_cloudwatch_metrics': 'false', 'sagemaker_container_log_level': '"{{ log_level }}"', @@ -322,8 +320,7 @@ def test_framework_training_config_all_args(sagemaker_session): 'S3Upload': [{ 'Path': '{{ source_dir }}', 'Bucket': '{{ bucket_name }}', - 'Key': "{{ base_job_name }}-{{ execution_date.strftime('%Y-%m-%d-%H-%M-%S') }}" - "/source/sourcedir.tar.gz", + 'Key': "{{ prefix }}/source/sourcedir.tar.gz", 'Tar': True}] } }