From d670a5ee5f7b62ca0118027f70d08bb3d7ba2300 Mon Sep 17 00:00:00 2001 From: Santos Gallegos Date: Wed, 2 Apr 2025 13:58:35 -0500 Subject: [PATCH 01/18] Build: use scoped credentials for interacting with S3 Closes https://github.com/readthedocs/readthedocs-ops/issues/1599 --- readthedocs/api/v2/views/model_views.py | 26 ++++ readthedocs/projects/models.py | 7 + readthedocs/projects/tasks/builds.py | 38 ++++- readthedocs/storage/rclone.py | 3 + readthedocs/storage/s3_storage.py | 1 + readthedocs/storage/security_token_service.py | 138 ++++++++++++++++++ 6 files changed, 212 insertions(+), 1 deletion(-) create mode 100644 readthedocs/storage/security_token_service.py diff --git a/readthedocs/api/v2/views/model_views.py b/readthedocs/api/v2/views/model_views.py index 8bc24efb11b..17fdbea9e19 100644 --- a/readthedocs/api/v2/views/model_views.py +++ b/readthedocs/api/v2/views/model_views.py @@ -1,6 +1,7 @@ """Endpoints for listing Projects, Versions, Builds, etc.""" import json +from dataclasses import asdict import structlog from allauth.socialaccount.models import SocialAccount @@ -38,6 +39,8 @@ from readthedocs.projects.models import Domain from readthedocs.projects.models import Project from readthedocs.storage import build_commands_storage +from readthedocs.storage.security_token_service import AWSTemporaryCredentialsError +from readthedocs.storage.security_token_service import get_s3_scoped_credentials from ..serializers import BuildAdminReadOnlySerializer from ..serializers import BuildAdminSerializer @@ -345,6 +348,29 @@ def reset(self, request, **kwargs): def get_queryset_for_api_key(self, api_key): return self.model.objects.filter(project=api_key.project) + @decorators.action( + detail=True, + permission_classes=[HasBuildAPIKey], + methods=["post"], + url_path="temporary-credentials", + ) + def temporary_credentials(self, request, **kwargs): + build = self.get_object() + project = build.project + version = build.version + try: + credentials = get_s3_scoped_credentials( + project=project, + version=version, + session_id=build.pk, + ) + except AWSTemporaryCredentialsError: + return Response( + {"error": "Failed to generate temporary credentials"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + return Response({"s3": asdict(credentials)}) + class BuildCommandViewSet(DisableListEndpoint, CreateModelMixin, UserSelectViewSet): parser_classes = [JSONParser, MultiPartParser] diff --git a/readthedocs/projects/models.py b/readthedocs/projects/models.py index 87fc0bb30e8..f16a58a0019 100644 --- a/readthedocs/projects/models.py +++ b/readthedocs/projects/models.py @@ -1932,6 +1932,9 @@ def add_features(sender, **kwargs): # Build related features SCALE_IN_PROTECTION = "scale_in_prtection" + USE_SCOPED_CREDENTIALS_FOR_BUILD_MEDIA_UPLOAD = ( + "use_s3_scoped_credentials_for_build_media_upload" + ) FEATURES = ( ( @@ -2010,6 +2013,10 @@ def add_features(sender, **kwargs): SCALE_IN_PROTECTION, _("Build: Set scale-in protection before/after building."), ), + ( + USE_SCOPED_CREDENTIALS_FOR_BUILD_MEDIA_UPLOAD, + _("Build: Use S3 scoped credentials for uploading build artifacts."), + ), ) FEATURES = sorted(FEATURES, key=lambda x: x[1]) diff --git a/readthedocs/projects/tasks/builds.py b/readthedocs/projects/tasks/builds.py index 3e013f9466d..32dd693fdbc 100644 --- a/readthedocs/projects/tasks/builds.py +++ b/readthedocs/projects/tasks/builds.py @@ -18,6 +18,7 @@ from celery import Task from django.conf import settings from django.utils import timezone +from django.utils.module_loading import import_string from slumber import API from slumber.exceptions import HttpClientError @@ -53,7 +54,6 @@ from readthedocs.doc_builder.exceptions import BuildUserError from readthedocs.doc_builder.exceptions import MkDocsYAMLParseError from readthedocs.projects.models import Feature -from readthedocs.storage import build_media_storage from readthedocs.telemetry.collectors import BuildDataCollector from readthedocs.telemetry.tasks import save_build_data from readthedocs.worker import app @@ -885,6 +885,40 @@ def set_valid_clone(self): self.data.project.has_valid_clone = True self.data.version.project.has_valid_clone = True + def _get_sync_media_storage(self): + storage_class = import_string(settings.RTD_BUILD_MEDIA_STORAGE) + extra_kwargs = self._get_extra_kwargs_for_storage() + log.info("Credentials for storage", extra_kwargs=extra_kwargs) + return storage_class(**extra_kwargs) + + def _get_extra_kwargs_for_storage(self): + extra_kwargs = {} + if self.data.project.has_feature(Feature.USE_SCOPED_CREDENTIALS_FOR_BUILD_MEDIA_UPLOAD): + credentials = self._get_scoped_credentials() + s3_credentials = credentials["s3"] + extra_kwargs.update( + { + "access_key": s3_credentials["access_key_id"], + "secret_key": s3_credentials["secret_access_key"], + "security_token": s3_credentials["session_token"], + } + ) + return extra_kwargs + + def _get_scoped_credentials(self): + build_id = self.data.build["id"] + try: + return self.data.api_client.build(f"{build_id}/temporary-credentials").post() + except Exception: + log.exception( + "Error getting scoped credentials.", + build_id=build_id, + ) + raise BuildAppError( + BuildAppError.GENERIC_WITH_BUILD_ID, + exception_message="Error getting scoped credentials.", + ) + def store_build_artifacts(self): """ Save build artifacts to "storage" using Django's storage API. @@ -904,6 +938,8 @@ def store_build_artifacts(self): types_to_copy = [] types_to_delete = [] + build_media_storage = self._get_sync_media_storage() + for artifact_type in ARTIFACT_TYPES: if artifact_type in valid_artifacts: types_to_copy.append(artifact_type) diff --git a/readthedocs/storage/rclone.py b/readthedocs/storage/rclone.py index b869a2e5f7a..f1c27ac32a2 100644 --- a/readthedocs/storage/rclone.py +++ b/readthedocs/storage/rclone.py @@ -173,6 +173,7 @@ def __init__( secret_acces_key, region, provider="AWS", + session_token=None, acl=None, endpoint=None, ): @@ -185,6 +186,8 @@ def __init__( "RCLONE_S3_REGION": region, "RCLONE_S3_LOCATION_CONSTRAINT": region, } + if session_token: + self.env_vars["RCLONE_S3_SESSION_TOKEN"] = session_token if acl: self.env_vars["RCLONE_S3_ACL"] = acl if endpoint: diff --git a/readthedocs/storage/s3_storage.py b/readthedocs/storage/s3_storage.py index 24381c5827d..01a2928be79 100644 --- a/readthedocs/storage/s3_storage.py +++ b/readthedocs/storage/s3_storage.py @@ -32,6 +32,7 @@ def _rclone(self): bucket_name=self.bucket_name, access_key_id=self.access_key, secret_acces_key=self.secret_key, + session_token=self.security_token, region=self.region_name or "", acl=self.default_acl, endpoint=self.endpoint_url, diff --git a/readthedocs/storage/security_token_service.py b/readthedocs/storage/security_token_service.py new file mode 100644 index 00000000000..dbc185d32ab --- /dev/null +++ b/readthedocs/storage/security_token_service.py @@ -0,0 +1,138 @@ +""" +Module to interact with AWS STS (Security Token Service) to assume a role and get temporary scoped credentials. + +This is mainly used to generate temporary credentials to interact with S3 buckets from the builders. +""" + +import json +from dataclasses import dataclass + +import boto3 +import structlog +from django.conf import settings + +from readthedocs.storage import build_media_storage + + +log = structlog.get_logger(__name__) + + +class AWSTemporaryCredentialsError(Exception): + """Exception raised when there is an error getting AWS S3 credentials.""" + + +@dataclass +class AWSTemporaryCredentials: + """ + Dataclass to hold AWS temporary credentials. + """ + + access_key_id: str + secret_access_key: str + session_token: str + + +def get_sts_client(): + return boto3.client( + "sts", + aws_access_key_id=settings.AWS_ACCESS_KEY_ID, + aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, + # TODO: should this be its own setting? + region_name=settings.AWS_S3_REGION_NAME, + ) + + +def get_s3_scoped_credentials( + project, version, session_id=None, duration=60 * 15 +) -> AWSTemporaryCredentials: + """ + :param session_id: A unique identifier to add to the name of the role session. + The name of the session always includes the project and version slug (rtd-{project}-{version}), + if session_id is given, the name of the session would be "rtd-{session_id}-{project}-{version}". + AWS limits the session name to 64 characters, so if the session_id is too long, it will be truncated. + For example, for a token used in a build, a good session_id is the ID of the build. + :duration: The duration of the credentials in seconds. Default is 15 minutes. + Note that the minimum duration time is 15 minutes and the maximum is given by the role (defaults to 1 hour). + + See: + - https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html + - https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_sts-comparison.html + - https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_control-access_assumerole.html + """ + bucket_name = build_media_storage.bucket_name + bucket_arn = f"arn:aws:s3:::{bucket_name}" + + storage_paths = version.get_storage_paths() + # Generate the list of allowed prefix resources + # The resulting prefix looks like: + # - html/project/latest/* + # - pdf/project/latest/* + allowed_prefixes = [f"{storage_path}/*" for storage_path in storage_paths] + + # Generate the list of allowed object resources in ARN format. + # The resulting ARN looks like: + # arn:aws:s3:::readthedocs-media/html/project/latest/* + # arn:aws:s3:::readthedocs-media/pdf/project/latest/* + allowed_objects_arn = [f"{bucket_arn}/{prefix}" for prefix in allowed_prefixes] + + # Define an inline policy document to limit the permissions of the temporary credentials. + policy_document = { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "s3:GetObject", + "s3:PutObject", + "s3:DeleteObject", + ], + "Resource": allowed_objects_arn, + }, + # In order to list the objects in a path, we need to allow the ListBucket action. + # But since that action is not scoped to a path, we need to limit it using a condition. + { + "Effect": "Allow", + "Action": ["s3:ListBucket"], + "Resource": [ + bucket_arn, + ], + "Condition": { + "StringLike": { + "s3:prefix": allowed_prefixes, + } + }, + }, + ], + } + + # NOTE: maybe have a single setting? AWS_STS_ASSUME_ROLE_ARN + role_arn = f"arn:aws:iam::{settings.AWS_IAM_USER_ID}:role/{settings.AWS_ROLE_NAME}" + session_prefix = f"rtd-{session_id}" if session_id else "rtd" + role_session_name = f"{session_prefix}-{project.slug}-{version.slug}" + # Limit to 64 characters, as per AWS limitations. + role_session_name = role_session_name[:64] + + try: + sts_client = get_sts_client() + response = sts_client.assume_role( + RoleArn=role_arn, + RoleSessionName=role_session_name, + Policy=json.dumps(policy_document), + DurationSeconds=duration, + ) + except Exception: + log.exception( + "Error while assuming role to generate temporary credentials", + role_arn=role_arn, + role_session_name=role_session_name, + policy_document=policy_document, + duration=duration, + ) + raise AWSTemporaryCredentialsError + + credentials = response["Credentials"] + return AWSTemporaryCredentials( + access_key_id=credentials["AccessKeyId"], + secret_access_key=credentials["SecretAccessKey"], + session_token=credentials["SessionToken"], + ) From a2c7b35859f66b5d4fce0ef40613c65aa34fa195 Mon Sep 17 00:00:00 2001 From: Santos Gallegos Date: Wed, 2 Apr 2025 16:37:52 -0500 Subject: [PATCH 02/18] Updates --- readthedocs/api/v2/views/model_views.py | 7 +++ readthedocs/builds/storage.py | 4 ++ readthedocs/projects/models.py | 6 +-- readthedocs/projects/tasks/builds.py | 40 ++++++++++------- readthedocs/settings/base.py | 2 + readthedocs/storage/s3_storage.py | 1 + readthedocs/storage/security_token_service.py | 45 +++++++++++++------ 7 files changed, 71 insertions(+), 34 deletions(-) diff --git a/readthedocs/api/v2/views/model_views.py b/readthedocs/api/v2/views/model_views.py index 17fdbea9e19..4dcce393a49 100644 --- a/readthedocs/api/v2/views/model_views.py +++ b/readthedocs/api/v2/views/model_views.py @@ -355,6 +355,11 @@ def get_queryset_for_api_key(self, api_key): url_path="temporary-credentials", ) def temporary_credentials(self, request, **kwargs): + """ + Generate temporary credentials for the build. + + This can generate temporary credentials for interacting with S3 only for now. + """ build = self.get_object() project = build.project version = build.version @@ -363,6 +368,8 @@ def temporary_credentials(self, request, **kwargs): project=project, version=version, session_id=build.pk, + # 30 minutes should be enough to upload all build artifacts. + duration=30 * 60, ) except AWSTemporaryCredentialsError: return Response( diff --git a/readthedocs/builds/storage.py b/readthedocs/builds/storage.py index 7de5d51575b..eb673c9d584 100644 --- a/readthedocs/builds/storage.py +++ b/readthedocs/builds/storage.py @@ -32,6 +32,10 @@ class BuildMediaStorageMixin: # that will serve files from this storage. internal_redirect_root_path = "proxito" + # If the storage backend supports passing credentials in their init method. + # Mainly used for S3. + supports_credentials = False + @staticmethod def _dirpath(path): """ diff --git a/readthedocs/projects/models.py b/readthedocs/projects/models.py index f16a58a0019..f73894c41e9 100644 --- a/readthedocs/projects/models.py +++ b/readthedocs/projects/models.py @@ -1932,9 +1932,7 @@ def add_features(sender, **kwargs): # Build related features SCALE_IN_PROTECTION = "scale_in_prtection" - USE_SCOPED_CREDENTIALS_FOR_BUILD_MEDIA_UPLOAD = ( - "use_s3_scoped_credentials_for_build_media_upload" - ) + USE_S3_SCOPED_CREDENTIALS_ON_BUILDERS = "use_s3_scoped_credentials_on_builders" FEATURES = ( ( @@ -2014,7 +2012,7 @@ def add_features(sender, **kwargs): _("Build: Set scale-in protection before/after building."), ), ( - USE_SCOPED_CREDENTIALS_FOR_BUILD_MEDIA_UPLOAD, + USE_S3_SCOPED_CREDENTIALS_ON_BUILDERS, _("Build: Use S3 scoped credentials for uploading build artifacts."), ), ) diff --git a/readthedocs/projects/tasks/builds.py b/readthedocs/projects/tasks/builds.py index 32dd693fdbc..26795e74602 100644 --- a/readthedocs/projects/tasks/builds.py +++ b/readthedocs/projects/tasks/builds.py @@ -886,29 +886,28 @@ def set_valid_clone(self): self.data.version.project.has_valid_clone = True def _get_sync_media_storage(self): + """ + Get a storage class instance to use for syncing build artifacts. + + .. note:: + + We no longer use readthedocs.storage.build_media_storage directly, + as we are now using per-build credentials for S3 storage, + so we need to dynamically create the storage class instance + """ storage_class = import_string(settings.RTD_BUILD_MEDIA_STORAGE) - extra_kwargs = self._get_extra_kwargs_for_storage() - log.info("Credentials for storage", extra_kwargs=extra_kwargs) + extra_kwargs = {} + if storage_class.supports_credentials: + extra_kwargs = self._get_s3_scoped_credentials() return storage_class(**extra_kwargs) - def _get_extra_kwargs_for_storage(self): - extra_kwargs = {} - if self.data.project.has_feature(Feature.USE_SCOPED_CREDENTIALS_FOR_BUILD_MEDIA_UPLOAD): - credentials = self._get_scoped_credentials() - s3_credentials = credentials["s3"] - extra_kwargs.update( - { - "access_key": s3_credentials["access_key_id"], - "secret_key": s3_credentials["secret_access_key"], - "security_token": s3_credentials["session_token"], - } - ) - return extra_kwargs + def _get_s3_scoped_credentials(self): + if not self.data.project.has_feature(Feature.USE_S3_SCOPED_CREDENTIALS_ON_BUILDERS): + return {} - def _get_scoped_credentials(self): build_id = self.data.build["id"] try: - return self.data.api_client.build(f"{build_id}/temporary-credentials").post() + credentials = self.data.api_client.build(f"{build_id}/temporary-credentials").post() except Exception: log.exception( "Error getting scoped credentials.", @@ -919,6 +918,13 @@ def _get_scoped_credentials(self): exception_message="Error getting scoped credentials.", ) + s3_credentials = credentials["s3"] + return { + "access_key": s3_credentials["access_key_id"], + "secret_key": s3_credentials["secret_access_key"], + "security_token": s3_credentials["session_token"], + } + def store_build_artifacts(self): """ Save build artifacts to "storage" using Django's storage API. diff --git a/readthedocs/settings/base.py b/readthedocs/settings/base.py index 0d59d54c83b..8a39772d19e 100644 --- a/readthedocs/settings/base.py +++ b/readthedocs/settings/base.py @@ -419,6 +419,8 @@ def MIDDLEWARE(self): ] PYTHON_MEDIA = False + RTD_USE_SCOPED_CREDENTIALS_ON_BUILDS = False + # Django Storage subclass used to write build artifacts to cloud or local storage # https://docs.readthedocs.io/page/development/settings.html#rtd-build-media-storage RTD_BUILD_MEDIA_STORAGE = "readthedocs.builds.storage.BuildMediaFileSystemStorage" diff --git a/readthedocs/storage/s3_storage.py b/readthedocs/storage/s3_storage.py index 01a2928be79..27ba1697cf4 100644 --- a/readthedocs/storage/s3_storage.py +++ b/readthedocs/storage/s3_storage.py @@ -45,6 +45,7 @@ class S3BuildMediaStorage(OverrideHostnameMixin, S3BuildMediaStorageMixin): bucket_name = getattr(settings, "S3_MEDIA_STORAGE_BUCKET", None) override_hostname = getattr(settings, "S3_MEDIA_STORAGE_OVERRIDE_HOSTNAME", None) + supports_credentials = True def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) diff --git a/readthedocs/storage/security_token_service.py b/readthedocs/storage/security_token_service.py index dbc185d32ab..bcdf2ff9222 100644 --- a/readthedocs/storage/security_token_service.py +++ b/readthedocs/storage/security_token_service.py @@ -2,6 +2,20 @@ Module to interact with AWS STS (Security Token Service) to assume a role and get temporary scoped credentials. This is mainly used to generate temporary credentials to interact with S3 buckets from the builders. + +In order to make use of STS, we need: + +- Create a role in IAM with a trusted entity type set to the AWS account that is going to be used to generate the temporary credentials. +- A policy that allows access to all S3 buckets and paths that are going to be used. + Which should be attached to the role. +- The permissions of the temporary credentials are the result of the intersection of the role policy and the inline policy that is passed to the AssumeRole API. + This means that the inline policy can be used to limit the permissions of the temporary credentials, but not to expand them. + +See: + +- https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html +- https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_sts-comparison.html +- https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_control-access_assumerole.html """ import json @@ -23,13 +37,11 @@ class AWSTemporaryCredentialsError(Exception): @dataclass class AWSTemporaryCredentials: - """ - Dataclass to hold AWS temporary credentials. - """ + """Dataclass to hold AWS temporary credentials.""" access_key_id: str secret_access_key: str - session_token: str + session_token: str | None def get_sts_client(): @@ -43,9 +55,11 @@ def get_sts_client(): def get_s3_scoped_credentials( - project, version, session_id=None, duration=60 * 15 + *, project, version, session_id=None, duration=60 * 15 ) -> AWSTemporaryCredentials: """ + :param project: The project to get the credentials for. + :param version: The version to get the credentials for. :param session_id: A unique identifier to add to the name of the role session. The name of the session always includes the project and version slug (rtd-{project}-{version}), if session_id is given, the name of the session would be "rtd-{session_id}-{project}-{version}". @@ -54,11 +68,19 @@ def get_s3_scoped_credentials( :duration: The duration of the credentials in seconds. Default is 15 minutes. Note that the minimum duration time is 15 minutes and the maximum is given by the role (defaults to 1 hour). - See: - - https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html - - https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_sts-comparison.html - - https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_control-access_assumerole.html + .. note:: + + If RTD_USE_SCOPED_CREDENTIALS_ON_BUILDS is set to False, this function will return + the values of the AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY settings. """ + if not settings.RTD_USE_SCOPED_CREDENTIALS_ON_BUILDS: + return AWSTemporaryCredentials( + access_key_id=settings.AWS_ACCESS_KEY_ID, + secret_access_key=settings.AWS_SECRET_ACCESS_KEY, + # A session token is not needed for the default credentials. + session_token=None, + ) + bucket_name = build_media_storage.bucket_name bucket_arn = f"arn:aws:s3:::{bucket_name}" @@ -105,8 +127,6 @@ def get_s3_scoped_credentials( ], } - # NOTE: maybe have a single setting? AWS_STS_ASSUME_ROLE_ARN - role_arn = f"arn:aws:iam::{settings.AWS_IAM_USER_ID}:role/{settings.AWS_ROLE_NAME}" session_prefix = f"rtd-{session_id}" if session_id else "rtd" role_session_name = f"{session_prefix}-{project.slug}-{version.slug}" # Limit to 64 characters, as per AWS limitations. @@ -115,7 +135,7 @@ def get_s3_scoped_credentials( try: sts_client = get_sts_client() response = sts_client.assume_role( - RoleArn=role_arn, + RoleArn=settings.AWS_STS_ASSUME_ROLE_ARN, RoleSessionName=role_session_name, Policy=json.dumps(policy_document), DurationSeconds=duration, @@ -123,7 +143,6 @@ def get_s3_scoped_credentials( except Exception: log.exception( "Error while assuming role to generate temporary credentials", - role_arn=role_arn, role_session_name=role_session_name, policy_document=policy_document, duration=duration, From 952d72d1f655d8d9abf50367ea21e4506628a764 Mon Sep 17 00:00:00 2001 From: Santos Gallegos Date: Wed, 2 Apr 2025 17:11:17 -0500 Subject: [PATCH 03/18] Less settings --- readthedocs/builds/storage.py | 4 ---- readthedocs/projects/tasks/builds.py | 3 ++- readthedocs/settings/base.py | 6 ++++-- readthedocs/storage/s3_storage.py | 1 - readthedocs/storage/security_token_service.py | 5 +++-- 5 files changed, 9 insertions(+), 10 deletions(-) diff --git a/readthedocs/builds/storage.py b/readthedocs/builds/storage.py index eb673c9d584..7de5d51575b 100644 --- a/readthedocs/builds/storage.py +++ b/readthedocs/builds/storage.py @@ -32,10 +32,6 @@ class BuildMediaStorageMixin: # that will serve files from this storage. internal_redirect_root_path = "proxito" - # If the storage backend supports passing credentials in their init method. - # Mainly used for S3. - supports_credentials = False - @staticmethod def _dirpath(path): """ diff --git a/readthedocs/projects/tasks/builds.py b/readthedocs/projects/tasks/builds.py index 26795e74602..72fd1c7149e 100644 --- a/readthedocs/projects/tasks/builds.py +++ b/readthedocs/projects/tasks/builds.py @@ -897,12 +897,13 @@ def _get_sync_media_storage(self): """ storage_class = import_string(settings.RTD_BUILD_MEDIA_STORAGE) extra_kwargs = {} - if storage_class.supports_credentials: + if settings.USING_AWS: extra_kwargs = self._get_s3_scoped_credentials() return storage_class(**extra_kwargs) def _get_s3_scoped_credentials(self): if not self.data.project.has_feature(Feature.USE_S3_SCOPED_CREDENTIALS_ON_BUILDERS): + # Use the default credentials defined in the settings. return {} build_id = self.data.build["id"] diff --git a/readthedocs/settings/base.py b/readthedocs/settings/base.py index 8a39772d19e..8596178db8a 100644 --- a/readthedocs/settings/base.py +++ b/readthedocs/settings/base.py @@ -419,8 +419,6 @@ def MIDDLEWARE(self): ] PYTHON_MEDIA = False - RTD_USE_SCOPED_CREDENTIALS_ON_BUILDS = False - # Django Storage subclass used to write build artifacts to cloud or local storage # https://docs.readthedocs.io/page/development/settings.html#rtd-build-media-storage RTD_BUILD_MEDIA_STORAGE = "readthedocs.builds.storage.BuildMediaFileSystemStorage" @@ -1090,3 +1088,7 @@ def SOCIALACCOUNT_PROVIDERS(self): } S3_PROVIDER = "AWS" + + def USING_AWS(self): + """Return True if we are using AWS as our storage/cloud provider.""" + return self.S3_PROVIDER == "AWS" diff --git a/readthedocs/storage/s3_storage.py b/readthedocs/storage/s3_storage.py index 27ba1697cf4..01a2928be79 100644 --- a/readthedocs/storage/s3_storage.py +++ b/readthedocs/storage/s3_storage.py @@ -45,7 +45,6 @@ class S3BuildMediaStorage(OverrideHostnameMixin, S3BuildMediaStorageMixin): bucket_name = getattr(settings, "S3_MEDIA_STORAGE_BUCKET", None) override_hostname = getattr(settings, "S3_MEDIA_STORAGE_OVERRIDE_HOSTNAME", None) - supports_credentials = True def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) diff --git a/readthedocs/storage/security_token_service.py b/readthedocs/storage/security_token_service.py index bcdf2ff9222..a9d90fc3125 100644 --- a/readthedocs/storage/security_token_service.py +++ b/readthedocs/storage/security_token_service.py @@ -70,10 +70,11 @@ def get_s3_scoped_credentials( .. note:: - If RTD_USE_SCOPED_CREDENTIALS_ON_BUILDS is set to False, this function will return + If USING_AWS is set to False, this function will return the values of the AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY settings. + Useful for local development where we use minio. """ - if not settings.RTD_USE_SCOPED_CREDENTIALS_ON_BUILDS: + if not settings.USING_AWS: return AWSTemporaryCredentials( access_key_id=settings.AWS_ACCESS_KEY_ID, secret_access_key=settings.AWS_SECRET_ACCESS_KEY, From e2ee90d48154c2eae2e55a2fe89ac15edab54c3c Mon Sep 17 00:00:00 2001 From: Santos Gallegos Date: Thu, 3 Apr 2025 11:47:00 -0500 Subject: [PATCH 04/18] Fix tests --- readthedocs/projects/tests/mockers.py | 5 ++--- readthedocs/projects/tests/test_build_tasks.py | 2 +- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/readthedocs/projects/tests/mockers.py b/readthedocs/projects/tests/mockers.py index f2b9604fc05..6da04a88e17 100644 --- a/readthedocs/projects/tests/mockers.py +++ b/readthedocs/projects/tests/mockers.py @@ -7,6 +7,7 @@ from readthedocs.api.v2.serializers import VersionAdminSerializer from readthedocs.builds.constants import BUILD_STATE_TRIGGERED from readthedocs.projects.constants import MKDOCS +from readthedocs.projects.tasks.builds import UpdateDocsTask class BuildEnvironmentMocker: @@ -159,9 +160,7 @@ def _mock_environment(self): # ) def _mock_storage(self): - self.patches["build_media_storage"] = mock.patch( - "readthedocs.projects.tasks.builds.build_media_storage", - ) + self.patches["get_sync_media_storage"] = mock.patch.object(UpdateDocsTask, "_get_sync_media_storage") def _mock_api(self): headers = {"Content-Type": "application/json"} diff --git a/readthedocs/projects/tests/test_build_tasks.py b/readthedocs/projects/tests/test_build_tasks.py index b9380048fc6..11f899827b8 100644 --- a/readthedocs/projects/tests/test_build_tasks.py +++ b/readthedocs/projects/tests/test_build_tasks.py @@ -660,7 +660,7 @@ def test_successful_build( assert BuildData.objects.all().exists() - self.mocker.mocks["build_media_storage"].rclone_sync_directory.assert_has_calls( + self.mocker.mocks["get_sync_media_storage"]().rclone_sync_directory.assert_has_calls( [ mock.call(mock.ANY, "html/project/latest"), mock.call(mock.ANY, "json/project/latest"), From de4ce2f6ff25daaa26ef635832b95791b1977319 Mon Sep 17 00:00:00 2001 From: Santos Gallegos Date: Thu, 3 Apr 2025 12:51:10 -0500 Subject: [PATCH 05/18] Add test for the task --- readthedocs/projects/tasks/builds.py | 16 +- readthedocs/projects/tests/mockers.py | 15 +- .../projects/tests/test_build_tasks.py | 285 +++++++++++++++++- 3 files changed, 312 insertions(+), 4 deletions(-) diff --git a/readthedocs/projects/tasks/builds.py b/readthedocs/projects/tasks/builds.py index 72fd1c7149e..a26b4247ef3 100644 --- a/readthedocs/projects/tasks/builds.py +++ b/readthedocs/projects/tasks/builds.py @@ -895,12 +895,26 @@ def _get_sync_media_storage(self): as we are now using per-build credentials for S3 storage, so we need to dynamically create the storage class instance """ - storage_class = import_string(settings.RTD_BUILD_MEDIA_STORAGE) + storage_class = self._get_sync_media_storage_class() extra_kwargs = {} if settings.USING_AWS: extra_kwargs = self._get_s3_scoped_credentials() return storage_class(**extra_kwargs) + def _get_sync_media_storage_class(self): + """ + Get a storage class to use for syncing build artifacts. + + This is done in a separate method to make it easier to mock in tests. + + .. note:: + + We no longer use readthedocs.storage.build_media_storage directly, + as we are now using per-build credentials for S3 storage, + so we need to dynamically create the storage class instance + """ + return import_string(settings.RTD_BUILD_MEDIA_STORAGE) + def _get_s3_scoped_credentials(self): if not self.data.project.has_feature(Feature.USE_S3_SCOPED_CREDENTIALS_ON_BUILDERS): # Use the default credentials defined in the settings. diff --git a/readthedocs/projects/tests/mockers.py b/readthedocs/projects/tests/mockers.py index 6da04a88e17..b7ebda2ad8c 100644 --- a/readthedocs/projects/tests/mockers.py +++ b/readthedocs/projects/tests/mockers.py @@ -160,7 +160,7 @@ def _mock_environment(self): # ) def _mock_storage(self): - self.patches["get_sync_media_storage"] = mock.patch.object(UpdateDocsTask, "_get_sync_media_storage") + self.patches["get_sync_media_storage_class"] = mock.patch.object(UpdateDocsTask, "_get_sync_media_storage_class") def _mock_api(self): headers = {"Content-Type": "application/json"} @@ -224,6 +224,19 @@ def _mock_api(self): headers=headers, ) + self.requestsmock.post( + f"{settings.SLUMBER_API_HOST}/api/v2/build/{self.build.pk}/temporary-credentials/", + status_code=201, + json={ + "s3": { + "access_key_id": "some-access-key", + "secret_access_key": "some-secret-key", + "session_token": "some-session-token", + } + }, + headers=headers, + ) + self.requestsmock.patch( f"{settings.SLUMBER_API_HOST}/api/v2/project/{self.project.pk}/", status_code=201, diff --git a/readthedocs/projects/tests/test_build_tasks.py b/readthedocs/projects/tests/test_build_tasks.py index 11f899827b8..e13d7f7171f 100644 --- a/readthedocs/projects/tests/test_build_tasks.py +++ b/readthedocs/projects/tests/test_build_tasks.py @@ -6,6 +6,7 @@ from unittest import mock import django_dynamic_fixture as fixture +from django_dynamic_fixture import get import pytest from django.conf import settings from django.test.utils import override_settings @@ -22,7 +23,7 @@ from readthedocs.config.tests.test_config import get_build_config from readthedocs.doc_builder.exceptions import BuildCancelled, BuildUserError from readthedocs.projects.exceptions import RepositoryError -from readthedocs.projects.models import EnvironmentVariable, Project, WebHookEvent +from readthedocs.projects.models import EnvironmentVariable, Feature, Project, WebHookEvent from readthedocs.projects.tasks.builds import sync_repository_task, update_docs_task from readthedocs.telemetry.models import BuildData @@ -660,7 +661,287 @@ def test_successful_build( assert BuildData.objects.all().exists() - self.mocker.mocks["get_sync_media_storage"]().rclone_sync_directory.assert_has_calls( + self.mocker.mocks["get_sync_media_storage_class"]()().rclone_sync_directory.assert_has_calls( + [ + mock.call(mock.ANY, "html/project/latest"), + mock.call(mock.ANY, "json/project/latest"), + mock.call(mock.ANY, "htmlzip/project/latest"), + mock.call(mock.ANY, "pdf/project/latest"), + mock.call(mock.ANY, "epub/project/latest"), + ] + ) + # TODO: find a directory to remove here :) + # build_media_storage.delete_directory + + @override_settings( + DOCROOT="/tmp/readthedocs-tests/git-repository/", + RTD_BUILD_MEDIA_STORAGE = "readthedocs.storage.s3_storage.S3BuildMediaStorage", + S3_MEDIA_STORAGE_BUCKET="readthedocs-test", + # S3_PROVIDER="AWS", + ) + @mock.patch("readthedocs.projects.tasks.builds.shutil") + @mock.patch("readthedocs.projects.tasks.builds.index_build") + @mock.patch("readthedocs.projects.tasks.builds.build_complete") + @mock.patch("readthedocs.projects.tasks.builds.send_external_build_status") + @mock.patch("readthedocs.projects.tasks.builds.UpdateDocsTask.send_notifications") + @mock.patch("readthedocs.projects.tasks.builds.clean_build") + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") + def test_successful_build_with_temporary_s3_credentials( + self, + load_yaml_config, + clean_build, + send_notifications, + send_external_build_status, + build_complete, + index_build, + shutilmock, + ): + get( + Feature, + feature_id=Feature.USE_S3_SCOPED_CREDENTIALS_ON_BUILDERS, + projects=[self.project], + ) + load_yaml_config.return_value = get_build_config( + { + "formats": "all", + "sphinx": { + "configuration": "docs/conf.py", + }, + }, + validate=True, + ) + + assert not BuildData.objects.all().exists() + + # Create the artifact paths, so it's detected by the builder + os.makedirs(self.project.artifact_path(version=self.version.slug, type_="html")) + os.makedirs(self.project.artifact_path(version=self.version.slug, type_="json")) + filename = str(uuid.uuid4()) + for f in ("htmlzip", "epub", "pdf"): + extension = "zip" if f == "htmlzip" else f + os.makedirs(self.project.artifact_path(version=self.version.slug, type_=f)) + pathlib.Path( + os.path.join( + self.project.artifact_path(version=self.version.slug, type_=f), + # Use a random name for the offline format. + # We will automatically rename this file to filename El Proxito expects. + f"{filename}.{extension}", + ) + ).touch() + + # Create an "index.html" at root to avoid failing the builds + pathlib.Path( + os.path.join( + self.project.artifact_path(version=self.version.slug, type_="html"), + "index.html", + ) + ).touch() + + self._trigger_update_docs_task() + + # Offline formats were renamed to the correct filename. + shutilmock.move.assert_has_calls( + [ + mock.call( + Path( + f"/tmp/readthedocs-tests/git-repository/_readthedocs/htmlzip/{filename}.zip" + ), + Path( + f"/tmp/readthedocs-tests/git-repository/_readthedocs/htmlzip/{self.project.slug}.zip" + ), + ), + mock.call( + Path( + f"/tmp/readthedocs-tests/git-repository/_readthedocs/pdf/{filename}.pdf" + ), + Path( + f"/tmp/readthedocs-tests/git-repository/_readthedocs/pdf/{self.project.slug}.pdf" + ), + ), + mock.call( + Path( + f"/tmp/readthedocs-tests/git-repository/_readthedocs/epub/{filename}.epub" + ), + Path( + f"/tmp/readthedocs-tests/git-repository/_readthedocs/epub/{self.project.slug}.epub" + ), + ), + ] + ) + + # It has to be called twice, ``before_start`` and ``after_return`` + clean_build.assert_has_calls( + [mock.call(mock.ANY), mock.call(mock.ANY)] # the argument is an APIVersion + ) + + # TODO: mock `build_tasks.send_build_notifications` instead and add + # another tests to check that they are not sent for EXTERNAL versions + send_notifications.assert_called_once_with( + self.version.pk, + self.build.pk, + event=WebHookEvent.BUILD_PASSED, + ) + + send_external_build_status.assert_called_once_with( + version_type=self.version.type, + build_pk=self.build.pk, + commit=self.build.commit, + status=BUILD_STATUS_SUCCESS, + ) + + build_complete.send.assert_called_once_with( + sender=Build, + build=mock.ANY, + ) + + index_build.delay.assert_called_once_with(build_id=self.build.pk) + + # TODO: assert the verb and the path for each API call as well + + # Build reset + assert self.requests_mock.request_history[3]._request.method == "POST" + assert self.requests_mock.request_history[3].path == "/api/v2/build/1/reset/" + + # Update build state: cloning + assert self.requests_mock.request_history[4].json() == { + "id": 1, + "state": "cloning", + "commit": "a1b2c3", + "error": "", + "builder": mock.ANY, + } + + # Update build state: installing + assert self.requests_mock.request_history[5].json() == { + "id": 1, + "state": "installing", + "commit": "a1b2c3", + "builder": mock.ANY, + "readthedocs_yaml_path": None, + "error": "", + # We update the `config` field at the same time we send the + # `installing` state, to reduce one API call + "config": { + "version": "2", + "formats": ["htmlzip", "pdf", "epub"], + "python": { + "install": [], + }, + "conda": None, + "build": { + "os": "ubuntu-22.04", + "commands": [], + "jobs": { + "pre_checkout": [], + "post_checkout": [], + "pre_system_dependencies": [], + "post_system_dependencies": [], + "pre_create_environment": [], + "create_environment": None, + "post_create_environment": [], + "pre_install": [], + "install": None, + "post_install": [], + "pre_build": [], + "build": { + "html": None, + "pdf": None, + "epub": None, + "htmlzip": None, + }, + "post_build": [], + }, + "tools": { + "python": { + "full_version": "3.13.0", + "version": "3", + } + }, + "apt_packages": [], + }, + "doctype": "sphinx", + "sphinx": { + "builder": "sphinx", + "configuration": "docs/conf.py", + "fail_on_warning": False, + }, + "mkdocs": None, + "submodules": { + "include": [], + "exclude": "all", + "recursive": False, + }, + "search": { + "ranking": {}, + "ignore": [ + "search.html", + "search/index.html", + "404.html", + "404/index.html", + ], + }, + }, + } + # Update build state: building + assert self.requests_mock.request_history[6].json() == { + "id": 1, + "state": "building", + "commit": "a1b2c3", + "readthedocs_yaml_path": None, + "config": mock.ANY, + "builder": mock.ANY, + "error": "", + } + # Update build state: uploading + assert self.requests_mock.request_history[7].json() == { + "id": 1, + "state": "uploading", + "commit": "a1b2c3", + "readthedocs_yaml_path": None, + "config": mock.ANY, + "builder": mock.ANY, + "error": "", + } + + # Get temporary credentials + assert self.requests_mock.request_history[8]._request.method == "POST" + assert self.requests_mock.request_history[8].path == "/api/v2/build/1/temporary-credentials/" + + # Update version state + assert self.requests_mock.request_history[9]._request.method == "PATCH" + assert self.requests_mock.request_history[9].path == "/api/v2/version/1/" + assert self.requests_mock.request_history[9].json() == { + "addons": False, + "build_data": None, + "built": True, + "documentation_type": "sphinx", + "has_pdf": True, + "has_epub": True, + "has_htmlzip": True, + } + # Set project has valid clone + assert self.requests_mock.request_history[10]._request.method == "PATCH" + assert self.requests_mock.request_history[10].path == "/api/v2/project/1/" + assert self.requests_mock.request_history[10].json() == {"has_valid_clone": True} + # Update build state: finished, success and builder + assert self.requests_mock.request_history[11].json() == { + "id": 1, + "state": "finished", + "commit": "a1b2c3", + "readthedocs_yaml_path": None, + "config": mock.ANY, + "builder": mock.ANY, + "length": mock.ANY, + "success": True, + "error": "", + } + + assert self.requests_mock.request_history[12]._request.method == "POST" + assert self.requests_mock.request_history[12].path == "/api/v2/revoke/" + + assert BuildData.objects.all().exists() + + self.mocker.mocks["get_sync_media_storage_class"]()().rclone_sync_directory.assert_has_calls( [ mock.call(mock.ANY, "html/project/latest"), mock.call(mock.ANY, "json/project/latest"), From a2501c8c10cf6c8ba71eec1dee4a4ffb697333a0 Mon Sep 17 00:00:00 2001 From: Santos Gallegos Date: Thu, 3 Apr 2025 16:26:00 -0500 Subject: [PATCH 06/18] Docs and settings --- docs/dev/aws-temporary-credentials.rst | 30 ++++++++++++++++++++++++++ docs/dev/index.rst | 1 + docs/dev/settings.rst | 15 +++++++++++++ readthedocs/settings/base.py | 1 + readthedocs/settings/docker_compose.py | 25 +++++++++++++-------- 5 files changed, 63 insertions(+), 9 deletions(-) create mode 100644 docs/dev/aws-temporary-credentials.rst diff --git a/docs/dev/aws-temporary-credentials.rst b/docs/dev/aws-temporary-credentials.rst new file mode 100644 index 00000000000..3bc965ef1d3 --- /dev/null +++ b/docs/dev/aws-temporary-credentials.rst @@ -0,0 +1,30 @@ +AWS temporary credentials +========================= + +Builders run arbitrary commands provided by the user, while we run the commands in a sandboxed environment (docker), +that shouln't be the only line of defense, as we still interact with the files generated by the user outside docker for some operations. + +This is why instead of using credentials that have access to all the resources in AWS, +we are using credentials that are generated by the AWS STS service, +which are temporary and scoped to the resources that are needed for the build. + +Local development +----------------- + +In order to make use of STS, you need: + +- Create a role in IAM with a trusted entity type set to the AWS account that is going to be used to generate the temporary credentials. +- Create an inline policy for the role, the policy should allow access to all S3 buckets and paths that are going to be used. + +You can use :ref:`environment variables ` to set the credentials for AWS, make sure to set the value of ``RTD_S3_PROVIDER`` to ``AWS``. + +.. note:: + + If you are part of the development team, you should be able to use the credentials from the ``storage-dev``` user, + which is already configured to make use of STS, and the ARN from the ``RTDSTSAssumeRoleDev`` role. + +.. note:: + + You should use AWS only when you are testing the AWS integration, + use the default minio provider for local development. + Otherwise, files may be overridden if multiple developers are using the same credentials. diff --git a/docs/dev/index.rst b/docs/dev/index.rst index 4e7f70d6d55..7a3e76859ba 100644 --- a/docs/dev/index.rst +++ b/docs/dev/index.rst @@ -28,6 +28,7 @@ or taking the open source Read the Docs codebase for your own custom installatio migrations server-side-search search-integration + aws-temporary-credentials subscriptions settings tests diff --git a/docs/dev/settings.rst b/docs/dev/settings.rst index 2db4844c16c..397cb1f3892 100644 --- a/docs/dev/settings.rst +++ b/docs/dev/settings.rst @@ -167,6 +167,21 @@ providers using the following environment variables: .. envvar:: RTD_SOCIALACCOUNT_PROVIDERS_GOOGLE_CLIENT_ID .. envvar:: RTD_SOCIALACCOUNT_PROVIDERS_GOOGLE_SECRET +AWS configuration +~~~~~~~~~~~~~~~~~ + +The following variables can be used to use AWS in your local environment. + +.. envvar:: RTD_S3_PROVIDER +.. envvar:: RTD_AWS_ACCESS_KEY_ID +.. envvar:: RTD_AWS_SECRET_ACCESS_KEY +.. envvar:: RTD_AWS_STS_ASSUME_ROLE_ARN +.. envvar:: RTD_S3_MEDIA_STORAGE_BUCKET +.. envvar:: RTD_S3_BUILD_COMMANDS_STORAGE_BUCKET +.. envvar:: RTD_S3_BUILD_TOOLS_STORAGE_BUCKET +.. envvar:: RTD_S3_STATIC_STORAGE_BUCKET +.. envvar:: RTD_AWS_S3_REGION_NAME + Stripe secrets ~~~~~~~~~~~~~~ diff --git a/readthedocs/settings/base.py b/readthedocs/settings/base.py index 8596178db8a..9acf196a777 100644 --- a/readthedocs/settings/base.py +++ b/readthedocs/settings/base.py @@ -1088,6 +1088,7 @@ def SOCIALACCOUNT_PROVIDERS(self): } S3_PROVIDER = "AWS" + AWS_STS_ASSUME_ROLE_ARN = "arn:aws:iam::1234:role/SomeRole" def USING_AWS(self): """Return True if we are using AWS as our storage/cloud provider.""" diff --git a/readthedocs/settings/docker_compose.py b/readthedocs/settings/docker_compose.py index 40b17214400..9a2490d1428 100644 --- a/readthedocs/settings/docker_compose.py +++ b/readthedocs/settings/docker_compose.py @@ -187,21 +187,28 @@ def DATABASES(self): # noqa STATICFILES_STORAGE = "readthedocs.storage.s3_storage.S3StaticStorage" RTD_STATICFILES_STORAGE = "readthedocs.storage.s3_storage.NoManifestS3StaticStorage" - AWS_ACCESS_KEY_ID = "admin" - AWS_SECRET_ACCESS_KEY = "password" - S3_MEDIA_STORAGE_BUCKET = "media" - S3_BUILD_COMMANDS_STORAGE_BUCKET = "builds" - S3_BUILD_ENVIRONMENT_STORAGE_BUCKET = "envs" - S3_BUILD_TOOLS_STORAGE_BUCKET = "build-tools" - S3_STATIC_STORAGE_BUCKET = "static" + AWS_ACCESS_KEY_ID = os.environ.get("RTD_AWS_ACCESS_KEY_ID", "admin") + AWS_SECRET_ACCESS_KEY = os.environ.get("RTD_AWS_SECRET_ACCESS_KEY", "password") + S3_MEDIA_STORAGE_BUCKET = os.environ.get("RTD_S3_MEDIA_STORAGE_BUCKET", "media") + S3_BUILD_COMMANDS_STORAGE_BUCKET = os.environ.get("RTD_S3_BUILD_COMMANDS_STORAGE_BUCKET", "builds") + S3_BUILD_TOOLS_STORAGE_BUCKET = os.environ.get("RTD_S3_BUILD_TOOLS_STORAGE_BUCKET", "build-tools") + S3_STATIC_STORAGE_BUCKET = os.environ.get("RTD_S3_STATIC_STORAGE_BUCKET", "static") S3_STATIC_STORAGE_OVERRIDE_HOSTNAME = PRODUCTION_DOMAIN S3_MEDIA_STORAGE_OVERRIDE_HOSTNAME = PRODUCTION_DOMAIN - S3_PROVIDER = "minio" + S3_PROVIDER = os.environ.get("RTD_S3_PROVIDER", "minio") AWS_S3_ENCRYPTION = False AWS_S3_SECURE_URLS = False AWS_S3_USE_SSL = False - AWS_S3_ENDPOINT_URL = "http://storage:9000/" + AWS_STS_ASSUME_ROLE_ARN = os.environ.get("RTD_AWS_STS_ASSUME_ROLE_ARN", None) + AWS_S3_REGION_NAME = os.environ.get("RTD_AWS_S3_REGION_NAME", None) + + @property + def AWS_S3_ENDPOINT_URL(self): + if self.S3_PROVIDER == "minio": + return "http://storage:9000/" + return None + AWS_QUERYSTRING_AUTH = False STRIPE_SECRET = os.environ.get("RTD_STRIPE_SECRET", "sk_test_x") From c0146772900403eb6a3aa4b13ff3728a3ad1b534 Mon Sep 17 00:00:00 2001 From: Santos Gallegos Date: Thu, 3 Apr 2025 16:29:24 -0500 Subject: [PATCH 07/18] Cleanup tests --- readthedocs/projects/tests/test_build_tasks.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/readthedocs/projects/tests/test_build_tasks.py b/readthedocs/projects/tests/test_build_tasks.py index e13d7f7171f..1b0aab7647e 100644 --- a/readthedocs/projects/tests/test_build_tasks.py +++ b/readthedocs/projects/tests/test_build_tasks.py @@ -677,7 +677,6 @@ def test_successful_build( DOCROOT="/tmp/readthedocs-tests/git-repository/", RTD_BUILD_MEDIA_STORAGE = "readthedocs.storage.s3_storage.S3BuildMediaStorage", S3_MEDIA_STORAGE_BUCKET="readthedocs-test", - # S3_PROVIDER="AWS", ) @mock.patch("readthedocs.projects.tasks.builds.shutil") @mock.patch("readthedocs.projects.tasks.builds.index_build") @@ -950,8 +949,6 @@ def test_successful_build_with_temporary_s3_credentials( mock.call(mock.ANY, "epub/project/latest"), ] ) - # TODO: find a directory to remove here :) - # build_media_storage.delete_directory @mock.patch("readthedocs.projects.tasks.builds.build_complete") @mock.patch("readthedocs.projects.tasks.builds.send_external_build_status") From 8a3b757487dce3d8f9ae53ab0b9d3becbe10dd70 Mon Sep 17 00:00:00 2001 From: Santos Gallegos Date: Thu, 3 Apr 2025 17:31:16 -0500 Subject: [PATCH 08/18] Test service --- readthedocs/storage/security_token_service.py | 6 +- readthedocs/storage/tests/__init__.py | 0 .../tests/test_security_token_service.py | 182 ++++++++++++++++++ 3 files changed, 183 insertions(+), 5 deletions(-) create mode 100644 readthedocs/storage/tests/__init__.py create mode 100644 readthedocs/storage/tests/test_security_token_service.py diff --git a/readthedocs/storage/security_token_service.py b/readthedocs/storage/security_token_service.py index a9d90fc3125..5b1cc3477a8 100644 --- a/readthedocs/storage/security_token_service.py +++ b/readthedocs/storage/security_token_service.py @@ -25,8 +25,6 @@ import structlog from django.conf import settings -from readthedocs.storage import build_media_storage - log = structlog.get_logger(__name__) @@ -82,9 +80,7 @@ def get_s3_scoped_credentials( session_token=None, ) - bucket_name = build_media_storage.bucket_name - bucket_arn = f"arn:aws:s3:::{bucket_name}" - + bucket_arn = f"arn:aws:s3:::{settings.S3_MEDIA_STORAGE_BUCKET}" storage_paths = version.get_storage_paths() # Generate the list of allowed prefix resources # The resulting prefix looks like: diff --git a/readthedocs/storage/tests/__init__.py b/readthedocs/storage/tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/readthedocs/storage/tests/test_security_token_service.py b/readthedocs/storage/tests/test_security_token_service.py new file mode 100644 index 00000000000..cbf93647ab5 --- /dev/null +++ b/readthedocs/storage/tests/test_security_token_service.py @@ -0,0 +1,182 @@ +import json +from unittest import mock + +from django.contrib.auth.models import User +from django.test import TestCase, override_settings +from django_dynamic_fixture import get + +from readthedocs.builds.constants import EXTERNAL +from readthedocs.projects.models import Project +from readthedocs.storage.security_token_service import ( + AWSTemporaryCredentials, + get_s3_scoped_credentials, +) + + +@override_settings( + AWS_ACCESS_KEY_ID="global_access_key_id", + AWS_SECRET_ACCESS_KEY="global_secret_access_key", + AWS_S3_REGION_NAME="us-east-1", + AWS_STS_ASSUME_ROLE_ARN="arn:aws:iam::1234:role/RoleName", + S3_MEDIA_STORAGE_BUCKET="readthedocs-media", +) +class TestSecurityTokenService(TestCase): + def setUp(self): + self.user = get(User) + self.project = get( + Project, + slug="project", + users=[self.user], + ) + self.version = self.project.versions.first() + + @override_settings(S3_PROVIDER="minio") + def test_get_s3_global_credentials(self): + credentials = get_s3_scoped_credentials( + project=self.project, + version=self.version, + ) + assert credentials == AWSTemporaryCredentials( + access_key_id="global_access_key_id", + secret_access_key="global_secret_access_key", + session_token=None, + ) + + @mock.patch("readthedocs.storage.security_token_service.boto3.client") + def test_get_s3_scoped_credentials(self, boto3_client): + boto3_client().assume_role.return_value = { + "Credentials": { + "AccessKeyId": "access_key_id", + "SecretAccessKey": "secret_access_key", + "SessionToken": "session_token", + } + } + credentials = get_s3_scoped_credentials( + project=self.project, + version=self.version, + session_id="1234", + ) + assert credentials == AWSTemporaryCredentials( + access_key_id="access_key_id", + secret_access_key="secret_access_key", + session_token="session_token", + ) + + policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "s3:GetObject", + "s3:PutObject", + "s3:DeleteObject", + ], + "Resource": [ + "arn:aws:s3:::readthedocs-media/html/project/latest/*", + "arn:aws:s3:::readthedocs-media/pdf/project/latest/*", + "arn:aws:s3:::readthedocs-media/epub/project/latest/*", + "arn:aws:s3:::readthedocs-media/htmlzip/project/latest/*", + "arn:aws:s3:::readthedocs-media/json/project/latest/*", + "arn:aws:s3:::readthedocs-media/diff/project/latest/*", + ], + }, + { + "Effect": "Allow", + "Action": ["s3:ListBucket"], + "Resource": [ + "arn:aws:s3:::readthedocs-media", + ], + "Condition": { + "StringLike": { + "s3:prefix": [ + "html/project/latest/*", + "pdf/project/latest/*", + "epub/project/latest/*", + "htmlzip/project/latest/*", + "json/project/latest/*", + "diff/project/latest/*", + ] + } + }, + }, + ], + } + + boto3_client().assume_role.assert_called_once_with( + RoleArn="arn:aws:iam::1234:role/RoleName", + RoleSessionName="rtd-1234-project-latest", + Policy=json.dumps(policy), + DurationSeconds=900, + ) + + @mock.patch("readthedocs.storage.security_token_service.boto3.client") + def test_get_s3_scoped_credentials_external_version(self, boto3_client): + self.version.type = EXTERNAL + self.version.save() + + boto3_client().assume_role.return_value = { + "Credentials": { + "AccessKeyId": "access_key_id", + "SecretAccessKey": "secret_access_key", + "SessionToken": "session_token", + } + } + credentials = get_s3_scoped_credentials( + project=self.project, + version=self.version, + session_id="1234", + ) + assert credentials == AWSTemporaryCredentials( + access_key_id="access_key_id", + secret_access_key="secret_access_key", + session_token="session_token", + ) + + policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "s3:GetObject", + "s3:PutObject", + "s3:DeleteObject", + ], + "Resource": [ + "arn:aws:s3:::readthedocs-media/external/html/project/latest/*", + "arn:aws:s3:::readthedocs-media/external/pdf/project/latest/*", + "arn:aws:s3:::readthedocs-media/external/epub/project/latest/*", + "arn:aws:s3:::readthedocs-media/external/htmlzip/project/latest/*", + "arn:aws:s3:::readthedocs-media/external/json/project/latest/*", + "arn:aws:s3:::readthedocs-media/external/diff/project/latest/*", + ], + }, + { + "Effect": "Allow", + "Action": ["s3:ListBucket"], + "Resource": [ + "arn:aws:s3:::readthedocs-media", + ], + "Condition": { + "StringLike": { + "s3:prefix": [ + "external/html/project/latest/*", + "external/pdf/project/latest/*", + "external/epub/project/latest/*", + "external/htmlzip/project/latest/*", + "external/json/project/latest/*", + "external/diff/project/latest/*", + ] + } + }, + }, + ], + } + + boto3_client().assume_role.assert_called_once_with( + RoleArn="arn:aws:iam::1234:role/RoleName", + RoleSessionName="rtd-1234-project-latest", + Policy=json.dumps(policy), + DurationSeconds=900, + ) From f097e0cc7b992a3e9d569380701e60bf64c0d83b Mon Sep 17 00:00:00 2001 From: Santos Gallegos Date: Thu, 3 Apr 2025 17:53:48 -0500 Subject: [PATCH 09/18] Add tests for the API --- readthedocs/rtd_tests/tests/test_api.py | 42 +++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/readthedocs/rtd_tests/tests/test_api.py b/readthedocs/rtd_tests/tests/test_api.py index cd6ca0da9de..944427426aa 100644 --- a/readthedocs/rtd_tests/tests/test_api.py +++ b/readthedocs/rtd_tests/tests/test_api.py @@ -45,6 +45,7 @@ BUILD_STATE_CLONING, BUILD_STATE_FINISHED, BUILD_STATE_TRIGGERED, + BUILD_STATE_UPLOADING, EXTERNAL, EXTERNAL_VERSION_STATE_CLOSED, LATEST, @@ -69,6 +70,7 @@ Feature, Project, ) +from readthedocs.storage.security_token_service import AWSTemporaryCredentials from readthedocs.subscriptions.constants import TYPE_CONCURRENT_BUILDS from readthedocs.subscriptions.products import RTDProductFeature from readthedocs.vcs_support.backends.git import parse_version_from_ref @@ -143,6 +145,46 @@ def test_reset_build(self): self.assertEqual(build.commands.count(), 0) self.assertEqual(build.notifications.count(), 0) + @mock.patch("readthedocs.api.v2.views.model_views.get_s3_scoped_credentials") + def test_get_temporary_credentials_for_build(self, get_s3_scoped_credentials): + build = get( + Build, + project=self.project, + version=self.version, + state=BUILD_STATE_UPLOADING, + success=False, + output="Output", + error="Error", + exit_code=0, + builder="Builder", + cold_storage=True, + ) + + client = APIClient() + _, build_api_key = BuildAPIKey.objects.create_key(self.project) + client.credentials(HTTP_AUTHORIZATION=f"Token {build_api_key}") + get_s3_scoped_credentials.return_value = AWSTemporaryCredentials( + access_key_id="access_key_id", + secret_access_key="secret_access_key", + session_token="session_token", + ) + r = client.post(reverse("build-temporary-credentials", args=(build.pk,))) + assert r.status_code == 200 + assert r.data == { + "s3": { + "access_key_id": "access_key_id", + "secret_access_key": "secret_access_key", + "session_token": "session_token", + } + } + + get_s3_scoped_credentials.assert_called_once_with( + project=self.project, + version=self.version, + session_id=build.pk, + duration=60 * 30, + ) + def test_api_does_not_have_private_config_key_superuser(self): client = APIClient() client.login(username="super", password="test") From 03898bf1e5d2484db891c84dfc998fb5e70480c7 Mon Sep 17 00:00:00 2001 From: Santos Gallegos Date: Thu, 3 Apr 2025 18:28:24 -0500 Subject: [PATCH 10/18] Fix typo, thanks copilot --- readthedocs/storage/rclone.py | 4 ++-- readthedocs/storage/s3_storage.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/readthedocs/storage/rclone.py b/readthedocs/storage/rclone.py index f1c27ac32a2..b50c866d730 100644 --- a/readthedocs/storage/rclone.py +++ b/readthedocs/storage/rclone.py @@ -170,7 +170,7 @@ def __init__( self, bucket_name, access_key_id, - secret_acces_key, + secret_access_key, region, provider="AWS", session_token=None, @@ -182,7 +182,7 @@ def __init__( self.env_vars = { "RCLONE_S3_PROVIDER": provider, "RCLONE_S3_ACCESS_KEY_ID": access_key_id, - "RCLONE_S3_SECRET_ACCESS_KEY": secret_acces_key, + "RCLONE_S3_SECRET_ACCESS_KEY": secret_access_key, "RCLONE_S3_REGION": region, "RCLONE_S3_LOCATION_CONSTRAINT": region, } diff --git a/readthedocs/storage/s3_storage.py b/readthedocs/storage/s3_storage.py index 70da87a2ffd..dd110d597df 100644 --- a/readthedocs/storage/s3_storage.py +++ b/readthedocs/storage/s3_storage.py @@ -44,7 +44,7 @@ def _rclone(self): return RCloneS3Remote( bucket_name=self.bucket_name, access_key_id=self.access_key, - secret_acces_key=self.secret_key, + secret_access_key=self.secret_key, session_token=self.security_token, region=self.region_name or "", acl=self.default_acl, From d4fdefb4f3cf999b57d0e30b31604d6fffd9533c Mon Sep 17 00:00:00 2001 From: Santos Gallegos Date: Mon, 7 Apr 2025 12:40:23 -0500 Subject: [PATCH 11/18] Updates from review --- docs/dev/aws-temporary-credentials.rst | 2 +- docs/dev/settings.rst | 1 + readthedocs/api/v2/views/model_views.py | 4 ++-- readthedocs/aws/__init__.py | 0 readthedocs/{storage => aws}/security_token_service.py | 1 + readthedocs/aws/tests/__init__.py | 0 .../{storage => aws}/tests/test_security_token_service.py | 8 ++++---- readthedocs/rtd_tests/tests/test_api.py | 2 +- readthedocs/settings/base.py | 2 ++ readthedocs/storage/rclone.py | 5 ++++- 10 files changed, 16 insertions(+), 9 deletions(-) create mode 100644 readthedocs/aws/__init__.py rename readthedocs/{storage => aws}/security_token_service.py (98%) create mode 100644 readthedocs/aws/tests/__init__.py rename readthedocs/{storage => aws}/tests/test_security_token_service.py (96%) diff --git a/docs/dev/aws-temporary-credentials.rst b/docs/dev/aws-temporary-credentials.rst index 3bc965ef1d3..ec215913404 100644 --- a/docs/dev/aws-temporary-credentials.rst +++ b/docs/dev/aws-temporary-credentials.rst @@ -5,7 +5,7 @@ Builders run arbitrary commands provided by the user, while we run the commands that shouln't be the only line of defense, as we still interact with the files generated by the user outside docker for some operations. This is why instead of using credentials that have access to all the resources in AWS, -we are using credentials that are generated by the AWS STS service, +we are using credentials that are generated by the `AWS STS service `__, which are temporary and scoped to the resources that are needed for the build. Local development diff --git a/docs/dev/settings.rst b/docs/dev/settings.rst index 397cb1f3892..b0f6b38b983 100644 --- a/docs/dev/settings.rst +++ b/docs/dev/settings.rst @@ -171,6 +171,7 @@ AWS configuration ~~~~~~~~~~~~~~~~~ The following variables can be used to use AWS in your local environment. +Useful for testing :doc:`temporary credentials `. .. envvar:: RTD_S3_PROVIDER .. envvar:: RTD_AWS_ACCESS_KEY_ID diff --git a/readthedocs/api/v2/views/model_views.py b/readthedocs/api/v2/views/model_views.py index 4dcce393a49..5c4ab1c1acf 100644 --- a/readthedocs/api/v2/views/model_views.py +++ b/readthedocs/api/v2/views/model_views.py @@ -39,8 +39,8 @@ from readthedocs.projects.models import Domain from readthedocs.projects.models import Project from readthedocs.storage import build_commands_storage -from readthedocs.storage.security_token_service import AWSTemporaryCredentialsError -from readthedocs.storage.security_token_service import get_s3_scoped_credentials +from readthedocs.aws.security_token_service import AWSTemporaryCredentialsError +from readthedocs.aws.security_token_service import get_s3_scoped_credentials from ..serializers import BuildAdminReadOnlySerializer from ..serializers import BuildAdminSerializer diff --git a/readthedocs/aws/__init__.py b/readthedocs/aws/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/readthedocs/storage/security_token_service.py b/readthedocs/aws/security_token_service.py similarity index 98% rename from readthedocs/storage/security_token_service.py rename to readthedocs/aws/security_token_service.py index 5b1cc3477a8..7f02474f4c8 100644 --- a/readthedocs/storage/security_token_service.py +++ b/readthedocs/aws/security_token_service.py @@ -16,6 +16,7 @@ - https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html - https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_sts-comparison.html - https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_control-access_assumerole.html +- https://docs.readthedocs.com/dev/latest/aws-temporary-credentials.html """ import json diff --git a/readthedocs/aws/tests/__init__.py b/readthedocs/aws/tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/readthedocs/storage/tests/test_security_token_service.py b/readthedocs/aws/tests/test_security_token_service.py similarity index 96% rename from readthedocs/storage/tests/test_security_token_service.py rename to readthedocs/aws/tests/test_security_token_service.py index cbf93647ab5..869109984c4 100644 --- a/readthedocs/storage/tests/test_security_token_service.py +++ b/readthedocs/aws/tests/test_security_token_service.py @@ -7,7 +7,7 @@ from readthedocs.builds.constants import EXTERNAL from readthedocs.projects.models import Project -from readthedocs.storage.security_token_service import ( +from readthedocs.aws.security_token_service import ( AWSTemporaryCredentials, get_s3_scoped_credentials, ) @@ -30,7 +30,7 @@ def setUp(self): ) self.version = self.project.versions.first() - @override_settings(S3_PROVIDER="minio") + @override_settings(USING_AWS=False) def test_get_s3_global_credentials(self): credentials = get_s3_scoped_credentials( project=self.project, @@ -42,7 +42,7 @@ def test_get_s3_global_credentials(self): session_token=None, ) - @mock.patch("readthedocs.storage.security_token_service.boto3.client") + @mock.patch("readthedocs.aws.security_token_service.boto3.client") def test_get_s3_scoped_credentials(self, boto3_client): boto3_client().assume_role.return_value = { "Credentials": { @@ -110,7 +110,7 @@ def test_get_s3_scoped_credentials(self, boto3_client): DurationSeconds=900, ) - @mock.patch("readthedocs.storage.security_token_service.boto3.client") + @mock.patch("readthedocs.aws.security_token_service.boto3.client") def test_get_s3_scoped_credentials_external_version(self, boto3_client): self.version.type = EXTERNAL self.version.save() diff --git a/readthedocs/rtd_tests/tests/test_api.py b/readthedocs/rtd_tests/tests/test_api.py index 944427426aa..e783813fdbe 100644 --- a/readthedocs/rtd_tests/tests/test_api.py +++ b/readthedocs/rtd_tests/tests/test_api.py @@ -70,7 +70,7 @@ Feature, Project, ) -from readthedocs.storage.security_token_service import AWSTemporaryCredentials +from readthedocs.aws.security_token_service import AWSTemporaryCredentials from readthedocs.subscriptions.constants import TYPE_CONCURRENT_BUILDS from readthedocs.subscriptions.products import RTDProductFeature from readthedocs.vcs_support.backends.git import parse_version_from_ref diff --git a/readthedocs/settings/base.py b/readthedocs/settings/base.py index 25420e9909e..a251b78aabc 100644 --- a/readthedocs/settings/base.py +++ b/readthedocs/settings/base.py @@ -1085,8 +1085,10 @@ def SOCIALACCOUNT_PROVIDERS(self): } S3_PROVIDER = "AWS" + # Used by readthedocs.aws.security_token_service. AWS_STS_ASSUME_ROLE_ARN = "arn:aws:iam::1234:role/SomeRole" + @property def USING_AWS(self): """Return True if we are using AWS as our storage/cloud provider.""" return self.S3_PROVIDER == "AWS" diff --git a/readthedocs/storage/rclone.py b/readthedocs/storage/rclone.py index b50c866d730..16fdfe09f4d 100644 --- a/readthedocs/storage/rclone.py +++ b/readthedocs/storage/rclone.py @@ -154,7 +154,10 @@ class RCloneS3Remote(BaseRClone): :params bucket_name: Name of the S3 bucket. :params access_key_id: AWS access key id. - :params secret_acces_key: AWS secret access key. + :params secret_access_key: AWS secret access key. + :params session_token: AWS session token, + useful for temporary credentials from AWS STS. + See https://docs.readthedocs.com/dev/latest/aws-temporary-credentials.html. :params region: AWS region. :params provider: S3 provider, defaults to ``AWS``. Useful to use Minio during development. From c7b482b61985da4d26d99b4cad01f44b55213f96 Mon Sep 17 00:00:00 2001 From: Santos Gallegos Date: Mon, 7 Apr 2025 12:42:03 -0500 Subject: [PATCH 12/18] Format --- readthedocs/api/v2/views/model_views.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/readthedocs/api/v2/views/model_views.py b/readthedocs/api/v2/views/model_views.py index 5c4ab1c1acf..a2e78888aa2 100644 --- a/readthedocs/api/v2/views/model_views.py +++ b/readthedocs/api/v2/views/model_views.py @@ -28,6 +28,8 @@ from readthedocs.api.v2.permissions import IsOwner from readthedocs.api.v2.permissions import ReadOnlyPermission from readthedocs.api.v2.utils import normalize_build_command +from readthedocs.aws.security_token_service import AWSTemporaryCredentialsError +from readthedocs.aws.security_token_service import get_s3_scoped_credentials from readthedocs.builds.constants import INTERNAL from readthedocs.builds.models import Build from readthedocs.builds.models import BuildCommandResult @@ -39,8 +41,6 @@ from readthedocs.projects.models import Domain from readthedocs.projects.models import Project from readthedocs.storage import build_commands_storage -from readthedocs.aws.security_token_service import AWSTemporaryCredentialsError -from readthedocs.aws.security_token_service import get_s3_scoped_credentials from ..serializers import BuildAdminReadOnlySerializer from ..serializers import BuildAdminSerializer From 8b18cc7c62c08e87ea2610e3893bf8643b72a005 Mon Sep 17 00:00:00 2001 From: Santos Gallegos Date: Tue, 8 Apr 2025 17:40:18 -0500 Subject: [PATCH 13/18] Use scoped credentials for build_tools too --- readthedocs/api/v2/views/model_views.py | 35 ++-- readthedocs/aws/security_token_service.py | 149 +++++++++++++----- .../aws/tests/test_security_token_service.py | 50 +++--- readthedocs/doc_builder/director.py | 5 +- readthedocs/projects/tasks/builds.py | 60 +------ readthedocs/projects/tasks/storage.py | 101 ++++++++++++ readthedocs/projects/tests/mockers.py | 6 +- .../projects/tests/test_build_tasks.py | 49 +++--- readthedocs/rtd_tests/tests/test_api.py | 20 +-- 9 files changed, 312 insertions(+), 163 deletions(-) create mode 100644 readthedocs/projects/tasks/storage.py diff --git a/readthedocs/api/v2/views/model_views.py b/readthedocs/api/v2/views/model_views.py index a2e78888aa2..4b9404f60cf 100644 --- a/readthedocs/api/v2/views/model_views.py +++ b/readthedocs/api/v2/views/model_views.py @@ -29,7 +29,8 @@ from readthedocs.api.v2.permissions import ReadOnlyPermission from readthedocs.api.v2.utils import normalize_build_command from readthedocs.aws.security_token_service import AWSTemporaryCredentialsError -from readthedocs.aws.security_token_service import get_s3_scoped_credentials +from readthedocs.aws.security_token_service import get_s3_build_media_scoped_credentials +from readthedocs.aws.security_token_service import get_s3_build_tools_scoped_credentials from readthedocs.builds.constants import INTERNAL from readthedocs.builds.models import Build from readthedocs.builds.models import BuildCommandResult @@ -352,25 +353,33 @@ def get_queryset_for_api_key(self, api_key): detail=True, permission_classes=[HasBuildAPIKey], methods=["post"], - url_path="temporary-credentials", + url_path="temporary-credentials/storage", ) - def temporary_credentials(self, request, **kwargs): + def temporary_credentials_for_storage(self, request, **kwargs): """ - Generate temporary credentials for the build. + Generate temporary credentials for interacting with storage. This can generate temporary credentials for interacting with S3 only for now. """ build = self.get_object() - project = build.project - version = build.version - try: - credentials = get_s3_scoped_credentials( - project=project, - version=version, - session_id=build.pk, - # 30 minutes should be enough to upload all build artifacts. - duration=30 * 60, + credentials_type = request.data.get("type") + + if credentials_type == "build_media": + method = get_s3_build_media_scoped_credentials + # 30 minutes should be enough for uploading build artifacts. + duration = 30 * 60 + elif credentials_type == "build_tools": + method = get_s3_build_tools_scoped_credentials + # 30 minutes should be enough for downloading build tools. + duration = 30 * 60 + else: + return Response( + {"error": "Invalid storage type"}, + status=status.HTTP_400_BAD_REQUEST, ) + + try: + credentials = method(build=build, duration=duration) except AWSTemporaryCredentialsError: return Response( {"error": "Failed to generate temporary credentials"}, diff --git a/readthedocs/aws/security_token_service.py b/readthedocs/aws/security_token_service.py index 7f02474f4c8..974bc4431e6 100644 --- a/readthedocs/aws/security_token_service.py +++ b/readthedocs/aws/security_token_service.py @@ -43,6 +43,14 @@ class AWSTemporaryCredentials: session_token: str | None +@dataclass +class AWSS3TemporaryCredentials(AWSTemporaryCredentials): + """Subclass of AWSTemporaryCredentials to include S3 specific fields.""" + + bucket_name: str + region_name: str + + def get_sts_client(): return boto3.client( "sts", @@ -53,17 +61,10 @@ def get_sts_client(): ) -def get_s3_scoped_credentials( - *, project, version, session_id=None, duration=60 * 15 -) -> AWSTemporaryCredentials: +def _get_scoped_credentials(*, session_name, policy, duration) -> AWSTemporaryCredentials: """ - :param project: The project to get the credentials for. - :param version: The version to get the credentials for. - :param session_id: A unique identifier to add to the name of the role session. - The name of the session always includes the project and version slug (rtd-{project}-{version}), - if session_id is given, the name of the session would be "rtd-{session_id}-{project}-{version}". - AWS limits the session name to 64 characters, so if the session_id is too long, it will be truncated. - For example, for a token used in a build, a good session_id is the ID of the build. + :param session_name: An identifier to attach to the generated credentials, useful to identify who requested them. + AWS limits the session name to 64 characters, so if the session_name is too long, it will be truncated. :duration: The duration of the credentials in seconds. Default is 15 minutes. Note that the minimum duration time is 15 minutes and the maximum is given by the role (defaults to 1 hour). @@ -71,7 +72,7 @@ def get_s3_scoped_credentials( If USING_AWS is set to False, this function will return the values of the AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY settings. - Useful for local development where we use minio. + Useful for local development where we don't have a service like AWS STS. """ if not settings.USING_AWS: return AWSTemporaryCredentials( @@ -81,6 +82,45 @@ def get_s3_scoped_credentials( session_token=None, ) + # Limit to 64 characters, as per AWS limitations. + session_name = session_name[:64] + try: + sts_client = get_sts_client() + response = sts_client.assume_role( + RoleArn=settings.AWS_STS_ASSUME_ROLE_ARN, + RoleSessionName=session_name, + Policy=json.dumps(policy), + DurationSeconds=duration, + ) + except Exception: + log.exception( + "Error while assuming role to generate temporary credentials", + session_name=session_name, + policy=policy, + duration=duration, + ) + raise AWSTemporaryCredentialsError + + credentials = response["Credentials"] + return AWSTemporaryCredentials( + access_key_id=credentials["AccessKeyId"], + secret_access_key=credentials["SecretAccessKey"], + session_token=credentials["SessionToken"], + ) + + +def get_s3_build_media_scoped_credentials( + *, + build, + duration=60 * 15, +) -> AWSS3TemporaryCredentials: + """ + :param build: The build to get the credentials for. + :duration: The duration of the credentials in seconds. Default is 15 minutes. + Note that the minimum duration time is 15 minutes and the maximum is given by the role (defaults to 1 hour). + """ + project = build.project + version = build.version bucket_arn = f"arn:aws:s3:::{settings.S3_MEDIA_STORAGE_BUCKET}" storage_paths = version.get_storage_paths() # Generate the list of allowed prefix resources @@ -95,8 +135,8 @@ def get_s3_scoped_credentials( # arn:aws:s3:::readthedocs-media/pdf/project/latest/* allowed_objects_arn = [f"{bucket_arn}/{prefix}" for prefix in allowed_prefixes] - # Define an inline policy document to limit the permissions of the temporary credentials. - policy_document = { + # Inline policy document to limit the permissions of the temporary credentials. + policy = { "Version": "2012-10-17", "Statement": [ { @@ -125,31 +165,64 @@ def get_s3_scoped_credentials( ], } - session_prefix = f"rtd-{session_id}" if session_id else "rtd" - role_session_name = f"{session_prefix}-{project.slug}-{version.slug}" - # Limit to 64 characters, as per AWS limitations. - role_session_name = role_session_name[:64] + session_name = f"rtd-{build.id}-{project.slug}-{version.slug}" + credentials = _get_scoped_credentials( + session_name=session_name, + policy=policy, + duration=duration, + ) + return AWSS3TemporaryCredentials( + access_key_id=credentials.access_key_id, + secret_access_key=credentials.secret_access_key, + session_token=credentials.session_token, + region_name=settings.AWS_S3_REGION_NAME, + bucket_name=settings.S3_MEDIA_STORAGE_BUCKET, + ) - try: - sts_client = get_sts_client() - response = sts_client.assume_role( - RoleArn=settings.AWS_STS_ASSUME_ROLE_ARN, - RoleSessionName=role_session_name, - Policy=json.dumps(policy_document), - DurationSeconds=duration, - ) - except Exception: - log.exception( - "Error while assuming role to generate temporary credentials", - role_session_name=role_session_name, - policy_document=policy_document, - duration=duration, - ) - raise AWSTemporaryCredentialsError - credentials = response["Credentials"] - return AWSTemporaryCredentials( - access_key_id=credentials["AccessKeyId"], - secret_access_key=credentials["SecretAccessKey"], - session_token=credentials["SessionToken"], +def get_s3_build_tools_scoped_credentials( + *, + build, + duration=60 * 15, +) -> AWSS3TemporaryCredentials: + """ + :param build: The build to get the credentials for. + :duration: The duration of the credentials in seconds. Default is 15 minutes. + Note that the minimum duration time is 15 minutes and the maximum is given by the role (defaults to 1 hour). + """ + project = build.project + version = build.version + bucket = settings.S3_BUILD_TOOLS_STORAGE_BUCKET + bucket_arn = f"arn:aws:s3:::{bucket}" + + # Inline policy to limit the permissions of the temporary credentials. + # The build-tools bucket is publicly readable, so we don't need to limit the permissions to a specific path. + policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "s3:GetObject", + "s3:ListBucket", + ], + "Resource": [ + bucket_arn, + f"{bucket_arn}/*", + ], + }, + ], + } + session_name = f"rtd-{build.id}-{project.slug}-{version.slug}" + credentials = _get_scoped_credentials( + session_name=session_name, + policy=policy, + duration=duration, + ) + return AWSS3TemporaryCredentials( + access_key_id=credentials.access_key_id, + secret_access_key=credentials.secret_access_key, + session_token=credentials.session_token, + region_name=settings.AWS_S3_REGION_NAME, + bucket_name=bucket, ) diff --git a/readthedocs/aws/tests/test_security_token_service.py b/readthedocs/aws/tests/test_security_token_service.py index 869109984c4..2d949ad1251 100644 --- a/readthedocs/aws/tests/test_security_token_service.py +++ b/readthedocs/aws/tests/test_security_token_service.py @@ -6,10 +6,12 @@ from django_dynamic_fixture import get from readthedocs.builds.constants import EXTERNAL +from readthedocs.builds.models import Build from readthedocs.projects.models import Project from readthedocs.aws.security_token_service import ( - AWSTemporaryCredentials, - get_s3_scoped_credentials, + AWSS3TemporaryCredentials, + get_s3_build_media_scoped_credentials, + get_s3_build_tools_scoped_credentials, ) @@ -29,21 +31,25 @@ def setUp(self): users=[self.user], ) self.version = self.project.versions.first() - - @override_settings(USING_AWS=False) - def test_get_s3_global_credentials(self): - credentials = get_s3_scoped_credentials( - project=self.project, + self.build = get( + Build, version=self.version, + project=self.project, ) - assert credentials == AWSTemporaryCredentials( + + @override_settings(USING_AWS=False) + def test_get_s3_build_media_global_credentials(self): + credentials = get_s3_build_media_scoped_credentials(build=self.build) + assert credentials == AWSS3TemporaryCredentials( access_key_id="global_access_key_id", secret_access_key="global_secret_access_key", session_token=None, + region_name="us-east-1", + bucket_name="readthedocs-media", ) @mock.patch("readthedocs.aws.security_token_service.boto3.client") - def test_get_s3_scoped_credentials(self, boto3_client): + def test_get_s3_build_media_scoped_credentials(self, boto3_client): boto3_client().assume_role.return_value = { "Credentials": { "AccessKeyId": "access_key_id", @@ -51,15 +57,13 @@ def test_get_s3_scoped_credentials(self, boto3_client): "SessionToken": "session_token", } } - credentials = get_s3_scoped_credentials( - project=self.project, - version=self.version, - session_id="1234", - ) - assert credentials == AWSTemporaryCredentials( + credentials = get_s3_build_media_scoped_credentials(build=self.build) + assert credentials == AWSS3TemporaryCredentials( access_key_id="access_key_id", secret_access_key="secret_access_key", session_token="session_token", + region_name="us-east-1", + bucket_name="readthedocs-media", ) policy = { @@ -105,13 +109,13 @@ def test_get_s3_scoped_credentials(self, boto3_client): boto3_client().assume_role.assert_called_once_with( RoleArn="arn:aws:iam::1234:role/RoleName", - RoleSessionName="rtd-1234-project-latest", + RoleSessionName=f"rtd-{self.build.id}-project-latest", Policy=json.dumps(policy), DurationSeconds=900, ) @mock.patch("readthedocs.aws.security_token_service.boto3.client") - def test_get_s3_scoped_credentials_external_version(self, boto3_client): + def test_get_s3_build_media_scoped_credentials_external_version(self, boto3_client): self.version.type = EXTERNAL self.version.save() @@ -122,15 +126,13 @@ def test_get_s3_scoped_credentials_external_version(self, boto3_client): "SessionToken": "session_token", } } - credentials = get_s3_scoped_credentials( - project=self.project, - version=self.version, - session_id="1234", - ) - assert credentials == AWSTemporaryCredentials( + credentials = get_s3_build_media_scoped_credentials(build=self.build) + assert credentials == AWSS3TemporaryCredentials( access_key_id="access_key_id", secret_access_key="secret_access_key", session_token="session_token", + region_name="us-east-1", + bucket_name="readthedocs-media", ) policy = { @@ -176,7 +178,7 @@ def test_get_s3_scoped_credentials_external_version(self, boto3_client): boto3_client().assume_role.assert_called_once_with( RoleArn="arn:aws:iam::1234:role/RoleName", - RoleSessionName="rtd-1234-project-latest", + RoleSessionName=f"rtd-{self.build.id}-project-latest", Policy=json.dumps(policy), DurationSeconds=900, ) diff --git a/readthedocs/doc_builder/director.py b/readthedocs/doc_builder/director.py index 0a020ae34f5..309c8246fcd 100644 --- a/readthedocs/doc_builder/director.py +++ b/readthedocs/doc_builder/director.py @@ -31,7 +31,8 @@ from readthedocs.projects.signals import after_build from readthedocs.projects.signals import before_build from readthedocs.projects.signals import before_vcs -from readthedocs.storage import build_tools_storage +from readthedocs.projects.tasks.storage import StorageType +from readthedocs.projects.tasks.storage import get_storage log = structlog.get_logger(__name__) @@ -522,6 +523,8 @@ def install_build_tools(self): record=False, ) + build_tools_storage = get_storage(self.data, StorageType.build_tools) + for tool, version in self.data.config.build.tools.items(): full_version = version.full_version # e.g. 3.9 -> 3.9.7 diff --git a/readthedocs/projects/tasks/builds.py b/readthedocs/projects/tasks/builds.py index a26b4247ef3..3b191445f8b 100644 --- a/readthedocs/projects/tasks/builds.py +++ b/readthedocs/projects/tasks/builds.py @@ -18,7 +18,6 @@ from celery import Task from django.conf import settings from django.utils import timezone -from django.utils.module_loading import import_string from slumber import API from slumber.exceptions import HttpClientError @@ -54,6 +53,8 @@ from readthedocs.doc_builder.exceptions import BuildUserError from readthedocs.doc_builder.exceptions import MkDocsYAMLParseError from readthedocs.projects.models import Feature +from readthedocs.projects.tasks.storage import StorageType +from readthedocs.projects.tasks.storage import get_storage from readthedocs.telemetry.collectors import BuildDataCollector from readthedocs.telemetry.tasks import save_build_data from readthedocs.worker import app @@ -885,61 +886,6 @@ def set_valid_clone(self): self.data.project.has_valid_clone = True self.data.version.project.has_valid_clone = True - def _get_sync_media_storage(self): - """ - Get a storage class instance to use for syncing build artifacts. - - .. note:: - - We no longer use readthedocs.storage.build_media_storage directly, - as we are now using per-build credentials for S3 storage, - so we need to dynamically create the storage class instance - """ - storage_class = self._get_sync_media_storage_class() - extra_kwargs = {} - if settings.USING_AWS: - extra_kwargs = self._get_s3_scoped_credentials() - return storage_class(**extra_kwargs) - - def _get_sync_media_storage_class(self): - """ - Get a storage class to use for syncing build artifacts. - - This is done in a separate method to make it easier to mock in tests. - - .. note:: - - We no longer use readthedocs.storage.build_media_storage directly, - as we are now using per-build credentials for S3 storage, - so we need to dynamically create the storage class instance - """ - return import_string(settings.RTD_BUILD_MEDIA_STORAGE) - - def _get_s3_scoped_credentials(self): - if not self.data.project.has_feature(Feature.USE_S3_SCOPED_CREDENTIALS_ON_BUILDERS): - # Use the default credentials defined in the settings. - return {} - - build_id = self.data.build["id"] - try: - credentials = self.data.api_client.build(f"{build_id}/temporary-credentials").post() - except Exception: - log.exception( - "Error getting scoped credentials.", - build_id=build_id, - ) - raise BuildAppError( - BuildAppError.GENERIC_WITH_BUILD_ID, - exception_message="Error getting scoped credentials.", - ) - - s3_credentials = credentials["s3"] - return { - "access_key": s3_credentials["access_key_id"], - "secret_key": s3_credentials["secret_access_key"], - "security_token": s3_credentials["session_token"], - } - def store_build_artifacts(self): """ Save build artifacts to "storage" using Django's storage API. @@ -959,7 +905,7 @@ def store_build_artifacts(self): types_to_copy = [] types_to_delete = [] - build_media_storage = self._get_sync_media_storage() + build_media_storage = get_storage(self.data, StorageType.build_media) for artifact_type in ARTIFACT_TYPES: if artifact_type in valid_artifacts: diff --git a/readthedocs/projects/tasks/storage.py b/readthedocs/projects/tasks/storage.py new file mode 100644 index 00000000000..8f60758531c --- /dev/null +++ b/readthedocs/projects/tasks/storage.py @@ -0,0 +1,101 @@ +from enum import StrEnum +from enum import auto + +import structlog +from django.conf import settings +from django.utils.module_loading import import_string + +from readthedocs.doc_builder.exceptions import BuildAppError +from readthedocs.projects.models import Feature + + +log = structlog.get_logger(__name__) + + +class StorageType(StrEnum): + build_media = auto() + build_tools = auto() + + +def get_storage(task_data, storage_type: StorageType): + """ + Get a storage class instance to interact with storage from the build. + + .. note:: + + We no longer use readthedocs.storage.build_media_storage/build_tools_storage directly, + as we are now using per-build credentials for S3 storage, + so we need to dynamically create the storage class instance + """ + storage_class = _get_storage_class(storage_type) + extra_kwargs = {} + if settings.USING_AWS: + extra_kwargs = _get_s3_scoped_credentials(task_data, storage_type) + return storage_class(**extra_kwargs) + + +def _get_storage_class(storage_type: StorageType): + """ + Get a storage class to use for interacting with storage. + + .. note:: + + We no longer use readthedocs.storage.build_media_storage/build_tools_storage directly, + as we are now using per-build credentials for S3 storage, + so we need to dynamically create the storage class instance + """ + if storage_type == StorageType.build_media: + return _get_build_media_storage_class() + if storage_type == StorageType.build_tools: + return _get_build_tools_storage_class() + raise ValueError("Invalid storage type") + + +def _get_build_media_storage_class(): + """ + Get a storage class to use for syncing build artifacts. + + This is done in a separate method to make it easier to mock in tests. + """ + return import_string(settings.RTD_BUILD_MEDIA_STORAGE) + + +def _get_build_tools_storage_class(): + """ + Get a storage class to use for downloading build tools. + + This is done in a separate method to make it easier to mock in tests. + """ + return import_string(settings.RTD_BUILD_TOOLS_STORAGE) + + +def _get_s3_scoped_credentials(task_data, storage_type: StorageType): + """Get the scoped credentials for the build using our internal API.""" + if not task_data.project.has_feature(Feature.USE_S3_SCOPED_CREDENTIALS_ON_BUILDERS): + # Use the default credentials defined in the settings. + return {} + + build_id = task_data.build["id"] + + try: + credentials = task_data.api_client.build(f"{build_id}/temporary-credentials/storage").post( + {"type": storage_type.value} + ) + except Exception: + log.exception( + "Error getting scoped credentials.", + build_id=build_id, + ) + raise BuildAppError( + BuildAppError.GENERIC_WITH_BUILD_ID, + exception_message="Error getting scoped credentials.", + ) + + s3_credentials = credentials["s3"] + return { + "access_key": s3_credentials["access_key_id"], + "secret_key": s3_credentials["secret_access_key"], + "security_token": s3_credentials["session_token"], + "bucket_name": s3_credentials["bucket_name"], + "region_name": s3_credentials["region_name"], + } diff --git a/readthedocs/projects/tests/mockers.py b/readthedocs/projects/tests/mockers.py index b7ebda2ad8c..e5dbf39f811 100644 --- a/readthedocs/projects/tests/mockers.py +++ b/readthedocs/projects/tests/mockers.py @@ -160,7 +160,7 @@ def _mock_environment(self): # ) def _mock_storage(self): - self.patches["get_sync_media_storage_class"] = mock.patch.object(UpdateDocsTask, "_get_sync_media_storage_class") + self.patches["get_build_media_storage_class"] = mock.patch("readthedocs.projects.tasks.storage._get_build_media_storage_class") def _mock_api(self): headers = {"Content-Type": "application/json"} @@ -225,13 +225,15 @@ def _mock_api(self): ) self.requestsmock.post( - f"{settings.SLUMBER_API_HOST}/api/v2/build/{self.build.pk}/temporary-credentials/", + f"{settings.SLUMBER_API_HOST}/api/v2/build/{self.build.pk}/temporary-credentials/storage/", status_code=201, json={ "s3": { "access_key_id": "some-access-key", "secret_access_key": "some-secret-key", "session_token": "some-session-token", + "bucket_name": "some-bucket-name", + "region_name": "us-east-1", } }, headers=headers, diff --git a/readthedocs/projects/tests/test_build_tasks.py b/readthedocs/projects/tests/test_build_tasks.py index 1b0aab7647e..6467447fccf 100644 --- a/readthedocs/projects/tests/test_build_tasks.py +++ b/readthedocs/projects/tests/test_build_tasks.py @@ -661,7 +661,7 @@ def test_successful_build( assert BuildData.objects.all().exists() - self.mocker.mocks["get_sync_media_storage_class"]()().rclone_sync_directory.assert_has_calls( + self.mocker.mocks["get_build_media_storage_class"]()().rclone_sync_directory.assert_has_calls( [ mock.call(mock.ANY, "html/project/latest"), mock.call(mock.ANY, "json/project/latest"), @@ -881,8 +881,16 @@ def test_successful_build_with_temporary_s3_credentials( }, }, } - # Update build state: building + + # Get temporary credentials + assert self.requests_mock.request_history[6]._request.method == "POST" + assert self.requests_mock.request_history[6].path == "/api/v2/build/1/temporary-credentials/storage/" assert self.requests_mock.request_history[6].json() == { + "type": "build_tools", + } + + # Update build state: building + assert self.requests_mock.request_history[7].json() == { "id": 1, "state": "building", "commit": "a1b2c3", @@ -892,7 +900,7 @@ def test_successful_build_with_temporary_s3_credentials( "error": "", } # Update build state: uploading - assert self.requests_mock.request_history[7].json() == { + assert self.requests_mock.request_history[8].json() == { "id": 1, "state": "uploading", "commit": "a1b2c3", @@ -903,13 +911,16 @@ def test_successful_build_with_temporary_s3_credentials( } # Get temporary credentials - assert self.requests_mock.request_history[8]._request.method == "POST" - assert self.requests_mock.request_history[8].path == "/api/v2/build/1/temporary-credentials/" + assert self.requests_mock.request_history[9]._request.method == "POST" + assert self.requests_mock.request_history[9].path == "/api/v2/build/1/temporary-credentials/storage/" + assert self.requests_mock.request_history[9].json() == { + "type": "build_media", + } # Update version state - assert self.requests_mock.request_history[9]._request.method == "PATCH" - assert self.requests_mock.request_history[9].path == "/api/v2/version/1/" - assert self.requests_mock.request_history[9].json() == { + assert self.requests_mock.request_history[10]._request.method == "PATCH" + assert self.requests_mock.request_history[10].path == "/api/v2/version/1/" + assert self.requests_mock.request_history[10].json() == { "addons": False, "build_data": None, "built": True, @@ -919,11 +930,11 @@ def test_successful_build_with_temporary_s3_credentials( "has_htmlzip": True, } # Set project has valid clone - assert self.requests_mock.request_history[10]._request.method == "PATCH" - assert self.requests_mock.request_history[10].path == "/api/v2/project/1/" - assert self.requests_mock.request_history[10].json() == {"has_valid_clone": True} + assert self.requests_mock.request_history[11]._request.method == "PATCH" + assert self.requests_mock.request_history[11].path == "/api/v2/project/1/" + assert self.requests_mock.request_history[11].json() == {"has_valid_clone": True} # Update build state: finished, success and builder - assert self.requests_mock.request_history[11].json() == { + assert self.requests_mock.request_history[12].json() == { "id": 1, "state": "finished", "commit": "a1b2c3", @@ -935,12 +946,12 @@ def test_successful_build_with_temporary_s3_credentials( "error": "", } - assert self.requests_mock.request_history[12]._request.method == "POST" - assert self.requests_mock.request_history[12].path == "/api/v2/revoke/" + assert self.requests_mock.request_history[13]._request.method == "POST" + assert self.requests_mock.request_history[13].path == "/api/v2/revoke/" assert BuildData.objects.all().exists() - self.mocker.mocks["get_sync_media_storage_class"]()().rclone_sync_directory.assert_has_calls( + self.mocker.mocks["get_build_media_storage_class"]()().rclone_sync_directory.assert_has_calls( [ mock.call(mock.ANY, "html/project/latest"), mock.call(mock.ANY, "json/project/latest"), @@ -1828,9 +1839,9 @@ def test_build_jobs_partial_build_override_empty_commands(self, load_yaml_config ) @mock.patch("readthedocs.doc_builder.director.tarfile") - @mock.patch("readthedocs.doc_builder.director.build_tools_storage") + @mock.patch("readthedocs.projects.tasks.storage._get_build_tools_storage_class") @mock.patch("readthedocs.doc_builder.director.load_yaml_config") - def test_build_tools_cached(self, load_yaml_config, build_tools_storage, tarfile): + def test_build_tools_cached(self, load_yaml_config, get_build_tools_storage_class, tarfile): config = BuildConfigV2( { "version": 2, @@ -1849,8 +1860,8 @@ def test_build_tools_cached(self, load_yaml_config, build_tools_storage, tarfile config.validate() load_yaml_config.return_value = config - build_tools_storage.open.return_value = b"" - build_tools_storage.exists.return_value = True + get_build_tools_storage_class()().open.return_value = b"" + get_build_tools_storage_class()().exists.return_value = True tarfile.open.return_value.__enter__.return_value.extract_all.return_value = None self._trigger_update_docs_task() diff --git a/readthedocs/rtd_tests/tests/test_api.py b/readthedocs/rtd_tests/tests/test_api.py index e783813fdbe..984c9c00bcc 100644 --- a/readthedocs/rtd_tests/tests/test_api.py +++ b/readthedocs/rtd_tests/tests/test_api.py @@ -70,7 +70,7 @@ Feature, Project, ) -from readthedocs.aws.security_token_service import AWSTemporaryCredentials +from readthedocs.aws.security_token_service import AWSS3TemporaryCredentials from readthedocs.subscriptions.constants import TYPE_CONCURRENT_BUILDS from readthedocs.subscriptions.products import RTDProductFeature from readthedocs.vcs_support.backends.git import parse_version_from_ref @@ -145,8 +145,8 @@ def test_reset_build(self): self.assertEqual(build.commands.count(), 0) self.assertEqual(build.notifications.count(), 0) - @mock.patch("readthedocs.api.v2.views.model_views.get_s3_scoped_credentials") - def test_get_temporary_credentials_for_build(self, get_s3_scoped_credentials): + @mock.patch("readthedocs.api.v2.views.model_views.get_s3_build_media_scoped_credentials") + def test_get_temporary_credentials_for_build(self, get_s3_build_media_scoped_credentials): build = get( Build, project=self.project, @@ -163,25 +163,27 @@ def test_get_temporary_credentials_for_build(self, get_s3_scoped_credentials): client = APIClient() _, build_api_key = BuildAPIKey.objects.create_key(self.project) client.credentials(HTTP_AUTHORIZATION=f"Token {build_api_key}") - get_s3_scoped_credentials.return_value = AWSTemporaryCredentials( + get_s3_build_media_scoped_credentials.return_value = AWSS3TemporaryCredentials( access_key_id="access_key_id", secret_access_key="secret_access_key", session_token="session_token", + region_name="us-east-1", + bucket_name="readthedocs-media", ) - r = client.post(reverse("build-temporary-credentials", args=(build.pk,))) + r = client.post(reverse("build-temporary-credentials-for-storage", args=(build.pk,)), {"type": "build_media"}) assert r.status_code == 200 assert r.data == { "s3": { "access_key_id": "access_key_id", "secret_access_key": "secret_access_key", "session_token": "session_token", + "region_name": "us-east-1", + "bucket_name": "readthedocs-media", } } - get_s3_scoped_credentials.assert_called_once_with( - project=self.project, - version=self.version, - session_id=build.pk, + get_s3_build_media_scoped_credentials.assert_called_once_with( + build=build, duration=60 * 30, ) From 4bb5206db6258438663c4abe97291655838bf474 Mon Sep 17 00:00:00 2001 From: Santos Gallegos Date: Tue, 8 Apr 2025 18:04:48 -0500 Subject: [PATCH 14/18] Tests --- readthedocs/aws/security_token_service.py | 7 +- .../aws/tests/test_security_token_service.py | 103 +++++++++++++++++- readthedocs/rtd_tests/tests/test_api.py | 27 ++++- 3 files changed, 133 insertions(+), 4 deletions(-) diff --git a/readthedocs/aws/security_token_service.py b/readthedocs/aws/security_token_service.py index 974bc4431e6..d8ea291ba70 100644 --- a/readthedocs/aws/security_token_service.py +++ b/readthedocs/aws/security_token_service.py @@ -115,7 +115,10 @@ def get_s3_build_media_scoped_credentials( duration=60 * 15, ) -> AWSS3TemporaryCredentials: """ - :param build: The build to get the credentials for. + Get temporary credentials with read/write access to the build media bucket. + + The credentials are scoped to the paths that the build needs to access. + :duration: The duration of the credentials in seconds. Default is 15 minutes. Note that the minimum duration time is 15 minutes and the maximum is given by the role (defaults to 1 hour). """ @@ -186,6 +189,8 @@ def get_s3_build_tools_scoped_credentials( duration=60 * 15, ) -> AWSS3TemporaryCredentials: """ + Get temporary credentials with read-only access to the build-tools bucket. + :param build: The build to get the credentials for. :duration: The duration of the credentials in seconds. Default is 15 minutes. Note that the minimum duration time is 15 minutes and the maximum is given by the role (defaults to 1 hour). diff --git a/readthedocs/aws/tests/test_security_token_service.py b/readthedocs/aws/tests/test_security_token_service.py index 2d949ad1251..b81705a05b5 100644 --- a/readthedocs/aws/tests/test_security_token_service.py +++ b/readthedocs/aws/tests/test_security_token_service.py @@ -21,6 +21,7 @@ AWS_S3_REGION_NAME="us-east-1", AWS_STS_ASSUME_ROLE_ARN="arn:aws:iam::1234:role/RoleName", S3_MEDIA_STORAGE_BUCKET="readthedocs-media", + S3_BUILD_TOOLS_STORAGE_BUCKET="readthedocs-build-tools", ) class TestSecurityTokenService(TestCase): def setUp(self): @@ -111,7 +112,7 @@ def test_get_s3_build_media_scoped_credentials(self, boto3_client): RoleArn="arn:aws:iam::1234:role/RoleName", RoleSessionName=f"rtd-{self.build.id}-project-latest", Policy=json.dumps(policy), - DurationSeconds=900, + DurationSeconds=15 * 60, ) @mock.patch("readthedocs.aws.security_token_service.boto3.client") @@ -180,5 +181,103 @@ def test_get_s3_build_media_scoped_credentials_external_version(self, boto3_clie RoleArn="arn:aws:iam::1234:role/RoleName", RoleSessionName=f"rtd-{self.build.id}-project-latest", Policy=json.dumps(policy), - DurationSeconds=900, + DurationSeconds=15 * 60, + ) + + @override_settings(USING_AWS=False) + def test_get_s3_build_tools_global_credentials(self): + credentials = get_s3_build_tools_scoped_credentials(build=self.build) + assert credentials == AWSS3TemporaryCredentials( + access_key_id="global_access_key_id", + secret_access_key="global_secret_access_key", + session_token=None, + region_name="us-east-1", + bucket_name="readthedocs-build-tools", + ) + + @mock.patch("readthedocs.aws.security_token_service.boto3.client") + def test_get_s3_build_tools_scoped_credentials(self, boto3_client): + boto3_client().assume_role.return_value = { + "Credentials": { + "AccessKeyId": "access_key_id", + "SecretAccessKey": "secret_access_key", + "SessionToken": "session_token", + } + } + credentials = get_s3_build_tools_scoped_credentials(build=self.build) + assert credentials == AWSS3TemporaryCredentials( + access_key_id="access_key_id", + secret_access_key="secret_access_key", + session_token="session_token", + region_name="us-east-1", + bucket_name="readthedocs-build-tools", + ) + + policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "s3:GetObject", + "s3:ListBucket", + ], + "Resource": [ + "arn:aws:s3:::readthedocs-build-tools", + "arn:aws:s3:::readthedocs-build-tools/*", + ], + }, + ], + } + + boto3_client().assume_role.assert_called_once_with( + RoleArn="arn:aws:iam::1234:role/RoleName", + RoleSessionName=f"rtd-{self.build.id}-project-latest", + Policy=json.dumps(policy), + DurationSeconds=15 * 60, + ) + + @mock.patch("readthedocs.aws.security_token_service.boto3.client") + def test_get_s3_build_tools_scoped_credentials_external_version(self, boto3_client): + self.version.type = EXTERNAL + self.version.save() + + boto3_client().assume_role.return_value = { + "Credentials": { + "AccessKeyId": "access_key_id", + "SecretAccessKey": "secret_access_key", + "SessionToken": "session_token", + } + } + credentials = get_s3_build_tools_scoped_credentials(build=self.build) + assert credentials == AWSS3TemporaryCredentials( + access_key_id="access_key_id", + secret_access_key="secret_access_key", + session_token="session_token", + region_name="us-east-1", + bucket_name="readthedocs-build-tools", + ) + + policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "s3:GetObject", + "s3:ListBucket", + ], + "Resource": [ + "arn:aws:s3:::readthedocs-build-tools", + "arn:aws:s3:::readthedocs-build-tools/*", + ], + }, + ], + } + + boto3_client().assume_role.assert_called_once_with( + RoleArn="arn:aws:iam::1234:role/RoleName", + RoleSessionName=f"rtd-{self.build.id}-project-latest", + Policy=json.dumps(policy), + DurationSeconds=15 * 60, ) diff --git a/readthedocs/rtd_tests/tests/test_api.py b/readthedocs/rtd_tests/tests/test_api.py index 984c9c00bcc..929683f5b92 100644 --- a/readthedocs/rtd_tests/tests/test_api.py +++ b/readthedocs/rtd_tests/tests/test_api.py @@ -145,8 +145,9 @@ def test_reset_build(self): self.assertEqual(build.commands.count(), 0) self.assertEqual(build.notifications.count(), 0) + @mock.patch("readthedocs.api.v2.views.model_views.get_s3_build_tools_scoped_credentials") @mock.patch("readthedocs.api.v2.views.model_views.get_s3_build_media_scoped_credentials") - def test_get_temporary_credentials_for_build(self, get_s3_build_media_scoped_credentials): + def test_get_temporary_credentials_for_build(self, get_s3_build_media_scoped_credentials, get_s3_build_tools_scoped_credentials): build = get( Build, project=self.project, @@ -187,6 +188,30 @@ def test_get_temporary_credentials_for_build(self, get_s3_build_media_scoped_cre duration=60 * 30, ) + get_s3_build_tools_scoped_credentials.return_value = AWSS3TemporaryCredentials( + access_key_id="access_key_id", + secret_access_key="secret_access_key", + session_token="session_token", + region_name="us-east-1", + bucket_name="readthedocs-build-tools", + ) + r = client.post(reverse("build-temporary-credentials-for-storage", args=(build.pk,)), {"type": "build_tools"}) + assert r.status_code == 200 + assert r.data == { + "s3": { + "access_key_id": "access_key_id", + "secret_access_key": "secret_access_key", + "session_token": "session_token", + "region_name": "us-east-1", + "bucket_name": "readthedocs-build-tools", + } + } + + get_s3_build_tools_scoped_credentials.assert_called_once_with( + build=build, + duration=60 * 30, + ) + def test_api_does_not_have_private_config_key_superuser(self): client = APIClient() client.login(username="super", password="test") From 354000e238236077ec1bf778cf9ba760e0371b6a Mon Sep 17 00:00:00 2001 From: Santos Gallegos Date: Thu, 17 Apr 2025 11:54:31 -0500 Subject: [PATCH 15/18] Updates from review --- readthedocs/api/v2/views/model_views.py | 4 ++-- readthedocs/aws/security_token_service.py | 5 +++-- readthedocs/doc_builder/director.py | 7 ++++++- readthedocs/projects/tasks/builds.py | 7 ++++++- readthedocs/projects/tasks/storage.py | 18 +++++++----------- readthedocs/projects/tests/mockers.py | 2 +- readthedocs/projects/tests/test_build_tasks.py | 4 ++-- readthedocs/rtd_tests/tests/test_api.py | 4 ++-- readthedocs/storage/rclone.py | 2 +- 9 files changed, 30 insertions(+), 23 deletions(-) diff --git a/readthedocs/api/v2/views/model_views.py b/readthedocs/api/v2/views/model_views.py index 4b9404f60cf..5a1bc3e5166 100644 --- a/readthedocs/api/v2/views/model_views.py +++ b/readthedocs/api/v2/views/model_views.py @@ -353,9 +353,9 @@ def get_queryset_for_api_key(self, api_key): detail=True, permission_classes=[HasBuildAPIKey], methods=["post"], - url_path="temporary-credentials/storage", + url_path="credentials/storage", ) - def temporary_credentials_for_storage(self, request, **kwargs): + def credentials_for_storage(self, request, **kwargs): """ Generate temporary credentials for interacting with storage. diff --git a/readthedocs/aws/security_token_service.py b/readthedocs/aws/security_token_service.py index d8ea291ba70..2d2885b46e2 100644 --- a/readthedocs/aws/security_token_service.py +++ b/readthedocs/aws/security_token_service.py @@ -65,8 +65,9 @@ def _get_scoped_credentials(*, session_name, policy, duration) -> AWSTemporaryCr """ :param session_name: An identifier to attach to the generated credentials, useful to identify who requested them. AWS limits the session name to 64 characters, so if the session_name is too long, it will be truncated. - :duration: The duration of the credentials in seconds. Default is 15 minutes. + :param duration: The duration of the credentials in seconds. Default is 15 minutes. Note that the minimum duration time is 15 minutes and the maximum is given by the role (defaults to 1 hour). + :param policy: The inline policy to attach to the generated credentials. .. note:: @@ -192,7 +193,7 @@ def get_s3_build_tools_scoped_credentials( Get temporary credentials with read-only access to the build-tools bucket. :param build: The build to get the credentials for. - :duration: The duration of the credentials in seconds. Default is 15 minutes. + :param duration: The duration of the credentials in seconds. Default is 15 minutes. Note that the minimum duration time is 15 minutes and the maximum is given by the role (defaults to 1 hour). """ project = build.project diff --git a/readthedocs/doc_builder/director.py b/readthedocs/doc_builder/director.py index 309c8246fcd..70ad960693f 100644 --- a/readthedocs/doc_builder/director.py +++ b/readthedocs/doc_builder/director.py @@ -523,7 +523,12 @@ def install_build_tools(self): record=False, ) - build_tools_storage = get_storage(self.data, StorageType.build_tools) + build_tools_storage = get_storage( + project=self.data.project, + build_id=self.data.build["id"], + api_client=self.data.api_client, + storage_type=StorageType.build_tools, + ) for tool, version in self.data.config.build.tools.items(): full_version = version.full_version # e.g. 3.9 -> 3.9.7 diff --git a/readthedocs/projects/tasks/builds.py b/readthedocs/projects/tasks/builds.py index 3b191445f8b..eff785dc61d 100644 --- a/readthedocs/projects/tasks/builds.py +++ b/readthedocs/projects/tasks/builds.py @@ -905,7 +905,12 @@ def store_build_artifacts(self): types_to_copy = [] types_to_delete = [] - build_media_storage = get_storage(self.data, StorageType.build_media) + build_media_storage = get_storage( + project=self.data.project, + build_id=self.data.build["id"], + api_client=self.data.api_client, + storage_type=StorageType.build_media, + ) for artifact_type in ARTIFACT_TYPES: if artifact_type in valid_artifacts: diff --git a/readthedocs/projects/tasks/storage.py b/readthedocs/projects/tasks/storage.py index 8f60758531c..4613cb71aba 100644 --- a/readthedocs/projects/tasks/storage.py +++ b/readthedocs/projects/tasks/storage.py @@ -17,7 +17,7 @@ class StorageType(StrEnum): build_tools = auto() -def get_storage(task_data, storage_type: StorageType): +def get_storage(*, project, build_id, api_client, storage_type: StorageType): """ Get a storage class instance to interact with storage from the build. @@ -30,7 +30,9 @@ def get_storage(task_data, storage_type: StorageType): storage_class = _get_storage_class(storage_type) extra_kwargs = {} if settings.USING_AWS: - extra_kwargs = _get_s3_scoped_credentials(task_data, storage_type) + extra_kwargs = _get_s3_scoped_credentials( + project=project, build_id=build_id, api_client=api_client, storage_type=storage_type + ) return storage_class(**extra_kwargs) @@ -69,23 +71,17 @@ def _get_build_tools_storage_class(): return import_string(settings.RTD_BUILD_TOOLS_STORAGE) -def _get_s3_scoped_credentials(task_data, storage_type: StorageType): +def _get_s3_scoped_credentials(*, project, build_id, api_client, storage_type: StorageType): """Get the scoped credentials for the build using our internal API.""" - if not task_data.project.has_feature(Feature.USE_S3_SCOPED_CREDENTIALS_ON_BUILDERS): + if not project.has_feature(Feature.USE_S3_SCOPED_CREDENTIALS_ON_BUILDERS): # Use the default credentials defined in the settings. return {} - build_id = task_data.build["id"] - try: - credentials = task_data.api_client.build(f"{build_id}/temporary-credentials/storage").post( + credentials = api_client.build(f"{build_id}/credentials/storage").post( {"type": storage_type.value} ) except Exception: - log.exception( - "Error getting scoped credentials.", - build_id=build_id, - ) raise BuildAppError( BuildAppError.GENERIC_WITH_BUILD_ID, exception_message="Error getting scoped credentials.", diff --git a/readthedocs/projects/tests/mockers.py b/readthedocs/projects/tests/mockers.py index e5dbf39f811..c1d2930a302 100644 --- a/readthedocs/projects/tests/mockers.py +++ b/readthedocs/projects/tests/mockers.py @@ -225,7 +225,7 @@ def _mock_api(self): ) self.requestsmock.post( - f"{settings.SLUMBER_API_HOST}/api/v2/build/{self.build.pk}/temporary-credentials/storage/", + f"{settings.SLUMBER_API_HOST}/api/v2/build/{self.build.pk}/credentials/storage/", status_code=201, json={ "s3": { diff --git a/readthedocs/projects/tests/test_build_tasks.py b/readthedocs/projects/tests/test_build_tasks.py index 6467447fccf..cb0cbb7dd00 100644 --- a/readthedocs/projects/tests/test_build_tasks.py +++ b/readthedocs/projects/tests/test_build_tasks.py @@ -884,7 +884,7 @@ def test_successful_build_with_temporary_s3_credentials( # Get temporary credentials assert self.requests_mock.request_history[6]._request.method == "POST" - assert self.requests_mock.request_history[6].path == "/api/v2/build/1/temporary-credentials/storage/" + assert self.requests_mock.request_history[6].path == "/api/v2/build/1/credentials/storage/" assert self.requests_mock.request_history[6].json() == { "type": "build_tools", } @@ -912,7 +912,7 @@ def test_successful_build_with_temporary_s3_credentials( # Get temporary credentials assert self.requests_mock.request_history[9]._request.method == "POST" - assert self.requests_mock.request_history[9].path == "/api/v2/build/1/temporary-credentials/storage/" + assert self.requests_mock.request_history[9].path == "/api/v2/build/1/credentials/storage/" assert self.requests_mock.request_history[9].json() == { "type": "build_media", } diff --git a/readthedocs/rtd_tests/tests/test_api.py b/readthedocs/rtd_tests/tests/test_api.py index 929683f5b92..1a2f545b04f 100644 --- a/readthedocs/rtd_tests/tests/test_api.py +++ b/readthedocs/rtd_tests/tests/test_api.py @@ -171,7 +171,7 @@ def test_get_temporary_credentials_for_build(self, get_s3_build_media_scoped_cre region_name="us-east-1", bucket_name="readthedocs-media", ) - r = client.post(reverse("build-temporary-credentials-for-storage", args=(build.pk,)), {"type": "build_media"}) + r = client.post(reverse("build-credentials-for-storage", args=(build.pk,)), {"type": "build_media"}) assert r.status_code == 200 assert r.data == { "s3": { @@ -195,7 +195,7 @@ def test_get_temporary_credentials_for_build(self, get_s3_build_media_scoped_cre region_name="us-east-1", bucket_name="readthedocs-build-tools", ) - r = client.post(reverse("build-temporary-credentials-for-storage", args=(build.pk,)), {"type": "build_tools"}) + r = client.post(reverse("build-credentials-for-storage", args=(build.pk,)), {"type": "build_tools"}) assert r.status_code == 200 assert r.data == { "s3": { diff --git a/readthedocs/storage/rclone.py b/readthedocs/storage/rclone.py index 16fdfe09f4d..ae3139892f4 100644 --- a/readthedocs/storage/rclone.py +++ b/readthedocs/storage/rclone.py @@ -150,7 +150,7 @@ class RCloneS3Remote(BaseRClone): All secrets will be passed as environ variables to the rclone command. - See https://rclone.org/s3/. + See https://rclone.org/s3/ and https://rclone.org/s3/#configuration. :params bucket_name: Name of the S3 bucket. :params access_key_id: AWS access key id. From 009159865625b2e473fbaf492924b2496490b6d7 Mon Sep 17 00:00:00 2001 From: Santos Gallegos Date: Thu, 17 Apr 2025 12:16:50 -0500 Subject: [PATCH 16/18] Make sure to never return global credentials in production --- readthedocs/aws/security_token_service.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/readthedocs/aws/security_token_service.py b/readthedocs/aws/security_token_service.py index 2d2885b46e2..82c1b2c0b91 100644 --- a/readthedocs/aws/security_token_service.py +++ b/readthedocs/aws/security_token_service.py @@ -56,7 +56,6 @@ def get_sts_client(): "sts", aws_access_key_id=settings.AWS_ACCESS_KEY_ID, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, - # TODO: should this be its own setting? region_name=settings.AWS_S3_REGION_NAME, ) @@ -76,6 +75,10 @@ def _get_scoped_credentials(*, session_name, policy, duration) -> AWSTemporaryCr Useful for local development where we don't have a service like AWS STS. """ if not settings.USING_AWS: + if not settings.DEBUG: + raise ValueError( + "Not returning global credentials, AWS STS should always be used in production." + ) return AWSTemporaryCredentials( access_key_id=settings.AWS_ACCESS_KEY_ID, secret_access_key=settings.AWS_SECRET_ACCESS_KEY, From 1ad2576bd2af010ed054afc8f0e3a4df404c206e Mon Sep 17 00:00:00 2001 From: Santos Gallegos Date: Thu, 17 Apr 2025 12:54:13 -0500 Subject: [PATCH 17/18] Fix tests --- readthedocs/aws/tests/test_security_token_service.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/readthedocs/aws/tests/test_security_token_service.py b/readthedocs/aws/tests/test_security_token_service.py index b81705a05b5..b0676658d02 100644 --- a/readthedocs/aws/tests/test_security_token_service.py +++ b/readthedocs/aws/tests/test_security_token_service.py @@ -38,7 +38,7 @@ def setUp(self): project=self.project, ) - @override_settings(USING_AWS=False) + @override_settings(USING_AWS=False, DEBUG=True) def test_get_s3_build_media_global_credentials(self): credentials = get_s3_build_media_scoped_credentials(build=self.build) assert credentials == AWSS3TemporaryCredentials( @@ -184,7 +184,7 @@ def test_get_s3_build_media_scoped_credentials_external_version(self, boto3_clie DurationSeconds=15 * 60, ) - @override_settings(USING_AWS=False) + @override_settings(USING_AWS=False, DEBUG=True) def test_get_s3_build_tools_global_credentials(self): credentials = get_s3_build_tools_scoped_credentials(build=self.build) assert credentials == AWSS3TemporaryCredentials( From 221dbad6caaa9e0834a898d79b30e085693416ca Mon Sep 17 00:00:00 2001 From: Santos Gallegos Date: Thu, 17 Apr 2025 13:19:00 -0500 Subject: [PATCH 18/18] Update steps --- docs/dev/aws-temporary-credentials.rst | 2 ++ readthedocs/aws/security_token_service.py | 10 ++++++---- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/docs/dev/aws-temporary-credentials.rst b/docs/dev/aws-temporary-credentials.rst index ec215913404..3488475b088 100644 --- a/docs/dev/aws-temporary-credentials.rst +++ b/docs/dev/aws-temporary-credentials.rst @@ -15,6 +15,8 @@ In order to make use of STS, you need: - Create a role in IAM with a trusted entity type set to the AWS account that is going to be used to generate the temporary credentials. - Create an inline policy for the role, the policy should allow access to all S3 buckets and paths that are going to be used. +- Create an inline policy to the user that is going to be used to generate the temporary credentials, + the policy should allow the ``sts:AssumeRole`` action for the role created in the previous step. You can use :ref:`environment variables ` to set the credentials for AWS, make sure to set the value of ``RTD_S3_PROVIDER`` to ``AWS``. diff --git a/readthedocs/aws/security_token_service.py b/readthedocs/aws/security_token_service.py index 82c1b2c0b91..326c7135c48 100644 --- a/readthedocs/aws/security_token_service.py +++ b/readthedocs/aws/security_token_service.py @@ -6,10 +6,12 @@ In order to make use of STS, we need: - Create a role in IAM with a trusted entity type set to the AWS account that is going to be used to generate the temporary credentials. -- A policy that allows access to all S3 buckets and paths that are going to be used. - Which should be attached to the role. -- The permissions of the temporary credentials are the result of the intersection of the role policy and the inline policy that is passed to the AssumeRole API. - This means that the inline policy can be used to limit the permissions of the temporary credentials, but not to expand them. +- Create an inline policy for the role, the policy should allow access to all S3 buckets and paths that are going to be used. +- Create an inline policy to the user that is going to be used to generate the temporary credentials, + the policy should allow the ``sts:AssumeRole`` action for the role created in the previous step. + +The permissions of the temporary credentials are the result of the intersection of the role policy and the inline policy that is passed to the AssumeRole API. +This means that the inline policy can be used to limit the permissions of the temporary credentials, but not to expand them. See: