diff --git a/docs/dev/aws-temporary-credentials.rst b/docs/dev/aws-temporary-credentials.rst new file mode 100644 index 00000000000..3488475b088 --- /dev/null +++ b/docs/dev/aws-temporary-credentials.rst @@ -0,0 +1,32 @@ +AWS temporary credentials +========================= + +Builders run arbitrary commands provided by the user, while we run the commands in a sandboxed environment (docker), +that shouln't be the only line of defense, as we still interact with the files generated by the user outside docker for some operations. + +This is why instead of using credentials that have access to all the resources in AWS, +we are using credentials that are generated by the `AWS STS service `__, +which are temporary and scoped to the resources that are needed for the build. + +Local development +----------------- + +In order to make use of STS, you need: + +- Create a role in IAM with a trusted entity type set to the AWS account that is going to be used to generate the temporary credentials. +- Create an inline policy for the role, the policy should allow access to all S3 buckets and paths that are going to be used. +- Create an inline policy to the user that is going to be used to generate the temporary credentials, + the policy should allow the ``sts:AssumeRole`` action for the role created in the previous step. + +You can use :ref:`environment variables ` to set the credentials for AWS, make sure to set the value of ``RTD_S3_PROVIDER`` to ``AWS``. + +.. note:: + + If you are part of the development team, you should be able to use the credentials from the ``storage-dev``` user, + which is already configured to make use of STS, and the ARN from the ``RTDSTSAssumeRoleDev`` role. + +.. note:: + + You should use AWS only when you are testing the AWS integration, + use the default minio provider for local development. + Otherwise, files may be overridden if multiple developers are using the same credentials. diff --git a/docs/dev/index.rst b/docs/dev/index.rst index 12f53915f2c..79032ecf7e9 100644 --- a/docs/dev/index.rst +++ b/docs/dev/index.rst @@ -28,6 +28,7 @@ or taking the open source Read the Docs codebase for your own custom installatio migrations server-side-search search-integration + aws-temporary-credentials subscriptions github-app settings diff --git a/docs/dev/settings.rst b/docs/dev/settings.rst index a1b04e93660..1d9a55323c7 100644 --- a/docs/dev/settings.rst +++ b/docs/dev/settings.rst @@ -167,6 +167,22 @@ providers using the following environment variables: .. envvar:: RTD_SOCIALACCOUNT_PROVIDERS_GOOGLE_CLIENT_ID .. envvar:: RTD_SOCIALACCOUNT_PROVIDERS_GOOGLE_SECRET +AWS configuration +~~~~~~~~~~~~~~~~~ + +The following variables can be used to use AWS in your local environment. +Useful for testing :doc:`temporary credentials `. + +.. envvar:: RTD_S3_PROVIDER +.. envvar:: RTD_AWS_ACCESS_KEY_ID +.. envvar:: RTD_AWS_SECRET_ACCESS_KEY +.. envvar:: RTD_AWS_STS_ASSUME_ROLE_ARN +.. envvar:: RTD_S3_MEDIA_STORAGE_BUCKET +.. envvar:: RTD_S3_BUILD_COMMANDS_STORAGE_BUCKET +.. envvar:: RTD_S3_BUILD_TOOLS_STORAGE_BUCKET +.. envvar:: RTD_S3_STATIC_STORAGE_BUCKET +.. envvar:: RTD_AWS_S3_REGION_NAME + GitHub App ~~~~~~~~~~ diff --git a/readthedocs/api/v2/views/model_views.py b/readthedocs/api/v2/views/model_views.py index 8bc24efb11b..5a1bc3e5166 100644 --- a/readthedocs/api/v2/views/model_views.py +++ b/readthedocs/api/v2/views/model_views.py @@ -1,6 +1,7 @@ """Endpoints for listing Projects, Versions, Builds, etc.""" import json +from dataclasses import asdict import structlog from allauth.socialaccount.models import SocialAccount @@ -27,6 +28,9 @@ from readthedocs.api.v2.permissions import IsOwner from readthedocs.api.v2.permissions import ReadOnlyPermission from readthedocs.api.v2.utils import normalize_build_command +from readthedocs.aws.security_token_service import AWSTemporaryCredentialsError +from readthedocs.aws.security_token_service import get_s3_build_media_scoped_credentials +from readthedocs.aws.security_token_service import get_s3_build_tools_scoped_credentials from readthedocs.builds.constants import INTERNAL from readthedocs.builds.models import Build from readthedocs.builds.models import BuildCommandResult @@ -345,6 +349,44 @@ def reset(self, request, **kwargs): def get_queryset_for_api_key(self, api_key): return self.model.objects.filter(project=api_key.project) + @decorators.action( + detail=True, + permission_classes=[HasBuildAPIKey], + methods=["post"], + url_path="credentials/storage", + ) + def credentials_for_storage(self, request, **kwargs): + """ + Generate temporary credentials for interacting with storage. + + This can generate temporary credentials for interacting with S3 only for now. + """ + build = self.get_object() + credentials_type = request.data.get("type") + + if credentials_type == "build_media": + method = get_s3_build_media_scoped_credentials + # 30 minutes should be enough for uploading build artifacts. + duration = 30 * 60 + elif credentials_type == "build_tools": + method = get_s3_build_tools_scoped_credentials + # 30 minutes should be enough for downloading build tools. + duration = 30 * 60 + else: + return Response( + {"error": "Invalid storage type"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + try: + credentials = method(build=build, duration=duration) + except AWSTemporaryCredentialsError: + return Response( + {"error": "Failed to generate temporary credentials"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + return Response({"s3": asdict(credentials)}) + class BuildCommandViewSet(DisableListEndpoint, CreateModelMixin, UserSelectViewSet): parser_classes = [JSONParser, MultiPartParser] diff --git a/readthedocs/aws/__init__.py b/readthedocs/aws/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/readthedocs/aws/security_token_service.py b/readthedocs/aws/security_token_service.py new file mode 100644 index 00000000000..326c7135c48 --- /dev/null +++ b/readthedocs/aws/security_token_service.py @@ -0,0 +1,239 @@ +""" +Module to interact with AWS STS (Security Token Service) to assume a role and get temporary scoped credentials. + +This is mainly used to generate temporary credentials to interact with S3 buckets from the builders. + +In order to make use of STS, we need: + +- Create a role in IAM with a trusted entity type set to the AWS account that is going to be used to generate the temporary credentials. +- Create an inline policy for the role, the policy should allow access to all S3 buckets and paths that are going to be used. +- Create an inline policy to the user that is going to be used to generate the temporary credentials, + the policy should allow the ``sts:AssumeRole`` action for the role created in the previous step. + +The permissions of the temporary credentials are the result of the intersection of the role policy and the inline policy that is passed to the AssumeRole API. +This means that the inline policy can be used to limit the permissions of the temporary credentials, but not to expand them. + +See: + +- https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html +- https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_sts-comparison.html +- https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_control-access_assumerole.html +- https://docs.readthedocs.com/dev/latest/aws-temporary-credentials.html +""" + +import json +from dataclasses import dataclass + +import boto3 +import structlog +from django.conf import settings + + +log = structlog.get_logger(__name__) + + +class AWSTemporaryCredentialsError(Exception): + """Exception raised when there is an error getting AWS S3 credentials.""" + + +@dataclass +class AWSTemporaryCredentials: + """Dataclass to hold AWS temporary credentials.""" + + access_key_id: str + secret_access_key: str + session_token: str | None + + +@dataclass +class AWSS3TemporaryCredentials(AWSTemporaryCredentials): + """Subclass of AWSTemporaryCredentials to include S3 specific fields.""" + + bucket_name: str + region_name: str + + +def get_sts_client(): + return boto3.client( + "sts", + aws_access_key_id=settings.AWS_ACCESS_KEY_ID, + aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, + region_name=settings.AWS_S3_REGION_NAME, + ) + + +def _get_scoped_credentials(*, session_name, policy, duration) -> AWSTemporaryCredentials: + """ + :param session_name: An identifier to attach to the generated credentials, useful to identify who requested them. + AWS limits the session name to 64 characters, so if the session_name is too long, it will be truncated. + :param duration: The duration of the credentials in seconds. Default is 15 minutes. + Note that the minimum duration time is 15 minutes and the maximum is given by the role (defaults to 1 hour). + :param policy: The inline policy to attach to the generated credentials. + + .. note:: + + If USING_AWS is set to False, this function will return + the values of the AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY settings. + Useful for local development where we don't have a service like AWS STS. + """ + if not settings.USING_AWS: + if not settings.DEBUG: + raise ValueError( + "Not returning global credentials, AWS STS should always be used in production." + ) + return AWSTemporaryCredentials( + access_key_id=settings.AWS_ACCESS_KEY_ID, + secret_access_key=settings.AWS_SECRET_ACCESS_KEY, + # A session token is not needed for the default credentials. + session_token=None, + ) + + # Limit to 64 characters, as per AWS limitations. + session_name = session_name[:64] + try: + sts_client = get_sts_client() + response = sts_client.assume_role( + RoleArn=settings.AWS_STS_ASSUME_ROLE_ARN, + RoleSessionName=session_name, + Policy=json.dumps(policy), + DurationSeconds=duration, + ) + except Exception: + log.exception( + "Error while assuming role to generate temporary credentials", + session_name=session_name, + policy=policy, + duration=duration, + ) + raise AWSTemporaryCredentialsError + + credentials = response["Credentials"] + return AWSTemporaryCredentials( + access_key_id=credentials["AccessKeyId"], + secret_access_key=credentials["SecretAccessKey"], + session_token=credentials["SessionToken"], + ) + + +def get_s3_build_media_scoped_credentials( + *, + build, + duration=60 * 15, +) -> AWSS3TemporaryCredentials: + """ + Get temporary credentials with read/write access to the build media bucket. + + The credentials are scoped to the paths that the build needs to access. + + :duration: The duration of the credentials in seconds. Default is 15 minutes. + Note that the minimum duration time is 15 minutes and the maximum is given by the role (defaults to 1 hour). + """ + project = build.project + version = build.version + bucket_arn = f"arn:aws:s3:::{settings.S3_MEDIA_STORAGE_BUCKET}" + storage_paths = version.get_storage_paths() + # Generate the list of allowed prefix resources + # The resulting prefix looks like: + # - html/project/latest/* + # - pdf/project/latest/* + allowed_prefixes = [f"{storage_path}/*" for storage_path in storage_paths] + + # Generate the list of allowed object resources in ARN format. + # The resulting ARN looks like: + # arn:aws:s3:::readthedocs-media/html/project/latest/* + # arn:aws:s3:::readthedocs-media/pdf/project/latest/* + allowed_objects_arn = [f"{bucket_arn}/{prefix}" for prefix in allowed_prefixes] + + # Inline policy document to limit the permissions of the temporary credentials. + policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "s3:GetObject", + "s3:PutObject", + "s3:DeleteObject", + ], + "Resource": allowed_objects_arn, + }, + # In order to list the objects in a path, we need to allow the ListBucket action. + # But since that action is not scoped to a path, we need to limit it using a condition. + { + "Effect": "Allow", + "Action": ["s3:ListBucket"], + "Resource": [ + bucket_arn, + ], + "Condition": { + "StringLike": { + "s3:prefix": allowed_prefixes, + } + }, + }, + ], + } + + session_name = f"rtd-{build.id}-{project.slug}-{version.slug}" + credentials = _get_scoped_credentials( + session_name=session_name, + policy=policy, + duration=duration, + ) + return AWSS3TemporaryCredentials( + access_key_id=credentials.access_key_id, + secret_access_key=credentials.secret_access_key, + session_token=credentials.session_token, + region_name=settings.AWS_S3_REGION_NAME, + bucket_name=settings.S3_MEDIA_STORAGE_BUCKET, + ) + + +def get_s3_build_tools_scoped_credentials( + *, + build, + duration=60 * 15, +) -> AWSS3TemporaryCredentials: + """ + Get temporary credentials with read-only access to the build-tools bucket. + + :param build: The build to get the credentials for. + :param duration: The duration of the credentials in seconds. Default is 15 minutes. + Note that the minimum duration time is 15 minutes and the maximum is given by the role (defaults to 1 hour). + """ + project = build.project + version = build.version + bucket = settings.S3_BUILD_TOOLS_STORAGE_BUCKET + bucket_arn = f"arn:aws:s3:::{bucket}" + + # Inline policy to limit the permissions of the temporary credentials. + # The build-tools bucket is publicly readable, so we don't need to limit the permissions to a specific path. + policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "s3:GetObject", + "s3:ListBucket", + ], + "Resource": [ + bucket_arn, + f"{bucket_arn}/*", + ], + }, + ], + } + session_name = f"rtd-{build.id}-{project.slug}-{version.slug}" + credentials = _get_scoped_credentials( + session_name=session_name, + policy=policy, + duration=duration, + ) + return AWSS3TemporaryCredentials( + access_key_id=credentials.access_key_id, + secret_access_key=credentials.secret_access_key, + session_token=credentials.session_token, + region_name=settings.AWS_S3_REGION_NAME, + bucket_name=bucket, + ) diff --git a/readthedocs/aws/tests/__init__.py b/readthedocs/aws/tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/readthedocs/aws/tests/test_security_token_service.py b/readthedocs/aws/tests/test_security_token_service.py new file mode 100644 index 00000000000..b0676658d02 --- /dev/null +++ b/readthedocs/aws/tests/test_security_token_service.py @@ -0,0 +1,283 @@ +import json +from unittest import mock + +from django.contrib.auth.models import User +from django.test import TestCase, override_settings +from django_dynamic_fixture import get + +from readthedocs.builds.constants import EXTERNAL +from readthedocs.builds.models import Build +from readthedocs.projects.models import Project +from readthedocs.aws.security_token_service import ( + AWSS3TemporaryCredentials, + get_s3_build_media_scoped_credentials, + get_s3_build_tools_scoped_credentials, +) + + +@override_settings( + AWS_ACCESS_KEY_ID="global_access_key_id", + AWS_SECRET_ACCESS_KEY="global_secret_access_key", + AWS_S3_REGION_NAME="us-east-1", + AWS_STS_ASSUME_ROLE_ARN="arn:aws:iam::1234:role/RoleName", + S3_MEDIA_STORAGE_BUCKET="readthedocs-media", + S3_BUILD_TOOLS_STORAGE_BUCKET="readthedocs-build-tools", +) +class TestSecurityTokenService(TestCase): + def setUp(self): + self.user = get(User) + self.project = get( + Project, + slug="project", + users=[self.user], + ) + self.version = self.project.versions.first() + self.build = get( + Build, + version=self.version, + project=self.project, + ) + + @override_settings(USING_AWS=False, DEBUG=True) + def test_get_s3_build_media_global_credentials(self): + credentials = get_s3_build_media_scoped_credentials(build=self.build) + assert credentials == AWSS3TemporaryCredentials( + access_key_id="global_access_key_id", + secret_access_key="global_secret_access_key", + session_token=None, + region_name="us-east-1", + bucket_name="readthedocs-media", + ) + + @mock.patch("readthedocs.aws.security_token_service.boto3.client") + def test_get_s3_build_media_scoped_credentials(self, boto3_client): + boto3_client().assume_role.return_value = { + "Credentials": { + "AccessKeyId": "access_key_id", + "SecretAccessKey": "secret_access_key", + "SessionToken": "session_token", + } + } + credentials = get_s3_build_media_scoped_credentials(build=self.build) + assert credentials == AWSS3TemporaryCredentials( + access_key_id="access_key_id", + secret_access_key="secret_access_key", + session_token="session_token", + region_name="us-east-1", + bucket_name="readthedocs-media", + ) + + policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "s3:GetObject", + "s3:PutObject", + "s3:DeleteObject", + ], + "Resource": [ + "arn:aws:s3:::readthedocs-media/html/project/latest/*", + "arn:aws:s3:::readthedocs-media/pdf/project/latest/*", + "arn:aws:s3:::readthedocs-media/epub/project/latest/*", + "arn:aws:s3:::readthedocs-media/htmlzip/project/latest/*", + "arn:aws:s3:::readthedocs-media/json/project/latest/*", + "arn:aws:s3:::readthedocs-media/diff/project/latest/*", + ], + }, + { + "Effect": "Allow", + "Action": ["s3:ListBucket"], + "Resource": [ + "arn:aws:s3:::readthedocs-media", + ], + "Condition": { + "StringLike": { + "s3:prefix": [ + "html/project/latest/*", + "pdf/project/latest/*", + "epub/project/latest/*", + "htmlzip/project/latest/*", + "json/project/latest/*", + "diff/project/latest/*", + ] + } + }, + }, + ], + } + + boto3_client().assume_role.assert_called_once_with( + RoleArn="arn:aws:iam::1234:role/RoleName", + RoleSessionName=f"rtd-{self.build.id}-project-latest", + Policy=json.dumps(policy), + DurationSeconds=15 * 60, + ) + + @mock.patch("readthedocs.aws.security_token_service.boto3.client") + def test_get_s3_build_media_scoped_credentials_external_version(self, boto3_client): + self.version.type = EXTERNAL + self.version.save() + + boto3_client().assume_role.return_value = { + "Credentials": { + "AccessKeyId": "access_key_id", + "SecretAccessKey": "secret_access_key", + "SessionToken": "session_token", + } + } + credentials = get_s3_build_media_scoped_credentials(build=self.build) + assert credentials == AWSS3TemporaryCredentials( + access_key_id="access_key_id", + secret_access_key="secret_access_key", + session_token="session_token", + region_name="us-east-1", + bucket_name="readthedocs-media", + ) + + policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "s3:GetObject", + "s3:PutObject", + "s3:DeleteObject", + ], + "Resource": [ + "arn:aws:s3:::readthedocs-media/external/html/project/latest/*", + "arn:aws:s3:::readthedocs-media/external/pdf/project/latest/*", + "arn:aws:s3:::readthedocs-media/external/epub/project/latest/*", + "arn:aws:s3:::readthedocs-media/external/htmlzip/project/latest/*", + "arn:aws:s3:::readthedocs-media/external/json/project/latest/*", + "arn:aws:s3:::readthedocs-media/external/diff/project/latest/*", + ], + }, + { + "Effect": "Allow", + "Action": ["s3:ListBucket"], + "Resource": [ + "arn:aws:s3:::readthedocs-media", + ], + "Condition": { + "StringLike": { + "s3:prefix": [ + "external/html/project/latest/*", + "external/pdf/project/latest/*", + "external/epub/project/latest/*", + "external/htmlzip/project/latest/*", + "external/json/project/latest/*", + "external/diff/project/latest/*", + ] + } + }, + }, + ], + } + + boto3_client().assume_role.assert_called_once_with( + RoleArn="arn:aws:iam::1234:role/RoleName", + RoleSessionName=f"rtd-{self.build.id}-project-latest", + Policy=json.dumps(policy), + DurationSeconds=15 * 60, + ) + + @override_settings(USING_AWS=False, DEBUG=True) + def test_get_s3_build_tools_global_credentials(self): + credentials = get_s3_build_tools_scoped_credentials(build=self.build) + assert credentials == AWSS3TemporaryCredentials( + access_key_id="global_access_key_id", + secret_access_key="global_secret_access_key", + session_token=None, + region_name="us-east-1", + bucket_name="readthedocs-build-tools", + ) + + @mock.patch("readthedocs.aws.security_token_service.boto3.client") + def test_get_s3_build_tools_scoped_credentials(self, boto3_client): + boto3_client().assume_role.return_value = { + "Credentials": { + "AccessKeyId": "access_key_id", + "SecretAccessKey": "secret_access_key", + "SessionToken": "session_token", + } + } + credentials = get_s3_build_tools_scoped_credentials(build=self.build) + assert credentials == AWSS3TemporaryCredentials( + access_key_id="access_key_id", + secret_access_key="secret_access_key", + session_token="session_token", + region_name="us-east-1", + bucket_name="readthedocs-build-tools", + ) + + policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "s3:GetObject", + "s3:ListBucket", + ], + "Resource": [ + "arn:aws:s3:::readthedocs-build-tools", + "arn:aws:s3:::readthedocs-build-tools/*", + ], + }, + ], + } + + boto3_client().assume_role.assert_called_once_with( + RoleArn="arn:aws:iam::1234:role/RoleName", + RoleSessionName=f"rtd-{self.build.id}-project-latest", + Policy=json.dumps(policy), + DurationSeconds=15 * 60, + ) + + @mock.patch("readthedocs.aws.security_token_service.boto3.client") + def test_get_s3_build_tools_scoped_credentials_external_version(self, boto3_client): + self.version.type = EXTERNAL + self.version.save() + + boto3_client().assume_role.return_value = { + "Credentials": { + "AccessKeyId": "access_key_id", + "SecretAccessKey": "secret_access_key", + "SessionToken": "session_token", + } + } + credentials = get_s3_build_tools_scoped_credentials(build=self.build) + assert credentials == AWSS3TemporaryCredentials( + access_key_id="access_key_id", + secret_access_key="secret_access_key", + session_token="session_token", + region_name="us-east-1", + bucket_name="readthedocs-build-tools", + ) + + policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "s3:GetObject", + "s3:ListBucket", + ], + "Resource": [ + "arn:aws:s3:::readthedocs-build-tools", + "arn:aws:s3:::readthedocs-build-tools/*", + ], + }, + ], + } + + boto3_client().assume_role.assert_called_once_with( + RoleArn="arn:aws:iam::1234:role/RoleName", + RoleSessionName=f"rtd-{self.build.id}-project-latest", + Policy=json.dumps(policy), + DurationSeconds=15 * 60, + ) diff --git a/readthedocs/doc_builder/director.py b/readthedocs/doc_builder/director.py index 963c18b4901..972f287100b 100644 --- a/readthedocs/doc_builder/director.py +++ b/readthedocs/doc_builder/director.py @@ -31,7 +31,8 @@ from readthedocs.projects.signals import after_build from readthedocs.projects.signals import before_build from readthedocs.projects.signals import before_vcs -from readthedocs.storage import build_tools_storage +from readthedocs.projects.tasks.storage import StorageType +from readthedocs.projects.tasks.storage import get_storage log = structlog.get_logger(__name__) @@ -518,6 +519,13 @@ def install_build_tools(self): record=False, ) + build_tools_storage = get_storage( + project=self.data.project, + build_id=self.data.build["id"], + api_client=self.data.api_client, + storage_type=StorageType.build_tools, + ) + for tool, version in self.data.config.build.tools.items(): full_version = version.full_version # e.g. 3.9 -> 3.9.7 diff --git a/readthedocs/projects/models.py b/readthedocs/projects/models.py index 3fcad60270a..fdf30058cd0 100644 --- a/readthedocs/projects/models.py +++ b/readthedocs/projects/models.py @@ -1918,6 +1918,7 @@ def add_features(sender, **kwargs): # Build related features SCALE_IN_PROTECTION = "scale_in_prtection" + USE_S3_SCOPED_CREDENTIALS_ON_BUILDERS = "use_s3_scoped_credentials_on_builders" FEATURES = ( ( @@ -1992,6 +1993,10 @@ def add_features(sender, **kwargs): SCALE_IN_PROTECTION, _("Build: Set scale-in protection before/after building."), ), + ( + USE_S3_SCOPED_CREDENTIALS_ON_BUILDERS, + _("Build: Use S3 scoped credentials for uploading build artifacts."), + ), ) FEATURES = sorted(FEATURES, key=lambda x: x[1]) diff --git a/readthedocs/projects/tasks/builds.py b/readthedocs/projects/tasks/builds.py index 563c36581de..87adcb9b998 100644 --- a/readthedocs/projects/tasks/builds.py +++ b/readthedocs/projects/tasks/builds.py @@ -53,7 +53,8 @@ from readthedocs.doc_builder.exceptions import BuildUserError from readthedocs.doc_builder.exceptions import MkDocsYAMLParseError from readthedocs.projects.models import Feature -from readthedocs.storage import build_media_storage +from readthedocs.projects.tasks.storage import StorageType +from readthedocs.projects.tasks.storage import get_storage from readthedocs.telemetry.collectors import BuildDataCollector from readthedocs.telemetry.tasks import save_build_data from readthedocs.worker import app @@ -902,6 +903,13 @@ def store_build_artifacts(self): types_to_copy = [] types_to_delete = [] + build_media_storage = get_storage( + project=self.data.project, + build_id=self.data.build["id"], + api_client=self.data.api_client, + storage_type=StorageType.build_media, + ) + for artifact_type in ARTIFACT_TYPES: if artifact_type in valid_artifacts: types_to_copy.append(artifact_type) diff --git a/readthedocs/projects/tasks/storage.py b/readthedocs/projects/tasks/storage.py new file mode 100644 index 00000000000..4613cb71aba --- /dev/null +++ b/readthedocs/projects/tasks/storage.py @@ -0,0 +1,97 @@ +from enum import StrEnum +from enum import auto + +import structlog +from django.conf import settings +from django.utils.module_loading import import_string + +from readthedocs.doc_builder.exceptions import BuildAppError +from readthedocs.projects.models import Feature + + +log = structlog.get_logger(__name__) + + +class StorageType(StrEnum): + build_media = auto() + build_tools = auto() + + +def get_storage(*, project, build_id, api_client, storage_type: StorageType): + """ + Get a storage class instance to interact with storage from the build. + + .. note:: + + We no longer use readthedocs.storage.build_media_storage/build_tools_storage directly, + as we are now using per-build credentials for S3 storage, + so we need to dynamically create the storage class instance + """ + storage_class = _get_storage_class(storage_type) + extra_kwargs = {} + if settings.USING_AWS: + extra_kwargs = _get_s3_scoped_credentials( + project=project, build_id=build_id, api_client=api_client, storage_type=storage_type + ) + return storage_class(**extra_kwargs) + + +def _get_storage_class(storage_type: StorageType): + """ + Get a storage class to use for interacting with storage. + + .. note:: + + We no longer use readthedocs.storage.build_media_storage/build_tools_storage directly, + as we are now using per-build credentials for S3 storage, + so we need to dynamically create the storage class instance + """ + if storage_type == StorageType.build_media: + return _get_build_media_storage_class() + if storage_type == StorageType.build_tools: + return _get_build_tools_storage_class() + raise ValueError("Invalid storage type") + + +def _get_build_media_storage_class(): + """ + Get a storage class to use for syncing build artifacts. + + This is done in a separate method to make it easier to mock in tests. + """ + return import_string(settings.RTD_BUILD_MEDIA_STORAGE) + + +def _get_build_tools_storage_class(): + """ + Get a storage class to use for downloading build tools. + + This is done in a separate method to make it easier to mock in tests. + """ + return import_string(settings.RTD_BUILD_TOOLS_STORAGE) + + +def _get_s3_scoped_credentials(*, project, build_id, api_client, storage_type: StorageType): + """Get the scoped credentials for the build using our internal API.""" + if not project.has_feature(Feature.USE_S3_SCOPED_CREDENTIALS_ON_BUILDERS): + # Use the default credentials defined in the settings. + return {} + + try: + credentials = api_client.build(f"{build_id}/credentials/storage").post( + {"type": storage_type.value} + ) + except Exception: + raise BuildAppError( + BuildAppError.GENERIC_WITH_BUILD_ID, + exception_message="Error getting scoped credentials.", + ) + + s3_credentials = credentials["s3"] + return { + "access_key": s3_credentials["access_key_id"], + "secret_key": s3_credentials["secret_access_key"], + "security_token": s3_credentials["session_token"], + "bucket_name": s3_credentials["bucket_name"], + "region_name": s3_credentials["region_name"], + } diff --git a/readthedocs/projects/tests/mockers.py b/readthedocs/projects/tests/mockers.py index f2b9604fc05..c1d2930a302 100644 --- a/readthedocs/projects/tests/mockers.py +++ b/readthedocs/projects/tests/mockers.py @@ -7,6 +7,7 @@ from readthedocs.api.v2.serializers import VersionAdminSerializer from readthedocs.builds.constants import BUILD_STATE_TRIGGERED from readthedocs.projects.constants import MKDOCS +from readthedocs.projects.tasks.builds import UpdateDocsTask class BuildEnvironmentMocker: @@ -159,9 +160,7 @@ def _mock_environment(self): # ) def _mock_storage(self): - self.patches["build_media_storage"] = mock.patch( - "readthedocs.projects.tasks.builds.build_media_storage", - ) + self.patches["get_build_media_storage_class"] = mock.patch("readthedocs.projects.tasks.storage._get_build_media_storage_class") def _mock_api(self): headers = {"Content-Type": "application/json"} @@ -225,6 +224,21 @@ def _mock_api(self): headers=headers, ) + self.requestsmock.post( + f"{settings.SLUMBER_API_HOST}/api/v2/build/{self.build.pk}/credentials/storage/", + status_code=201, + json={ + "s3": { + "access_key_id": "some-access-key", + "secret_access_key": "some-secret-key", + "session_token": "some-session-token", + "bucket_name": "some-bucket-name", + "region_name": "us-east-1", + } + }, + headers=headers, + ) + self.requestsmock.patch( f"{settings.SLUMBER_API_HOST}/api/v2/project/{self.project.pk}/", status_code=201, diff --git a/readthedocs/projects/tests/test_build_tasks.py b/readthedocs/projects/tests/test_build_tasks.py index b9380048fc6..cb0cbb7dd00 100644 --- a/readthedocs/projects/tests/test_build_tasks.py +++ b/readthedocs/projects/tests/test_build_tasks.py @@ -6,6 +6,7 @@ from unittest import mock import django_dynamic_fixture as fixture +from django_dynamic_fixture import get import pytest from django.conf import settings from django.test.utils import override_settings @@ -22,7 +23,7 @@ from readthedocs.config.tests.test_config import get_build_config from readthedocs.doc_builder.exceptions import BuildCancelled, BuildUserError from readthedocs.projects.exceptions import RepositoryError -from readthedocs.projects.models import EnvironmentVariable, Project, WebHookEvent +from readthedocs.projects.models import EnvironmentVariable, Feature, Project, WebHookEvent from readthedocs.projects.tasks.builds import sync_repository_task, update_docs_task from readthedocs.telemetry.models import BuildData @@ -660,7 +661,7 @@ def test_successful_build( assert BuildData.objects.all().exists() - self.mocker.mocks["build_media_storage"].rclone_sync_directory.assert_has_calls( + self.mocker.mocks["get_build_media_storage_class"]()().rclone_sync_directory.assert_has_calls( [ mock.call(mock.ANY, "html/project/latest"), mock.call(mock.ANY, "json/project/latest"), @@ -672,6 +673,294 @@ def test_successful_build( # TODO: find a directory to remove here :) # build_media_storage.delete_directory + @override_settings( + DOCROOT="/tmp/readthedocs-tests/git-repository/", + RTD_BUILD_MEDIA_STORAGE = "readthedocs.storage.s3_storage.S3BuildMediaStorage", + S3_MEDIA_STORAGE_BUCKET="readthedocs-test", + ) + @mock.patch("readthedocs.projects.tasks.builds.shutil") + @mock.patch("readthedocs.projects.tasks.builds.index_build") + @mock.patch("readthedocs.projects.tasks.builds.build_complete") + @mock.patch("readthedocs.projects.tasks.builds.send_external_build_status") + @mock.patch("readthedocs.projects.tasks.builds.UpdateDocsTask.send_notifications") + @mock.patch("readthedocs.projects.tasks.builds.clean_build") + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") + def test_successful_build_with_temporary_s3_credentials( + self, + load_yaml_config, + clean_build, + send_notifications, + send_external_build_status, + build_complete, + index_build, + shutilmock, + ): + get( + Feature, + feature_id=Feature.USE_S3_SCOPED_CREDENTIALS_ON_BUILDERS, + projects=[self.project], + ) + load_yaml_config.return_value = get_build_config( + { + "formats": "all", + "sphinx": { + "configuration": "docs/conf.py", + }, + }, + validate=True, + ) + + assert not BuildData.objects.all().exists() + + # Create the artifact paths, so it's detected by the builder + os.makedirs(self.project.artifact_path(version=self.version.slug, type_="html")) + os.makedirs(self.project.artifact_path(version=self.version.slug, type_="json")) + filename = str(uuid.uuid4()) + for f in ("htmlzip", "epub", "pdf"): + extension = "zip" if f == "htmlzip" else f + os.makedirs(self.project.artifact_path(version=self.version.slug, type_=f)) + pathlib.Path( + os.path.join( + self.project.artifact_path(version=self.version.slug, type_=f), + # Use a random name for the offline format. + # We will automatically rename this file to filename El Proxito expects. + f"{filename}.{extension}", + ) + ).touch() + + # Create an "index.html" at root to avoid failing the builds + pathlib.Path( + os.path.join( + self.project.artifact_path(version=self.version.slug, type_="html"), + "index.html", + ) + ).touch() + + self._trigger_update_docs_task() + + # Offline formats were renamed to the correct filename. + shutilmock.move.assert_has_calls( + [ + mock.call( + Path( + f"/tmp/readthedocs-tests/git-repository/_readthedocs/htmlzip/{filename}.zip" + ), + Path( + f"/tmp/readthedocs-tests/git-repository/_readthedocs/htmlzip/{self.project.slug}.zip" + ), + ), + mock.call( + Path( + f"/tmp/readthedocs-tests/git-repository/_readthedocs/pdf/{filename}.pdf" + ), + Path( + f"/tmp/readthedocs-tests/git-repository/_readthedocs/pdf/{self.project.slug}.pdf" + ), + ), + mock.call( + Path( + f"/tmp/readthedocs-tests/git-repository/_readthedocs/epub/{filename}.epub" + ), + Path( + f"/tmp/readthedocs-tests/git-repository/_readthedocs/epub/{self.project.slug}.epub" + ), + ), + ] + ) + + # It has to be called twice, ``before_start`` and ``after_return`` + clean_build.assert_has_calls( + [mock.call(mock.ANY), mock.call(mock.ANY)] # the argument is an APIVersion + ) + + # TODO: mock `build_tasks.send_build_notifications` instead and add + # another tests to check that they are not sent for EXTERNAL versions + send_notifications.assert_called_once_with( + self.version.pk, + self.build.pk, + event=WebHookEvent.BUILD_PASSED, + ) + + send_external_build_status.assert_called_once_with( + version_type=self.version.type, + build_pk=self.build.pk, + commit=self.build.commit, + status=BUILD_STATUS_SUCCESS, + ) + + build_complete.send.assert_called_once_with( + sender=Build, + build=mock.ANY, + ) + + index_build.delay.assert_called_once_with(build_id=self.build.pk) + + # TODO: assert the verb and the path for each API call as well + + # Build reset + assert self.requests_mock.request_history[3]._request.method == "POST" + assert self.requests_mock.request_history[3].path == "/api/v2/build/1/reset/" + + # Update build state: cloning + assert self.requests_mock.request_history[4].json() == { + "id": 1, + "state": "cloning", + "commit": "a1b2c3", + "error": "", + "builder": mock.ANY, + } + + # Update build state: installing + assert self.requests_mock.request_history[5].json() == { + "id": 1, + "state": "installing", + "commit": "a1b2c3", + "builder": mock.ANY, + "readthedocs_yaml_path": None, + "error": "", + # We update the `config` field at the same time we send the + # `installing` state, to reduce one API call + "config": { + "version": "2", + "formats": ["htmlzip", "pdf", "epub"], + "python": { + "install": [], + }, + "conda": None, + "build": { + "os": "ubuntu-22.04", + "commands": [], + "jobs": { + "pre_checkout": [], + "post_checkout": [], + "pre_system_dependencies": [], + "post_system_dependencies": [], + "pre_create_environment": [], + "create_environment": None, + "post_create_environment": [], + "pre_install": [], + "install": None, + "post_install": [], + "pre_build": [], + "build": { + "html": None, + "pdf": None, + "epub": None, + "htmlzip": None, + }, + "post_build": [], + }, + "tools": { + "python": { + "full_version": "3.13.0", + "version": "3", + } + }, + "apt_packages": [], + }, + "doctype": "sphinx", + "sphinx": { + "builder": "sphinx", + "configuration": "docs/conf.py", + "fail_on_warning": False, + }, + "mkdocs": None, + "submodules": { + "include": [], + "exclude": "all", + "recursive": False, + }, + "search": { + "ranking": {}, + "ignore": [ + "search.html", + "search/index.html", + "404.html", + "404/index.html", + ], + }, + }, + } + + # Get temporary credentials + assert self.requests_mock.request_history[6]._request.method == "POST" + assert self.requests_mock.request_history[6].path == "/api/v2/build/1/credentials/storage/" + assert self.requests_mock.request_history[6].json() == { + "type": "build_tools", + } + + # Update build state: building + assert self.requests_mock.request_history[7].json() == { + "id": 1, + "state": "building", + "commit": "a1b2c3", + "readthedocs_yaml_path": None, + "config": mock.ANY, + "builder": mock.ANY, + "error": "", + } + # Update build state: uploading + assert self.requests_mock.request_history[8].json() == { + "id": 1, + "state": "uploading", + "commit": "a1b2c3", + "readthedocs_yaml_path": None, + "config": mock.ANY, + "builder": mock.ANY, + "error": "", + } + + # Get temporary credentials + assert self.requests_mock.request_history[9]._request.method == "POST" + assert self.requests_mock.request_history[9].path == "/api/v2/build/1/credentials/storage/" + assert self.requests_mock.request_history[9].json() == { + "type": "build_media", + } + + # Update version state + assert self.requests_mock.request_history[10]._request.method == "PATCH" + assert self.requests_mock.request_history[10].path == "/api/v2/version/1/" + assert self.requests_mock.request_history[10].json() == { + "addons": False, + "build_data": None, + "built": True, + "documentation_type": "sphinx", + "has_pdf": True, + "has_epub": True, + "has_htmlzip": True, + } + # Set project has valid clone + assert self.requests_mock.request_history[11]._request.method == "PATCH" + assert self.requests_mock.request_history[11].path == "/api/v2/project/1/" + assert self.requests_mock.request_history[11].json() == {"has_valid_clone": True} + # Update build state: finished, success and builder + assert self.requests_mock.request_history[12].json() == { + "id": 1, + "state": "finished", + "commit": "a1b2c3", + "readthedocs_yaml_path": None, + "config": mock.ANY, + "builder": mock.ANY, + "length": mock.ANY, + "success": True, + "error": "", + } + + assert self.requests_mock.request_history[13]._request.method == "POST" + assert self.requests_mock.request_history[13].path == "/api/v2/revoke/" + + assert BuildData.objects.all().exists() + + self.mocker.mocks["get_build_media_storage_class"]()().rclone_sync_directory.assert_has_calls( + [ + mock.call(mock.ANY, "html/project/latest"), + mock.call(mock.ANY, "json/project/latest"), + mock.call(mock.ANY, "htmlzip/project/latest"), + mock.call(mock.ANY, "pdf/project/latest"), + mock.call(mock.ANY, "epub/project/latest"), + ] + ) + @mock.patch("readthedocs.projects.tasks.builds.build_complete") @mock.patch("readthedocs.projects.tasks.builds.send_external_build_status") @mock.patch("readthedocs.projects.tasks.builds.UpdateDocsTask.execute") @@ -1550,9 +1839,9 @@ def test_build_jobs_partial_build_override_empty_commands(self, load_yaml_config ) @mock.patch("readthedocs.doc_builder.director.tarfile") - @mock.patch("readthedocs.doc_builder.director.build_tools_storage") + @mock.patch("readthedocs.projects.tasks.storage._get_build_tools_storage_class") @mock.patch("readthedocs.doc_builder.director.load_yaml_config") - def test_build_tools_cached(self, load_yaml_config, build_tools_storage, tarfile): + def test_build_tools_cached(self, load_yaml_config, get_build_tools_storage_class, tarfile): config = BuildConfigV2( { "version": 2, @@ -1571,8 +1860,8 @@ def test_build_tools_cached(self, load_yaml_config, build_tools_storage, tarfile config.validate() load_yaml_config.return_value = config - build_tools_storage.open.return_value = b"" - build_tools_storage.exists.return_value = True + get_build_tools_storage_class()().open.return_value = b"" + get_build_tools_storage_class()().exists.return_value = True tarfile.open.return_value.__enter__.return_value.extract_all.return_value = None self._trigger_update_docs_task() diff --git a/readthedocs/rtd_tests/tests/test_api.py b/readthedocs/rtd_tests/tests/test_api.py index cd6ca0da9de..1a2f545b04f 100644 --- a/readthedocs/rtd_tests/tests/test_api.py +++ b/readthedocs/rtd_tests/tests/test_api.py @@ -45,6 +45,7 @@ BUILD_STATE_CLONING, BUILD_STATE_FINISHED, BUILD_STATE_TRIGGERED, + BUILD_STATE_UPLOADING, EXTERNAL, EXTERNAL_VERSION_STATE_CLOSED, LATEST, @@ -69,6 +70,7 @@ Feature, Project, ) +from readthedocs.aws.security_token_service import AWSS3TemporaryCredentials from readthedocs.subscriptions.constants import TYPE_CONCURRENT_BUILDS from readthedocs.subscriptions.products import RTDProductFeature from readthedocs.vcs_support.backends.git import parse_version_from_ref @@ -143,6 +145,73 @@ def test_reset_build(self): self.assertEqual(build.commands.count(), 0) self.assertEqual(build.notifications.count(), 0) + @mock.patch("readthedocs.api.v2.views.model_views.get_s3_build_tools_scoped_credentials") + @mock.patch("readthedocs.api.v2.views.model_views.get_s3_build_media_scoped_credentials") + def test_get_temporary_credentials_for_build(self, get_s3_build_media_scoped_credentials, get_s3_build_tools_scoped_credentials): + build = get( + Build, + project=self.project, + version=self.version, + state=BUILD_STATE_UPLOADING, + success=False, + output="Output", + error="Error", + exit_code=0, + builder="Builder", + cold_storage=True, + ) + + client = APIClient() + _, build_api_key = BuildAPIKey.objects.create_key(self.project) + client.credentials(HTTP_AUTHORIZATION=f"Token {build_api_key}") + get_s3_build_media_scoped_credentials.return_value = AWSS3TemporaryCredentials( + access_key_id="access_key_id", + secret_access_key="secret_access_key", + session_token="session_token", + region_name="us-east-1", + bucket_name="readthedocs-media", + ) + r = client.post(reverse("build-credentials-for-storage", args=(build.pk,)), {"type": "build_media"}) + assert r.status_code == 200 + assert r.data == { + "s3": { + "access_key_id": "access_key_id", + "secret_access_key": "secret_access_key", + "session_token": "session_token", + "region_name": "us-east-1", + "bucket_name": "readthedocs-media", + } + } + + get_s3_build_media_scoped_credentials.assert_called_once_with( + build=build, + duration=60 * 30, + ) + + get_s3_build_tools_scoped_credentials.return_value = AWSS3TemporaryCredentials( + access_key_id="access_key_id", + secret_access_key="secret_access_key", + session_token="session_token", + region_name="us-east-1", + bucket_name="readthedocs-build-tools", + ) + r = client.post(reverse("build-credentials-for-storage", args=(build.pk,)), {"type": "build_tools"}) + assert r.status_code == 200 + assert r.data == { + "s3": { + "access_key_id": "access_key_id", + "secret_access_key": "secret_access_key", + "session_token": "session_token", + "region_name": "us-east-1", + "bucket_name": "readthedocs-build-tools", + } + } + + get_s3_build_tools_scoped_credentials.assert_called_once_with( + build=build, + duration=60 * 30, + ) + def test_api_does_not_have_private_config_key_superuser(self): client = APIClient() client.login(username="super", password="test") diff --git a/readthedocs/settings/base.py b/readthedocs/settings/base.py index 566d3ec8d4e..799fedf311b 100644 --- a/readthedocs/settings/base.py +++ b/readthedocs/settings/base.py @@ -1054,3 +1054,10 @@ def GITHUB_APP_CLIENT_ID(self): RTD_SPAM_MAX_SCORE = 9999 S3_PROVIDER = "AWS" + # Used by readthedocs.aws.security_token_service. + AWS_STS_ASSUME_ROLE_ARN = "arn:aws:iam::1234:role/SomeRole" + + @property + def USING_AWS(self): + """Return True if we are using AWS as our storage/cloud provider.""" + return self.S3_PROVIDER == "AWS" diff --git a/readthedocs/settings/docker_compose.py b/readthedocs/settings/docker_compose.py index e6007c60573..ef84bee93d6 100644 --- a/readthedocs/settings/docker_compose.py +++ b/readthedocs/settings/docker_compose.py @@ -182,20 +182,28 @@ def DATABASES(self): # noqa STATICFILES_STORAGE = "readthedocs.storage.s3_storage.S3StaticStorage" RTD_STATICFILES_STORAGE = "readthedocs.storage.s3_storage.NoManifestS3StaticStorage" - AWS_ACCESS_KEY_ID = "admin" - AWS_SECRET_ACCESS_KEY = "password" - S3_MEDIA_STORAGE_BUCKET = "media" - S3_BUILD_COMMANDS_STORAGE_BUCKET = "builds" - S3_BUILD_TOOLS_STORAGE_BUCKET = "build-tools" - S3_STATIC_STORAGE_BUCKET = "static" + AWS_ACCESS_KEY_ID = os.environ.get("RTD_AWS_ACCESS_KEY_ID", "admin") + AWS_SECRET_ACCESS_KEY = os.environ.get("RTD_AWS_SECRET_ACCESS_KEY", "password") + S3_MEDIA_STORAGE_BUCKET = os.environ.get("RTD_S3_MEDIA_STORAGE_BUCKET", "media") + S3_BUILD_COMMANDS_STORAGE_BUCKET = os.environ.get("RTD_S3_BUILD_COMMANDS_STORAGE_BUCKET", "builds") + S3_BUILD_TOOLS_STORAGE_BUCKET = os.environ.get("RTD_S3_BUILD_TOOLS_STORAGE_BUCKET", "build-tools") + S3_STATIC_STORAGE_BUCKET = os.environ.get("RTD_S3_STATIC_STORAGE_BUCKET", "static") S3_STATIC_STORAGE_OVERRIDE_HOSTNAME = PRODUCTION_DOMAIN S3_MEDIA_STORAGE_OVERRIDE_HOSTNAME = PRODUCTION_DOMAIN - S3_PROVIDER = "minio" + S3_PROVIDER = os.environ.get("RTD_S3_PROVIDER", "minio") AWS_S3_ENCRYPTION = False AWS_S3_SECURE_URLS = False AWS_S3_USE_SSL = False - AWS_S3_ENDPOINT_URL = "http://storage:9000/" + AWS_STS_ASSUME_ROLE_ARN = os.environ.get("RTD_AWS_STS_ASSUME_ROLE_ARN", None) + AWS_S3_REGION_NAME = os.environ.get("RTD_AWS_S3_REGION_NAME", None) + + @property + def AWS_S3_ENDPOINT_URL(self): + if self.S3_PROVIDER == "minio": + return "http://storage:9000/" + return None + AWS_QUERYSTRING_AUTH = False STRIPE_SECRET = os.environ.get("RTD_STRIPE_SECRET", "sk_test_x") diff --git a/readthedocs/storage/rclone.py b/readthedocs/storage/rclone.py index b869a2e5f7a..ae3139892f4 100644 --- a/readthedocs/storage/rclone.py +++ b/readthedocs/storage/rclone.py @@ -150,11 +150,14 @@ class RCloneS3Remote(BaseRClone): All secrets will be passed as environ variables to the rclone command. - See https://rclone.org/s3/. + See https://rclone.org/s3/ and https://rclone.org/s3/#configuration. :params bucket_name: Name of the S3 bucket. :params access_key_id: AWS access key id. - :params secret_acces_key: AWS secret access key. + :params secret_access_key: AWS secret access key. + :params session_token: AWS session token, + useful for temporary credentials from AWS STS. + See https://docs.readthedocs.com/dev/latest/aws-temporary-credentials.html. :params region: AWS region. :params provider: S3 provider, defaults to ``AWS``. Useful to use Minio during development. @@ -170,9 +173,10 @@ def __init__( self, bucket_name, access_key_id, - secret_acces_key, + secret_access_key, region, provider="AWS", + session_token=None, acl=None, endpoint=None, ): @@ -181,10 +185,12 @@ def __init__( self.env_vars = { "RCLONE_S3_PROVIDER": provider, "RCLONE_S3_ACCESS_KEY_ID": access_key_id, - "RCLONE_S3_SECRET_ACCESS_KEY": secret_acces_key, + "RCLONE_S3_SECRET_ACCESS_KEY": secret_access_key, "RCLONE_S3_REGION": region, "RCLONE_S3_LOCATION_CONSTRAINT": region, } + if session_token: + self.env_vars["RCLONE_S3_SESSION_TOKEN"] = session_token if acl: self.env_vars["RCLONE_S3_ACL"] = acl if endpoint: diff --git a/readthedocs/storage/s3_storage.py b/readthedocs/storage/s3_storage.py index e81914c6744..dd110d597df 100644 --- a/readthedocs/storage/s3_storage.py +++ b/readthedocs/storage/s3_storage.py @@ -44,7 +44,8 @@ def _rclone(self): return RCloneS3Remote( bucket_name=self.bucket_name, access_key_id=self.access_key, - secret_acces_key=self.secret_key, + secret_access_key=self.secret_key, + session_token=self.security_token, region=self.region_name or "", acl=self.default_acl, endpoint=self.endpoint_url, diff --git a/readthedocs/storage/tests/__init__.py b/readthedocs/storage/tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d