diff --git a/readthedocs/builds/storage.py b/readthedocs/builds/storage.py index 05f53eb3332..da57c40d244 100644 --- a/readthedocs/builds/storage.py +++ b/readthedocs/builds/storage.py @@ -128,60 +128,6 @@ def _check_suspicious_path(self, path): log.error(msg, path=str(path), resolved_path=str(resolved_path)) raise SuspiciousFileOperation(msg) - def sync_directory(self, source, destination): - """ - Sync a directory recursively to storage. - - Overwrites files in remote storage with files from ``source`` (no timstamp/hash checking). - Removes files and folders in remote storage that are not present in ``source``. - - :param source: the source path on the local disk - :param destination: the destination path in storage - """ - if destination in ('', '/'): - raise SuspiciousFileOperation('Syncing all storage cannot be right') - - source = Path(source) - self._check_suspicious_path(source) - - log.debug( - 'Syncing to media storage.', - source=str(source), - destination=destination, - ) - - copied_files = set() - copied_dirs = set() - for filepath in source.iterdir(): - sub_destination = self.join(destination, filepath.name) - # Don't follow symlinks when uploading to storage. - if filepath.is_symlink(): - log.info( - "Skipping symlink upload.", - path_resolved=str(filepath.resolve()), - ) - continue - - if filepath.is_dir(): - # Recursively sync the subdirectory - self.sync_directory(filepath, sub_destination) - copied_dirs.add(filepath.name) - elif filepath.is_file(): - with safe_open(filepath, "rb") as fd: - self.save(sub_destination, fd) - copied_files.add(filepath.name) - - # Remove files that are not present in ``source`` - dest_folders, dest_files = self.listdir(self._dirpath(destination)) - for folder in dest_folders: - if folder not in copied_dirs: - self.delete_directory(self.join(destination, folder)) - for filename in dest_files: - if filename not in copied_files: - filepath = self.join(destination, filename) - log.debug('Deleting file from media storage.', filepath=filepath) - self.delete(filepath) - @cached_property def _rclone(self): raise NotImplementedError diff --git a/readthedocs/builds/tests/test_trigger_build.py b/readthedocs/builds/tests/test_trigger_build.py index 1e2361de828..b2c39ae3a4b 100644 --- a/readthedocs/builds/tests/test_trigger_build.py +++ b/readthedocs/builds/tests/test_trigger_build.py @@ -4,7 +4,6 @@ import pytest from readthedocs.builds.constants import ( - BUILD_FINAL_STATES, BUILD_STATE_BUILDING, BUILD_STATE_CANCELLED, BUILD_STATE_CLONING, @@ -15,7 +14,7 @@ ) from readthedocs.builds.models import Build, Version from readthedocs.core.utils import trigger_build -from readthedocs.projects.models import Feature, Project +from readthedocs.projects.models import Project @pytest.mark.django_db @@ -24,8 +23,6 @@ class TestCancelOldBuilds: def setup(self): self.project = fixture.get(Project) self.version = fixture.get(Version, project=self.project) - self.feature = fixture.get(Feature, feature_id=Feature.CANCEL_OLD_BUILDS) - self.feature.projects.add(self.project) @pytest.mark.parametrize( "state", diff --git a/readthedocs/core/utils/__init__.py b/readthedocs/core/utils/__init__.py index 2b4f1fd9ebb..f37d1a4083a 100644 --- a/readthedocs/core/utils/__init__.py +++ b/readthedocs/core/utils/__init__.py @@ -45,7 +45,7 @@ def prepare_build( # Avoid circular import from readthedocs.builds.models import Build from readthedocs.builds.tasks import send_build_notifications - from readthedocs.projects.models import Feature, Project, WebHookEvent + from readthedocs.projects.models import Project, WebHookEvent from readthedocs.projects.tasks.builds import update_docs_task from readthedocs.projects.tasks.utils import send_external_build_status @@ -118,45 +118,44 @@ def prepare_build( ) # Reduce overhead when doing multiple push on the same version. - if project.has_feature(Feature.CANCEL_OLD_BUILDS): - running_builds = ( - Build.objects - .filter( - project=project, - version=version, - ).exclude( - state__in=BUILD_FINAL_STATES, - ).exclude( - pk=build.pk, - ) + running_builds = ( + Build.objects.filter( + project=project, + version=version, + ) + .exclude( + state__in=BUILD_FINAL_STATES, + ) + .exclude( + pk=build.pk, + ) + ) + if running_builds.count() > 0: + log.warning( + "Canceling running builds automatically due a new one arrived.", + running_builds=running_builds.count(), ) - if running_builds.count() > 0: - log.warning( - "Canceling running builds automatically due a new one arrived.", - running_builds=running_builds.count(), - ) - # If there are builds triggered/running for this particular project and version, - # we cancel all of them and trigger a new one for the latest commit received. - for running_build in running_builds: - cancel_build(running_build) + # If there are builds triggered/running for this particular project and version, + # we cancel all of them and trigger a new one for the latest commit received. + for running_build in running_builds: + cancel_build(running_build) # Start the build in X minutes and mark it as limited - if project.has_feature(Feature.LIMIT_CONCURRENT_BUILDS): - limit_reached, _, max_concurrent_builds = Build.objects.concurrent(project) - if limit_reached: - log.warning( - 'Delaying tasks at trigger step due to concurrency limit.', - ) - # Delay the start of the build for the build retry delay. - # We're still triggering the task, but it won't run immediately, - # and the user will be alerted in the UI from the Error below. - options['countdown'] = settings.RTD_BUILDS_RETRY_DELAY - options['max_retries'] = settings.RTD_BUILDS_MAX_RETRIES - build.error = BuildMaxConcurrencyError.message.format( - limit=max_concurrent_builds, - ) - build.save() + limit_reached, _, max_concurrent_builds = Build.objects.concurrent(project) + if limit_reached: + log.warning( + "Delaying tasks at trigger step due to concurrency limit.", + ) + # Delay the start of the build for the build retry delay. + # We're still triggering the task, but it won't run immediately, + # and the user will be alerted in the UI from the Error below. + options["countdown"] = settings.RTD_BUILDS_RETRY_DELAY + options["max_retries"] = settings.RTD_BUILDS_MAX_RETRIES + build.error = BuildMaxConcurrencyError.message.format( + limit=max_concurrent_builds, + ) + build.save() return ( update_docs_task.signature( @@ -263,7 +262,7 @@ def cancel_build(build): def send_email( recipient, subject, template, template_html, context=None, request=None, from_email=None, **kwargs -): # pylint: disable=unused-argument +): """ Alter context passed in and call email send task. diff --git a/readthedocs/doc_builder/backends/sphinx.py b/readthedocs/doc_builder/backends/sphinx.py index 053412d119a..afa1c253421 100644 --- a/readthedocs/doc_builder/backends/sphinx.py +++ b/readthedocs/doc_builder/backends/sphinx.py @@ -292,8 +292,10 @@ def append_conf(self): raise UserFileNotFound( UserFileNotFound.FILE_NOT_FOUND.format(self.config_file) ) - except IOError: - raise ProjectConfigurationError(ProjectConfigurationError.NOT_FOUND) + except IOError as exc: + raise ProjectConfigurationError( + ProjectConfigurationError.NOT_FOUND + ) from exc # Append config to project conf file tmpl = template_loader.get_template('doc_builder/conf.py.tmpl') @@ -320,7 +322,6 @@ def build(self): *self.get_sphinx_cmd(), '-T', '-E', - *self.sphinx_parallel_arg(), ] if self.config.sphinx.fail_on_warning: build_command.extend(["-W", "--keep-going"]) @@ -359,38 +360,27 @@ def get_sphinx_cmd(self): 'sphinx', ) - def sphinx_parallel_arg(self): - if self.project.has_feature(Feature.SPHINX_PARALLEL): - return ['-j', 'auto'] - return [] - class HtmlBuilder(BaseSphinx): relative_output_dir = "html" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.sphinx_builder = 'readthedocs' - if self.project.has_feature(Feature.USE_SPHINX_BUILDERS): - self.sphinx_builder = 'html' + self.sphinx_builder = "html" class HtmlDirBuilder(HtmlBuilder): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.sphinx_builder = 'readthedocsdirhtml' - if self.project.has_feature(Feature.USE_SPHINX_BUILDERS): - self.sphinx_builder = 'dirhtml' + self.sphinx_builder = "dirhtml" class SingleHtmlBuilder(HtmlBuilder): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.sphinx_builder = 'readthedocssinglehtml' - if self.project.has_feature(Feature.USE_SPHINX_BUILDERS): - self.sphinx_builder = 'singlehtml' + self.sphinx_builder = "singlehtml" class LocalMediaBuilder(BaseSphinx): @@ -518,7 +508,6 @@ def build(self): *self.get_sphinx_cmd(), "-T", "-E", - *self.sphinx_parallel_arg(), "-b", self.sphinx_builder, "-d", diff --git a/readthedocs/doc_builder/director.py b/readthedocs/doc_builder/director.py index 11bfebb65f9..197b2be4dcf 100644 --- a/readthedocs/doc_builder/director.py +++ b/readthedocs/doc_builder/director.py @@ -180,10 +180,6 @@ def setup_environment(self): self.install() self.run_build_job("post_install") - # TODO: remove this and document how to do it on `build.jobs.post_install` - if self.data.project.has_feature(Feature.LIST_PACKAGES_INSTALLED_ENV): - self.language_environment.list_packages_installed() - def build(self): """ Build all the formats specified by the user. diff --git a/readthedocs/doc_builder/environments.py b/readthedocs/doc_builder/environments.py index e442971f105..e761b5222b5 100644 --- a/readthedocs/doc_builder/environments.py +++ b/readthedocs/doc_builder/environments.py @@ -156,7 +156,7 @@ def run(self): command = self.get_command() stderr = subprocess.PIPE if self.demux else subprocess.STDOUT - proc = subprocess.Popen( + proc = subprocess.Popen( # pylint: disable=consider-using-with command, shell=self.shell, cwd=self.cwd, @@ -574,10 +574,7 @@ def __init__(self, *args, **kwargs): self.use_gvisor = True # Decide what Docker image to use, based on priorities: - # Use the Docker image set by our feature flag: ``testing`` or, - if self.project.has_feature(Feature.USE_TESTING_BUILD_IMAGE): - self.container_image = 'readthedocs/build:testing' - # the image set by user or, + # The image set by user or, if self.config and self.config.docker_image: self.container_image = self.config.docker_image # the image overridden by the project (manually set by an admin). @@ -633,8 +630,8 @@ def __enter__(self): ) client = self.get_client() client.remove_container(self.container_id) - except (DockerAPIError, ConnectionError) as e: - raise BuildAppError(e.explanation) + except (DockerAPIError, ConnectionError) as exc: + raise BuildAppError(exc.explanation) from exc # Create the checkout path if it doesn't exist to avoid Docker creation if not os.path.exists(self.project.doc_path): @@ -707,8 +704,8 @@ def get_client(self): version=DOCKER_VERSION, ) return self.client - except DockerException as e: - raise BuildAppError(e.explanation) + except DockerException as exc: + raise BuildAppError(exc.explanation) from exc def _get_binds(self): """ @@ -825,5 +822,5 @@ def create_container(self): runtime=docker_runtime, ) client.start(container=self.container_id) - except (DockerAPIError, ConnectionError) as e: - raise BuildAppError(e.explanation) + except (DockerAPIError, ConnectionError) as exc: + raise BuildAppError(exc.explanation) from exc diff --git a/readthedocs/doc_builder/python_environments.py b/readthedocs/doc_builder/python_environments.py index 77cae1705ed..0b85408400f 100644 --- a/readthedocs/doc_builder/python_environments.py +++ b/readthedocs/doc_builder/python_environments.py @@ -307,22 +307,6 @@ def install_requirements_file(self, install): bin_path=self.venv_bin(), ) - def list_packages_installed(self): - """List packages installed in pip.""" - args = [ - self.venv_bin(filename='python'), - '-m', - 'pip', - 'list', - # Include pre-release versions. - '--pre', - ] - self.build_env.run( - *args, - cwd=self.checkout_path, - bin_path=self.venv_bin(), - ) - class Conda(PythonEnvironment): @@ -542,17 +526,3 @@ def install_requirements_file(self, install): # as the conda environment was created by using the ``environment.yml`` # defined by the user, there is nothing to update at this point pass - - def list_packages_installed(self): - """List packages installed in conda.""" - args = [ - self.conda_bin_name(), - 'list', - '--name', - self.version.slug, - ] - self.build_env.run( - *args, - cwd=self.checkout_path, - bin_path=self.venv_bin(), - ) diff --git a/readthedocs/projects/admin.py b/readthedocs/projects/admin.py index 1dbe8311fd7..eef90848f8f 100644 --- a/readthedocs/projects/admin.py +++ b/readthedocs/projects/admin.py @@ -28,11 +28,7 @@ WebHook, WebHookEvent, ) -from .notifications import ( - DeprecatedBuildWebhookNotification, - DeprecatedGitHubWebhookNotification, - ResourceUsageNotification, -) +from .notifications import ResourceUsageNotification from .tag_utils import import_tags from .tasks.utils import clean_project_resources @@ -56,8 +52,6 @@ def has_delete_permission(self, request, obj=None): class ProjectSendNotificationView(SendNotificationView): notification_classes = [ ResourceUsageNotification, - DeprecatedBuildWebhookNotification, - DeprecatedGitHubWebhookNotification, ] def get_object_recipients(self, obj): diff --git a/readthedocs/projects/migrations/0021_add-webhook-deprecation-feature.py b/readthedocs/projects/migrations/0021_add-webhook-deprecation-feature.py index f75e8eeba3f..49a84dab891 100644 --- a/readthedocs/projects/migrations/0021_add-webhook-deprecation-feature.py +++ b/readthedocs/projects/migrations/0021_add-webhook-deprecation-feature.py @@ -3,21 +3,6 @@ from django.db import migrations -FEATURE_ID = 'allow_deprecated_webhooks' - - -def forward_add_feature(apps, schema_editor): - Feature = apps.get_model('projects', 'Feature') - Feature.objects.create( - feature_id=FEATURE_ID, - default_true=True, - ) - - -def reverse_add_feature(apps, schema_editor): - Feature = apps.get_model('projects', 'Feature') - Feature.objects.filter(feature_id=FEATURE_ID).delete() - class Migration(migrations.Migration): @@ -26,5 +11,4 @@ class Migration(migrations.Migration): ] operations = [ - migrations.RunPython(forward_add_feature, reverse_add_feature), ] diff --git a/readthedocs/projects/models.py b/readthedocs/projects/models.py index 4d74f6b45ec..377b1e24034 100644 --- a/readthedocs/projects/models.py +++ b/readthedocs/projects/models.py @@ -1901,18 +1901,13 @@ def add_features(sender, **kwargs): # Feature constants - this is not a exhaustive list of features, features # may be added by other packages - ALLOW_DEPRECATED_WEBHOOKS = "allow_deprecated_webhooks" SKIP_SPHINX_HTML_THEME_PATH = "skip_sphinx_html_theme_path" MKDOCS_THEME_RTD = "mkdocs_theme_rtd" API_LARGE_DATA = "api_large_data" DONT_SHALLOW_CLONE = "dont_shallow_clone" - USE_TESTING_BUILD_IMAGE = "use_testing_build_image" - CLEAN_AFTER_BUILD = "clean_after_build" UPDATE_CONDA_STARTUP = "update_conda_startup" CONDA_APPEND_CORE_REQUIREMENTS = "conda_append_core_requirements" ALL_VERSIONS_IN_HTML_CONTEXT = "all_versions_in_html_context" - CACHED_ENVIRONMENT = "cached_environment" - LIMIT_CONCURRENT_BUILDS = "limit_concurrent_builds" CDN_ENABLED = "cdn_enabled" DOCKER_GVISOR_RUNTIME = "gvisor_runtime" RECORD_404_PAGE_VIEWS = "record_404_page_views" @@ -1943,18 +1938,11 @@ def add_features(sender, **kwargs): INDEX_FROM_HTML_FILES = 'index_from_html_files' # Build related features - LIST_PACKAGES_INSTALLED_ENV = "list_packages_installed_env" - VCS_REMOTE_LISTING = "vcs_remote_listing" - SPHINX_PARALLEL = "sphinx_parallel" - USE_SPHINX_BUILDERS = "use_sphinx_builders" - CANCEL_OLD_BUILDS = "cancel_old_builds" DONT_CREATE_INDEX = "dont_create_index" - USE_RCLONE = "use_rclone" HOSTING_INTEGRATIONS = "hosting_integrations" NO_CONFIG_FILE_DEPRECATED = "no_config_file" FEATURES = ( - (ALLOW_DEPRECATED_WEBHOOKS, _("Webhook: Allow deprecated webhook views")), ( SKIP_SPHINX_HTML_THEME_PATH, _( @@ -1969,18 +1957,10 @@ def add_features(sender, **kwargs): DONT_SHALLOW_CLONE, _("Build: Do not shallow clone when cloning git repos"), ), - ( - USE_TESTING_BUILD_IMAGE, - _("Build: Use Docker image labelled as `testing` to build the docs"), - ), ( API_LARGE_DATA, _("Build: Try alternative method of posting large data"), ), - ( - CLEAN_AFTER_BUILD, - _("Build: Clean all files used in the build process"), - ), ( UPDATE_CONDA_STARTUP, _("Conda: Upgrade conda before creating the environment"), @@ -1996,16 +1976,6 @@ def add_features(sender, **kwargs): "when building with Sphinx" ), ), - ( - CACHED_ENVIRONMENT, - _( - "Build: Cache the environment (virtualenv, conda, pip cache, repository) in storage" - ), - ), - ( - LIMIT_CONCURRENT_BUILDS, - _("Build: Limit the amount of concurrent builds"), - ), ( CDN_ENABLED, _( @@ -2097,45 +2067,12 @@ def add_features(sender, **kwargs): ), ), - ( - LIST_PACKAGES_INSTALLED_ENV, - _( - 'Build: List packages installed in the environment ("pip list" or "conda list") ' - 'on build\'s output', - ), - ), - ( - VCS_REMOTE_LISTING, - _( - "Build: Use remote listing in VCS (e.g. git ls-remote) if supported for sync " - "versions" - ), - ), - ( - SPHINX_PARALLEL, - _('Sphinx: Use "-j auto" when calling sphinx-build'), - ), - ( - USE_SPHINX_BUILDERS, - _("Sphinx: Use regular sphinx builders instead of custom RTD builders"), - ), - ( - CANCEL_OLD_BUILDS, - _( - "Build: Cancel triggered/running builds when a new one with same project/version " - "arrives" - ), - ), ( DONT_CREATE_INDEX, _( "Sphinx: Do not create index.md or README.rst if the project does not have one." ), ), - ( - USE_RCLONE, - _("Build: Use rclone for syncing files to the media storage."), - ), ( HOSTING_INTEGRATIONS, _( diff --git a/readthedocs/projects/notifications.py b/readthedocs/projects/notifications.py index 67432a9fbe5..16e4f40e94e 100644 --- a/readthedocs/projects/notifications.py +++ b/readthedocs/projects/notifications.py @@ -1,12 +1,10 @@ """Project notifications.""" -from django.http import HttpRequest from django.urls import reverse from django.utils.translation import gettext_lazy as _ from messages_extends.constants import ERROR_PERSISTENT -from readthedocs.core.permissions import AdminPermission from readthedocs.notifications import Notification, SiteNotification from readthedocs.notifications.constants import REQUIREMENT @@ -28,43 +26,6 @@ class EmailConfirmNotification(SiteNotification): ) def get_context_data(self): - context = super(EmailConfirmNotification, self).get_context_data() + context = super().get_context_data() context.update({'account_email_url': reverse('account_email')}) return context - - -class DeprecatedViewNotification(Notification): - - """Notification to alert user of a view that is going away.""" - - context_object_name = 'project' - subject = '{{ project.name }} project webhook needs to be updated' - level = REQUIREMENT - - @classmethod - def notify_project_users(cls, projects): - """ - Notify project users of deprecated view. - - :param projects: List of project instances - :type projects: [:py:class:`Project`] - """ - for project in projects: - # Send one notification to each admin of the project - for user in AdminPermission.admins(project): - notification = cls( - context_object=project, - request=HttpRequest(), - user=user, - ) - notification.send() - - -class DeprecatedGitHubWebhookNotification(DeprecatedViewNotification): - - name = 'deprecated_github_webhook' - - -class DeprecatedBuildWebhookNotification(DeprecatedViewNotification): - - name = 'deprecated_build_webhook' diff --git a/readthedocs/projects/tasks/builds.py b/readthedocs/projects/tasks/builds.py index 3eba0a9d24d..d8328aeaa84 100644 --- a/readthedocs/projects/tasks/builds.py +++ b/readthedocs/projects/tasks/builds.py @@ -63,7 +63,7 @@ RepositoryError, SyncRepositoryLocked, ) -from ..models import APIProject, Feature, WebHookEvent +from ..models import APIProject, WebHookEvent from ..signals import before_vcs from .mixins import SyncRepositoryMixin from .search import fileify @@ -225,12 +225,7 @@ def execute(self): verbose_name=self.data.version.verbose_name, version_type=self.data.version.type, ) - if any( - [ - not vcs_repository.supports_lsremote, - not self.data.project.has_feature(Feature.VCS_REMOTE_LISTING), - ] - ): + if not vcs_repository.supports_lsremote: log.info("Syncing repository via full clone.") vcs_repository.update() else: @@ -881,10 +876,7 @@ def store_build_artifacts(self): version_type=self.data.version.type, ) try: - if self.data.project.has_feature(Feature.USE_RCLONE): - build_media_storage.rclone_sync_directory(from_path, to_path) - else: - build_media_storage.sync_directory(from_path, to_path) + build_media_storage.rclone_sync_directory(from_path, to_path) except Exception as exc: # NOTE: the exceptions reported so far are: # - botocore.exceptions:HTTPClientError diff --git a/readthedocs/projects/tasks/mixins.py b/readthedocs/projects/tasks/mixins.py index 66ea91e8873..830e13ca458 100644 --- a/readthedocs/projects/tasks/mixins.py +++ b/readthedocs/projects/tasks/mixins.py @@ -48,7 +48,6 @@ def sync_versions(self, vcs_repository): use_lsremote = ( vcs_repository.supports_lsremote and not vcs_repository.repo_exists() - and self.data.project.has_feature(Feature.VCS_REMOTE_LISTING) ) sync_tags = vcs_repository.supports_tags and not self.data.project.has_feature( Feature.SKIP_SYNC_TAGS diff --git a/readthedocs/projects/tests/test_build_tasks.py b/readthedocs/projects/tests/test_build_tasks.py index 3646d3f8652..e57dc18d200 100644 --- a/readthedocs/projects/tests/test_build_tasks.py +++ b/readthedocs/projects/tests/test_build_tasks.py @@ -197,7 +197,7 @@ def test_build_sphinx_formats(self, load_yaml_config, formats, builders): "-T", "-E", "-b", - "readthedocs", + "html", "-d", "_build/doctrees", "-D", @@ -580,7 +580,7 @@ def test_successful_build( assert BuildData.objects.all().exists() - self.mocker.mocks["build_media_storage"].sync_directory.assert_has_calls( + self.mocker.mocks["build_media_storage"].rclone_sync_directory.assert_has_calls( [ mock.call(mock.ANY, "html/project/latest"), mock.call(mock.ANY, "json/project/latest"), @@ -749,7 +749,7 @@ def test_build_commands_executed( "-T", "-E", "-b", - "readthedocs", + "html", "-d", "_build/doctrees", "-D", @@ -1335,7 +1335,7 @@ def test_sphinx_fail_on_warning(self, load_yaml_config): "-W", # fail on warning flag "--keep-going", # fail on warning flag "-b", - "readthedocs", + "html", "-d", "_build/doctrees", "-D", @@ -1685,10 +1685,10 @@ def test_submodules_exclude_all(self, load_yaml_config): @pytest.mark.parametrize( "value,command", [ - ("html", "readthedocs"), - ("htmldir", "readthedocsdirhtml"), - ("dirhtml", "readthedocsdirhtml"), - ("singlehtml", "readthedocssinglehtml"), + ("html", "html"), + ("htmldir", "dirhtml"), + ("dirhtml", "dirhtml"), + ("singlehtml", "singlehtml"), ], ) @mock.patch("readthedocs.doc_builder.director.load_yaml_config") @@ -1780,20 +1780,13 @@ def test_clean_build_after_failure_in_sync_repository(self, clean_build, execute def test_check_duplicate_reserved_version_latest(self, on_failure, verbose_name): # `repository.tags` and `repository.branch` both will return a tag/branch named `latest/stable` with mock.patch( - 'readthedocs.vcs_support.backends.git.Backend.branches', - new_callable=mock.PropertyMock, - return_value=[ - mock.MagicMock(identifier='a1b2c3', verbose_name=verbose_name), - ], + "readthedocs.vcs_support.backends.git.Backend.lsremote", + return_value=[ + [mock.MagicMock(identifier="branch/a1b2c3", verbose_name=verbose_name)], + [mock.MagicMock(identifier="tag/a1b2c3", verbose_name=verbose_name)], + ], ): - with mock.patch( - 'readthedocs.vcs_support.backends.git.Backend.tags', - new_callable=mock.PropertyMock, - return_value=[ - mock.MagicMock(identifier='a1b2c3', verbose_name=verbose_name), - ], - ): - self._trigger_sync_repository_task() + self._trigger_sync_repository_task() on_failure.assert_called_once_with( # This argument is the exception we are intereste, but I don't know diff --git a/readthedocs/rtd_tests/tests/test_api.py b/readthedocs/rtd_tests/tests/test_api.py index f19355c981c..8f56d209fc6 100644 --- a/readthedocs/rtd_tests/tests/test_api.py +++ b/readthedocs/rtd_tests/tests/test_api.py @@ -2790,7 +2790,7 @@ def test_get_version_by_id(self): "environment_variables": {}, "enable_epub_build": True, "enable_pdf_build": True, - "features": ["allow_deprecated_webhooks"], + "features": [], "has_valid_clone": False, "has_valid_webhook": False, "id": 6, diff --git a/readthedocs/rtd_tests/tests/test_build_storage.py b/readthedocs/rtd_tests/tests/test_build_storage.py index 95b4bed50ec..78e138e7b7e 100644 --- a/readthedocs/rtd_tests/tests/test_build_storage.py +++ b/readthedocs/rtd_tests/tests/test_build_storage.py @@ -63,7 +63,7 @@ def test_sync_directory(self): 'test.html', ] with override_settings(DOCROOT=tmp_files_dir): - self.storage.sync_directory(tmp_files_dir, storage_dir) + self.storage.rclone_sync_directory(tmp_files_dir, storage_dir) self.assertFileTree(storage_dir, tree) tree = [ @@ -73,18 +73,16 @@ def test_sync_directory(self): ] os.remove(os.path.join(tmp_files_dir, 'api.fjson')) with override_settings(DOCROOT=tmp_files_dir): - self.storage.sync_directory(tmp_files_dir, storage_dir) + self.storage.rclone_sync_directory(tmp_files_dir, storage_dir) self.assertFileTree(storage_dir, tree) tree = [ - # Cloud storage generally doesn't consider empty directories to exist - ('api', []), 'conf.py', 'test.html', ] shutil.rmtree(os.path.join(tmp_files_dir, 'api')) with override_settings(DOCROOT=tmp_files_dir): - self.storage.sync_directory(tmp_files_dir, storage_dir) + self.storage.rclone_sync_directory(tmp_files_dir, storage_dir) self.assertFileTree(storage_dir, tree) def test_sync_directory_source_symlink(self): @@ -94,7 +92,7 @@ def test_sync_directory_source_symlink(self): with override_settings(DOCROOT=tmp_dir): with pytest.raises(SuspiciousFileOperation, match="symbolic link"): - self.storage.sync_directory(tmp_symlink_dir, "files") + self.storage.rclone_sync_directory(tmp_symlink_dir, "files") def test_copy_directory_source_symlink(self): tmp_dir = Path(tempfile.mkdtemp()) @@ -112,7 +110,7 @@ def test_sync_directory_source_outside_docroot(self): with override_settings(DOCROOT=tmp_docroot): with pytest.raises(SuspiciousFileOperation, match="outside the docroot"): - self.storage.sync_directory(tmp_dir, "files") + self.storage.rclone_sync_directory(tmp_dir, "files") def test_copy_directory_source_outside_docroot(self): tmp_dir = Path(tempfile.mkdtemp()) diff --git a/readthedocs/rtd_tests/tests/test_core_utils.py b/readthedocs/rtd_tests/tests/test_core_utils.py index bfd3c4420cc..fa57b0895cb 100644 --- a/readthedocs/rtd_tests/tests/test_core_utils.py +++ b/readthedocs/rtd_tests/tests/test_core_utils.py @@ -10,7 +10,7 @@ from readthedocs.builds.models import Build, Version from readthedocs.core.utils import slugify, trigger_build from readthedocs.doc_builder.exceptions import BuildMaxConcurrencyError -from readthedocs.projects.models import Feature, Project +from readthedocs.projects.models import Project class CoreUtilTests(TestCase): @@ -168,11 +168,6 @@ def test_trigger_build_rounded_time_limit(self, update_docs): @pytest.mark.xfail(reason='Fails while we work out Docker time limits', strict=True) @mock.patch('readthedocs.projects.tasks.builds.update_docs_task') def test_trigger_max_concurrency_reached(self, update_docs): - get( - Feature, - feature_id=Feature.LIMIT_CONCURRENT_BUILDS, - projects=[self.project], - ) max_concurrent_builds = 2 for _ in range(max_concurrent_builds): get( diff --git a/readthedocs/rtd_tests/tests/test_doc_builder.py b/readthedocs/rtd_tests/tests/test_doc_builder.py index 97576cd4862..2fa92682511 100644 --- a/readthedocs/rtd_tests/tests/test_doc_builder.py +++ b/readthedocs/rtd_tests/tests/test_doc_builder.py @@ -263,11 +263,6 @@ def test_multiple_conf_py( @mock.patch('readthedocs.doc_builder.config.load_config') def test_use_sphinx_builders(self, load_config): - feature = get( - Feature, - feature_id=Feature.USE_SPHINX_BUILDERS, - ) - config_data = {'version': 2, 'sphinx': {'configuration': 'docs/conf.py'}} load_config.side_effect = create_load(config_data) config = load_yaml_config(self.version) @@ -277,27 +272,6 @@ def test_use_sphinx_builders(self, load_config): build_env=self.build_env, config=config, ) - builder = HtmlBuilder( - build_env=self.build_env, - python_env=python_env, - ) - self.assertEqual(builder.sphinx_builder, 'readthedocs') - - builder = HtmlDirBuilder( - build_env=self.build_env, - python_env=python_env, - ) - self.assertEqual(builder.sphinx_builder, 'readthedocsdirhtml') - - builder = SingleHtmlBuilder( - build_env=self.build_env, - python_env=python_env, - ) - self.assertEqual(builder.sphinx_builder, 'readthedocssinglehtml') - - # Add the feature to use the regular builders - feature.projects.add(self.project) - builder = HtmlBuilder( build_env=self.build_env, python_env=python_env, diff --git a/readthedocs/rtd_tests/tests/test_notifications.py b/readthedocs/rtd_tests/tests/test_notifications.py index 0bd33ba37f4..94b1112cf99 100644 --- a/readthedocs/rtd_tests/tests/test_notifications.py +++ b/readthedocs/rtd_tests/tests/test_notifications.py @@ -6,7 +6,6 @@ import django_dynamic_fixture as fixture from allauth.account.models import EmailAddress from django.contrib.auth.models import AnonymousUser, User -from django.http import HttpRequest from django.test import TestCase from django.test.utils import override_settings from messages_extends.models import Message as PersistentMessage @@ -19,11 +18,6 @@ INFO_NON_PERSISTENT, WARNING_NON_PERSISTENT, ) -from readthedocs.projects.models import Project -from readthedocs.projects.notifications import ( - DeprecatedBuildWebhookNotification, - DeprecatedGitHubWebhookNotification, -) @override_settings( @@ -272,43 +266,3 @@ def test_message(self): with mock.patch('readthedocs.notifications.notification.log') as mock_log: self.assertEqual(self.n.get_message(False), '') mock_log.error.assert_called_once() - - -class DeprecatedWebhookEndpointNotificationTests(TestCase): - - def setUp(self): - PersistentMessage.objects.all().delete() - - self.user = fixture.get(User) - self.project = fixture.get(Project, users=[self.user]) - self.request = HttpRequest() - - self.notification = DeprecatedBuildWebhookNotification( - self.project, - self.request, - self.user, - ) - - @mock.patch('readthedocs.notifications.backends.send_email') - def test_dedupliation(self, send_email): - user = fixture.get(User) - project = fixture.get(Project, main_language_project=None) - project.users.add(user) - project.refresh_from_db() - self.assertEqual(project.users.count(), 1) - - self.assertEqual(PersistentMessage.objects.filter(user=user).count(), 0) - DeprecatedGitHubWebhookNotification.notify_project_users([project]) - - # Site and email notification will go out, site message doesn't have - # any reason to deduplicate yet - self.assertEqual(PersistentMessage.objects.filter(user=user).count(), 1) - self.assertTrue(send_email.called) - send_email.reset_mock() - self.assertFalse(send_email.called) - - # Expect the site message to deduplicate, the email won't - DeprecatedGitHubWebhookNotification.notify_project_users([project]) - self.assertEqual(PersistentMessage.objects.filter(user=user).count(), 1) - self.assertTrue(send_email.called) - send_email.reset_mock() diff --git a/readthedocs/rtd_tests/tests/test_views.py b/readthedocs/rtd_tests/tests/test_views.py index 097e217a4db..3a4980562a9 100644 --- a/readthedocs/rtd_tests/tests/test_views.py +++ b/readthedocs/rtd_tests/tests/test_views.py @@ -268,8 +268,9 @@ def test_build_list_includes_external_versions(self): self.assertEqual(response.status_code, 200) self.assertIn(external_version_build, response.context['build_qs']) - @mock.patch('readthedocs.projects.tasks.builds.update_docs_task') - def test_rebuild_specific_commit(self, mock): + @mock.patch("readthedocs.projects.tasks.builds.update_docs_task") + @mock.patch("readthedocs.core.utils.cancel_build") + def test_rebuild_specific_commit(self, mock_update_docs_task, mock_cancel_build): builds_count = Build.objects.count() version = self.pip.versions.first()