diff --git a/readthedocs/doc_builder/director.py b/readthedocs/doc_builder/director.py new file mode 100644 index 00000000000..77a5ade28ee --- /dev/null +++ b/readthedocs/doc_builder/director.py @@ -0,0 +1,457 @@ +import os +from collections import defaultdict + +import structlog +from django.conf import settings +from django.utils.translation import gettext_lazy as _ + +from readthedocs.builds.constants import EXTERNAL +from readthedocs.doc_builder.config import load_yaml_config +from readthedocs.doc_builder.loader import get_builder_class +from readthedocs.doc_builder.python_environments import Conda, Virtualenv +from readthedocs.projects.exceptions import RepositoryError +from readthedocs.projects.models import Feature +from readthedocs.projects.signals import after_build, before_build, before_vcs + +log = structlog.get_logger(__name__) + + +class BuildDirector: + + """ + Encapsulates all the logic to perform a build for user's documentation. + + This class handles all the VCS commands, setup OS and language (e.g. only + Python for now) environment (via virtualenv or conda), installs all the + required basic and user packages, and finally execute the build commands + (e.g. Sphinx or MkDocs) to generate the artifacts. + + Note that this class *is not* in charge of doing anything related to Read + the Docs, the platform, itself. These include not updating the `Build`'s + status, or uploading the artifacts to the storage, creating the search + index, among others. + """ + + def __init__(self, data): + """ + Initializer. + + :param data: object with all the data grabbed by Celery task in + ``before_start`` and used as a way to share data with this class + by-directionally. + + :type data: readthedocs.projects.tasks.builds.TaskData + + """ + self.data = data + + def setup_vcs(self): + """ + Perform all VCS related steps. + + 1. clone the repository + 2. checkout specific commit/identifier + 3. load the config file + 4. checkout submodules + """ + # Make dirs if it does not exist to clone the repository under it + if not os.path.exists(self.data.project.doc_path): + os.makedirs(self.data.project.doc_path) + + if not self.data.project.vcs_class(): + raise RepositoryError( + _('Repository type "{repo_type}" unknown').format( + repo_type=self.data.project.repo_type, + ), + ) + + environment = self.data.environment_class( + project=self.data.project, + version=self.data.version, + build=self.data.build, + environment=self.get_vcs_env_vars(), + # Force the ``container_image`` to use one that has the latest + # ca-certificate package which is compatible with Lets Encrypt + container_image=settings.RTD_DOCKER_BUILD_SETTINGS["os"]["ubuntu-20.04"], + ) + with environment: + before_vcs.send( + sender=self.data.version, + environment=environment, + ) + + # Create the VCS repository where all the commands are going to be + # executed for a particular VCS type + self.vcs_repository = self.data.project.vcs_repo( + version=self.data.version.slug, + environment=environment, + verbose_name=self.data.version.verbose_name, + version_type=self.data.version.type, + ) + + self.pre_checkout() + self.checkout() + self.post_checkout() + + commit = self.data.build_commit or self.vcs_repository.commit + if commit: + self.data.build["commit"] = commit + + def setup_environment(self): + """ + Create the environment and install required dependencies. + + 1. install OS dependencies (apt) + 2. create language (e.g. Python) environment + 3. install dependencies into the environment + """ + self.build_environment = self.data.environment_class( + project=self.data.project, + version=self.data.version, + config=self.data.config, + build=self.data.build, + environment=self.get_build_env_vars(), + ) + + # Environment used for building code, usually with Docker + with self.build_environment: + language_environment_cls = Virtualenv + if any( + [ + self.data.config.conda is not None, + self.data.config.python_interpreter in ("conda", "mamba"), + ] + ): + language_environment_cls = Conda + + self.language_environment = language_environment_cls( + version=self.data.version, + build_env=self.build_environment, + config=self.data.config, + ) + + # TODO: check if `before_build` and `after_build` are still useful + # (maybe in commercial?) + # + # I didn't find they are used anywhere, we should probably remove them + before_build.send( + sender=self.data.version, + environment=self.build_environment, + ) + + self.pre_system_dependencies() + self.system_dependencies() + self.post_system_dependencies() + + # Install all ``build.tools`` specified by the user + if self.data.config.using_build_tools: + self.language_environment.install_build_tools() + + self.pre_create_environment() + self.create_environment() + self.post_create_environment() + + self.pre_install() + self.install() + self.post_install() + + # TODO: remove this and document how to do it on `build.jobs.post_install` + if self.data.project.has_feature(Feature.LIST_PACKAGES_INSTALLED_ENV): + self.language_environment.list_packages_installed() + + def build(self): + """ + Build all the formats specified by the user. + + 1. build HTML + 2. build HTMLZzip + 3. build PDF + 4. build ePub + """ + with self.build_environment: + self.data.outcomes = defaultdict(lambda: False) + self.data.outcomes["html"] = self.build_html() + self.data.outcomes["search"] = self.is_type_sphinx() + self.data.outcomes["localmedia"] = self.build_htmlzip() + self.data.outcomes["pdf"] = self.build_pdf() + self.data.outcomes["epub"] = self.build_epub() + + after_build.send( + sender=self.data.version, + ) + + # VCS checkout + def pre_checkout(self): + # We can't do too much here because we haven't cloned the repository + # yet and we don't know what the user wrote in the `.readthedocs.yaml` + # yet. + # + # We could implement something different in the future if we download + # the `.readthedocs.yaml` file without cloning. + # See https://github.com/readthedocs/readthedocs.org/issues/8935 + pass + + def checkout(self): + log.info( + "Clonning repository.", + ) + self.vcs_repository.update() + + identifier = self.data.build_commit or self.data.version.identifier + log.info("Checking out.", identifier=identifier) + self.vcs_repository.checkout(identifier) + + self.data.config = load_yaml_config(version=self.data.version) + self.data.build["config"] = self.data.config.as_dict() + + if self.vcs_repository.supports_submodules: + self.vcs_repository.update_submodules(self.data.config) + + def post_checkout(self): + commands = [] # self.data.config.build.jobs.post_checkout + for command in commands: + self.build_environment.run(command) + + # System dependencies (``build.apt_packages``) + def pre_system_dependencies(self): + commands = [] # self.data.config.build.jobs.pre_system_dependencies + for command in commands: + self.build_environment.run(command) + + # NOTE: `system_dependencies` should not be possible to override by the + # user because it's executed as ``RTD_DOCKER_USER`` (e.g. ``root``) user. + def system_dependencies(self): + """ + Install apt packages from the config file. + + We don't allow to pass custom options or install from a path. + The packages names are already validated when reading the config file. + + .. note:: + + ``--quiet`` won't suppress the output, + it would just remove the progress bar. + """ + packages = self.data.config.build.apt_packages + if packages: + self.build_environment.run( + "apt-get", + "update", + "--assume-yes", + "--quiet", + user=settings.RTD_DOCKER_SUPER_USER, + ) + + # put ``--`` to end all command arguments. + self.build_environment.run( + "apt-get", + "install", + "--assume-yes", + "--quiet", + "--", + *packages, + user=settings.RTD_DOCKER_SUPER_USER, + ) + + def post_system_dependencies(self): + pass + + # Language environment + def pre_create_environment(self): + commands = [] # self.data.config.build.jobs.pre_create_environment + for command in commands: + self.build_environment.run(command) + + def create_environment(self): + commands = [] # self.data.config.build.jobs.create_environment + for command in commands: + self.build_environment.run(command) + + if not commands: + self.language_environment.setup_base() + + def post_create_environment(self): + commands = [] # self.data.config.build.jobs.post_create_environment + for command in commands: + self.build_environment.run(command) + + # Install + def pre_install(self): + commands = [] # self.data.config.build.jobs.pre_install + for command in commands: + self.build_environment.run(command) + + def install(self): + commands = [] # self.data.config.build.jobs.install + for command in commands: + self.build_environment.run(command) + + if not commands: + self.language_environment.install_core_requirements() + self.language_environment.install_requirements() + + def post_install(self): + commands = [] # self.data.config.build.jobs.post_install + for command in commands: + self.build_environment.run(command) + + # Build + def pre_build(self): + commands = [] # self.data.config.build.jobs.pre_build + for command in commands: + self.build_environment.run(command) + + def build_html(self): + commands = [] # self.data.config.build.jobs.build.html + if commands: + for command in commands: + self.build_environment.run(command) + return True + + return self.build_docs_class(self.data.config.doctype) + + def build_pdf(self): + if "pdf" not in self.data.config.formats or self.data.version.type == EXTERNAL: + return False + + commands = [] # self.data.config.build.jobs.build.pdf + if commands: + for command in commands: + self.build_environment.run(command) + return True + + # Mkdocs has no pdf generation currently. + if self.is_type_sphinx(): + return self.build_docs_class("sphinx_pdf") + + return False + + def build_htmlzip(self): + if ( + "htmlzip" not in self.data.config.formats + or self.data.version.type == EXTERNAL + ): + return False + + commands = [] # self.data.config.build.jobs.build.htmlzip + if commands: + for command in commands: + self.build_environment.run(command) + return True + + # We don't generate a zip for mkdocs currently. + if self.is_type_sphinx(): + return self.build_docs_class("sphinx_singlehtmllocalmedia") + return False + + def build_epub(self): + if "epub" not in self.data.config.formats or self.data.version.type == EXTERNAL: + return False + + commands = [] # self.data.config.build.jobs.build.epub + if commands: + for command in commands: + self.build_environment.run(command) + return True + + # Mkdocs has no epub generation currently. + if self.is_type_sphinx(): + return self.build_docs_class("sphinx_epub") + return False + + def post_build(self): + commands = [] # self.data.config.build.jobs.post_build + for command in commands: + self.build_environment.run(command) + + # Helpers + # + # TODO: move somewhere or change names to make them private or something to + # easily differentiate them from the normal flow. + def build_docs_class(self, builder_class): + """ + Build docs with additional doc backends. + + These steps are not necessarily required for the build to halt, so we + only raise a warning exception here. A hard error will halt the build + process. + """ + builder = get_builder_class(builder_class)( + build_env=self.build_environment, + python_env=self.language_environment, + ) + + if builder_class == self.data.config.doctype: + builder.append_conf() + self.data.version.documentation_type = builder.get_final_doctype() + + success = builder.build() + builder.move() + + return success + + def get_vcs_env_vars(self): + """Get environment variables to be included in the VCS setup step.""" + env = self.get_rtd_env_vars() + # Don't prompt for username, this requires Git 2.3+ + env["GIT_TERMINAL_PROMPT"] = "0" + return env + + def get_rtd_env_vars(self): + """Get bash environment variables specific to Read the Docs.""" + env = { + "READTHEDOCS": "True", + "READTHEDOCS_VERSION": self.data.version.slug, + "READTHEDOCS_PROJECT": self.data.project.slug, + "READTHEDOCS_LANGUAGE": self.data.project.language, + } + return env + + def get_build_env_vars(self): + """Get bash environment variables used for all builder commands.""" + env = self.get_rtd_env_vars() + + # https://no-color.org/ + env["NO_COLOR"] = "1" + + if self.data.config.conda is not None: + env.update( + { + "CONDA_ENVS_PATH": os.path.join( + self.data.project.doc_path, "conda" + ), + "CONDA_DEFAULT_ENV": self.data.version.slug, + "BIN_PATH": os.path.join( + self.data.project.doc_path, + "conda", + self.data.version.slug, + "bin", + ), + } + ) + else: + env.update( + { + "BIN_PATH": os.path.join( + self.data.project.doc_path, + "envs", + self.data.version.slug, + "bin", + ), + } + ) + + # Update environment from Project's specific environment variables, + # avoiding to expose private environment variables + # if the version is external (i.e. a PR build). + env.update( + self.data.project.environment_variables( + public_only=self.data.version.is_external + ) + ) + + return env + + def is_type_sphinx(self): + """Is documentation type Sphinx.""" + return "sphinx" in self.data.config.doctype diff --git a/readthedocs/projects/tasks/builds.py b/readthedocs/projects/tasks/builds.py index a52f6254737..573d162a732 100644 --- a/readthedocs/projects/tasks/builds.py +++ b/readthedocs/projects/tasks/builds.py @@ -5,24 +5,14 @@ rebuilding documentation. """ -import datetime -import json -import os import signal import socket -import tarfile -import tempfile -from collections import Counter, defaultdict +import structlog from celery import Task from django.conf import settings -from django.db.models import Q from django.utils import timezone -from django.utils.translation import ugettext_lazy as _ from slumber.exceptions import HttpClientError -from sphinx.ext import intersphinx - -import structlog from readthedocs.api.v2.client import api as api_v2 from readthedocs.builds import tasks as build_tasks @@ -39,44 +29,33 @@ LATEST_VERBOSE_NAME, STABLE_VERBOSE_NAME, ) -from readthedocs.builds.models import APIVersion, Build, Version +from readthedocs.builds.models import Build from readthedocs.builds.signals import build_complete from readthedocs.config import ConfigError -from readthedocs.doc_builder.config import load_yaml_config +from readthedocs.doc_builder.director import BuildDirector from readthedocs.doc_builder.environments import ( DockerBuildEnvironment, LocalBuildEnvironment, ) from readthedocs.doc_builder.exceptions import ( BuildAppError, - BuildUserError, + BuildCancelled, BuildMaxConcurrencyError, + BuildUserError, DuplicatedBuildError, - BuildCancelled, + MkDocsYAMLParseError, ProjectBuildsSkippedError, YAMLParseError, - MkDocsYAMLParseError, ) -from readthedocs.doc_builder.loader import get_builder_class -from readthedocs.doc_builder.python_environments import Conda, Virtualenv -from readthedocs.search.utils import index_new_files, remove_indexed_files -from readthedocs.sphinx_domains.models import SphinxDomain -from readthedocs.storage import build_environment_storage, build_media_storage +from readthedocs.storage import build_media_storage from readthedocs.worker import app - -from ..exceptions import RepositoryError, ProjectConfigurationError -from ..models import APIProject, Feature, WebHookEvent, HTMLFile, ImportedFile, Project -from ..signals import ( - after_build, - before_build, - before_vcs, - files_changed, -) - +from ..exceptions import ProjectConfigurationError, RepositoryError +from ..models import APIProject, Feature, WebHookEvent +from ..signals import before_vcs from .mixins import SyncRepositoryMixin -from .utils import clean_build, BuildRequest, send_external_build_status from .search import fileify +from .utils import BuildRequest, clean_build, send_external_build_status log = structlog.get_logger(__name__) @@ -99,8 +78,6 @@ class TaskData: See https://docs.celeryproject.org/en/master/userguide/tasks.html#instantiation """ - pass - class SyncRepositoryTask(SyncRepositoryMixin, Task): @@ -169,7 +146,9 @@ def execute(self): environment = self.data.environment_class( project=self.data.project, version=self.data.version, - environment=self.get_vcs_env_vars(), + environment={ + "GIT_TERMINAL_PROMPT": "0", + }, # Do not try to save commands on the db because they are just for # sync repository record=False, @@ -180,25 +159,25 @@ def execute(self): sender=self.data.version, environment=environment, ) - self.update_versions_from_repository(environment) - def update_versions_from_repository(self, environment): - """ - Update Read the Docs versions from VCS repository. + vcs_repository = self.data.project.vcs_repo( + version=self.data.version.slug, + environment=environment, + verbose_name=self.data.version.verbose_name, + version_type=self.data.version.type, + ) + if any( + [ + not vcs_repository.supports_lsremote, + not self.data.project.has_feature(Feature.VCS_REMOTE_LISTING), + ] + ): + log.info("Syncing repository via full clone.") + vcs_repository.update() + else: + log.info("Syncing repository via remote listing.") - Depending if the VCS backend supports remote listing, we just list its branches/tags - remotely or we do a full clone and local listing of branches/tags. - """ - version_repo = self.get_vcs_repo(environment) - if any([ - not version_repo.supports_lsremote, - not self.data.project.has_feature(Feature.VCS_REMOTE_LISTING), - ]): - log.info('Syncing repository via full clone.') - self.sync_repo(environment) - else: - log.info('Syncing repository via remote listing.') - self.sync_versions(version_repo) + self.sync_versions(vcs_repository) @app.task( @@ -447,7 +426,6 @@ def on_success(self, retval, task_id, args, kwargs): # Store build artifacts to storage (local or cloud storage) self.store_build_artifacts( - self.data.build_env, html=html, search=search, localmedia=localmedia, @@ -459,13 +437,15 @@ def on_success(self, retval, task_id, args, kwargs): # HTML are built successfully. if html: try: - api_v2.version(self.data.version.pk).patch({ - 'built': True, - 'documentation_type': self.get_final_doctype(), - 'has_pdf': pdf, - 'has_epub': epub, - 'has_htmlzip': localmedia, - }) + api_v2.version(self.data.version.pk).patch( + { + "built": True, + "documentation_type": self.data.version.documentation_type, + "has_pdf": pdf, + "has_epub": epub, + "has_htmlzip": localmedia, + } + ) except HttpClientError: # NOTE: I think we should fail the build if we cannot update # the version at this point. Otherwise, we will have inconsistent data @@ -556,85 +536,25 @@ def update_build(self, state): log.exception('Unable to update build') def execute(self): - self.run_setup() - self.run_build() - - def run_setup(self): - """ - Run setup in a build environment. - - 1. Create a Docker container with the default image - 2. Clone the repository's code and submodules - 3. Save the `config` object into the database - 4. Update VCS submodules - """ - environment = self.data.environment_class( - project=self.data.project, - version=self.data.version, - build=self.data.build, - environment=self.get_vcs_env_vars(), - # Force the ``container_image`` to use one that has the latest - # ca-certificate package which is compatible with Lets Encrypt - container_image=settings.RTD_DOCKER_BUILD_SETTINGS['os']['ubuntu-20.04'] + self.data.build_director = BuildDirector( + data=self.data, ) - # Environment used for code checkout & initial configuration reading - with environment: - before_vcs.send( - sender=self.data.version, - environment=environment, - ) - - self.setup_vcs(environment) - self.data.config = load_yaml_config(version=self.data.version) - self.save_build_config() - self.update_vcs_submodules(environment) - - def update_vcs_submodules(self, environment): - version_repo = self.get_vcs_repo(environment) - if version_repo.supports_submodules: - version_repo.update_submodules(self.data.config) - - def run_build(self): - """Build the docs in an environment.""" - self.data.build_env = self.data.environment_class( - project=self.data.project, - version=self.data.version, - config=self.data.config, - build=self.data.build, - environment=self.get_build_env_vars(), - ) - - # Environment used for building code, usually with Docker - with self.data.build_env: - python_env_cls = Virtualenv - if any([ - self.data.config.conda is not None, - self.data.config.python_interpreter in ('conda', 'mamba'), - ]): - python_env_cls = Conda - - self.data.python_env = python_env_cls( - version=self.data.version, - build_env=self.data.build_env, - config=self.data.config, - ) + # Clonning + self.update_build(state=BUILD_STATE_CLONING) + self.data.build_director.setup_vcs() - # TODO: check if `before_build` and `after_build` are still useful - # (maybe in commercial?) - # - # I didn't find they are used anywhere, we should probably remove them - before_build.send( - sender=self.data.version, - environment=self.data.build_env, - ) + # Sync tags/branches from VCS repository into Read the Docs' `Version` + # objects in the database + self.sync_versions(self.data.build_director.vcs_repository) - self.setup_build() - self.build_docs() + # Installing + self.update_build(state=BUILD_STATE_INSTALLING) + self.data.build_director.setup_environment() - after_build.send( - sender=self.data.version, - ) + # Building + self.update_build(state=BUILD_STATE_BUILDING) + self.data.build_director.build() @staticmethod def get_project(project_pk): @@ -664,56 +584,6 @@ def get_build(build_pk): for key, val in build.items() if key not in private_keys } - def setup_vcs(self, environment): - """ - Update the checkout of the repo to make sure it's the latest. - - This also syncs versions in the DB. - """ - self.update_build(state=BUILD_STATE_CLONING) - self.sync_repo(environment) - - commit = self.data.build_commit or self.get_vcs_repo(environment).commit - if commit: - self.data.build['commit'] = commit - - def get_build_env_vars(self): - """Get bash environment variables used for all builder commands.""" - env = self.get_rtd_env_vars() - - # https://no-color.org/ - env['NO_COLOR'] = '1' - - if self.data.config.conda is not None: - env.update({ - 'CONDA_ENVS_PATH': os.path.join(self.data.project.doc_path, 'conda'), - 'CONDA_DEFAULT_ENV': self.data.version.slug, - 'BIN_PATH': os.path.join( - self.data.project.doc_path, - 'conda', - self.data.version.slug, - 'bin', - ), - }) - else: - env.update({ - 'BIN_PATH': os.path.join( - self.data.project.doc_path, - 'envs', - self.data.version.slug, - 'bin', - ), - }) - - # Update environment from Project's specific environment variables, - # avoiding to expose private environment variables - # if the version is external (i.e. a PR build). - env.update(self.data.project.environment_variables( - public_only=self.data.version.is_external - )) - - return env - # NOTE: this can be just updated on `self.data.build['']` and sent once the # build has finished to reduce API calls. def set_valid_clone(self): @@ -724,21 +594,8 @@ def set_valid_clone(self): self.data.project.has_valid_clone = True self.data.version.project.has_valid_clone = True - # TODO: think about reducing the amount of API calls. Can we just save the - # `config` in memory (`self.data.build['config']`) and update it later (e.g. - # together with the build status)? - def save_build_config(self): - """Save config in the build object.""" - pk = self.data.build['id'] - config = self.data.config.as_dict() - api_v2.build(pk).patch({ - 'config': config, - }) - self.data.build['config'] = config - def store_build_artifacts( self, - environment, html=False, localmedia=False, search=False, @@ -832,152 +689,6 @@ def store_build_artifacts( media_path=media_path, ) - def setup_build(self): - self.update_build(state=BUILD_STATE_INSTALLING) - - self.install_system_dependencies() - self.setup_python_environment() - - def setup_python_environment(self): - """ - Build the virtualenv and install the project into it. - - Always build projects with a virtualenv. - - :param build_env: Build environment to pass commands and execution through. - """ - # Install all ``build.tools`` specified by the user - if self.data.config.using_build_tools: - self.data.python_env.install_build_tools() - - self.data.python_env.setup_base() - self.data.python_env.install_core_requirements() - self.data.python_env.install_requirements() - if self.data.project.has_feature(Feature.LIST_PACKAGES_INSTALLED_ENV): - self.data.python_env.list_packages_installed() - - def install_system_dependencies(self): - """ - Install apt packages from the config file. - - We don't allow to pass custom options or install from a path. - The packages names are already validated when reading the config file. - - .. note:: - - ``--quiet`` won't suppress the output, - it would just remove the progress bar. - """ - packages = self.data.config.build.apt_packages - if packages: - self.data.build_env.run( - 'apt-get', 'update', '--assume-yes', '--quiet', - user=settings.RTD_DOCKER_SUPER_USER, - ) - # put ``--`` to end all command arguments. - self.data.build_env.run( - 'apt-get', 'install', '--assume-yes', '--quiet', '--', *packages, - user=settings.RTD_DOCKER_SUPER_USER, - ) - - def build_docs(self): - """ - Wrapper to all build functions. - - Executes the necessary builds for this task and returns whether the - build was successful or not. - - :returns: Build outcomes with keys for html, search, localmedia, pdf, - and epub - :rtype: dict - """ - self.update_build(state=BUILD_STATE_BUILDING) - - self.data.outcomes = defaultdict(lambda: False) - self.data.outcomes['html'] = self.build_docs_html() - self.data.outcomes['search'] = self.build_docs_search() - self.data.outcomes['localmedia'] = self.build_docs_localmedia() - self.data.outcomes['pdf'] = self.build_docs_pdf() - self.data.outcomes['epub'] = self.build_docs_epub() - - return self.data.outcomes - - def build_docs_html(self): - """Build HTML docs.""" - html_builder = get_builder_class(self.data.config.doctype)( - build_env=self.data.build_env, - python_env=self.data.python_env, - ) - html_builder.append_conf() - success = html_builder.build() - if success: - html_builder.move() - - return success - - def get_final_doctype(self): - html_builder = get_builder_class(self.data.config.doctype)( - build_env=self.data.build_env, - python_env=self.data.python_env, - ) - return html_builder.get_final_doctype() - - def build_docs_search(self): - """ - Build search data. - - .. note:: - For MkDocs search is indexed from its ``html`` artifacts. - And in sphinx is run using the rtd-sphinx-extension. - """ - return self.is_type_sphinx() - - def build_docs_localmedia(self): - """Get local media files with separate build.""" - if ( - 'htmlzip' not in self.data.config.formats or - self.data.version.type == EXTERNAL - ): - return False - # We don't generate a zip for mkdocs currently. - if self.is_type_sphinx(): - return self.build_docs_class('sphinx_singlehtmllocalmedia') - return False - - def build_docs_pdf(self): - """Build PDF docs.""" - if 'pdf' not in self.data.config.formats or self.data.version.type == EXTERNAL: - return False - # Mkdocs has no pdf generation currently. - if self.is_type_sphinx(): - return self.build_docs_class('sphinx_pdf') - return False - - def build_docs_epub(self): - """Build ePub docs.""" - if 'epub' not in self.data.config.formats or self.data.version.type == EXTERNAL: - return False - # Mkdocs has no epub generation currently. - if self.is_type_sphinx(): - return self.build_docs_class('sphinx_epub') - return False - - def build_docs_class(self, builder_class): - """ - Build docs with additional doc backends. - - These steps are not necessarily required for the build to halt, so we - only raise a warning exception here. A hard error will halt the build - process. - """ - builder = get_builder_class(builder_class)( - self.data.build_env, - python_env=self.data.python_env, - ) - success = builder.build() - builder.move() - return success - def send_notifications(self, version_pk, build_pk, event): """Send notifications to all subscribers of `event`.""" # Try to infer the version type if we can @@ -989,10 +700,6 @@ def send_notifications(self, version_pk, build_pk, event): event=event, ) - def is_type_sphinx(self): - """Is documentation type Sphinx.""" - return 'sphinx' in self.data.config.doctype - @app.task( base=UpdateDocsTask, diff --git a/readthedocs/projects/tasks/mixins.py b/readthedocs/projects/tasks/mixins.py index 68dc9760e22..1f7f2f5cf82 100644 --- a/readthedocs/projects/tasks/mixins.py +++ b/readthedocs/projects/tasks/mixins.py @@ -1,8 +1,6 @@ -import os from collections import Counter import structlog -from django.utils.translation import gettext_lazy as _ from readthedocs.api.v2.client import api as api_v2 from readthedocs.builds import tasks as build_tasks @@ -46,45 +44,7 @@ def get_version(version_pk): version_data = api_v2.version(version_pk).get() return APIVersion(**version_data) - def get_vcs_repo(self, environment): - """ - Get the VCS object of the current project. - - All VCS commands will be executed using `environment`. - """ - version_repo = self.data.project.vcs_repo( - version=self.data.version.slug, - environment=environment, - verbose_name=self.data.version.verbose_name, - version_type=self.data.version.type - ) - return version_repo - - def sync_repo(self, environment): - """Update the project's repository and hit ``sync_versions`` API.""" - # Make Dirs - if not os.path.exists(self.data.project.doc_path): - os.makedirs(self.data.project.doc_path) - - if not self.data.project.vcs_class(): - raise RepositoryError( - _('Repository type "{repo_type}" unknown').format( - repo_type=self.data.project.repo_type, - ), - ) - - # Get the actual code on disk - log.info( - 'Checking out version.', - version_identifier=self.data.version.identifier, - ) - version_repo = self.get_vcs_repo(environment) - version_repo.update() - self.sync_versions(version_repo) - identifier = self.data.build_commit or self.data.version.identifier - version_repo.checkout(identifier) - - def sync_versions(self, version_repo): + def sync_versions(self, vcs_repository): """ Update tags/branches via a Celery task. @@ -92,29 +52,33 @@ def sync_versions(self, version_repo): It may trigger a new build to the stable version. """ + + # NOTE: `sync_versions` should receive `tags` and `branches` already + # and just validate them trigger the task. All the other logic should + # be done by the BuildDirector or the VCS backend. We should not + # check this here and do not depend on ``vcs_repository``. + tags = None branches = None if ( - version_repo.supports_lsremote and - not version_repo.repo_exists() and - self.data.project.has_feature(Feature.VCS_REMOTE_LISTING) + vcs_repository.supports_lsremote + and not vcs_repository.repo_exists() + and self.data.project.has_feature(Feature.VCS_REMOTE_LISTING) ): # Do not use ``ls-remote`` if the VCS does not support it or if we # have already cloned the repository locally. The latter happens # when triggering a normal build. - branches, tags = version_repo.lsremote - log.info('Remote versions.', branches=branches, tags=tags) + branches, tags = vcs_repository.lsremote branches_data = [] tags_data = [] - if ( - version_repo.supports_tags and - not self.data.project.has_feature(Feature.SKIP_SYNC_TAGS) + if vcs_repository.supports_tags and not self.data.project.has_feature( + Feature.SKIP_SYNC_TAGS ): # Will be an empty list if we called lsremote and had no tags returned if tags is None: - tags = version_repo.tags + tags = vcs_repository.tags tags_data = [ { 'identifier': v.identifier, @@ -123,13 +87,12 @@ def sync_versions(self, version_repo): for v in tags ] - if ( - version_repo.supports_branches and - not self.data.project.has_feature(Feature.SKIP_SYNC_BRANCHES) + if vcs_repository.supports_branches and not self.data.project.has_feature( + Feature.SKIP_SYNC_BRANCHES ): # Will be an empty list if we called lsremote and had no branches returned if branches is None: - branches = version_repo.branches + branches = vcs_repository.branches branches_data = [ { 'identifier': v.identifier, @@ -138,6 +101,8 @@ def sync_versions(self, version_repo): for v in branches ] + log.debug("Synchronizing versions.", branches=branches, tags=tags) + self.validate_duplicate_reserved_versions( tags_data=tags_data, branches_data=branches_data, @@ -169,22 +134,3 @@ def validate_duplicate_reserved_versions(self, tags_data, branches_data): raise RepositoryError( RepositoryError.DUPLICATED_RESERVED_VERSIONS, ) - - def get_vcs_env_vars(self): - """Get environment variables to be included in the VCS setup step.""" - env = self.get_rtd_env_vars() - # Don't prompt for username, this requires Git 2.3+ - env['GIT_TERMINAL_PROMPT'] = '0' - return env - - def get_rtd_env_vars(self): - """Get bash environment variables specific to Read the Docs.""" - env = { - "READTHEDOCS": "True", - "READTHEDOCS_VERSION": self.data.version.slug, - "READTHEDOCS_VERSION_TYPE": self.data.version.type, - "READTHEDOCS_VERSION_NAME": self.data.version.verbose_name, - "READTHEDOCS_PROJECT": self.data.project.slug, - "READTHEDOCS_LANGUAGE": self.data.project.language, - } - return env diff --git a/readthedocs/projects/tests/test_build_tasks.py b/readthedocs/projects/tests/test_build_tasks.py index 2b02fd4565d..81c9def784b 100644 --- a/readthedocs/projects/tests/test_build_tasks.py +++ b/readthedocs/projects/tests/test_build_tasks.py @@ -33,15 +33,15 @@ def setup(self, requests_mock): self.project = fixture.get( Project, - slug='project', + slug="project", enable_epub_build=True, enable_pdf_build=True, ) - self.version = self.project.versions.get(slug='latest') + self.version = self.project.versions.get(slug="latest") self.build = fixture.get( Build, version=self.version, - commit='a1b2c3', + commit="a1b2c3", ) self.mocker = BuildEnvironmentMocker( @@ -69,86 +69,94 @@ def _config_file(self, config): config = BuildConfigV2( {}, config, - source_file='readthedocs.yaml', + source_file="readthedocs.yaml", ) config.validate() return config class TestBuildTask(BuildEnvironmentBase): - @pytest.mark.parametrize( - 'formats,builders', + "formats,builders", ( - (['pdf'], ['latex']), - (['htmlzip'], ['readthedocssinglehtmllocalmedia']), - (['epub'], ['epub']), - (['pdf', 'htmlzip', 'epub'], ['latex', 'readthedocssinglehtmllocalmedia', 'epub']), - ('all', ['latex', 'readthedocssinglehtmllocalmedia', 'nepub']), - ) + (["pdf"], ["latex"]), + (["htmlzip"], ["readthedocssinglehtmllocalmedia"]), + (["epub"], ["epub"]), + ( + ["pdf", "htmlzip", "epub"], + ["latex", "readthedocssinglehtmllocalmedia", "epub"], + ), + ("all", ["latex", "readthedocssinglehtmllocalmedia", "nepub"]), + ), ) - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") @pytest.mark.skip def test_build_sphinx_formats(self, load_yaml_config, formats, builders): - load_yaml_config.return_value = self._config_file({ - 'version': 2, - 'formats': formats, - 'sphinx': { - 'configuration': 'docs/conf.py', - }, - }) + load_yaml_config.return_value = self._config_file( + { + "version": 2, + "formats": formats, + "sphinx": { + "configuration": "docs/conf.py", + }, + } + ) self._trigger_update_docs_task() - self.mocker.mocks['environment.run'].assert_any_call( + self.mocker.mocks["environment.run"].assert_any_call( mock.call( mock.ANY, - '-m', - 'sphinx', - '-T', - '-E', - '-b', - 'readthedocs', - '-d', - '_build/doctrees', - '-D', - 'language=en', - '.', - '_build/html', + "-m", + "sphinx", + "-T", + "-E", + "-b", + "readthedocs", + "-d", + "_build/doctrees", + "-D", + "language=en", + ".", + "_build/html", cwd=mock.ANY, bin_path=mock.ANY, ) ) for builder in builders: - self.mocker.mocks['environment.run'].assert_any_call( + self.mocker.mocks["environment.run"].assert_any_call( mock.call( mock.ANY, - '-m', - 'sphinx', - '-T', - '-E', - '-b', + "-m", + "sphinx", + "-T", + "-E", + "-b", builder, - '-d', - '_build/doctrees', - '-D', - 'language=en', - '.', - '_build/html', + "-d", + "_build/doctrees", + "-D", + "language=en", + ".", + "_build/html", cwd=mock.ANY, bin_path=mock.ANY, ) ) - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') - @mock.patch('readthedocs.projects.tasks.builds.UpdateDocsTask.build_docs_class') - @mock.patch('readthedocs.projects.tasks.builds.UpdateDocsTask.build_docs_html') - def test_build_formats_only_html_for_external_versions(self, build_docs_html, build_docs_class, load_yaml_config): - load_yaml_config.return_value = self._config_file({ - 'version': 2, - 'formats': 'all', - }) + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") + @mock.patch("readthedocs.doc_builder.director.BuildDirector.build_docs_class") + def test_build_formats_only_html_for_external_versions( + self, build_docs_class, load_yaml_config + ): + load_yaml_config.return_value = self._config_file( + { + "version": 2, + "formats": "all", + } + ) + build_docs_class.return_value = True # Make the version external self.version.type = EXTERNAL @@ -156,38 +164,39 @@ def test_build_formats_only_html_for_external_versions(self, build_docs_html, bu self._trigger_update_docs_task() - build_docs_html.assert_called_once() # HTML builder - build_docs_class.assert_not_called() # all the other builders - - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') - @mock.patch('readthedocs.projects.tasks.builds.UpdateDocsTask.build_docs_class') - @mock.patch('readthedocs.projects.tasks.builds.UpdateDocsTask.build_docs_html') - def test_build_respects_formats_mkdocs(self, build_docs_html, build_docs_class, load_yaml_config): - load_yaml_config.return_value = self._config_file({ - 'version': 2, - 'mkdocs': { - 'configuration': 'mkdocs.yml', - }, - 'formats': ['epub', 'pdf'], - }) + build_docs_class.assert_called_once_with("sphinx") # HTML builder + + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") + @mock.patch("readthedocs.doc_builder.director.BuildDirector.build_docs_class") + def test_build_respects_formats_mkdocs(self, build_docs_class, load_yaml_config): + load_yaml_config.return_value = self._config_file( + { + "version": 2, + "mkdocs": { + "configuration": "mkdocs.yml", + }, + "formats": ["epub", "pdf"], + } + ) self._trigger_update_docs_task() - build_docs_html.assert_called_once() - build_docs_class.assert_not_called() + build_docs_class.assert_called_once_with("mkdocs") # HTML builder - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") @pytest.mark.skip() # NOTE: find a way to test we are passing all the environment variables to all the commands def test_get_env_vars_default(self, load_yaml_config): - load_yaml_config.return_value = self._config_file({ - 'version': 2, - }) + load_yaml_config.return_value = self._config_file( + { + "version": 2, + } + ) fixture.get( EnvironmentVariable, - name='TOKEN', - value='a1b2c3', + name="TOKEN", + value="a1b2c3", project=self.project, ) @@ -201,49 +210,60 @@ def test_get_env_vars_default(self, load_yaml_config): "READTHEDOCS_LANGUAGE": self.project.language, "BIN_PATH": os.path.join( self.project.doc_path, - 'envs', + "envs", self.version.slug, - 'bin', + "bin", ), - 'TOKEN': 'a1b2c3', + "TOKEN": "a1b2c3", } self._trigger_update_docs_task() # mock this object to make sure that we are in a conda env - env.update({ - 'CONDA_ENVS_PATH': os.path.join(self.project.doc_path, 'conda'), - 'CONDA_DEFAULT_ENV': self.version.slug, - 'BIN_PATH': os.path.join( - self.project.doc_path, - 'conda', - self.version.slug, - 'bin', - ), - }) + env.update( + { + "CONDA_ENVS_PATH": os.path.join(self.project.doc_path, "conda"), + "CONDA_DEFAULT_ENV": self.version.slug, + "BIN_PATH": os.path.join( + self.project.doc_path, + "conda", + self.version.slug, + "bin", + ), + } + ) - @mock.patch('readthedocs.projects.tasks.builds.fileify') - @mock.patch('readthedocs.projects.tasks.builds.build_complete') - @mock.patch('readthedocs.projects.tasks.builds.send_external_build_status') - @mock.patch('readthedocs.projects.tasks.builds.UpdateDocsTask.send_notifications') - @mock.patch('readthedocs.projects.tasks.builds.clean_build') - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') - def test_successful_build(self, load_yaml_config, clean_build, send_notifications, send_external_build_status, build_complete, fileify): - load_yaml_config.return_value = self._config_file({ - 'version': 2, - 'formats': 'all', - 'sphinx': { - 'configuration': 'docs/conf.py', - }, - }) + @mock.patch("readthedocs.projects.tasks.builds.fileify") + @mock.patch("readthedocs.projects.tasks.builds.build_complete") + @mock.patch("readthedocs.projects.tasks.builds.send_external_build_status") + @mock.patch("readthedocs.projects.tasks.builds.UpdateDocsTask.send_notifications") + @mock.patch("readthedocs.projects.tasks.builds.clean_build") + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") + def test_successful_build( + self, + load_yaml_config, + clean_build, + send_notifications, + send_external_build_status, + build_complete, + fileify, + ): + load_yaml_config.return_value = self._config_file( + { + "version": 2, + "formats": "all", + "sphinx": { + "configuration": "docs/conf.py", + }, + } + ) self._trigger_update_docs_task() # It has to be called twice, ``before_start`` and ``after_return`` - clean_build.assert_has_calls([ - mock.call(mock.ANY), # the argument is an APIVersion - mock.call(mock.ANY) - ]) + clean_build.assert_has_calls( + [mock.call(mock.ANY), mock.call(mock.ANY)] # the argument is an APIVersion + ) # TODO: mock `build_tasks.send_build_notifications` instead and add # another tests to check that they are not sent for EXTERNAL versions @@ -277,120 +297,127 @@ def test_successful_build(self, load_yaml_config, clean_build, send_notification # Update build state: clonning assert self.requests_mock.request_history[3].json() == { - 'id': 1, - 'state': 'cloning', - 'commit': 'a1b2c3', - 'error': '', - 'builder': mock.ANY, + "id": 1, + "state": "cloning", + "commit": "a1b2c3", + "error": "", + "builder": mock.ANY, } - # Save config object data (using default values) + # Update build state: installing assert self.requests_mock.request_history[4].json() == { - 'config': { - 'version': '2', - 'formats': ['htmlzip', 'pdf', 'epub'], - 'python': { - 'version': '3', - 'install': [], - 'use_system_site_packages': False, + "id": 1, + "state": "installing", + "commit": "a1b2c3", + "builder": mock.ANY, + "error": "", + # We update the `config` field at the same time we send the + # `installing` state, to reduce one API call + "config": { + "version": "2", + "formats": ["htmlzip", "pdf", "epub"], + "python": { + "version": "3", + "install": [], + "use_system_site_packages": False, }, - 'conda': None, - 'build': { - 'image': 'readthedocs/build:latest', - 'apt_packages': [], + "conda": None, + "build": { + "image": "readthedocs/build:latest", + "apt_packages": [], }, - 'doctype': 'sphinx', - 'sphinx': { - 'builder': 'sphinx', - 'configuration': 'docs/conf.py', - 'fail_on_warning': False, + "doctype": "sphinx", + "sphinx": { + "builder": "sphinx", + "configuration": "docs/conf.py", + "fail_on_warning": False, }, - 'mkdocs': None, - 'submodules': { - 'include': [], - 'exclude': 'all', - 'recursive': False, + "mkdocs": None, + "submodules": { + "include": [], + "exclude": "all", + "recursive": False, }, - 'search': { - 'ranking': {}, - 'ignore': [ - 'search.html', - 'search/index.html', - '404.html', - '404/index.html', + "search": { + "ranking": {}, + "ignore": [ + "search.html", + "search/index.html", + "404.html", + "404/index.html", ], }, }, } - # Update build state: installing - assert self.requests_mock.request_history[5].json() == { - 'id': 1, - 'state': 'installing', - 'commit': 'a1b2c3', - 'config': mock.ANY, - 'builder': mock.ANY, - 'error': '', - } # Update build state: building - assert self.requests_mock.request_history[6].json() == { - 'id': 1, - 'state': 'building', - 'commit': 'a1b2c3', - 'config': mock.ANY, - 'builder': mock.ANY, - 'error': '', + assert self.requests_mock.request_history[5].json() == { + "id": 1, + "state": "building", + "commit": "a1b2c3", + "config": mock.ANY, + "builder": mock.ANY, + "error": "", } # Update build state: uploading - assert self.requests_mock.request_history[7].json() == { - 'id': 1, - 'state': 'uploading', - 'commit': 'a1b2c3', - 'config': mock.ANY, - 'builder': mock.ANY, - 'error': '', + assert self.requests_mock.request_history[6].json() == { + "id": 1, + "state": "uploading", + "commit": "a1b2c3", + "config": mock.ANY, + "builder": mock.ANY, + "error": "", } # Update version state - assert self.requests_mock.request_history[8]._request.method == 'PATCH' - assert self.requests_mock.request_history[8].path == '/api/v2/version/1/' - assert self.requests_mock.request_history[8].json() == { - 'built': True, - 'documentation_type': 'sphinx', - 'has_pdf': True, - 'has_epub': True, - 'has_htmlzip': True, + assert self.requests_mock.request_history[7]._request.method == "PATCH" + assert self.requests_mock.request_history[7].path == "/api/v2/version/1/" + assert self.requests_mock.request_history[7].json() == { + "built": True, + "documentation_type": "sphinx", + "has_pdf": True, + "has_epub": True, + "has_htmlzip": True, } # Set project has valid clone - assert self.requests_mock.request_history[9]._request.method == 'PATCH' - assert self.requests_mock.request_history[9].path == '/api/v2/project/1/' - assert self.requests_mock.request_history[9].json() == {'has_valid_clone': True} + assert self.requests_mock.request_history[8]._request.method == "PATCH" + assert self.requests_mock.request_history[8].path == "/api/v2/project/1/" + assert self.requests_mock.request_history[8].json() == {"has_valid_clone": True} # Update build state: finished, success and builder - assert self.requests_mock.request_history[10].json() == { - 'id': 1, - 'state': 'finished', - 'commit': 'a1b2c3', - 'config': mock.ANY, - 'builder': mock.ANY, - 'length': mock.ANY, - 'success': True, - 'error': '', + assert self.requests_mock.request_history[9].json() == { + "id": 1, + "state": "finished", + "commit": "a1b2c3", + "config": mock.ANY, + "builder": mock.ANY, + "length": mock.ANY, + "success": True, + "error": "", } - self.mocker.mocks['build_media_storage'].sync_directory.assert_has_calls([ - mock.call(mock.ANY, 'html/project/latest'), - mock.call(mock.ANY, 'json/project/latest'), - mock.call(mock.ANY, 'htmlzip/project/latest'), - mock.call(mock.ANY, 'pdf/project/latest'), - mock.call(mock.ANY, 'epub/project/latest'), - ]) + self.mocker.mocks["build_media_storage"].sync_directory.assert_has_calls( + [ + mock.call(mock.ANY, "html/project/latest"), + mock.call(mock.ANY, "json/project/latest"), + mock.call(mock.ANY, "htmlzip/project/latest"), + mock.call(mock.ANY, "pdf/project/latest"), + mock.call(mock.ANY, "epub/project/latest"), + ] + ) # TODO: find a directory to remove here :) # build_media_storage.delete_directory - @mock.patch('readthedocs.projects.tasks.builds.build_complete') - @mock.patch('readthedocs.projects.tasks.builds.send_external_build_status') - @mock.patch('readthedocs.projects.tasks.builds.UpdateDocsTask.execute') - @mock.patch('readthedocs.projects.tasks.builds.UpdateDocsTask.send_notifications') - @mock.patch('readthedocs.projects.tasks.builds.clean_build') - def test_failed_build(self, clean_build, send_notifications, execute, send_external_build_status, build_complete): + @mock.patch("readthedocs.projects.tasks.builds.build_complete") + @mock.patch("readthedocs.projects.tasks.builds.send_external_build_status") + @mock.patch("readthedocs.projects.tasks.builds.UpdateDocsTask.execute") + @mock.patch("readthedocs.projects.tasks.builds.UpdateDocsTask.send_notifications") + @mock.patch("readthedocs.projects.tasks.builds.clean_build") + def test_failed_build( + self, + clean_build, + send_notifications, + execute, + send_external_build_status, + build_complete, + ): # Force an exception from the execution of the task. We don't really # care "where" it was raised: setup, build, syncing directories, etc execute.side_effect = Exception('Force and exception here.') @@ -423,380 +450,402 @@ def test_failed_build(self, clean_build, send_notifications, execute, send_exter ) # Test we are updating the DB by calling the API with the updated build object - api_request = self.requests_mock.request_history[-1] # the last one should be the PATCH for the build - assert api_request._request.method == 'PATCH' + api_request = self.requests_mock.request_history[ + -1 + ] # the last one should be the PATCH for the build + assert api_request._request.method == "PATCH" assert api_request.json() == { - 'builder': mock.ANY, - 'commit': self.build.commit, - 'error': BuildAppError.GENERIC_WITH_BUILD_ID.format(build_id=self.build.pk), - 'id': self.build.pk, - 'length': mock.ANY, - 'state': 'finished', - 'success': False, + "builder": mock.ANY, + "commit": self.build.commit, + "error": BuildAppError.GENERIC_WITH_BUILD_ID.format(build_id=self.build.pk), + "id": self.build.pk, + "length": mock.ANY, + "state": "finished", + "success": False, } - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") def test_build_commands_executed(self, load_yaml_config): - load_yaml_config.return_value = self._config_file({ - 'version': 2, - 'formats': 'all', - 'sphinx': { - 'configuration': 'docs/conf.py', - }, - }) + load_yaml_config.return_value = self._config_file( + { + "version": 2, + "formats": "all", + "sphinx": { + "configuration": "docs/conf.py", + }, + } + ) self._trigger_update_docs_task() - self.mocker.mocks['git.Backend.run'].assert_has_calls([ - mock.call('git', 'clone', '--no-single-branch', '--depth', '50', '', '.'), - mock.call('git', 'checkout', '--force', 'a1b2c3'), - mock.call('git', 'clean', '-d', '-f', '-f'), - ]) - - self.mocker.mocks['environment.run'].assert_has_calls([ - mock.call( - 'python3.7', - '-mvirtualenv', - mock.ANY, - bin_path=None, - cwd=None, - ), - mock.call( - mock.ANY, - '-m', - 'pip', - 'install', - '--upgrade', - '--no-cache-dir', - 'pip', - 'setuptools<58.3.0', - bin_path=mock.ANY, - cwd=mock.ANY, - ), - mock.call( - mock.ANY, - '-m', - 'pip', - 'install', - '--upgrade', - '--no-cache-dir', - 'mock==1.0.1', - 'pillow==5.4.1', - 'alabaster>=0.7,<0.8,!=0.7.5', - 'commonmark==0.8.1', - 'recommonmark==0.5.0', - 'sphinx<2', - 'sphinx-rtd-theme<0.5', - 'readthedocs-sphinx-ext<2.2', - bin_path=mock.ANY, - cwd=mock.ANY, - ), - # FIXME: shouldn't this one be present here? It's not now because - # we are mocking `append_conf` which is the one that triggers this - # command. - # - # mock.call( - # 'cat', - # 'docs/conf.py', - # cwd=mock.ANY, - # ), - mock.call( - mock.ANY, - '-m', - 'sphinx', - '-T', - '-E', - '-b', - 'readthedocs', - '-d', - '_build/doctrees', - '-D', - 'language=en', - '.', - '_build/html', - cwd=mock.ANY, - bin_path=mock.ANY, - ), - mock.call( - mock.ANY, - '-m', - 'sphinx', - '-T', - '-E', - '-b', - 'readthedocssinglehtmllocalmedia', - '-d', - '_build/doctrees', - '-D', - 'language=en', - '.', - '_build/localmedia', - cwd=mock.ANY, - bin_path=mock.ANY, - ), - mock.call( - mock.ANY, - '-m', - 'sphinx', - '-b', - 'latex', - '-D', - 'language=en', - '-d', - '_build/doctrees', - '.', - '_build/latex', - cwd=mock.ANY, - bin_path=mock.ANY, - ), - mock.call( - mock.ANY, - '-c', - '"import sys; import sphinx; sys.exit(0 if sphinx.version_info >= (1, 6, 1) else 1)"', - bin_path=mock.ANY, - cwd=mock.ANY, - escape_command=False, - shell=True, - record=False, - ), - mock.call( - 'mv', - '-f', - 'output.file', - # TODO: take a look at - # https://callee.readthedocs.io/en/latest/reference/strings.html#callee.strings.EndsWith - # to match `project.pdf` - mock.ANY, - cwd=mock.ANY, - ), - mock.call( - mock.ANY, - '-m', - 'sphinx', - '-T', - '-E', - '-b', - 'epub', - '-d', - '_build/doctrees', - '-D', - 'language=en', - '.', - '_build/epub', - cwd=mock.ANY, - bin_path=mock.ANY, - ), - mock.call( - 'mv', - '-f', - 'output.file', - # TODO: take a look at - # https://callee.readthedocs.io/en/latest/reference/strings.html#callee.strings.EndsWith - # to match `project.epub` - mock.ANY, - cwd=mock.ANY, - ), - # FIXME: I think we are hitting this issue here: - # https://github.com/pytest-dev/pytest-mock/issues/234 - ]) + self.mocker.mocks["git.Backend.run"].assert_has_calls( + [ + mock.call( + "git", "clone", "--no-single-branch", "--depth", "50", "", "." + ), + mock.call("git", "checkout", "--force", "a1b2c3"), + mock.call("git", "clean", "-d", "-f", "-f"), + ] + ) - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') - def test_use_config_file(self, load_yaml_config): - self._trigger_update_docs_task() - load_yaml_config.assert_called_once() + self.mocker.mocks["environment.run"].assert_has_calls( + [ + mock.call( + "python3.7", + "-mvirtualenv", + mock.ANY, + bin_path=None, + cwd=None, + ), + mock.call( + mock.ANY, + "-m", + "pip", + "install", + "--upgrade", + "--no-cache-dir", + "pip", + "setuptools<58.3.0", + bin_path=mock.ANY, + cwd=mock.ANY, + ), + mock.call( + mock.ANY, + "-m", + "pip", + "install", + "--upgrade", + "--no-cache-dir", + "mock==1.0.1", + "pillow==5.4.1", + "alabaster>=0.7,<0.8,!=0.7.5", + "commonmark==0.8.1", + "recommonmark==0.5.0", + "sphinx<2", + "sphinx-rtd-theme<0.5", + "readthedocs-sphinx-ext<2.2", + bin_path=mock.ANY, + cwd=mock.ANY, + ), + # FIXME: shouldn't this one be present here? It's not now because + # we are mocking `append_conf` which is the one that triggers this + # command. + # + # mock.call( + # 'cat', + # 'docs/conf.py', + # cwd=mock.ANY, + # ), + mock.call( + mock.ANY, + "-m", + "sphinx", + "-T", + "-E", + "-b", + "readthedocs", + "-d", + "_build/doctrees", + "-D", + "language=en", + ".", + "_build/html", + cwd=mock.ANY, + bin_path=mock.ANY, + ), + mock.call( + mock.ANY, + "-m", + "sphinx", + "-T", + "-E", + "-b", + "readthedocssinglehtmllocalmedia", + "-d", + "_build/doctrees", + "-D", + "language=en", + ".", + "_build/localmedia", + cwd=mock.ANY, + bin_path=mock.ANY, + ), + mock.call( + mock.ANY, + "-m", + "sphinx", + "-b", + "latex", + "-D", + "language=en", + "-d", + "_build/doctrees", + ".", + "_build/latex", + cwd=mock.ANY, + bin_path=mock.ANY, + ), + mock.call( + mock.ANY, + "-c", + '"import sys; import sphinx; sys.exit(0 if sphinx.version_info >= (1, 6, 1) else 1)"', + bin_path=mock.ANY, + cwd=mock.ANY, + escape_command=False, + shell=True, + record=False, + ), + mock.call( + "mv", + "-f", + "output.file", + # TODO: take a look at + # https://callee.readthedocs.io/en/latest/reference/strings.html#callee.strings.EndsWith + # to match `project.pdf` + mock.ANY, + cwd=mock.ANY, + ), + mock.call( + mock.ANY, + "-m", + "sphinx", + "-T", + "-E", + "-b", + "epub", + "-d", + "_build/doctrees", + "-D", + "language=en", + ".", + "_build/epub", + cwd=mock.ANY, + bin_path=mock.ANY, + ), + mock.call( + "mv", + "-f", + "output.file", + # TODO: take a look at + # https://callee.readthedocs.io/en/latest/reference/strings.html#callee.strings.EndsWith + # to match `project.epub` + mock.ANY, + cwd=mock.ANY, + ), + # FIXME: I think we are hitting this issue here: + # https://github.com/pytest-dev/pytest-mock/issues/234 + ] + ) - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") def test_install_apt_packages(self, load_yaml_config): config = BuildConfigV2( {}, { - 'version': 2, - 'build': { - 'apt_packages': [ - 'clangd', - 'cmatrix', + "version": 2, + "build": { + "apt_packages": [ + "clangd", + "cmatrix", ], }, }, - source_file='readthedocs.yml', + source_file="readthedocs.yml", ) config.validate() load_yaml_config.return_value = config self._trigger_update_docs_task() - self.mocker.mocks['environment.run'].assert_has_calls([ - mock.call( - 'apt-get', - 'update', - '--assume-yes', - '--quiet', - user='root:root', - ), - mock.call( - 'apt-get', - 'install', - '--assume-yes', - '--quiet', - '--', - 'clangd', - 'cmatrix', - user='root:root', - ) - ]) + self.mocker.mocks["environment.run"].assert_has_calls( + [ + mock.call( + "apt-get", + "update", + "--assume-yes", + "--quiet", + user="root:root", + ), + mock.call( + "apt-get", + "install", + "--assume-yes", + "--quiet", + "--", + "clangd", + "cmatrix", + user="root:root", + ), + ] + ) - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") def test_build_tools(self, load_yaml_config): config = BuildConfigV2( {}, { - 'version': 2, - 'build': { - 'os': 'ubuntu-20.04', - 'tools': { - 'python': '3.10', - 'nodejs': '16', - 'rust': '1.55', - 'golang': '1.17', + "version": 2, + "build": { + "os": "ubuntu-20.04", + "tools": { + "python": "3.10", + "nodejs": "16", + "rust": "1.55", + "golang": "1.17", }, }, }, - source_file='readthedocs.yml', + source_file="readthedocs.yml", ) config.validate() load_yaml_config.return_value = config self._trigger_update_docs_task() - python_version = settings.RTD_DOCKER_BUILD_SETTINGS['tools']['python']['3.10'] - nodejs_version = settings.RTD_DOCKER_BUILD_SETTINGS['tools']['nodejs']['16'] - rust_version = settings.RTD_DOCKER_BUILD_SETTINGS['tools']['rust']['1.55'] - golang_version = settings.RTD_DOCKER_BUILD_SETTINGS['tools']['golang']['1.17'] - self.mocker.mocks['environment.run'].assert_has_calls([ - mock.call('asdf', 'install', 'python', python_version), - mock.call('asdf', 'global', 'python', python_version), - mock.call('asdf', 'reshim', 'python', record=False), - mock.call('python', '-mpip', 'install', '-U', 'virtualenv', 'setuptools<58.3.0'), - mock.call('asdf', 'install', 'nodejs', nodejs_version), - mock.call('asdf', 'global', 'nodejs', nodejs_version), - mock.call('asdf', 'reshim', 'nodejs', record=False), - mock.call('asdf', 'install', 'rust', rust_version), - mock.call('asdf', 'global', 'rust', rust_version), - mock.call('asdf', 'reshim', 'rust', record=False), - mock.call('asdf', 'install', 'golang', golang_version), - mock.call('asdf', 'global', 'golang', golang_version), - mock.call('asdf', 'reshim', 'golang', record=False), - mock.ANY, - ]) + python_version = settings.RTD_DOCKER_BUILD_SETTINGS["tools"]["python"]["3.10"] + nodejs_version = settings.RTD_DOCKER_BUILD_SETTINGS["tools"]["nodejs"]["16"] + rust_version = settings.RTD_DOCKER_BUILD_SETTINGS["tools"]["rust"]["1.55"] + golang_version = settings.RTD_DOCKER_BUILD_SETTINGS["tools"]["golang"]["1.17"] + self.mocker.mocks["environment.run"].assert_has_calls( + [ + mock.call("asdf", "install", "python", python_version), + mock.call("asdf", "global", "python", python_version), + mock.call("asdf", "reshim", "python", record=False), + mock.call( + "python", + "-mpip", + "install", + "-U", + "virtualenv", + "setuptools<58.3.0", + ), + mock.call("asdf", "install", "nodejs", nodejs_version), + mock.call("asdf", "global", "nodejs", nodejs_version), + mock.call("asdf", "reshim", "nodejs", record=False), + mock.call("asdf", "install", "rust", rust_version), + mock.call("asdf", "global", "rust", rust_version), + mock.call("asdf", "reshim", "rust", record=False), + mock.call("asdf", "install", "golang", golang_version), + mock.call("asdf", "global", "golang", golang_version), + mock.call("asdf", "reshim", "golang", record=False), + mock.ANY, + ] + ) - @mock.patch('readthedocs.doc_builder.python_environments.tarfile') - @mock.patch('readthedocs.doc_builder.python_environments.build_tools_storage') - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') + @mock.patch("readthedocs.doc_builder.python_environments.tarfile") + @mock.patch("readthedocs.doc_builder.python_environments.build_tools_storage") + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") def test_build_tools_cached(self, load_yaml_config, build_tools_storage, tarfile): config = BuildConfigV2( {}, { - 'version': 2, - 'build': { - 'os': 'ubuntu-20.04', - 'tools': { - 'python': '3.10', - 'nodejs': '16', - 'rust': '1.55', - 'golang': '1.17', + "version": 2, + "build": { + "os": "ubuntu-20.04", + "tools": { + "python": "3.10", + "nodejs": "16", + "rust": "1.55", + "golang": "1.17", }, }, }, - source_file='readthedocs.yml', + source_file="readthedocs.yml", ) config.validate() load_yaml_config.return_value = config - build_tools_storage.open.return_value = b'' + build_tools_storage.open.return_value = b"" build_tools_storage.exists.return_value = True tarfile.open.return_value.__enter__.return_value.extract_all.return_value = None self._trigger_update_docs_task() - python_version = settings.RTD_DOCKER_BUILD_SETTINGS['tools']['python']['3.10'] - nodejs_version = settings.RTD_DOCKER_BUILD_SETTINGS['tools']['nodejs']['16'] - rust_version = settings.RTD_DOCKER_BUILD_SETTINGS['tools']['rust']['1.55'] - golang_version = settings.RTD_DOCKER_BUILD_SETTINGS['tools']['golang']['1.17'] - self.mocker.mocks['environment.run'].assert_has_calls([ - mock.call( - 'mv', - # Use mock.ANY here because path differs when ran locally - # and on CircleCI - mock.ANY, - f'/home/docs/.asdf/installs/python/{python_version}', - record=False, - ), - mock.call('asdf', 'global', 'python', python_version), - mock.call('asdf', 'reshim', 'python', record=False), - mock.call( - 'mv', - mock.ANY, - f'/home/docs/.asdf/installs/nodejs/{nodejs_version}', - record=False, - ), - mock.call('asdf', 'global', 'nodejs', nodejs_version), - mock.call('asdf', 'reshim', 'nodejs', record=False), - mock.call( - 'mv', - mock.ANY, - f'/home/docs/.asdf/installs/rust/{rust_version}', - record=False, - ), - mock.call('asdf', 'global', 'rust', rust_version), - mock.call('asdf', 'reshim', 'rust', record=False), - mock.call( - 'mv', + python_version = settings.RTD_DOCKER_BUILD_SETTINGS["tools"]["python"]["3.10"] + nodejs_version = settings.RTD_DOCKER_BUILD_SETTINGS["tools"]["nodejs"]["16"] + rust_version = settings.RTD_DOCKER_BUILD_SETTINGS["tools"]["rust"]["1.55"] + golang_version = settings.RTD_DOCKER_BUILD_SETTINGS["tools"]["golang"]["1.17"] + self.mocker.mocks["environment.run"].assert_has_calls( + [ + mock.call( + "mv", + # Use mock.ANY here because path differs when ran locally + # and on CircleCI + mock.ANY, + f"/home/docs/.asdf/installs/python/{python_version}", + record=False, + ), + mock.call("asdf", "global", "python", python_version), + mock.call("asdf", "reshim", "python", record=False), + mock.call( + "mv", + mock.ANY, + f"/home/docs/.asdf/installs/nodejs/{nodejs_version}", + record=False, + ), + mock.call("asdf", "global", "nodejs", nodejs_version), + mock.call("asdf", "reshim", "nodejs", record=False), + mock.call( + "mv", + mock.ANY, + f"/home/docs/.asdf/installs/rust/{rust_version}", + record=False, + ), + mock.call("asdf", "global", "rust", rust_version), + mock.call("asdf", "reshim", "rust", record=False), + mock.call( + "mv", + mock.ANY, + f"/home/docs/.asdf/installs/golang/{golang_version}", + record=False, + ), + mock.call("asdf", "global", "golang", golang_version), + mock.call("asdf", "reshim", "golang", record=False), mock.ANY, - f'/home/docs/.asdf/installs/golang/{golang_version}', - record=False, - ), - mock.call('asdf', 'global', 'golang', golang_version), - mock.call('asdf', 'reshim', 'golang', record=False), - mock.ANY, - ]) + ] + ) - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") def test_requirements_from_config_file_installed(self, load_yaml_config): load_yaml_config.return_value = self._config_file( { - 'version': 2, - 'python': { - 'install': [{ - 'requirements': 'requirements.txt', - }], + "version": 2, + "python": { + "install": [ + { + "requirements": "requirements.txt", + } + ], }, }, ) self._trigger_update_docs_task() - self.mocker.mocks['environment.run'].assert_has_calls([ - mock.call( - mock.ANY, - '-m', - 'pip', - 'install', - '--exists-action=w', - '--no-cache-dir', - '-r', - 'requirements.txt', - cwd=mock.ANY, - bin_path=mock.ANY, - ), - ]) + self.mocker.mocks["environment.run"].assert_has_calls( + [ + mock.call( + mock.ANY, + "-m", + "pip", + "install", + "--exists-action=w", + "--no-cache-dir", + "-r", + "requirements.txt", + cwd=mock.ANY, + bin_path=mock.ANY, + ), + ] + ) - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") def test_conda_config_calls_conda_command(self, load_yaml_config): load_yaml_config.return_value = self._config_file( { - 'version': 2, - 'conda': { - 'environment': 'environment.yaml', + "version": 2, + "conda": { + "environment": "environment.yaml", }, }, ) @@ -806,172 +855,204 @@ def test_conda_config_calls_conda_command(self, load_yaml_config): # TODO: check we are saving the `conda.environment` in the config file # via the API call - self.mocker.mocks['environment.run'].assert_has_calls([ - mock.call( - 'conda', - 'env', - 'create', - '--quiet', - '--name', - self.version.slug, - '--file', - 'environment.yaml', - cwd=mock.ANY, - bin_path=mock.ANY, - ), - mock.call( - 'conda', - 'install', - '--yes', - '--quiet', - '--name', - self.version.slug, - 'mock', - 'pillow', - 'sphinx', - 'sphinx_rtd_theme', - cwd=mock.ANY, - ), - mock.call( - mock.ANY, - '-m', - 'pip', - 'install', - '-U', - '--no-cache-dir', - 'recommonmark', - 'readthedocs-sphinx-ext', - cwd=mock.ANY, - bin_path=mock.ANY, - ), - ]) + self.mocker.mocks["environment.run"].assert_has_calls( + [ + mock.call( + "conda", + "env", + "create", + "--quiet", + "--name", + self.version.slug, + "--file", + "environment.yaml", + cwd=mock.ANY, + bin_path=mock.ANY, + ), + mock.call( + "conda", + "install", + "--yes", + "--quiet", + "--name", + self.version.slug, + "mock", + "pillow", + "sphinx", + "sphinx_rtd_theme", + cwd=mock.ANY, + ), + mock.call( + mock.ANY, + "-m", + "pip", + "install", + "-U", + "--no-cache-dir", + "recommonmark", + "readthedocs-sphinx-ext", + cwd=mock.ANY, + bin_path=mock.ANY, + ), + ] + ) - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") def test_python_mamba_commands(self, load_yaml_config): load_yaml_config.return_value = self._config_file( { - 'version': 2, - 'build': { - 'os': 'ubuntu-20.04', - 'tools': { - 'python': 'mambaforge-4.10', + "version": 2, + "build": { + "os": "ubuntu-20.04", + "tools": { + "python": "mambaforge-4.10", }, }, - 'conda': { - 'environment': 'environment.yaml', + "conda": { + "environment": "environment.yaml", }, }, ) self._trigger_update_docs_task() - self.mocker.mocks['environment.run'].assert_has_calls([ - mock.call('asdf', 'install', 'python', 'mambaforge-4.10.3-10'), - mock.call('asdf', 'global', 'python', 'mambaforge-4.10.3-10'), - mock.call('asdf', 'reshim', 'python', record=False), - mock.call('mamba', 'env', 'create', '--quiet', '--name', 'latest', '--file', 'environment.yaml', bin_path=None, cwd=mock.ANY), - mock.call('mamba', 'install', '--yes', '--quiet', '--name', 'latest', 'mock', 'pillow', 'sphinx', 'sphinx_rtd_theme', cwd=mock.ANY), - ]) + self.mocker.mocks["environment.run"].assert_has_calls( + [ + mock.call("asdf", "install", "python", "mambaforge-4.10.3-10"), + mock.call("asdf", "global", "python", "mambaforge-4.10.3-10"), + mock.call("asdf", "reshim", "python", record=False), + mock.call( + "mamba", + "env", + "create", + "--quiet", + "--name", + "latest", + "--file", + "environment.yaml", + bin_path=None, + cwd=mock.ANY, + ), + mock.call( + "mamba", + "install", + "--yes", + "--quiet", + "--name", + "latest", + "mock", + "pillow", + "sphinx", + "sphinx_rtd_theme", + cwd=mock.ANY, + ), + ] + ) - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") def test_sphinx_fail_on_warning(self, load_yaml_config): load_yaml_config.return_value = self._config_file( { - 'version': 2, - 'sphinx': { - 'configuration': 'docs/conf.py', - 'fail_on_warning': True, - }, + "version": 2, + "sphinx": { + "configuration": "docs/conf.py", + "fail_on_warning": True, + }, }, ) self._trigger_update_docs_task() - self.mocker.mocks['environment.run'].assert_has_calls([ - mock.call( - mock.ANY, - '-m', - 'sphinx', - '-T', - '-E', - '-W', # fail on warning flag - '--keep-going', # fail on warning flag - '-b', - 'readthedocs', - '-d', - '_build/doctrees', - '-D', - 'language=en', - '.', - '_build/html', - cwd=mock.ANY, - bin_path=mock.ANY, - ), - ]) - + self.mocker.mocks["environment.run"].assert_has_calls( + [ + mock.call( + mock.ANY, + "-m", + "sphinx", + "-T", + "-E", + "-W", # fail on warning flag + "--keep-going", # fail on warning flag + "-b", + "readthedocs", + "-d", + "_build/doctrees", + "-D", + "language=en", + ".", + "_build/html", + cwd=mock.ANY, + bin_path=mock.ANY, + ), + ] + ) - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") def test_mkdocs_fail_on_warning(self, load_yaml_config): load_yaml_config.return_value = self._config_file( { - 'version': 2, - 'mkdocs': { - 'configuration': 'docs/mkdocs.yaml', - 'fail_on_warning': True, - }, + "version": 2, + "mkdocs": { + "configuration": "docs/mkdocs.yaml", + "fail_on_warning": True, + }, }, ) self._trigger_update_docs_task() - self.mocker.mocks['environment.run'].assert_has_calls([ - mock.call( - mock.ANY, - '-m', - 'mkdocs', - 'build', - '--clean', - '--site-dir', - '_build/html', - '--config-file', - 'docs/mkdocs.yaml', - '--strict', # fail on warning flag - cwd=mock.ANY, - bin_path=mock.ANY, - ) - ]) + self.mocker.mocks["environment.run"].assert_has_calls( + [ + mock.call( + mock.ANY, + "-m", + "mkdocs", + "build", + "--clean", + "--site-dir", + "_build/html", + "--config-file", + "docs/mkdocs.yaml", + "--strict", # fail on warning flag + cwd=mock.ANY, + bin_path=mock.ANY, + ) + ] + ) - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") def test_system_site_packages(self, load_yaml_config): load_yaml_config.return_value = self._config_file( { - 'version': 2, - 'python': { - 'system_packages': True, + "version": 2, + "python": { + "system_packages": True, }, }, ) self._trigger_update_docs_task() - self.mocker.mocks['environment.run'].assert_has_calls([ - mock.call( - 'python3.7', - '-mvirtualenv', - '--system-site-packages', # expected flag - mock.ANY, - bin_path=None, - cwd=None, - ), - ]) + self.mocker.mocks["environment.run"].assert_has_calls( + [ + mock.call( + "python3.7", + "-mvirtualenv", + "--system-site-packages", # expected flag + mock.ANY, + bin_path=None, + cwd=None, + ), + ] + ) - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") def test_system_site_packages_project_overrides(self, load_yaml_config): load_yaml_config.return_value = self._config_file( { - 'version': 2, + "version": 2, # Do not define `system_packages: True` in the config file. - 'python': {}, + "python": {}, }, ) @@ -981,81 +1062,88 @@ def test_system_site_packages_project_overrides(self, load_yaml_config): self._trigger_update_docs_task() - self.mocker.mocks['environment.run'].assert_has_calls([ - mock.call( - 'python3.7', - '-mvirtualenv', - # we don't expect this flag to be here - # '--system-site-packages' - mock.ANY, - bin_path=None, - cwd=None, - ), - ]) - + self.mocker.mocks["environment.run"].assert_has_calls( + [ + mock.call( + "python3.7", + "-mvirtualenv", + # we don't expect this flag to be here + # '--system-site-packages' + mock.ANY, + bin_path=None, + cwd=None, + ), + ] + ) - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") def test_python_install_setuptools(self, load_yaml_config): load_yaml_config.return_value = self._config_file( { - 'version': 2, - 'python': { - 'install': [{ - 'path': '.', - 'method': 'setuptools', - }], + "version": 2, + "python": { + "install": [ + { + "path": ".", + "method": "setuptools", + } + ], }, }, ) self._trigger_update_docs_task() - self.mocker.mocks['environment.run'].assert_has_calls([ - mock.call( - mock.ANY, - './setup.py', - 'install', - '--force', - cwd=mock.ANY, - bin_path=mock.ANY, - ) - ]) - + self.mocker.mocks["environment.run"].assert_has_calls( + [ + mock.call( + mock.ANY, + "./setup.py", + "install", + "--force", + cwd=mock.ANY, + bin_path=mock.ANY, + ) + ] + ) - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") def test_python_install_pip(self, load_yaml_config): load_yaml_config.return_value = self._config_file( { - 'version': 2, - 'python': { - 'install': [{ - 'path': '.', - 'method': 'pip', - }], + "version": 2, + "python": { + "install": [ + { + "path": ".", + "method": "pip", + } + ], }, }, ) self._trigger_update_docs_task() - self.mocker.mocks['environment.run'].assert_has_calls([ - mock.call( - mock.ANY, - '-m', - 'pip', - 'install', - '--upgrade', - '--upgrade-strategy', - 'eager', - '--no-cache-dir', - '.', - cwd=mock.ANY, - bin_path=mock.ANY, - ) - ]) - + self.mocker.mocks["environment.run"].assert_has_calls( + [ + mock.call( + mock.ANY, + "-m", + "pip", + "install", + "--upgrade", + "--upgrade-strategy", + "eager", + "--no-cache-dir", + ".", + cwd=mock.ANY, + bin_path=mock.ANY, + ) + ] + ) - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") def test_python_install_pip_extras(self, load_yaml_config): # FIXME: the test passes but in the logs there is an error related to # `backends/sphinx.py` not finding a file. @@ -1063,54 +1151,57 @@ def test_python_install_pip_extras(self, load_yaml_config): # TypeError('expected str, bytes or os.PathLike object, not NoneType') load_yaml_config.return_value = self._config_file( { - 'version': 2, - 'python': { - 'install': [{ - 'path': '.', - 'method': 'pip', - 'extra_requirements': ['docs'], - }], + "version": 2, + "python": { + "install": [ + { + "path": ".", + "method": "pip", + "extra_requirements": ["docs"], + } + ], }, }, ) self._trigger_update_docs_task() - self.mocker.mocks['environment.run'].assert_has_calls([ - mock.call( - mock.ANY, - '-m', - 'pip', - 'install', - '--upgrade', - '--upgrade-strategy', - 'eager', - '--no-cache-dir', - '.[docs]', - cwd=mock.ANY, - bin_path=mock.ANY, - ) - ]) - + self.mocker.mocks["environment.run"].assert_has_calls( + [ + mock.call( + mock.ANY, + "-m", + "pip", + "install", + "--upgrade", + "--upgrade-strategy", + "eager", + "--no-cache-dir", + ".[docs]", + cwd=mock.ANY, + bin_path=mock.ANY, + ) + ] + ) - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") def test_python_install_pip_several_options(self, load_yaml_config): load_yaml_config.return_value = self._config_file( { - 'version': 2, - 'python': { - 'install': [ + "version": 2, + "python": { + "install": [ { - 'path': '.', - 'method': 'pip', - 'extra_requirements': ['docs'], + "path": ".", + "method": "pip", + "extra_requirements": ["docs"], }, { - 'path': 'two', - 'method': 'setuptools', + "path": "two", + "method": "setuptools", }, { - 'requirements': 'three.txt', + "requirements": "three.txt", }, ], }, @@ -1119,94 +1210,104 @@ def test_python_install_pip_several_options(self, load_yaml_config): self._trigger_update_docs_task() - self.mocker.mocks['environment.run'].assert_has_calls([ - mock.call( - mock.ANY, - '-m', - 'pip', - 'install', - '--upgrade', - '--upgrade-strategy', - 'eager', - '--no-cache-dir', - '.[docs]', - cwd=mock.ANY, - bin_path=mock.ANY, - ), - mock.call( - mock.ANY, - 'two/setup.py', - 'install', - '--force', - cwd=mock.ANY, - bin_path=mock.ANY, - ), - mock.call( - mock.ANY, - '-m', - 'pip', - 'install', - '--exists-action=w', - '--no-cache-dir', - '-r', - 'three.txt', - cwd=mock.ANY, - bin_path=mock.ANY, - ), - ]) + self.mocker.mocks["environment.run"].assert_has_calls( + [ + mock.call( + mock.ANY, + "-m", + "pip", + "install", + "--upgrade", + "--upgrade-strategy", + "eager", + "--no-cache-dir", + ".[docs]", + cwd=mock.ANY, + bin_path=mock.ANY, + ), + mock.call( + mock.ANY, + "two/setup.py", + "install", + "--force", + cwd=mock.ANY, + bin_path=mock.ANY, + ), + mock.call( + mock.ANY, + "-m", + "pip", + "install", + "--exists-action=w", + "--no-cache-dir", + "-r", + "three.txt", + cwd=mock.ANY, + bin_path=mock.ANY, + ), + ] + ) @pytest.mark.parametrize( - 'value,expected', [ - (ALL, ['one', 'two', 'three']), - (['one', 'two'], ['one', 'two']), + "value,expected", + [ + (ALL, ["one", "two", "three"]), + (["one", "two"], ["one", "two"]), ], ) - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") def test_submodules_include(self, load_yaml_config, value, expected): load_yaml_config.return_value = self._config_file( { - 'version': 2, - 'submodules': { - 'include': value, + "version": 2, + "submodules": { + "include": value, }, }, ) self._trigger_update_docs_task() - self.mocker.mocks['git.Backend.run'].assert_has_calls([ - mock.call('git', 'submodule', 'sync'), - mock.call('git', 'submodule', 'update', '--init', '--force', *expected), - ]) + self.mocker.mocks["git.Backend.run"].assert_has_calls( + [ + mock.call("git", "submodule", "sync"), + mock.call("git", "submodule", "update", "--init", "--force", *expected), + ] + ) - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") def test_submodules_exclude(self, load_yaml_config): load_yaml_config.return_value = self._config_file( { - 'version': 2, - 'submodules': { - 'exclude': ['one'], - 'recursive': True - }, + "version": 2, + "submodules": {"exclude": ["one"], "recursive": True}, }, ) self._trigger_update_docs_task() - self.mocker.mocks['git.Backend.run'].assert_has_calls([ - mock.call('git', 'submodule', 'sync'), - mock.call('git', 'submodule', 'update', '--init', '--force', '--recursive', 'two', 'three'), - ]) + self.mocker.mocks["git.Backend.run"].assert_has_calls( + [ + mock.call("git", "submodule", "sync"), + mock.call( + "git", + "submodule", + "update", + "--init", + "--force", + "--recursive", + "two", + "three", + ), + ] + ) - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") def test_submodules_exclude_all(self, load_yaml_config): load_yaml_config.return_value = self._config_file( { - 'version': 2, - 'submodules': { - 'exclude': ALL, - 'recursive': True - }, + "version": 2, + "submodules": {"exclude": ALL, "recursive": True}, }, ) @@ -1216,83 +1317,81 @@ def test_submodules_exclude_all(self, load_yaml_config): # mock.call('git', 'submodule', 'sync'), # mock.call('git', 'submodule', 'update', '--init', '--force', 'one', 'two', 'three'), - for call in self.mocker.mocks['git.Backend.run'].mock_calls: - if 'submodule' in call.args: - assert False, 'git submodule command found' - + for call in self.mocker.mocks["git.Backend.run"].mock_calls: + if "submodule" in call.args: + assert False, "git submodule command found" @pytest.mark.parametrize( - 'value,command', + "value,command", [ - ('html', 'readthedocs'), - ('htmldir', 'readthedocsdirhtml'), - ('dirhtml', 'readthedocsdirhtml'), - ('singlehtml', 'readthedocssinglehtml'), + ("html", "readthedocs"), + ("htmldir", "readthedocsdirhtml"), + ("dirhtml", "readthedocsdirhtml"), + ("singlehtml", "readthedocssinglehtml"), ], ) - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") def test_sphinx_builder(self, load_yaml_config, value, command): load_yaml_config.return_value = self._config_file( { - 'version': 2, - 'sphinx': { - 'builder': value, - 'configuration': 'docs/conf.py', + "version": 2, + "sphinx": { + "builder": value, + "configuration": "docs/conf.py", }, }, ) self._trigger_update_docs_task() - self.mocker.mocks['environment.run'].assert_has_calls([ - mock.call( - mock.ANY, - '-m', - 'sphinx', - '-T', - '-E', - '-b', - command, - '-d', - '_build/doctrees', - '-D', - 'language=en', - '.', - '_build/html', - cwd=mock.ANY, - bin_path=mock.ANY, - ), - ]) + self.mocker.mocks["environment.run"].assert_has_calls( + [ + mock.call( + mock.ANY, + "-m", + "sphinx", + "-T", + "-E", + "-b", + command, + "-d", + "_build/doctrees", + "-D", + "language=en", + ".", + "_build/html", + cwd=mock.ANY, + bin_path=mock.ANY, + ), + ] + ) class TestBuildTaskExceptionHandler(BuildEnvironmentBase): - - @mock.patch('readthedocs.projects.tasks.builds.load_yaml_config') + @mock.patch("readthedocs.doc_builder.director.load_yaml_config") def test_config_file_exception(self, load_yaml_config): load_yaml_config.side_effect = ConfigError( - code='invalid', - message='Invalid version in config file.' + code="invalid", message="Invalid version in config file." ) self._trigger_update_docs_task() # This is a known exceptions. We hit the API saving the correct error # in the Build object. In this case, the "error message" coming from # the exception will be shown to the user - assert self.requests_mock.request_history[-1]._request.method == 'PATCH' - assert self.requests_mock.request_history[-1].path == '/api/v2/build/1/' + assert self.requests_mock.request_history[-1]._request.method == "PATCH" + assert self.requests_mock.request_history[-1].path == "/api/v2/build/1/" assert self.requests_mock.request_history[-1].json() == { - 'id': 1, - 'state': 'finished', - 'commit': 'a1b2c3', - 'error': "Problem in your project's configuration. Invalid version in config file.", - 'success': False, - 'builder': mock.ANY, - 'length': 0, + "id": 1, + "state": "finished", + "commit": "a1b2c3", + "error": "Problem in your project's configuration. Invalid version in config file.", + "success": False, + "builder": mock.ANY, + "length": 0, } class TestSyncRepositoryTask(BuildEnvironmentBase): - def _trigger_sync_repository_task(self): sync_repository_task.delay(self.version.pk) diff --git a/readthedocs/rtd_tests/tests/test_imported_file.py b/readthedocs/rtd_tests/tests/test_imported_file.py index 72e464c6471..b47755ffb8a 100644 --- a/readthedocs/rtd_tests/tests/test_imported_file.py +++ b/readthedocs/rtd_tests/tests/test_imported_file.py @@ -10,8 +10,8 @@ from readthedocs.projects.tasks.search import ( _create_imported_files, _create_intersphinx_data, + _sync_imported_files, ) -from readthedocs.projects.tasks.search import _sync_imported_files from readthedocs.sphinx_domains.models import SphinxDomain base_dir = os.path.dirname(os.path.dirname(__file__)) @@ -174,9 +174,9 @@ def test_update_content(self): self._manage_imported_files(self.version, 'commit02', 2) self.assertEqual(ImportedFile.objects.count(), 2) - @override_settings(PRODUCTION_DOMAIN='readthedocs.org') - @override_settings(RTD_INTERSPHINX_URL='https://readthedocs.org') - @mock.patch('readthedocs.projects.tasks.builds.os.path.exists') + @override_settings(PRODUCTION_DOMAIN="readthedocs.org") + @override_settings(RTD_INTERSPHINX_URL="https://readthedocs.org") + @mock.patch("readthedocs.doc_builder.director.os.path.exists") def test_create_intersphinx_data(self, mock_exists): mock_exists.return_Value = True @@ -254,8 +254,8 @@ def test_create_intersphinx_data(self, mock_exists): ) self.assertEqual(ImportedFile.objects.count(), 2) - @override_settings(RTD_INTERSPHINX_URL='http://localhost:8080') - @mock.patch('readthedocs.projects.tasks.builds.os.path.exists') + @override_settings(RTD_INTERSPHINX_URL="http://localhost:8080") + @mock.patch("readthedocs.doc_builder.director.os.path.exists") def test_custom_intersphinx_url(self, mock_exists): mock_exists.return_Value = True