diff --git a/docs/builds.rst b/docs/builds.rst index ec36a3e32a4..b3f84c713e2 100644 --- a/docs/builds.rst +++ b/docs/builds.rst @@ -75,7 +75,7 @@ An example in code: .. code-block:: python - update_imported_docs(version) + update_docs_from_vcs(version) if exists('setup.py'): run('python setup.py install') if project.requirements_file: diff --git a/readthedocs/core/management/commands/pull.py b/readthedocs/core/management/commands/pull.py index fa6810e0b52..c6c8d783043 100644 --- a/readthedocs/core/management/commands/pull.py +++ b/readthedocs/core/management/commands/pull.py @@ -19,6 +19,7 @@ class Command(BaseCommand): def handle(self, *args, **options): if args: for slug in args: - tasks.update_imported_docs( - utils.version_from_slug(slug, LATEST).pk + version = utils.version_from_slug(slug, LATEST) + tasks.SyncRepositoryTask().run( + version.pk, ) diff --git a/readthedocs/core/views/hooks.py b/readthedocs/core/views/hooks.py index 9140c45a59c..4564f7394d6 100644 --- a/readthedocs/core/views/hooks.py +++ b/readthedocs/core/views/hooks.py @@ -12,7 +12,7 @@ from readthedocs.builds.constants import LATEST from readthedocs.projects import constants from readthedocs.projects.models import Project, Feature -from readthedocs.projects.tasks import update_imported_docs +from readthedocs.projects.tasks import SyncRepositoryTask import logging @@ -123,8 +123,12 @@ def _build_url(url, projects, branches): for project in projects: (built, not_building) = build_branches(project, branches) if not built: - # Call update_imported_docs to update tag/branch info - update_imported_docs.delay(project.versions.get(slug=LATEST).pk) + # Call SyncRepositoryTask to update tag/branch info + version = project.versions.get(slug=LATEST) + sync_repository = SyncRepositoryTask() + sync_repository.apply_async( + args=(version.pk,), + ) msg = '(URL Build) Syncing versions for %s' % project.slug log.info(msg) all_built[project.slug] = built diff --git a/readthedocs/doc_builder/environments.py b/readthedocs/doc_builder/environments.py index 7995de160de..83e784e742c 100644 --- a/readthedocs/doc_builder/environments.py +++ b/readthedocs/doc_builder/environments.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- + """Documentation Builder Environments.""" from __future__ import absolute_import @@ -14,7 +16,7 @@ from readthedocs.core.utils import slugify from django.conf import settings -from django.utils.translation import ugettext_lazy as _, ugettext_noop +from django.utils.translation import ugettext_lazy as _ from docker import Client from docker.utils import create_host_config from docker.errors import APIError as DockerAPIError, DockerException @@ -40,7 +42,8 @@ __all__ = ( 'api_v2', 'BuildCommand', 'DockerBuildCommand', - 'BuildEnvironment', 'LocalEnvironment', 'DockerEnvironment', + 'LocalEnvironment', + 'LocalBuildEnvironment', 'DockerBuildEnvironment', ) @@ -70,7 +73,7 @@ class BuildCommand(BuildCommandResultMixin): def __init__(self, command, cwd=None, shell=False, environment=None, combine_output=True, input_data=None, build_env=None, - bin_path=None, description=None): + bin_path=None, description=None, record_as_success=False): self.command = command self.shell = shell if cwd is None: @@ -93,6 +96,7 @@ def __init__(self, command, cwd=None, shell=False, environment=None, self.description = '' if description is not None: self.description = description + self.record_as_success = record_as_success self.exit_code = None def __str__(self): @@ -180,12 +184,21 @@ def get_command(self): def save(self): """Save this command and result via the API.""" + exit_code = self.exit_code + + # Force record this command as success to avoid Build reporting errors + # on commands that are just for checking purposes and do not interferes + # in the Build + if self.record_as_success: + log.warning('Recording command exit_code as success') + exit_code = 0 + data = { 'build': self.build_env.build.get('id'), 'command': self.get_command(), 'description': self.description, 'output': self.output, - 'exit_code': self.exit_code, + 'exit_code': exit_code, 'start_time': self.start_time, 'end_time': self.end_time, } @@ -268,7 +281,100 @@ def get_wrapped_command(self): for part in self.command])))) -class BuildEnvironment(object): +class BaseEnvironment(object): + + """ + Base environment class. + + Used to run arbitrary commands outside a build. + """ + + def __init__(self, project, environment=None): + # TODO: maybe we can remove this Project dependency also + self.project = project + self.environment = environment or {} + self.commands = [] + + def record_command(self, command): + pass + + def _log_warning(self, msg): + log.warning(LOG_TEMPLATE.format( + project=self.project.slug, + version='latest', + msg=msg, + )) + + def run(self, *cmd, **kwargs): + """Shortcut to run command from environment.""" + return self.run_command_class(cls=self.command_class, cmd=cmd, **kwargs) + + def run_command_class( + self, cls, cmd, record=None, warn_only=False, + record_as_success=False, **kwargs): + """ + Run command from this environment. + + :param cls: command class to instantiate a command + :param cmd: command (as a list) to execute in this environment + :param record: whether or not to record this particular command + (``False`` implies ``warn_only=True``) + :param warn_only: don't raise an exception on command failure + :param record_as_success: force command ``exit_code`` to be saved as + ``0`` (``True`` implies ``warn_only=True`` and ``record=True``) + """ + if record is None: + # ``self.record`` only exists when called from ``*BuildEnvironment`` + record = getattr(self, 'record', False) + + if not record: + warn_only = True + + if record_as_success: + record = True + warn_only = True + # ``record_as_success`` is needed to instantiate the BuildCommand + kwargs.update({'record_as_success': record_as_success}) + + # Remove PATH from env, and set it to bin_path if it isn't passed in + env_path = self.environment.pop('BIN_PATH', None) + if 'bin_path' not in kwargs and env_path: + kwargs['bin_path'] = env_path + assert 'environment' not in kwargs, "environment can't be passed in via commands." + kwargs['environment'] = self.environment + + # ``build_env`` is passed as ``kwargs`` when it's called from a + # ``*BuildEnvironment`` + build_cmd = cls(cmd, **kwargs) + self.commands.append(build_cmd) + build_cmd.run() + + if record: + # TODO: I don't like how it's handled this entry point here since + # this class should know nothing about a BuildCommand (which are the + # only ones that can be saved/recorded) + self.record_command(build_cmd) + + if build_cmd.failed: + msg = u'Command {cmd} failed'.format(cmd=build_cmd.get_command()) + + if build_cmd.output: + msg += u':\n{out}'.format(out=build_cmd.output) + + if warn_only: + self._log_warning(msg) + else: + raise BuildEnvironmentWarning(msg) + return build_cmd + + +class LocalEnvironment(BaseEnvironment): + + # TODO: BuildCommand name doesn't make sense here, should be just Command + command_class = BuildCommand + + +class BuildEnvironment(BaseEnvironment): """ Base build environment. @@ -303,15 +409,13 @@ class BuildEnvironment(object): def __init__(self, project=None, version=None, build=None, config=None, record=True, environment=None, update_on_success=True): - self.project = project + super(BuildEnvironment, self).__init__(project, environment) self.version = version self.build = build self.config = config self.record = record - self.environment = environment or {} self.update_on_success = update_on_success - self.commands = [] self.failure = None self.start_time = datetime.utcnow() @@ -348,48 +452,28 @@ def handle_exception(self, exc_type, exc_value, _): self.failure = exc_value return True - def run(self, *cmd, **kwargs): - """Shortcut to run command from environment.""" - return self.run_command_class(cls=self.command_class, cmd=cmd, **kwargs) + def record_command(self, command): + command.save() - def run_command_class(self, cls, cmd, **kwargs): - """ - Run command from this environment. - - Use ``cls`` to instantiate a command - - :param warn_only: Don't raise an exception on command failure - """ - warn_only = kwargs.pop('warn_only', False) - # Remove PATH from env, and set it to bin_path if it isn't passed in - env_path = self.environment.pop('BIN_PATH', None) - if 'bin_path' not in kwargs and env_path: - kwargs['bin_path'] = env_path - assert 'environment' not in kwargs, "environment can't be passed in via commands." - kwargs['environment'] = self.environment - kwargs['build_env'] = self - build_cmd = cls(cmd, **kwargs) - self.commands.append(build_cmd) - build_cmd.run() - - # Save to database - if self.record: - build_cmd.save() - - if build_cmd.failed: - msg = u'Command {cmd} failed'.format(cmd=build_cmd.get_command()) + def _log_warning(self, msg): + # :'( + log.warning(LOG_TEMPLATE.format( + project=self.project.slug, + version=self.version.slug, + msg=msg, + )) - if build_cmd.output: - msg += u':\n{out}'.format(out=build_cmd.output) + def run(self, *cmd, **kwargs): + kwargs.update({ + 'build_env': self, + }) + return super(BuildEnvironment, self).run(*cmd, **kwargs) - if warn_only: - log.warning(LOG_TEMPLATE - .format(project=self.project.slug, - version=self.version.slug, - msg=msg)) - else: - raise BuildEnvironmentWarning(msg) - return build_cmd + def run_command_class(self, *cmd, **kwargs): # pylint: disable=arguments-differ + kwargs.update({ + 'build_env': self, + }) + return super(BuildEnvironment, self).run_command_class(*cmd, **kwargs) @property def successful(self): @@ -497,14 +581,14 @@ def update_build(self, state=None): log.exception("Unknown build exception") -class LocalEnvironment(BuildEnvironment): +class LocalBuildEnvironment(BuildEnvironment): - """Local execution environment.""" + """Local execution build environment.""" command_class = BuildCommand -class DockerEnvironment(BuildEnvironment): +class DockerBuildEnvironment(BuildEnvironment): """ Docker build environment, uses docker to contain builds. @@ -527,7 +611,7 @@ class DockerEnvironment(BuildEnvironment): def __init__(self, *args, **kwargs): self.docker_socket = kwargs.pop('docker_socket', DOCKER_SOCKET) - super(DockerEnvironment, self).__init__(*args, **kwargs) + super(DockerBuildEnvironment, self).__init__(*args, **kwargs) self.client = None self.container = None self.container_name = slugify( diff --git a/readthedocs/projects/apps.py b/readthedocs/projects/apps.py index 71111f4e8d0..c6c0bf29017 100644 --- a/readthedocs/projects/apps.py +++ b/readthedocs/projects/apps.py @@ -7,6 +7,7 @@ class ProjectsConfig(AppConfig): name = 'readthedocs.projects' def ready(self): - from readthedocs.projects.tasks import UpdateDocsTask + from readthedocs.projects import tasks from readthedocs.worker import app - app.tasks.register(UpdateDocsTask) + app.tasks.register(tasks.SyncRepositoryTask) + app.tasks.register(tasks.UpdateDocsTask) diff --git a/readthedocs/projects/models.py b/readthedocs/projects/models.py index f346f2fc984..5ff628e899f 100644 --- a/readthedocs/projects/models.py +++ b/readthedocs/projects/models.py @@ -33,7 +33,6 @@ determine_stable_version, version_windows) from readthedocs.restapi.client import api from readthedocs.vcs_support.backends import backend_cls -from readthedocs.vcs_support.base import VCSProject from readthedocs.vcs_support.utils import Lock, NonBlockingLock log = logging.getLogger(__name__) @@ -605,14 +604,24 @@ def has_htmlzip(self, version_slug=LATEST): def sponsored(self): return False - def vcs_repo(self, version=LATEST): + def vcs_repo(self, version=LATEST, environment=None): + """ + Return a Backend object for this project able to handle VCS commands. + + :param environment: environment to run the commands + :type environment: doc_builder.environments.BuildEnvironment + :param version: version slug for the backend (``LATEST`` by default) + :type version: str + """ + # TODO: this seems to be the only method that receives a + # ``version.slug`` instead of a ``Version`` instance (I prefer an + # instance here) + backend = backend_cls.get(self.repo_type) if not backend: repo = None else: - proj = VCSProject( - self.name, self.default_branch, self.checkout_path(version), self.clean_repo) - repo = backend(proj, version) + repo = backend(self, version, environment) return repo def repo_nonblockinglock(self, version, max_lock_age=5): diff --git a/readthedocs/projects/tasks.py b/readthedocs/projects/tasks.py index a670b2a3b3c..71ddb8b7c83 100644 --- a/readthedocs/projects/tasks.py +++ b/readthedocs/projects/tasks.py @@ -45,8 +45,8 @@ from readthedocs.core.utils import send_email, broadcast from readthedocs.doc_builder.config import load_yaml_config from readthedocs.doc_builder.constants import DOCKER_LIMITS -from readthedocs.doc_builder.environments import (LocalEnvironment, - DockerEnvironment) +from readthedocs.doc_builder.environments import (LocalBuildEnvironment, + DockerBuildEnvironment) from readthedocs.doc_builder.exceptions import BuildEnvironmentError from readthedocs.doc_builder.loader import get_builder_class from readthedocs.doc_builder.python_environments import Virtualenv, Conda @@ -63,27 +63,140 @@ HTML_ONLY = getattr(settings, 'HTML_ONLY_PROJECTS', ()) -class UpdateDocsTask(Task): +class SyncRepositoryMixin(object): - """ - The main entry point for updating documentation. + """Mixin that handles the VCS sync/update.""" + + @staticmethod + def get_version(project=None, version_pk=None): + """ + Retrieve version data from the API. + + :param project: project object to sync + :type project: projects.models.Project + :param version_pk: version pk to sync + :type version_pk: int + :returns: a data-complete version object + :rtype: builds.models.APIVersion + """ + assert (project or version_pk), 'project or version_pk is needed' + if version_pk: + version_data = api_v2.version(version_pk).get() + else: + version_data = (api_v2 + .version(project.slug) + .get(slug=LATEST)['objects'][0]) + return APIVersion(**version_data) + + def sync_repo(self): + """Update the project's repository and hit ``sync_versions`` API.""" + # Make Dirs + if not os.path.exists(self.project.doc_path): + os.makedirs(self.project.doc_path) + + if not self.project.vcs_repo(): + raise RepositoryError( + _('Repository type "{repo_type}" unknown').format( + repo_type=self.project.repo_type, + ), + ) + + with self.project.repo_nonblockinglock( + version=self.version, + max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30)): + + # Get the actual code on disk + try: + before_vcs.send(sender=self.version) + self._log( + 'Checking out version {slug}: {identifier}'.format( + slug=self.version.slug, + identifier=self.version.identifier, + ), + ) + version_repo = self.project.vcs_repo( + self.version.slug, + # When called from ``SyncRepositoryTask.run`` we don't have + # a ``setup_env`` so we use just ``None`` and commands won't + # be recorded + getattr(self, 'setup_env', None), + ) + version_repo.checkout(self.version.identifier) + finally: + after_vcs.send(sender=self.version) + + # Update tags/version + version_post_data = {'repo': version_repo.repo_url} + + if version_repo.supports_tags: + version_post_data['tags'] = [ + {'identifier': v.identifier, + 'verbose_name': v.verbose_name, + } for v in version_repo.tags + ] + + if version_repo.supports_branches: + version_post_data['branches'] = [ + {'identifier': v.identifier, + 'verbose_name': v.verbose_name, + } for v in version_repo.branches + ] + + try: + # Hit the API ``sync_versions`` which may trigger a new build + # for the stable version + api_v2.project(self.project.pk).sync_versions.post(version_post_data) + except HttpClientError: + log.exception('Sync Versions Exception') + except Exception: + log.exception('Unknown Sync Versions Exception') + + +class SyncRepositoryTask(SyncRepositoryMixin, Task): + + """Entry point to synchronize the VCS documentation.""" + + max_retries = 5 + default_retry_delay = (7 * 60) + name = __name__ + '.sync_repository' + + def run(self, version_pk): # pylint: disable=arguments-differ + """ + Run the VCS synchronization. - It handles all of the logic around whether a project is imported or we - created it. Then it will build the html docs and other requested parts. + :param version_pk: version pk to sync + :type version_pk: int + :returns: whether or not the task ended successfully + :rtype: bool + """ + try: + self.version = self.get_version(version_pk=version_pk) + self.project = self.version.project + self.sync_repo() + return True + # Catch unhandled errors when syncing + except Exception: + log.exception( + 'An unhandled exception was raised during VCS syncing', + ) + return False - `pk` - Primary key of the project to update - `record` - Whether or not to keep a record of the update in the database. Useful - for preventing changes visible to the end-user when running commands - from the shell, for example. +class UpdateDocsTask(SyncRepositoryMixin, Task): + + """ + The main entry point for updating documentation. + + It handles all of the logic around whether a project is imported, we created + it or a webhook is received. Then it will sync the repository and build the + html docs if needed. """ max_retries = 5 default_retry_delay = (7 * 60) name = __name__ + '.update_docs' + # TODO: the argument from the __init__ are used only in tests def __init__(self, build_env=None, python_env=None, config=None, force=False, search=True, localmedia=True, build=None, project=None, version=None): @@ -114,7 +227,7 @@ def _log(self, msg): def run(self, pk, version_pk=None, build_pk=None, record=True, docker=False, search=True, force=False, localmedia=True, **__): """ - Run a documentation build. + Run a documentation sync n' build. This is fully wrapped in exception handling to account for a number of failure cases. We first run a few commands in a local build environment, @@ -122,22 +235,24 @@ def run(self, pk, version_pk=None, build_pk=None, record=True, build output page where the build is marked as finished in between the local environment steps and the docker build steps. - If a failure is raised, or the build is not successful, return ``False``, - otherwise, ``True``. + If a failure is raised, or the build is not successful, return + ``False``, otherwise, ``True``. Unhandled exceptions raise a generic user facing error, which directs the user to bug us. It is therefore a benefit to have as few unhandled errors as possible. :param pk int: Project id - :param version_pk int: Project Version id - :param build_pk int: Build id + :param version_pk int: Project Version id (latest if None) + :param build_pk int: Build id (if None, commands are not recorded) :param record bool: record a build object in the database :param docker bool: use docker to build the project :param search bool: update search :param force bool: force Sphinx build - :param localmedia: update localmedia - :returns: if build was successful or not + :param localmedia bool: update localmedia + + :returns: whether build was successful or not + :rtype: bool """ try: @@ -152,6 +267,7 @@ def run(self, pk, version_pk=None, build_pk=None, record=True, setup_successful = self.run_setup(record=record) if not setup_successful: return False + # Catch unhandled errors in the setup step except Exception as e: # noqa log.exception( @@ -191,7 +307,7 @@ def run_setup(self, record=True): Return True if successful. """ - self.setup_env = LocalEnvironment( + self.setup_env = LocalBuildEnvironment( project=self.project, version=self.version, build=self.build, @@ -246,9 +362,9 @@ def run_build(self, docker=False, record=True): env_vars = self.get_env_vars() if docker or settings.DOCKER_ENABLE: - env_cls = DockerEnvironment + env_cls = DockerBuildEnvironment else: - env_cls = LocalEnvironment + env_cls = LocalBuildEnvironment self.build_env = env_cls(project=self.project, version=self.version, config=self.config, build=self.build, record=record, environment=env_vars) @@ -267,7 +383,7 @@ def run_build(self, docker=False, record=True): config=self.config) try: - self.setup_environment() + self.setup_python_environment() # TODO the build object should have an idea of these states, extend # the model to include an idea of these outcomes @@ -299,17 +415,6 @@ def get_project(project_pk): project_data = api_v2.project(project_pk).get() return APIProject(**project_data) - @staticmethod - def get_version(project, version_pk): - """Ensure we're using a sane version.""" - if version_pk: - version_data = api_v2.version(version_pk).get() - else: - version_data = (api_v2 - .version(project.slug) - .get(slug=LATEST)['objects'][0]) - return APIVersion(**version_data) - @staticmethod def get_build(build_pk): """ @@ -335,7 +440,7 @@ def setup_vcs(self): self.setup_env.update_build(state=BUILD_STATE_CLONING) self._log(msg='Updating docs from VCS') - update_imported_docs(self.version.pk) + self.sync_repo() commit = self.project.vcs_repo(self.version.slug).commit if commit: self.build['commit'] = commit @@ -420,7 +525,7 @@ def update_app_instances(self, html=False, localmedia=False, search=False, callback=sync_callback.s(version_pk=self.version.pk, commit=self.build['commit']), ) - def setup_environment(self): + def setup_python_environment(self): """ Build the virtualenv and install the project into it. @@ -549,88 +654,6 @@ def send_notifications(self): send_notifications.delay(self.version.pk, build_pk=self.build['id']) -@app.task() -def update_imported_docs(version_pk): - """ - Check out or update the given project's repository. - - :param version_pk: Version id to update - """ - version_data = api_v2.version(version_pk).get() - version = APIVersion(**version_data) - project = version.project - ret_dict = {} - - # Make Dirs - if not os.path.exists(project.doc_path): - os.makedirs(project.doc_path) - - if not project.vcs_repo(): - raise RepositoryError( - _('Repository type "{repo_type}" unknown').format( - repo_type=project.repo_type - ) - ) - - with project.repo_nonblockinglock( - version=version, - max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30)): - - # Get the actual code on disk - try: - before_vcs.send(sender=version) - if version: - log.info( - LOG_TEMPLATE.format( - project=project.slug, - version=version.slug, - msg='Checking out version {slug}: {identifier}'.format( - slug=version.slug, - identifier=version.identifier - ) - ) - ) - version_slug = version.slug - version_repo = project.vcs_repo(version_slug) - - ret_dict['checkout'] = version_repo.checkout(version.identifier) - else: - # Does this ever get called? - log.info(LOG_TEMPLATE.format( - project=project.slug, version=version.slug, msg='Updating to latest revision')) - version_slug = LATEST - version_repo = project.vcs_repo(version_slug) - ret_dict['checkout'] = version_repo.update() - finally: - after_vcs.send(sender=version) - - # Update tags/version - - version_post_data = {'repo': version_repo.repo_url} - - if version_repo.supports_tags: - version_post_data['tags'] = [ - {'identifier': v.identifier, - 'verbose_name': v.verbose_name, - } for v in version_repo.tags - ] - - if version_repo.supports_branches: - version_post_data['branches'] = [ - {'identifier': v.identifier, - 'verbose_name': v.verbose_name, - } for v in version_repo.branches - ] - - try: - api_v2.project(project.pk).sync_versions.post(version_post_data) - except HttpClientError: - log.exception("Sync Versions Exception") - except Exception: - log.exception("Unknown Sync Versions Exception") - return ret_dict - - # Web tasks @app.task(queue='web') def sync_files(project_pk, version_pk, hostname=None, html=False, diff --git a/readthedocs/rtd_tests/tests/test_builds.py b/readthedocs/rtd_tests/tests/test_builds.py index 19f383aa3e8..b53cd80b7ad 100644 --- a/readthedocs/rtd_tests/tests/test_builds.py +++ b/readthedocs/rtd_tests/tests/test_builds.py @@ -9,7 +9,7 @@ from readthedocs.projects.models import Project from readthedocs.doc_builder.config import ConfigWrapper -from readthedocs.doc_builder.environments import LocalEnvironment +from readthedocs.doc_builder.environments import LocalBuildEnvironment from readthedocs.doc_builder.python_environments import Virtualenv from readthedocs.doc_builder.loader import get_builder_class from readthedocs.projects.tasks import UpdateDocsTask @@ -41,7 +41,7 @@ def test_build(self): }) self.mocks.patches['html_build'].stop() - build_env = LocalEnvironment(project=project, version=version, build={}) + build_env = LocalBuildEnvironment(project=project, version=version, build={}) python_env = Virtualenv(version=version, build_env=build_env) config = ConfigWrapper(version=version, yaml_config=create_load()()[0]) task = UpdateDocsTask(build_env=build_env, project=project, python_env=python_env, @@ -65,7 +65,7 @@ def test_build_respects_pdf_flag(self): versions=[fixture()]) version = project.versions.all()[0] - build_env = LocalEnvironment(project=project, version=version, build={}) + build_env = LocalBuildEnvironment(project=project, version=version, build={}) python_env = Virtualenv(version=version, build_env=build_env) config = ConfigWrapper(version=version, yaml_config=create_load()()[0]) task = UpdateDocsTask(build_env=build_env, project=project, python_env=python_env, @@ -90,7 +90,7 @@ def test_build_respects_epub_flag(self): versions=[fixture()]) version = project.versions.all()[0] - build_env = LocalEnvironment(project=project, version=version, build={}) + build_env = LocalBuildEnvironment(project=project, version=version, build={}) python_env = Virtualenv(version=version, build_env=build_env) config = ConfigWrapper(version=version, yaml_config=create_load()()[0]) task = UpdateDocsTask(build_env=build_env, project=project, python_env=python_env, @@ -114,7 +114,7 @@ def test_build_respects_yaml(self): versions=[fixture()]) version = project.versions.all()[0] - build_env = LocalEnvironment(project=project, version=version, build={}) + build_env = LocalBuildEnvironment(project=project, version=version, build={}) python_env = Virtualenv(version=version, build_env=build_env) config = ConfigWrapper(version=version, yaml_config=create_load({ 'formats': ['epub'] @@ -136,7 +136,7 @@ def test_builder_comments(self): allow_comments=True, versions=[fixture()]) version = project.versions.all()[0] - build_env = LocalEnvironment(version=version, project=project, build={}) + build_env = LocalBuildEnvironment(version=version, project=project, build={}) python_env = Virtualenv(version=version, build_env=build_env) builder_class = get_builder_class(project.documentation_type) builder = builder_class(build_env, python_env) @@ -149,7 +149,7 @@ def test_builder_no_comments(self): allow_comments=False, versions=[fixture()]) version = project.versions.all()[0] - build_env = LocalEnvironment(version=version, project=project, build={}) + build_env = LocalBuildEnvironment(version=version, project=project, build={}) python_env = Virtualenv(version=version, build_env=build_env) builder_class = get_builder_class(project.documentation_type) builder = builder_class(build_env, python_env) @@ -171,7 +171,7 @@ def test_build_pdf_latex_failures(self): version = project.versions.all()[0] assert project.conf_dir() == '/tmp/rtd' - build_env = LocalEnvironment(project=project, version=version, build={}) + build_env = LocalBuildEnvironment(project=project, version=version, build={}) python_env = Virtualenv(version=version, build_env=build_env) config = ConfigWrapper(version=version, yaml_config=create_load()()[0]) task = UpdateDocsTask(build_env=build_env, project=project, python_env=python_env, @@ -213,7 +213,7 @@ def test_build_pdf_latex_not_failure(self): version = project.versions.all()[0] assert project.conf_dir() == '/tmp/rtd' - build_env = LocalEnvironment(project=project, version=version, build={}) + build_env = LocalBuildEnvironment(project=project, version=version, build={}) python_env = Virtualenv(version=version, build_env=build_env) config = ConfigWrapper(version=version, yaml_config=create_load()()[0]) task = UpdateDocsTask(build_env=build_env, project=project, python_env=python_env, diff --git a/readthedocs/rtd_tests/tests/test_celery.py b/readthedocs/rtd_tests/tests/test_celery.py index 49db6b96dd0..e9124baf104 100644 --- a/readthedocs/rtd_tests/tests/test_celery.py +++ b/readthedocs/rtd_tests/tests/test_celery.py @@ -8,7 +8,7 @@ from django_dynamic_fixture import get from mock import patch, MagicMock -from readthedocs.builds.constants import BUILD_STATE_INSTALLING, BUILD_STATE_FINISHED +from readthedocs.builds.constants import BUILD_STATE_INSTALLING, BUILD_STATE_FINISHED, LATEST from readthedocs.builds.models import Build from readthedocs.projects.models import Project from readthedocs.projects import tasks @@ -64,7 +64,7 @@ def test_clear_artifacts(self): self.assertTrue(result.successful()) self.assertFalse(exists(directory)) - @patch('readthedocs.projects.tasks.UpdateDocsTask.setup_environment', new=MagicMock) + @patch('readthedocs.projects.tasks.UpdateDocsTask.setup_python_environment', new=MagicMock) @patch('readthedocs.projects.tasks.UpdateDocsTask.build_docs', new=MagicMock) @patch('readthedocs.projects.tasks.UpdateDocsTask.setup_vcs', new=MagicMock) def test_update_docs(self): @@ -79,7 +79,7 @@ def test_update_docs(self): intersphinx=False) self.assertTrue(result.successful()) - @patch('readthedocs.projects.tasks.UpdateDocsTask.setup_environment', new=MagicMock) + @patch('readthedocs.projects.tasks.UpdateDocsTask.setup_python_environment', new=MagicMock) @patch('readthedocs.projects.tasks.UpdateDocsTask.build_docs', new=MagicMock) @patch('readthedocs.doc_builder.environments.BuildEnvironment.update_build', new=MagicMock) @patch('readthedocs.projects.tasks.UpdateDocsTask.setup_vcs') @@ -97,7 +97,7 @@ def test_update_docs_unexpected_setup_exception(self, mock_setup_vcs): intersphinx=False) self.assertTrue(result.successful()) - @patch('readthedocs.projects.tasks.UpdateDocsTask.setup_environment', new=MagicMock) + @patch('readthedocs.projects.tasks.UpdateDocsTask.setup_python_environment', new=MagicMock) @patch('readthedocs.projects.tasks.UpdateDocsTask.setup_vcs', new=MagicMock) @patch('readthedocs.doc_builder.environments.BuildEnvironment.update_build', new=MagicMock) @patch('readthedocs.projects.tasks.UpdateDocsTask.build_docs') @@ -115,7 +115,11 @@ def test_update_docs_unexpected_build_exception(self, mock_build_docs): intersphinx=False) self.assertTrue(result.successful()) - def test_update_imported_doc(self): + def test_sync_repository(self): + version = self.project.versions.get(slug=LATEST) with mock_api(self.repo): - result = tasks.update_imported_docs.delay(self.project.pk) + sync_repository = tasks.SyncRepositoryTask() + result = sync_repository.apply_async( + args=(version.pk,), + ) self.assertTrue(result.successful()) diff --git a/readthedocs/rtd_tests/tests/test_doc_building.py b/readthedocs/rtd_tests/tests/test_doc_building.py index 8e928d27dae..880eb5d4397 100644 --- a/readthedocs/rtd_tests/tests/test_doc_building.py +++ b/readthedocs/rtd_tests/tests/test_doc_building.py @@ -24,7 +24,7 @@ from readthedocs.builds.models import Version from readthedocs.doc_builder.config import ConfigWrapper from readthedocs.doc_builder.environments import ( - BuildCommand, DockerBuildCommand, DockerEnvironment, LocalEnvironment) + BuildCommand, DockerBuildCommand, DockerBuildEnvironment, LocalBuildEnvironment) from readthedocs.doc_builder.exceptions import BuildEnvironmentError from readthedocs.doc_builder.python_environments import Virtualenv from readthedocs.projects.models import Project @@ -36,7 +36,7 @@ SAMPLE_UTF8_BYTES = SAMPLE_UNICODE.encode('utf-8') -class TestLocalEnvironment(TestCase): +class TestLocalBuildEnvironment(TestCase): """Test execution and exception handling in environment.""" fixtures = ['test_data'] @@ -58,7 +58,7 @@ def test_normal_execution(self): }) type(self.mocks.process).returncode = PropertyMock(return_value=0) - build_env = LocalEnvironment( + build_env = LocalBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, @@ -92,12 +92,12 @@ def test_normal_execution(self): def test_incremental_state_update_with_no_update(self): """Build updates to a non-finished state when update_on_success=True.""" build_envs = [ - LocalEnvironment( + LocalBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, ), - LocalEnvironment( + LocalBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, @@ -130,7 +130,7 @@ def test_failing_execution(self): }) type(self.mocks.process).returncode = PropertyMock(return_value=1) - build_env = LocalEnvironment( + build_env = LocalBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, @@ -164,7 +164,7 @@ def test_failing_execution(self): def test_failing_execution_with_caught_exception(self): """Build in failing state with BuildEnvironmentError exception.""" - build_env = LocalEnvironment( + build_env = LocalBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, @@ -197,7 +197,7 @@ def test_failing_execution_with_caught_exception(self): def test_failing_execution_with_unexpected_exception(self): """Build in failing state with exception from code.""" - build_env = LocalEnvironment( + build_env = LocalBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, @@ -230,7 +230,7 @@ def test_failing_execution_with_unexpected_exception(self): }) -class TestDockerEnvironment(TestCase): +class TestDockerBuildEnvironment(TestCase): """Test docker build environment.""" @@ -248,7 +248,7 @@ def tearDown(self): def test_container_id(self): """Test docker build command.""" - docker = DockerEnvironment( + docker = DockerBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, @@ -257,7 +257,7 @@ def test_container_id(self): def test_environment_successful_build(self): """A successful build exits cleanly and reports the build output.""" - build_env = DockerEnvironment( + build_env = DockerBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, @@ -286,7 +286,7 @@ def test_environment_successful_build(self): def test_environment_successful_build_without_update(self): """A successful build exits cleanly and doesn't update build.""" - build_env = DockerEnvironment( + build_env = DockerBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, @@ -304,7 +304,7 @@ def test_environment_successful_build_without_update(self): def test_environment_failed_build_without_update_but_with_error(self): """A failed build exits cleanly and doesn't update build.""" - build_env = DockerEnvironment( + build_env = DockerBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, @@ -336,7 +336,7 @@ def test_environment_failed_build_without_update_but_with_error(self): def test_connection_failure(self): """Connection failure on to docker socket should raise exception.""" self.mocks.configure_mock('docker', {'side_effect': DockerException}) - build_env = DockerEnvironment( + build_env = DockerBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, @@ -379,7 +379,7 @@ def test_api_failure(self): 'Failure creating container') }) - build_env = DockerEnvironment( + build_env = DockerBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, @@ -418,7 +418,7 @@ def test_api_failure_on_docker_memory_limit(self): 'Failure creating container'), }) - build_env = DockerEnvironment( + build_env = DockerBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, @@ -454,7 +454,7 @@ def test_api_failure_on_error_in_exit(self): 'kill.side_effect': BuildEnvironmentError('Failed') }) - build_env = DockerEnvironment( + build_env = DockerBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, @@ -492,7 +492,7 @@ def test_api_failure_returns_previous_error_on_error_in_exit(self): 'kill.side_effect': BuildEnvironmentError('Outer failed') }) - build_env = DockerEnvironment( + build_env = DockerBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, @@ -527,7 +527,7 @@ def test_command_execution(self): 'exec_inspect.return_value': {'ExitCode': 1}, }) - build_env = DockerEnvironment( + build_env = DockerBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, @@ -576,7 +576,7 @@ def test_command_execution_cleanup_exception(self): ) }) - build_env = DockerEnvironment( + build_env = DockerBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, @@ -615,7 +615,7 @@ def test_container_already_exists(self): 'exec_inspect.return_value': {'ExitCode': 0}, }) - build_env = DockerEnvironment( + build_env = DockerBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, @@ -667,7 +667,7 @@ def test_container_timeout(self): 'exec_inspect.return_value': {'ExitCode': 0}, }) - build_env = DockerEnvironment( + build_env = DockerBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, diff --git a/readthedocs/vcs_support/backends/bzr.py b/readthedocs/vcs_support/backends/bzr.py index 6c58a667de1..7d7234b84f2 100644 --- a/readthedocs/vcs_support/backends/bzr.py +++ b/readthedocs/vcs_support/backends/bzr.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """Bazaar-related utilities.""" from __future__ import absolute_import @@ -21,7 +22,7 @@ class Backend(BaseVCS): def update(self): super(Backend, self).update() - retcode = self.run('bzr', 'status')[0] + retcode = self.run('bzr', 'status', record=False)[0] if retcode == 0: self.up() else: @@ -44,7 +45,7 @@ def clone(self): @property def tags(self): - retcode, stdout = self.run('bzr', 'tags')[:2] + retcode, stdout = self.run('bzr', 'tags', record_as_success=True)[:2] # error (or no tags found) if retcode != 0: return [] diff --git a/readthedocs/vcs_support/backends/git.py b/readthedocs/vcs_support/backends/git.py index a77628bb2f6..d2e623de1c6 100644 --- a/readthedocs/vcs_support/backends/git.py +++ b/readthedocs/vcs_support/backends/git.py @@ -52,7 +52,7 @@ def update(self): self.checkout() def repo_exists(self): - code, _, _ = self.run('git', 'status') + code, _, _ = self.run('git', 'status', record=False) return code == 0 def fetch(self): @@ -65,22 +65,22 @@ def checkout_revision(self, revision=None): branch = self.default_branch or self.fallback_branch revision = 'origin/%s' % branch - code, out, err = self.run('git', 'checkout', - '--force', '--quiet', revision) + code, out, err = self.run( + 'git', 'checkout', '--force', revision) if code != 0: log.warning("Failed to checkout revision '%s': %s", revision, code) return [code, out, err] def clone(self): - code, _, _ = self.run('git', 'clone', '--recursive', '--quiet', - self.repo_url, '.') + code, _, _ = self.run( + 'git', 'clone', '--recursive', self.repo_url, '.') if code != 0: raise RepositoryError @property def tags(self): - retcode, stdout, _ = self.run('git', 'show-ref', '--tags') + retcode, stdout, _ = self.run('git', 'show-ref', '--tags', record_as_success=True) # error (or no tags found) if retcode != 0: return [] @@ -205,7 +205,7 @@ def find_ref(self, ref): return ref def ref_exists(self, ref): - code, _, _ = self.run('git', 'show-ref', ref) + code, _, _ = self.run('git', 'show-ref', ref, record_as_success=True) return code == 0 @property diff --git a/readthedocs/vcs_support/backends/hg.py b/readthedocs/vcs_support/backends/hg.py index c1f60ef8f6e..6aebe57f259 100644 --- a/readthedocs/vcs_support/backends/hg.py +++ b/readthedocs/vcs_support/backends/hg.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """Mercurial-related utilities.""" from __future__ import absolute_import from readthedocs.projects.exceptions import RepositoryError @@ -14,7 +15,7 @@ class Backend(BaseVCS): def update(self): super(Backend, self).update() - retcode = self.run('hg', 'status')[0] + retcode = self.run('hg', 'status', record=False)[0] if retcode == 0: return self.pull() return self.clone() @@ -23,7 +24,7 @@ def pull(self): (pull_retcode, _, _) = self.run('hg', 'pull') if pull_retcode != 0: raise RepositoryError - (update_retcode, stdout, stderr) = self.run('hg', 'update', '-C') + (update_retcode, stdout, stderr) = self.run('hg', 'update', '--clean') if update_retcode != 0: raise RepositoryError return (update_retcode, stdout, stderr) @@ -37,7 +38,7 @@ def clone(self): @property def branches(self): - retcode, stdout = self.run('hg', 'branches', '-q')[:2] + retcode, stdout = self.run('hg', 'branches', record_as_success=True)[:2] # error (or no tags found) if retcode != 0: return [] @@ -50,7 +51,7 @@ def parse_branches(self, data): @property def tags(self): - retcode, stdout = self.run('hg', 'tags')[:2] + retcode, stdout = self.run('hg', 'tags', record_as_success=True)[:2] # error (or no tags found) if retcode != 0: return [] @@ -86,16 +87,16 @@ def parse_tags(self, data): @property def commit(self): - _, stdout = self.run('hg', 'id', '-i')[:2] + _, stdout = self.run('hg', 'identify', '--id')[:2] return stdout.strip() def checkout(self, identifier=None): super(Backend, self).checkout() if not identifier: identifier = 'tip' - retcode = self.run('hg', 'status')[0] + retcode = self.run('hg', 'status', record=False)[0] if retcode == 0: self.run('hg', 'pull') - return self.run('hg', 'update', '-C', identifier) - self.clone() - return self.run('hg', 'update', '-C', identifier) + else: + self.clone() + return self.run('hg', 'update', '--clean', identifier) diff --git a/readthedocs/vcs_support/backends/svn.py b/readthedocs/vcs_support/backends/svn.py index 5c952e30882..9d456c86226 100644 --- a/readthedocs/vcs_support/backends/svn.py +++ b/readthedocs/vcs_support/backends/svn.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """Subversion-related utilities.""" from __future__ import absolute_import @@ -33,7 +34,7 @@ def update(self): super(Backend, self).update() # For some reason `svn status` gives me retcode 0 in non-svn # directories that's why I use `svn info` here. - retcode = self.run('svn', 'info')[0] + retcode = self.run('svn', 'info', record_as_success=True)[0] if retcode == 0: self.up() else: @@ -56,7 +57,7 @@ def co(self, identifier=None): url = self.base_url + identifier else: url = self.repo_url - retcode, out, err = self.run('svn', 'checkout', '--quiet', url, '.') + retcode, out, err = self.run('svn', 'checkout', url, '.') if retcode != 0: raise RepositoryError return retcode, out, err @@ -64,7 +65,7 @@ def co(self, identifier=None): @property def tags(self): retcode, stdout = self.run('svn', 'list', '%s/tags/' - % self.base_url)[:2] + % self.base_url, record_as_success=True)[:2] # error (or no tags found) if retcode != 0: return [] @@ -98,7 +99,7 @@ def commit(self): def checkout(self, identifier=None): super(Backend, self).checkout() - retcode = self.run('svn', 'info')[0] + retcode = self.run('svn', 'info', record=False)[0] if retcode == 0: result = self.up() else: diff --git a/readthedocs/vcs_support/base.py b/readthedocs/vcs_support/base.py index 3416de0ff11..9a623fcc1a2 100644 --- a/readthedocs/vcs_support/base.py +++ b/readthedocs/vcs_support/base.py @@ -1,12 +1,11 @@ +# -*- coding: utf-8 -*- + """Base classes for VCS backends.""" from __future__ import absolute_import from builtins import object import logging import os import shutil -import subprocess -from collections import namedtuple -from os.path import basename log = logging.getLogger(__name__) @@ -33,60 +32,12 @@ def __repr__(self): self.verbose_name) -class VCSProject(namedtuple("VCSProject", - "name default_branch working_dir repo_url")): - - """Transient object to encapsulate a projects stuff""" - - pass - - -class BaseCLI(object): - - """Helper class for CLI-heavy classes.""" - - log_tmpl = u'VCS[{name}:{ident}]: {args}' - - def __call__(self, *args): - return self.run(args) - - def run(self, *args): - """:param bits: list of command and args. See `subprocess` docs""" - process = subprocess.Popen(args, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - cwd=self.working_dir, shell=False, - env=self.env) - try: - log.info(self.log_tmpl.format(ident=basename(self.working_dir), - name=self.name, - args=' '.join(args))) - except UnicodeDecodeError: - # >:x - pass - stdout, stderr = process.communicate() - try: - log.info(self.log_tmpl.format(ident=basename(self.working_dir), - name=self.name, - args=stdout)) - except UnicodeDecodeError: - # >:x - pass - return ( - process.returncode, - stdout.decode('utf-8'), - stderr.decode('utf-8')) - - @property - def env(self): - return os.environ.copy() - - -class BaseVCS(BaseCLI): +class BaseVCS(object): """ Base for VCS Classes. - Built on top of the BaseCLI. + VCS commands are ran inside a ``LocalEnvironment``. """ supports_tags = False # Whether this VCS supports tags or not. @@ -98,11 +49,17 @@ class BaseVCS(BaseCLI): # Defining a base API, so we'll have unused args # pylint: disable=unused-argument - def __init__(self, project, version, **kwargs): + def __init__(self, project, version_slug, environment=None, **kwargs): self.default_branch = project.default_branch self.name = project.name - self.repo_url = project.repo_url - self.working_dir = project.working_dir + self.repo_url = project.clean_repo + self.working_dir = project.checkout_path(version_slug) + + from readthedocs.doc_builder.environments import LocalEnvironment + self.environment = environment or LocalEnvironment(project) + + # Update the env variables with the proper VCS env variables + self.environment.environment.update(self.env) def check_working_dir(self): if not os.path.exists(self.working_dir): @@ -113,6 +70,15 @@ def make_clean_working_dir(self): shutil.rmtree(self.working_dir, ignore_errors=True) self.check_working_dir() + @property + def env(self): + environment = os.environ.copy() + + # TODO: kind of a hack + del environment['PATH'] + + return environment + def update(self): """ Update a local copy of the repository in self.working_dir. @@ -122,6 +88,16 @@ def update(self): """ self.check_working_dir() + def run(self, *cmd, **kwargs): + kwargs.update({ + 'cwd': self.working_dir, + 'shell': False, + }) + + build_cmd = self.environment.run(*cmd, **kwargs) + # Return a tuple to keep compatibility + return (build_cmd.exit_code, build_cmd.output, build_cmd.error) + # ========================================================================= # Tag / Branch related methods # These methods only apply if supports_tags = True and/or