Skip to content

Feature flag: remove unused ones #10423

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 19 commits into from
Jun 14, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 0 additions & 54 deletions readthedocs/builds/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,60 +128,6 @@ def _check_suspicious_path(self, path):
log.error(msg, path=str(path), resolved_path=str(resolved_path))
raise SuspiciousFileOperation(msg)

def sync_directory(self, source, destination):
"""
Sync a directory recursively to storage.

Overwrites files in remote storage with files from ``source`` (no timstamp/hash checking).
Removes files and folders in remote storage that are not present in ``source``.

:param source: the source path on the local disk
:param destination: the destination path in storage
"""
if destination in ('', '/'):
raise SuspiciousFileOperation('Syncing all storage cannot be right')

source = Path(source)
self._check_suspicious_path(source)

log.debug(
'Syncing to media storage.',
source=str(source),
destination=destination,
)

copied_files = set()
copied_dirs = set()
for filepath in source.iterdir():
sub_destination = self.join(destination, filepath.name)
# Don't follow symlinks when uploading to storage.
if filepath.is_symlink():
log.info(
"Skipping symlink upload.",
path_resolved=str(filepath.resolve()),
)
continue

if filepath.is_dir():
# Recursively sync the subdirectory
self.sync_directory(filepath, sub_destination)
copied_dirs.add(filepath.name)
elif filepath.is_file():
with safe_open(filepath, "rb") as fd:
self.save(sub_destination, fd)
copied_files.add(filepath.name)

# Remove files that are not present in ``source``
dest_folders, dest_files = self.listdir(self._dirpath(destination))
for folder in dest_folders:
if folder not in copied_dirs:
self.delete_directory(self.join(destination, folder))
for filename in dest_files:
if filename not in copied_files:
filepath = self.join(destination, filename)
log.debug('Deleting file from media storage.', filepath=filepath)
self.delete(filepath)

@cached_property
def _rclone(self):
raise NotImplementedError
Expand Down
5 changes: 1 addition & 4 deletions readthedocs/builds/tests/test_trigger_build.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
import pytest

from readthedocs.builds.constants import (
BUILD_FINAL_STATES,
BUILD_STATE_BUILDING,
BUILD_STATE_CANCELLED,
BUILD_STATE_CLONING,
Expand All @@ -15,7 +14,7 @@
)
from readthedocs.builds.models import Build, Version
from readthedocs.core.utils import trigger_build
from readthedocs.projects.models import Feature, Project
from readthedocs.projects.models import Project


@pytest.mark.django_db
Expand All @@ -24,8 +23,6 @@ class TestCancelOldBuilds:
def setup(self):
self.project = fixture.get(Project)
self.version = fixture.get(Version, project=self.project)
self.feature = fixture.get(Feature, feature_id=Feature.CANCEL_OLD_BUILDS)
self.feature.projects.add(self.project)

@pytest.mark.parametrize(
"state",
Expand Down
73 changes: 36 additions & 37 deletions readthedocs/core/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def prepare_build(
# Avoid circular import
from readthedocs.builds.models import Build
from readthedocs.builds.tasks import send_build_notifications
from readthedocs.projects.models import Feature, Project, WebHookEvent
from readthedocs.projects.models import Project, WebHookEvent
from readthedocs.projects.tasks.builds import update_docs_task
from readthedocs.projects.tasks.utils import send_external_build_status

Expand Down Expand Up @@ -118,45 +118,44 @@ def prepare_build(
)

# Reduce overhead when doing multiple push on the same version.
if project.has_feature(Feature.CANCEL_OLD_BUILDS):
running_builds = (
Build.objects
.filter(
project=project,
version=version,
).exclude(
state__in=BUILD_FINAL_STATES,
).exclude(
pk=build.pk,
)
running_builds = (
Build.objects.filter(
project=project,
version=version,
)
.exclude(
state__in=BUILD_FINAL_STATES,
)
.exclude(
pk=build.pk,
)
)
if running_builds.count() > 0:
log.warning(
"Canceling running builds automatically due a new one arrived.",
running_builds=running_builds.count(),
)
if running_builds.count() > 0:
log.warning(
"Canceling running builds automatically due a new one arrived.",
running_builds=running_builds.count(),
)

# If there are builds triggered/running for this particular project and version,
# we cancel all of them and trigger a new one for the latest commit received.
for running_build in running_builds:
cancel_build(running_build)
# If there are builds triggered/running for this particular project and version,
# we cancel all of them and trigger a new one for the latest commit received.
for running_build in running_builds:
cancel_build(running_build)

# Start the build in X minutes and mark it as limited
if project.has_feature(Feature.LIMIT_CONCURRENT_BUILDS):
limit_reached, _, max_concurrent_builds = Build.objects.concurrent(project)
if limit_reached:
log.warning(
'Delaying tasks at trigger step due to concurrency limit.',
)
# Delay the start of the build for the build retry delay.
# We're still triggering the task, but it won't run immediately,
# and the user will be alerted in the UI from the Error below.
options['countdown'] = settings.RTD_BUILDS_RETRY_DELAY
options['max_retries'] = settings.RTD_BUILDS_MAX_RETRIES
build.error = BuildMaxConcurrencyError.message.format(
limit=max_concurrent_builds,
)
build.save()
limit_reached, _, max_concurrent_builds = Build.objects.concurrent(project)
if limit_reached:
log.warning(
"Delaying tasks at trigger step due to concurrency limit.",
)
# Delay the start of the build for the build retry delay.
# We're still triggering the task, but it won't run immediately,
# and the user will be alerted in the UI from the Error below.
options["countdown"] = settings.RTD_BUILDS_RETRY_DELAY
options["max_retries"] = settings.RTD_BUILDS_MAX_RETRIES
build.error = BuildMaxConcurrencyError.message.format(
limit=max_concurrent_builds,
)
build.save()

return (
update_docs_task.signature(
Expand Down Expand Up @@ -263,7 +262,7 @@ def cancel_build(build):
def send_email(
recipient, subject, template, template_html, context=None, request=None,
from_email=None, **kwargs
): # pylint: disable=unused-argument
):
"""
Alter context passed in and call email send task.

Expand Down
25 changes: 7 additions & 18 deletions readthedocs/doc_builder/backends/sphinx.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,8 +292,10 @@ def append_conf(self):
raise UserFileNotFound(
UserFileNotFound.FILE_NOT_FOUND.format(self.config_file)
)
except IOError:
raise ProjectConfigurationError(ProjectConfigurationError.NOT_FOUND)
except IOError as exc:
raise ProjectConfigurationError(
ProjectConfigurationError.NOT_FOUND
) from exc

# Append config to project conf file
tmpl = template_loader.get_template('doc_builder/conf.py.tmpl')
Expand All @@ -320,7 +322,6 @@ def build(self):
*self.get_sphinx_cmd(),
'-T',
'-E',
*self.sphinx_parallel_arg(),
]
if self.config.sphinx.fail_on_warning:
build_command.extend(["-W", "--keep-going"])
Expand Down Expand Up @@ -359,38 +360,27 @@ def get_sphinx_cmd(self):
'sphinx',
)

def sphinx_parallel_arg(self):
if self.project.has_feature(Feature.SPHINX_PARALLEL):
return ['-j', 'auto']
return []


class HtmlBuilder(BaseSphinx):
relative_output_dir = "html"

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.sphinx_builder = 'readthedocs'
if self.project.has_feature(Feature.USE_SPHINX_BUILDERS):
self.sphinx_builder = 'html'
self.sphinx_builder = "html"


class HtmlDirBuilder(HtmlBuilder):

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.sphinx_builder = 'readthedocsdirhtml'
if self.project.has_feature(Feature.USE_SPHINX_BUILDERS):
self.sphinx_builder = 'dirhtml'
self.sphinx_builder = "dirhtml"


class SingleHtmlBuilder(HtmlBuilder):

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.sphinx_builder = 'readthedocssinglehtml'
if self.project.has_feature(Feature.USE_SPHINX_BUILDERS):
self.sphinx_builder = 'singlehtml'
self.sphinx_builder = "singlehtml"


class LocalMediaBuilder(BaseSphinx):
Expand Down Expand Up @@ -518,7 +508,6 @@ def build(self):
*self.get_sphinx_cmd(),
"-T",
"-E",
*self.sphinx_parallel_arg(),
"-b",
self.sphinx_builder,
"-d",
Expand Down
4 changes: 0 additions & 4 deletions readthedocs/doc_builder/director.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,10 +180,6 @@ def setup_environment(self):
self.install()
self.run_build_job("post_install")

# TODO: remove this and document how to do it on `build.jobs.post_install`
if self.data.project.has_feature(Feature.LIST_PACKAGES_INSTALLED_ENV):
self.language_environment.list_packages_installed()

def build(self):
"""
Build all the formats specified by the user.
Expand Down
19 changes: 8 additions & 11 deletions readthedocs/doc_builder/environments.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ def run(self):
command = self.get_command()

stderr = subprocess.PIPE if self.demux else subprocess.STDOUT
proc = subprocess.Popen(
proc = subprocess.Popen( # pylint: disable=consider-using-with
command,
shell=self.shell,
cwd=self.cwd,
Expand Down Expand Up @@ -574,10 +574,7 @@ def __init__(self, *args, **kwargs):
self.use_gvisor = True

# Decide what Docker image to use, based on priorities:
# Use the Docker image set by our feature flag: ``testing`` or,
if self.project.has_feature(Feature.USE_TESTING_BUILD_IMAGE):
self.container_image = 'readthedocs/build:testing'
# the image set by user or,
# The image set by user or,
if self.config and self.config.docker_image:
self.container_image = self.config.docker_image
# the image overridden by the project (manually set by an admin).
Expand Down Expand Up @@ -633,8 +630,8 @@ def __enter__(self):
)
client = self.get_client()
client.remove_container(self.container_id)
except (DockerAPIError, ConnectionError) as e:
raise BuildAppError(e.explanation)
except (DockerAPIError, ConnectionError) as exc:
raise BuildAppError(exc.explanation) from exc

# Create the checkout path if it doesn't exist to avoid Docker creation
if not os.path.exists(self.project.doc_path):
Expand Down Expand Up @@ -707,8 +704,8 @@ def get_client(self):
version=DOCKER_VERSION,
)
return self.client
except DockerException as e:
raise BuildAppError(e.explanation)
except DockerException as exc:
raise BuildAppError(exc.explanation) from exc

def _get_binds(self):
"""
Expand Down Expand Up @@ -825,5 +822,5 @@ def create_container(self):
runtime=docker_runtime,
)
client.start(container=self.container_id)
except (DockerAPIError, ConnectionError) as e:
raise BuildAppError(e.explanation)
except (DockerAPIError, ConnectionError) as exc:
raise BuildAppError(exc.explanation) from exc
30 changes: 0 additions & 30 deletions readthedocs/doc_builder/python_environments.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,22 +307,6 @@ def install_requirements_file(self, install):
bin_path=self.venv_bin(),
)

def list_packages_installed(self):
"""List packages installed in pip."""
args = [
self.venv_bin(filename='python'),
'-m',
'pip',
'list',
# Include pre-release versions.
'--pre',
]
self.build_env.run(
*args,
cwd=self.checkout_path,
bin_path=self.venv_bin(),
)


class Conda(PythonEnvironment):

Expand Down Expand Up @@ -542,17 +526,3 @@ def install_requirements_file(self, install):
# as the conda environment was created by using the ``environment.yml``
# defined by the user, there is nothing to update at this point
pass

def list_packages_installed(self):
"""List packages installed in conda."""
args = [
self.conda_bin_name(),
'list',
'--name',
self.version.slug,
]
self.build_env.run(
*args,
cwd=self.checkout_path,
bin_path=self.venv_bin(),
)
8 changes: 1 addition & 7 deletions readthedocs/projects/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,7 @@
WebHook,
WebHookEvent,
)
from .notifications import (
DeprecatedBuildWebhookNotification,
DeprecatedGitHubWebhookNotification,
ResourceUsageNotification,
)
from .notifications import ResourceUsageNotification
from .tag_utils import import_tags
from .tasks.utils import clean_project_resources

Expand All @@ -56,8 +52,6 @@ def has_delete_permission(self, request, obj=None):
class ProjectSendNotificationView(SendNotificationView):
notification_classes = [
ResourceUsageNotification,
DeprecatedBuildWebhookNotification,
DeprecatedGitHubWebhookNotification,
]

def get_object_recipients(self, obj):
Expand Down
Loading