diff --git a/readthedocs/api/v2/utils.py b/readthedocs/api/v2/utils.py
index 98d83f9de60..011a10cd057 100644
--- a/readthedocs/api/v2/utils.py
+++ b/readthedocs/api/v2/utils.py
@@ -37,8 +37,8 @@ def sync_versions_to_db(project, versions, type):
:returns: set of versions' slug added
"""
old_version_values = project.versions.filter(type=type).values_list(
- 'verbose_name',
- 'identifier',
+ "verbose_name",
+ "identifier",
)
old_versions = dict(old_version_values)
@@ -48,8 +48,8 @@ def sync_versions_to_db(project, versions, type):
has_user_stable = False
has_user_latest = False
for version in versions:
- version_id = version['identifier']
- version_name = version['verbose_name']
+ version_id = version["identifier"]
+ version_name = version["verbose_name"]
if version_name == STABLE_VERBOSE_NAME:
has_user_stable = True
created_version, created = _set_or_create_version(
@@ -90,7 +90,7 @@ def sync_versions_to_db(project, versions, type):
)
log.info(
- 'Re-syncing versions: version updated.',
+ "Re-syncing versions: version updated.",
version_verbose_name=version_name,
version_id=version_id,
)
@@ -101,26 +101,22 @@ def sync_versions_to_db(project, versions, type):
added.update(_create_versions(project, type, versions_to_create))
if not has_user_stable:
- stable_version = (
- project.versions.filter(slug=STABLE, type=type).first()
- )
+ stable_version = project.versions.filter(slug=STABLE, type=type).first()
if stable_version:
# Put back the RTD's stable version
stable_version.machine = True
stable_version.save()
if not has_user_latest:
- latest_version = (
- project.versions.filter(slug=LATEST, type=type).first()
- )
+ latest_version = project.versions.filter(slug=LATEST, type=type).first()
if latest_version:
# Put back the RTD's latest version
latest_version.machine = True
latest_version.save()
if added:
log.info(
- 'Re-syncing versions: versions added.',
+ "Re-syncing versions: versions added.",
count=len(added),
- versions=','.join(itertools.islice(added, 100)),
+ versions=",".join(itertools.islice(added, 100)),
)
return added
@@ -174,14 +170,8 @@ def _set_or_create_version(project, slug, version_id, verbose_name, type_):
def _get_deleted_versions_qs(project, tags_data, branches_data):
# We use verbose_name for tags
# because several tags can point to the same identifier.
- versions_tags = [
- version['verbose_name']
- for version in tags_data
- ]
- versions_branches = [
- version['identifier']
- for version in branches_data
- ]
+ versions_tags = [version["verbose_name"] for version in tags_data]
+ versions_branches = [version["identifier"] for version in branches_data]
to_delete_qs = (
project.versions(manager=INTERNAL)
@@ -206,32 +196,28 @@ def delete_versions_from_db(project, tags_data, branches_data):
:returns: The slug of the deleted versions from the database.
"""
- to_delete_qs = (
- _get_deleted_versions_qs(
- project=project,
- tags_data=tags_data,
- branches_data=branches_data,
- )
- .exclude(active=True)
- )
+ to_delete_qs = _get_deleted_versions_qs(
+ project=project,
+ tags_data=tags_data,
+ branches_data=branches_data,
+ ).exclude(active=True)
_, deleted = to_delete_qs.delete()
- versions_count = deleted.get('builds.Version', 0)
+ versions_count = deleted.get("builds.Version", 0)
log.info(
- 'Re-syncing versions: versions deleted.', project_slug=project.slug, count=versions_count,
+ "Re-syncing versions: versions deleted.",
+ project_slug=project.slug,
+ count=versions_count,
)
def get_deleted_active_versions(project, tags_data, branches_data):
"""Return the slug of active versions that were deleted from the repository."""
- to_delete_qs = (
- _get_deleted_versions_qs(
- project=project,
- tags_data=tags_data,
- branches_data=branches_data,
- )
- .filter(active=True)
- )
- return set(to_delete_qs.values_list('slug', flat=True))
+ to_delete_qs = _get_deleted_versions_qs(
+ project=project,
+ tags_data=tags_data,
+ branches_data=branches_data,
+ ).filter(active=True)
+ return set(to_delete_qs.values_list("slug", flat=True))
def run_automation_rules(project, added_versions, deleted_active_versions):
diff --git a/readthedocs/api/v2/views/footer_views.py b/readthedocs/api/v2/views/footer_views.py
index 71fefb62147..856b6b00b01 100644
--- a/readthedocs/api/v2/views/footer_views.py
+++ b/readthedocs/api/v2/views/footer_views.py
@@ -34,11 +34,8 @@ def get_version_compare_data(project, base_version=None, user=None):
:param base_version: We assert whether or not the base_version is also the
highest version in the resulting "is_highest" value.
"""
- if (
- not project.show_version_warning or
- (base_version and base_version.is_external)
- ):
- return {'is_highest': False}
+ if not project.show_version_warning or (base_version and base_version.is_external):
+ return {"is_highest": False}
versions_qs = Version.internal.public(project=project, user=user).filter(
built=True, active=True
@@ -49,21 +46,21 @@ def get_version_compare_data(project, base_version=None, user=None):
versions_qs = versions_qs.filter(type=TAG)
# Optimization
- versions_qs = versions_qs.select_related('project')
+ versions_qs = versions_qs.select_related("project")
highest_version_obj, highest_version_comparable = highest_version(
versions_qs,
)
ret_val = {
- 'project': str(highest_version_obj),
- 'version': str(highest_version_comparable),
- 'is_highest': True,
+ "project": str(highest_version_obj),
+ "version": str(highest_version_comparable),
+ "is_highest": True,
}
if highest_version_obj:
# Never link to the dashboard,
# users reading the docs may don't have access to the dashboard.
- ret_val['url'] = highest_version_obj.get_absolute_url()
- ret_val['slug'] = highest_version_obj.slug
+ ret_val["url"] = highest_version_obj.get_absolute_url()
+ ret_val["slug"] = highest_version_obj.slug
if base_version and base_version.slug != LATEST:
try:
base_version_comparable = parse_version_failsafe(
@@ -72,13 +69,13 @@ def get_version_compare_data(project, base_version=None, user=None):
if base_version_comparable:
# This is only place where is_highest can get set. All error
# cases will be set to True, for non- standard versions.
- ret_val['is_highest'] = (
+ ret_val["is_highest"] = (
base_version_comparable >= highest_version_comparable
)
else:
- ret_val['is_highest'] = True
+ ret_val["is_highest"] = True
except (Version.DoesNotExist, TypeError):
- ret_val['is_highest'] = True
+ ret_val["is_highest"] = True
return ret_val
@@ -105,24 +102,24 @@ class BaseFooterHTML(CDNCacheTagsMixin, APIView):
are called many times, so a basic cache is implemented.
"""
- http_method_names = ['get']
+ http_method_names = ["get"]
permission_classes = [IsAuthorizedToViewVersion]
renderer_classes = [JSONRenderer, JSONPRenderer]
- project_cache_tag = 'rtd-footer'
+ project_cache_tag = "rtd-footer"
@lru_cache(maxsize=1)
def _get_project(self):
- project_slug = self.request.GET.get('project', None)
+ project_slug = self.request.GET.get("project", None)
project = get_object_or_404(Project, slug=project_slug)
return project
@lru_cache(maxsize=1)
def _get_version(self):
- version_slug = self.request.GET.get('version', None)
+ version_slug = self.request.GET.get("version", None)
# Hack in a fix for missing version slug deploy
# that went out a while back
- if version_slug == '':
+ if version_slug == "":
version_slug = LATEST
project = self._get_project()
@@ -142,23 +139,23 @@ def _get_active_versions_sorted(self):
return versions
def _get_context(self):
- theme = self.request.GET.get('theme', False)
- docroot = self.request.GET.get('docroot', '')
- source_suffix = self.request.GET.get('source_suffix', '.rst')
+ theme = self.request.GET.get("theme", False)
+ docroot = self.request.GET.get("docroot", "")
+ source_suffix = self.request.GET.get("source_suffix", ".rst")
- new_theme = (theme == 'sphinx_rtd_theme')
+ new_theme = theme == "sphinx_rtd_theme"
project = self._get_project()
main_project = project.main_language_project or project
version = self._get_version()
- page_slug = self.request.GET.get('page', '')
- path = ''
- if page_slug and page_slug != 'index':
+ page_slug = self.request.GET.get("page", "")
+ path = ""
+ if page_slug and page_slug != "index":
if version.documentation_type in {SPHINX_HTMLDIR, MKDOCS}:
- path = re.sub('/index$', '', page_slug) + '/'
+ path = re.sub("/index$", "", page_slug) + "/"
else:
- path = page_slug + '.html'
+ path = page_slug + ".html"
context = {
"project": project,
@@ -176,27 +173,27 @@ def _get_context(self):
docroot,
page_slug,
source_suffix,
- 'edit',
+ "edit",
),
- 'github_view_url': version.get_github_url(
+ "github_view_url": version.get_github_url(
docroot,
page_slug,
source_suffix,
- 'view',
+ "view",
),
- 'gitlab_edit_url': version.get_gitlab_url(
+ "gitlab_edit_url": version.get_gitlab_url(
docroot,
page_slug,
source_suffix,
- 'edit',
+ "edit",
),
- 'gitlab_view_url': version.get_gitlab_url(
+ "gitlab_view_url": version.get_gitlab_url(
docroot,
page_slug,
source_suffix,
- 'view',
+ "view",
),
- 'bitbucket_url': version.get_bitbucket_url(
+ "bitbucket_url": version.get_bitbucket_url(
docroot,
page_slug,
source_suffix,
@@ -214,22 +211,19 @@ def get(self, request, format=None):
)
context = self._get_context()
- html = template_loader.get_template('restapi/footer.html').render(
+ html = template_loader.get_template("restapi/footer.html").render(
context,
request,
)
- show_version_warning = (
- project.show_version_warning and
- not version.is_external
- )
+ show_version_warning = project.show_version_warning and not version.is_external
resp_data = {
- 'html': html,
- 'show_version_warning': show_version_warning,
- 'version_active': version.active,
- 'version_compare': version_compare_data,
- 'version_supported': version.supported,
+ "html": html,
+ "show_version_warning": show_version_warning,
+ "version_active": version.active,
+ "version_compare": version_compare_data,
+ "version_supported": version.supported,
}
return Response(resp_data)
diff --git a/readthedocs/api/v2/views/integrations.py b/readthedocs/api/v2/views/integrations.py
index be9236c5df8..824e1536195 100644
--- a/readthedocs/api/v2/views/integrations.py
+++ b/readthedocs/api/v2/views/integrations.py
@@ -71,7 +71,7 @@ class WebhookMixin:
renderer_classes = (JSONRenderer,)
integration = None
integration_type = None
- invalid_payload_msg = 'Payload not valid'
+ invalid_payload_msg = "Payload not valid"
missing_secret_deprecated_msg = dedent(
"""
This webhook doesn't have a secret configured.
@@ -101,7 +101,7 @@ def post(self, request, project_slug):
try:
self.project = self.get_project(slug=project_slug)
if not Project.objects.is_active(self.project):
- resp = {'detail': 'This project is currently disabled'}
+ resp = {"detail": "This project is currently disabled"}
return Response(resp, status=status.HTTP_406_NOT_ACCEPTABLE)
except Project.DoesNotExist as exc:
raise NotFound("Project not found") from exc
@@ -115,15 +115,15 @@ def post(self, request, project_slug):
)
if not self.is_payload_valid():
- log.warning('Invalid payload for project and integration.')
+ log.warning("Invalid payload for project and integration.")
return Response(
- {'detail': self.invalid_payload_msg},
+ {"detail": self.invalid_payload_msg},
status=HTTP_400_BAD_REQUEST,
)
resp = self.handle_webhook()
if resp is None:
- log.info('Unhandled webhook event')
- resp = {'detail': 'Unhandled webhook event'}
+ log.info("Unhandled webhook event")
+ resp = {"detail": "Unhandled webhook event"}
# The response can be a DRF Response with with the status code already set.
# In that case, we just return it as is.
@@ -143,7 +143,7 @@ def get_project(self, **kwargs):
def finalize_response(self, req, *args, **kwargs):
"""If the project was set on POST, store an HTTP exchange."""
resp = super().finalize_response(req, *args, **kwargs)
- if hasattr(self, 'project') and self.project:
+ if hasattr(self, "project") and self.project:
HttpExchange.objects.from_exchange(
req,
resp,
@@ -222,14 +222,14 @@ def get_response_push(self, project, branches):
to_build, not_building = build_branches(project, branches)
if not_building:
log.info(
- 'Skipping project branches.',
+ "Skipping project branches.",
branches=branches,
)
triggered = bool(to_build)
return {
- 'build_triggered': triggered,
- 'project': project.slug,
- 'versions': list(to_build),
+ "build_triggered": triggered,
+ "project": project.slug,
+ "versions": list(to_build),
}
def sync_versions_response(self, project, sync=True):
@@ -242,10 +242,10 @@ def sync_versions_response(self, project, sync=True):
if sync:
version = trigger_sync_versions(project)
return {
- 'build_triggered': False,
- 'project': project.slug,
- 'versions': [version] if version else [],
- 'versions_synced': version is not None,
+ "build_triggered": False,
+ "project": project.slug,
+ "versions": [version] if version else [],
+ "versions_synced": version is not None,
}
def get_external_version_response(self, project):
@@ -372,12 +372,12 @@ class GitHubWebhookView(WebhookMixin, APIView):
"""
integration_type = Integration.GITHUB_WEBHOOK
- invalid_payload_msg = 'Payload not valid, invalid or missing signature'
+ invalid_payload_msg = "Payload not valid, invalid or missing signature"
def get_data(self):
- if self.request.content_type == 'application/x-www-form-urlencoded':
+ if self.request.content_type == "application/x-www-form-urlencoded":
try:
- return json.loads(self.request.data['payload'])
+ return json.loads(self.request.data["payload"])
except (ValueError, KeyError):
pass
return super().get_data()
@@ -446,9 +446,9 @@ def handle_webhook(self):
"""
# Get event and trigger other webhook events
- action = self.data.get('action', None)
- created = self.data.get('created', False)
- deleted = self.data.get('deleted', False)
+ action = self.data.get("action", None)
+ created = self.data.get("created", False)
+ deleted = self.data.get("deleted", False)
event = self.request.headers.get(GITHUB_EVENT_HEADER, GITHUB_PUSH)
log.bind(webhook_event=event)
webhook_github.send(
@@ -469,7 +469,7 @@ def handle_webhook(self):
# Sync versions when a branch/tag was created/deleted
if event in (GITHUB_CREATE, GITHUB_DELETE):
- log.debug('Triggered sync_versions.')
+ log.debug("Triggered sync_versions.")
return self.sync_versions_response(self.project)
integration = self.get_integration()
@@ -489,22 +489,30 @@ def handle_webhook(self):
return self.get_closed_external_version_response(self.project)
# Sync versions when push event is created/deleted action
- if all([
+ if all(
+ [
event == GITHUB_PUSH,
(created or deleted),
- ]):
- events = integration.provider_data.get('events', []) if integration.provider_data else [] # noqa
- if any([
+ ]
+ ):
+ events = (
+ integration.provider_data.get("events", [])
+ if integration.provider_data
+ else []
+ ) # noqa
+ if any(
+ [
GITHUB_CREATE in events,
GITHUB_DELETE in events,
- ]):
+ ]
+ ):
# GitHub will send PUSH **and** CREATE/DELETE events on a creation/deletion in newer
# webhooks. If we receive a PUSH event we need to check if the webhook doesn't
# already have the CREATE/DELETE events. So we don't trigger the sync twice.
return self.sync_versions_response(self.project, sync=False)
log.debug(
- 'Triggered sync_versions.',
+ "Triggered sync_versions.",
integration_events=events,
)
return self.sync_versions_response(self.project)
@@ -521,8 +529,8 @@ def handle_webhook(self):
def _normalize_ref(self, ref):
"""Remove `ref/(heads|tags)/` from the reference to match a Version on the db."""
- pattern = re.compile(r'^refs/(heads|tags)/')
- return pattern.sub('', ref)
+ pattern = re.compile(r"^refs/(heads|tags)/")
+ return pattern.sub("", ref)
class GitLabWebhookView(WebhookMixin, APIView):
@@ -565,7 +573,7 @@ class GitLabWebhookView(WebhookMixin, APIView):
"""
integration_type = Integration.GITLAB_WEBHOOK
- invalid_payload_msg = 'Payload not valid, invalid or missing token'
+ invalid_payload_msg = "Payload not valid, invalid or missing token"
def is_payload_valid(self):
"""
@@ -602,8 +610,8 @@ def handle_webhook(self):
instead, it sets the before/after field to
0000000000000000000000000000000000000000 ('0' * 40)
"""
- event = self.request.data.get('object_kind', GITLAB_PUSH)
- action = self.data.get('object_attributes', {}).get('action', None)
+ event = self.request.data.get("object_kind", GITLAB_PUSH)
+ action = self.data.get("object_attributes", {}).get("action", None)
log.bind(webhook_event=event)
webhook_gitlab.send(
Project,
@@ -621,12 +629,12 @@ def handle_webhook(self):
# Handle push events and trigger builds
if event in (GITLAB_PUSH, GITLAB_TAG_PUSH):
data = self.request.data
- before = data.get('before')
- after = data.get('after')
+ before = data.get("before")
+ after = data.get("after")
# Tag/branch created/deleted
if GITLAB_NULL_HASH in (before, after):
log.debug(
- 'Triggered sync_versions.',
+ "Triggered sync_versions.",
before=before,
after=after,
)
@@ -653,8 +661,8 @@ def handle_webhook(self):
return None
def _normalize_ref(self, ref):
- pattern = re.compile(r'^refs/(heads|tags)/')
- return pattern.sub('', ref)
+ pattern = re.compile(r"^refs/(heads|tags)/")
+ return pattern.sub("", ref)
class BitbucketWebhookView(WebhookMixin, APIView):
@@ -715,14 +723,14 @@ def handle_webhook(self):
if event == BITBUCKET_PUSH:
try:
data = self.request.data
- changes = data['push']['changes']
+ changes = data["push"]["changes"]
branches = []
for change in changes:
- old = change['old']
- new = change['new']
+ old = change["old"]
+ new = change["new"]
# Normal push to master
if old is not None and new is not None:
- branches.append(new['name'])
+ branches.append(new["name"])
# BitBuck returns an array of changes rather than
# one webhook per change. If we have at least one normal push
# we don't trigger the sync versions, because that
@@ -770,7 +778,7 @@ class IsAuthenticatedOrHasToken(permissions.IsAuthenticated):
def has_permission(self, request, view):
has_perm = super().has_permission(request, view)
- return has_perm or 'token' in request.data
+ return has_perm or "token" in request.data
class APIWebhookView(WebhookMixin, APIView):
@@ -799,15 +807,13 @@ def get_project(self, **kwargs):
# If the user is not an admin of the project, fall back to token auth
if self.request.user.is_authenticated:
try:
- return (
- Project.objects.for_admin_user(
- self.request.user,
- ).get(**kwargs)
- )
+ return Project.objects.for_admin_user(
+ self.request.user,
+ ).get(**kwargs)
except Project.DoesNotExist:
pass
# Recheck project and integration relationship during token auth check
- token = self.request.data.get('token')
+ token = self.request.data.get("token")
if token:
integration = self.get_integration()
obj = Project.objects.get(**kwargs)
@@ -821,7 +827,7 @@ def get_project(self, **kwargs):
def handle_webhook(self):
try:
branches = self.request.data.get(
- 'branches',
+ "branches",
[self.project.get_default_branch()],
)
default_branch = self.request.data.get("default_branch", None)
diff --git a/readthedocs/builds/automation_actions.py b/readthedocs/builds/automation_actions.py
index 93de835f669..6223729cd5b 100644
--- a/readthedocs/builds/automation_actions.py
+++ b/readthedocs/builds/automation_actions.py
@@ -25,10 +25,7 @@ def activate_version(version, match_result, action_arg, *args, **kwargs):
version.active = True
version.save()
if not version.built:
- trigger_build(
- project=version.project,
- version=version
- )
+ trigger_build(project=version.project, version=version)
def set_default_version(version, match_result, action_arg, *args, **kwargs):
diff --git a/readthedocs/builds/constants.py b/readthedocs/builds/constants.py
index df1bd6bf7f3..39fc8edb4f8 100644
--- a/readthedocs/builds/constants.py
+++ b/readthedocs/builds/constants.py
@@ -30,30 +30,30 @@
)
BUILD_TYPES = (
- ('html', _('HTML')),
- ('pdf', _('PDF')),
- ('epub', _('Epub')),
+ ("html", _("HTML")),
+ ("pdf", _("PDF")),
+ ("epub", _("Epub")),
# There is currently no support for building man/dash formats, but we keep
# it there since the DB might still contain those values for legacy
# projects.
- ('man', _('Manpage')),
- ('dash', _('Dash')),
+ ("man", _("Manpage")),
+ ("dash", _("Dash")),
)
# Manager name for Internal Versions or Builds.
# ie: Versions and Builds Excluding pull request/merge request Versions and Builds.
-INTERNAL = 'internal'
+INTERNAL = "internal"
# Manager name for External Versions or Builds.
# ie: Only pull request/merge request Versions and Builds.
-EXTERNAL = 'external'
-EXTERNAL_TEXT = _('External')
+EXTERNAL = "external"
+EXTERNAL_TEXT = _("External")
-BRANCH = 'branch'
-BRANCH_TEXT = _('Branch')
-TAG = 'tag'
-TAG_TEXT = _('Tag')
-UNKNOWN = 'unknown'
-UNKNOWN_TEXT = _('Unknown')
+BRANCH = "branch"
+BRANCH_TEXT = _("Branch")
+TAG = "tag"
+TAG_TEXT = _("Tag")
+UNKNOWN = "unknown"
+UNKNOWN_TEXT = _("Unknown")
VERSION_TYPES = (
(BRANCH, BRANCH_TEXT),
@@ -84,59 +84,59 @@
# General build statuses, i.e. the status that is reported back to the
# user on a Git Provider. This not the same as BUILD_STATE which the internal
# representation.
-BUILD_STATUS_FAILURE = 'failed'
-BUILD_STATUS_PENDING = 'pending'
-BUILD_STATUS_SUCCESS = 'success'
+BUILD_STATUS_FAILURE = "failed"
+BUILD_STATUS_PENDING = "pending"
+BUILD_STATUS_SUCCESS = "success"
# GitHub Build Statuses
-GITHUB_BUILD_STATUS_FAILURE = 'failure'
-GITHUB_BUILD_STATUS_PENDING = 'pending'
-GITHUB_BUILD_STATUS_SUCCESS = 'success'
+GITHUB_BUILD_STATUS_FAILURE = "failure"
+GITHUB_BUILD_STATUS_PENDING = "pending"
+GITHUB_BUILD_STATUS_SUCCESS = "success"
# GitLab Build Statuses
-GITLAB_BUILD_STATUS_FAILURE = 'failed'
-GITLAB_BUILD_STATUS_PENDING = 'pending'
-GITLAB_BUILD_STATUS_SUCCESS = 'success'
+GITLAB_BUILD_STATUS_FAILURE = "failed"
+GITLAB_BUILD_STATUS_PENDING = "pending"
+GITLAB_BUILD_STATUS_SUCCESS = "success"
# Used to select correct Build status and description to be sent to each service API
SELECT_BUILD_STATUS = {
BUILD_STATUS_FAILURE: {
- 'github': GITHUB_BUILD_STATUS_FAILURE,
- 'gitlab': GITLAB_BUILD_STATUS_FAILURE,
- 'description': 'Read the Docs build failed!',
+ "github": GITHUB_BUILD_STATUS_FAILURE,
+ "gitlab": GITLAB_BUILD_STATUS_FAILURE,
+ "description": "Read the Docs build failed!",
},
BUILD_STATUS_PENDING: {
- 'github': GITHUB_BUILD_STATUS_PENDING,
- 'gitlab': GITLAB_BUILD_STATUS_PENDING,
- 'description': 'Read the Docs build is in progress!',
+ "github": GITHUB_BUILD_STATUS_PENDING,
+ "gitlab": GITLAB_BUILD_STATUS_PENDING,
+ "description": "Read the Docs build is in progress!",
},
BUILD_STATUS_SUCCESS: {
- 'github': GITHUB_BUILD_STATUS_SUCCESS,
- 'gitlab': GITLAB_BUILD_STATUS_SUCCESS,
- 'description': 'Read the Docs build succeeded!',
+ "github": GITHUB_BUILD_STATUS_SUCCESS,
+ "gitlab": GITLAB_BUILD_STATUS_SUCCESS,
+ "description": "Read the Docs build succeeded!",
},
}
-GITHUB_EXTERNAL_VERSION_NAME = 'Pull Request'
-GITLAB_EXTERNAL_VERSION_NAME = 'Merge Request'
-GENERIC_EXTERNAL_VERSION_NAME = 'External Version'
+GITHUB_EXTERNAL_VERSION_NAME = "Pull Request"
+GITLAB_EXTERNAL_VERSION_NAME = "Merge Request"
+GENERIC_EXTERNAL_VERSION_NAME = "External Version"
# Automation rules
-ALL_VERSIONS = 'all-versions'
-ALL_VERSIONS_REGEX = r'.*'
-SEMVER_VERSIONS = 'semver-versions'
+ALL_VERSIONS = "all-versions"
+ALL_VERSIONS_REGEX = r".*"
+SEMVER_VERSIONS = "semver-versions"
# Pattern referred from
# https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string
-SEMVER_VERSIONS_REGEX = r'^v?(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$' # noqa
+SEMVER_VERSIONS_REGEX = r"^v?(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$" # noqa
PREDEFINED_MATCH_ARGS = (
- (ALL_VERSIONS, _('Any version')),
- (SEMVER_VERSIONS, _('SemVer versions')),
- (None, _('Custom match')),
+ (ALL_VERSIONS, _("Any version")),
+ (SEMVER_VERSIONS, _("SemVer versions")),
+ (None, _("Custom match")),
)
PREDEFINED_MATCH_ARGS_VALUES = {
@@ -144,10 +144,8 @@
SEMVER_VERSIONS: SEMVER_VERSIONS_REGEX,
}
-BUILD_STATUS_NORMAL = 'normal'
-BUILD_STATUS_CHOICES = (
- (BUILD_STATUS_NORMAL, 'Normal'),
-)
+BUILD_STATUS_NORMAL = "normal"
+BUILD_STATUS_CHOICES = ((BUILD_STATUS_NORMAL, "Normal"),)
MAX_BUILD_COMMAND_SIZE = 1000000 # This keeps us under Azure's upload limit
diff --git a/readthedocs/builds/forms.py b/readthedocs/builds/forms.py
index f7e226e4e65..b5d3084f764 100644
--- a/readthedocs/builds/forms.py
+++ b/readthedocs/builds/forms.py
@@ -25,11 +25,10 @@
class VersionForm(forms.ModelForm):
-
class Meta:
model = Version
- states_fields = ['active', 'hidden']
- privacy_fields = ['privacy_level']
+ states_fields = ["active", "hidden"]
+ privacy_fields = ["privacy_level"]
fields = (
*states_fields,
*privacy_fields,
@@ -40,8 +39,10 @@ def __init__(self, *args, **kwargs):
field_sets = [
Fieldset(
- _('States'),
- HTML(render_to_string('projects/project_version_states_help_text.html')),
+ _("States"),
+ HTML(
+ render_to_string("projects/project_version_states_help_text.html")
+ ),
*self.Meta.states_fields,
),
]
@@ -49,18 +50,20 @@ def __init__(self, *args, **kwargs):
if settings.ALLOW_PRIVATE_REPOS:
field_sets.append(
Fieldset(
- _('Privacy'),
+ _("Privacy"),
*self.Meta.privacy_fields,
)
)
else:
- self.fields.pop('privacy_level')
+ self.fields.pop("privacy_level")
field_sets.append(
- HTML(render_to_string(
- 'projects/project_version_submit.html',
- context={'version': self.instance},
- ))
+ HTML(
+ render_to_string(
+ "projects/project_version_submit.html",
+ context={"version": self.instance},
+ )
+ )
)
self.helper = FormHelper()
@@ -70,11 +73,11 @@ def __init__(self, *args, **kwargs):
self._was_active = self.instance.active if self.instance else False
def clean_active(self):
- active = self.cleaned_data['active']
+ active = self.cleaned_data["active"]
if self._is_default_version() and not active:
msg = _(
- '{version} is the default version of the project, '
- 'it should be active.',
+ "{version} is the default version of the project, "
+ "it should be active.",
)
raise forms.ValidationError(
msg.format(version=self.instance.verbose_name),
@@ -92,18 +95,19 @@ def save(self, commit=True):
class RegexAutomationRuleForm(forms.ModelForm):
-
project = forms.CharField(widget=forms.HiddenInput(), required=False)
match_arg = forms.CharField(
- label='Custom match',
- help_text=_(textwrap.dedent(
- """
+ label="Custom match",
+ help_text=_(
+ textwrap.dedent(
+ """
A regular expression to match the version.
Check the documentation for valid patterns.
"""
- )),
+ )
+ ),
required=False,
)
@@ -119,20 +123,20 @@ class Meta:
]
# Don't pollute the UI with help texts
help_texts = {
- 'version_type': '',
- 'action': '',
+ "version_type": "",
+ "action": "",
}
labels = {
- 'predefined_match_arg': 'Match',
+ "predefined_match_arg": "Match",
}
def __init__(self, *args, **kwargs):
- self.project = kwargs.pop('project', None)
+ self.project = kwargs.pop("project", None)
super().__init__(*args, **kwargs)
# Only list supported types
- self.fields['version_type'].choices = [
- (None, '-' * 9),
+ self.fields["version_type"].choices = [
+ (None, "-" * 9),
(BRANCH, BRANCH_TEXT),
(TAG, TAG_TEXT),
]
@@ -143,36 +147,34 @@ def __init__(self, *args, **kwargs):
VersionAutomationRule.MAKE_VERSION_PUBLIC_ACTION,
VersionAutomationRule.MAKE_VERSION_PRIVATE_ACTION,
}
- action_choices = self.fields['action'].choices
- self.fields['action'].choices = [
- action
- for action in action_choices
- if action[0] not in invalid_actions
+ action_choices = self.fields["action"].choices
+ self.fields["action"].choices = [
+ action for action in action_choices if action[0] not in invalid_actions
]
if not self.instance.pk:
- self.initial['predefined_match_arg'] = ALL_VERSIONS
+ self.initial["predefined_match_arg"] = ALL_VERSIONS
# Allow users to start from the pattern of the predefined match
# if they want to use a custom one.
if self.instance.pk and self.instance.predefined_match_arg:
- self.initial['match_arg'] = self.instance.get_match_arg()
+ self.initial["match_arg"] = self.instance.get_match_arg()
def clean_match_arg(self):
"""Check that a custom match was given if a predefined match wasn't used."""
- match_arg = self.cleaned_data['match_arg']
- predefined_match = self.cleaned_data['predefined_match_arg']
+ match_arg = self.cleaned_data["match_arg"]
+ predefined_match = self.cleaned_data["predefined_match_arg"]
if predefined_match:
- match_arg = ''
+ match_arg = ""
if not predefined_match and not match_arg:
raise forms.ValidationError(
- _('Custom match should not be empty.'),
+ _("Custom match should not be empty."),
)
try:
re.compile(match_arg)
except Exception:
raise forms.ValidationError(
- _('Invalid Python regular expression.'),
+ _("Invalid Python regular expression."),
)
return match_arg
diff --git a/readthedocs/builds/managers.py b/readthedocs/builds/managers.py
index 4fc655eed66..7262bfd9144 100644
--- a/readthedocs/builds/managers.py
+++ b/readthedocs/builds/managers.py
@@ -19,7 +19,7 @@
log = structlog.get_logger(__name__)
-__all__ = ['VersionManager']
+__all__ = ["VersionManager"]
class VersionManager(models.Manager):
@@ -44,29 +44,29 @@ def from_queryset(cls, queryset_class, class_name=None):
def create_stable(self, **kwargs):
defaults = {
- 'slug': STABLE,
- 'verbose_name': STABLE_VERBOSE_NAME,
- 'machine': True,
- 'active': True,
+ "slug": STABLE,
+ "verbose_name": STABLE_VERBOSE_NAME,
+ "machine": True,
+ "active": True,
# TODO: double-check if we still require the `identifier: STABLE` field.
# At the time of creation, we don't really know what's the branch/tag identifier
# for the STABLE version. It makes sense to be `None`, probably.
#
# Note that we removed the `identifier: LATEST` from `create_latest` as a way to
# use the default branch.
- 'identifier': STABLE,
- 'type': TAG,
+ "identifier": STABLE,
+ "type": TAG,
}
defaults.update(kwargs)
return self.create(**defaults)
def create_latest(self, **kwargs):
defaults = {
- 'slug': LATEST,
- 'verbose_name': LATEST_VERBOSE_NAME,
- 'machine': True,
- 'active': True,
- 'type': BRANCH,
+ "slug": LATEST,
+ "verbose_name": LATEST_VERBOSE_NAME,
+ "machine": True,
+ "active": True,
+ "type": BRANCH,
}
defaults.update(kwargs)
return self.create(**defaults)
@@ -81,7 +81,7 @@ def get_object_or_log(self, **kwargs):
try:
return super().get(**kwargs)
except ObjectDoesNotExist:
- log.warning('Version not found for given kwargs.', kwargs=kwargs)
+ log.warning("Version not found for given kwargs.", kwargs=kwargs)
class InternalVersionManager(VersionManager):
@@ -135,7 +135,6 @@ def get_queryset(self):
class AutomationRuleMatchManager(models.Manager):
-
def register_match(self, rule, version, max_registers=15):
created = self.create(
rule=rule,
diff --git a/readthedocs/builds/models.py b/readthedocs/builds/models.py
index 5947c9d7a0d..c6cef2b9587 100644
--- a/readthedocs/builds/models.py
+++ b/readthedocs/builds/models.py
@@ -93,15 +93,15 @@ class Version(TimeStampedModel):
project = models.ForeignKey(
Project,
- verbose_name=_('Project'),
- related_name='versions',
+ verbose_name=_("Project"),
+ related_name="versions",
on_delete=models.CASCADE,
)
type = models.CharField(
- _('Type'),
+ _("Type"),
max_length=20,
choices=VERSION_TYPES,
- default='unknown',
+ default="unknown",
)
# used by the vcs backend
@@ -119,24 +119,24 @@ class Version(TimeStampedModel):
#: ``identifier``. This might be the tag or branch name like ``"v1.0.4"``.
#: However this might also hold special version names like ``"latest"``
#: and ``"stable"``.
- verbose_name = models.CharField(_('Verbose Name'), max_length=255)
+ verbose_name = models.CharField(_("Verbose Name"), max_length=255)
#: The slug is the slugified version of ``verbose_name`` that can be used
#: in the URL to identify this version in a project. It's also used in the
#: filesystem to determine how the paths for this version are called. It
#: must not be used for any other identifying purposes.
slug = VersionSlugField(
- _('Slug'),
+ _("Slug"),
max_length=255,
- populate_from='verbose_name',
+ populate_from="verbose_name",
)
# TODO: this field (`supported`) could be removed. It's returned only on
# the footer API response but I don't think anybody is using this field at
# all.
- supported = models.BooleanField(_('Supported'), default=True)
+ supported = models.BooleanField(_("Supported"), default=True)
- active = models.BooleanField(_('Active'), default=False)
+ active = models.BooleanField(_("Active"), default=False)
state = models.CharField(
_("State"),
max_length=20,
@@ -153,32 +153,32 @@ class Version(TimeStampedModel):
uploaded = models.BooleanField(_("Uploaded"), default=False)
privacy_level = models.CharField(
- _('Privacy Level'),
+ _("Privacy Level"),
max_length=20,
choices=PRIVACY_CHOICES,
default=settings.DEFAULT_VERSION_PRIVACY_LEVEL,
- help_text=_('Level of privacy for this Version.'),
+ help_text=_("Level of privacy for this Version."),
)
hidden = models.BooleanField(
- _('Hidden'),
+ _("Hidden"),
default=False,
- help_text=_('Hide this version from the version (flyout) menu and search results?')
+ help_text=_(
+ "Hide this version from the version (flyout) menu and search results?"
+ ),
)
- machine = models.BooleanField(_('Machine Created'), default=False)
+ machine = models.BooleanField(_("Machine Created"), default=False)
# Whether the latest successful build for this version contains certain media types
- has_pdf = models.BooleanField(_('Has PDF'), default=False)
- has_epub = models.BooleanField(_('Has ePub'), default=False)
- has_htmlzip = models.BooleanField(_('Has HTML Zip'), default=False)
+ has_pdf = models.BooleanField(_("Has PDF"), default=False)
+ has_epub = models.BooleanField(_("Has ePub"), default=False)
+ has_htmlzip = models.BooleanField(_("Has HTML Zip"), default=False)
documentation_type = models.CharField(
- _('Documentation type'),
+ _("Documentation type"),
max_length=20,
choices=DOCTYPE_CHOICES,
default=SPHINX,
- help_text=_(
- 'Type of documentation the version was built with.'
- ),
+ help_text=_("Type of documentation the version was built with."),
)
build_data = models.JSONField(
@@ -197,17 +197,21 @@ class Version(TimeStampedModel):
objects = VersionManager.from_queryset(VersionQuerySet)()
# Only include BRANCH, TAG, UNKNOWN type Versions.
- internal = InternalVersionManager.from_queryset(partial(VersionQuerySet, internal_only=True))()
+ internal = InternalVersionManager.from_queryset(
+ partial(VersionQuerySet, internal_only=True)
+ )()
# Only include EXTERNAL type Versions.
- external = ExternalVersionManager.from_queryset(partial(VersionQuerySet, external_only=True))()
+ external = ExternalVersionManager.from_queryset(
+ partial(VersionQuerySet, external_only=True)
+ )()
class Meta:
- unique_together = [('project', 'slug')]
- ordering = ['-verbose_name']
+ unique_together = [("project", "slug")]
+ ordering = ["-verbose_name"]
def __str__(self):
return gettext(
- 'Version {version} of {project} ({pk})'.format(
+ "Version {version} of {project} ({pk})".format(
version=self.verbose_name,
project=self.project,
pk=self.pk,
@@ -284,7 +288,7 @@ def vcs_url(self):
version_name = self.project.get_default_branch()
else:
version_name = self.slug
- if 'bitbucket' in self.project.repo:
+ if "bitbucket" in self.project.repo:
version_name = self.identifier
return get_vcs_url(
@@ -295,7 +299,7 @@ def vcs_url(self):
@property
def last_build(self):
- return self.builds.order_by('-date').first()
+ return self.builds.order_by("-date").first()
@property
def config(self):
@@ -309,8 +313,9 @@ def config(self):
self.builds.filter(
state=BUILD_STATE_FINISHED,
success=True,
- ).order_by('-date')
- .only('_config')
+ )
+ .order_by("-date")
+ .only("_config")
.first()
)
if last_build:
@@ -337,14 +342,14 @@ def commit_name(self):
# name from the commit identifier, but it's hacky.
# TODO: Refactor ``Version`` to store more actual info about
# the underlying commits.
- if self.identifier.startswith('origin/'):
- return self.identifier[len('origin/'):]
+ if self.identifier.startswith("origin/"):
+ return self.identifier[len("origin/") :]
return self.identifier
# By now we must have handled all special versions.
if self.slug in NON_REPOSITORY_VERSIONS:
# pylint: disable=broad-exception-raised
- raise Exception('All special versions must be handled by now.')
+ raise Exception("All special versions must be handled by now.")
if self.type in (BRANCH, TAG):
# If this version is a branch or a tag, the verbose_name will
@@ -364,7 +369,7 @@ def commit_name(self):
# nor a branch, tag or EXTERNAL version.
# Therefore just return the identifier to make a safe guess.
log.debug(
- 'TODO: Raise an exception here. Testing what cases it happens',
+ "TODO: Raise an exception here. Testing what cases it happens",
)
return self.identifier
@@ -401,7 +406,8 @@ def get_absolute_url(self):
def delete(self, *args, **kwargs):
from readthedocs.projects.tasks.utils import clean_project_resources
- log.info('Removing files for version.', version_slug=self.slug)
+
+ log.info("Removing files for version.", version_slug=self.slug)
clean_project_resources(self.project, self)
super().delete(*args, **kwargs)
@@ -460,7 +466,7 @@ def identifier_friendly(self):
# This usually happens when we haven't pulled the ``default_branch`` for LATEST.
return "Unknown yet"
- if re.match(r'^[0-9a-f]{40}$', self.identifier, re.I):
+ if re.match(r"^[0-9a-f]{40}$", self.identifier, re.I):
return self.identifier[:8]
return self.identifier
@@ -492,19 +498,19 @@ def prettify(k):
return k if pretty else k.lower()
if self.has_pdf:
- data[prettify('PDF')] = project.get_production_media_url(
- 'pdf',
+ data[prettify("PDF")] = project.get_production_media_url(
+ "pdf",
self.slug,
)
if self.has_htmlzip:
- data[prettify('HTML')] = project.get_production_media_url(
- 'htmlzip',
+ data[prettify("HTML")] = project.get_production_media_url(
+ "htmlzip",
self.slug,
)
if self.has_epub:
- data[prettify('Epub')] = project.get_production_media_url(
- 'epub',
+ data[prettify("Epub")] = project.get_production_media_url(
+ "epub",
self.slug,
)
return data
@@ -536,11 +542,11 @@ def get_storage_paths(self):
return paths
def get_github_url(
- self,
- docroot,
- filename,
- source_suffix='.rst',
- action='view',
+ self,
+ docroot,
+ filename,
+ source_suffix=".rst",
+ action="view",
):
"""
Return a GitHub URL for a given filename.
@@ -551,27 +557,27 @@ def get_github_url(
:param action: `view` (default) or `edit`
"""
repo_url = self.project.repo
- if 'github' not in repo_url:
- return ''
+ if "github" not in repo_url:
+ return ""
if not docroot:
- return ''
+ return ""
# Normalize /docroot/
- docroot = '/' + docroot.strip('/') + '/'
+ docroot = "/" + docroot.strip("/") + "/"
- if action == 'view':
- action_string = 'blob'
- elif action == 'edit':
- action_string = 'edit'
+ if action == "view":
+ action_string = "blob"
+ elif action == "edit":
+ action_string = "edit"
user, repo = get_github_username_repo(repo_url)
if not user and not repo:
- return ''
+ return ""
if not filename:
# If there isn't a filename, we don't need a suffix
- source_suffix = ''
+ source_suffix = ""
return GITHUB_URL.format(
user=user,
@@ -584,34 +590,34 @@ def get_github_url(
)
def get_gitlab_url(
- self,
- docroot,
- filename,
- source_suffix='.rst',
- action='view',
+ self,
+ docroot,
+ filename,
+ source_suffix=".rst",
+ action="view",
):
repo_url = self.project.repo
- if 'gitlab' not in repo_url:
- return ''
+ if "gitlab" not in repo_url:
+ return ""
if not docroot:
- return ''
+ return ""
# Normalize /docroot/
- docroot = '/' + docroot.strip('/') + '/'
+ docroot = "/" + docroot.strip("/") + "/"
- if action == 'view':
- action_string = 'blob'
- elif action == 'edit':
- action_string = 'edit'
+ if action == "view":
+ action_string = "blob"
+ elif action == "edit":
+ action_string = "edit"
user, repo = get_gitlab_username_repo(repo_url)
if not user and not repo:
- return ''
+ return ""
if not filename:
# If there isn't a filename, we don't need a suffix
- source_suffix = ''
+ source_suffix = ""
return GITLAB_URL.format(
user=user,
@@ -623,23 +629,23 @@ def get_gitlab_url(
action=action_string,
)
- def get_bitbucket_url(self, docroot, filename, source_suffix='.rst'):
+ def get_bitbucket_url(self, docroot, filename, source_suffix=".rst"):
repo_url = self.project.repo
- if 'bitbucket' not in repo_url:
- return ''
+ if "bitbucket" not in repo_url:
+ return ""
if not docroot:
- return ''
+ return ""
# Normalize /docroot/
- docroot = '/' + docroot.strip('/') + '/'
+ docroot = "/" + docroot.strip("/") + "/"
user, repo = get_bitbucket_username_repo(repo_url)
if not user and not repo:
- return ''
+ return ""
if not filename:
# If there isn't a filename, we don't need a suffix
- source_suffix = ''
+ source_suffix = ""
return BITBUCKET_URL.format(
user=user,
@@ -677,7 +683,7 @@ def __init__(self, *args, **kwargs):
self.canonical_url = kwargs.pop("canonical_url", None)
# These fields only exist on the API return, not on the model, so we'll
# remove them to avoid throwing exceptions due to unexpected fields
- for key in ['resource_uri', 'absolute_url', 'downloads']:
+ for key in ["resource_uri", "absolute_url", "downloads"]:
try:
del kwargs[key]
except KeyError:
@@ -704,29 +710,29 @@ class Build(models.Model):
project = models.ForeignKey(
Project,
- verbose_name=_('Project'),
- related_name='builds',
+ verbose_name=_("Project"),
+ related_name="builds",
on_delete=models.CASCADE,
)
version = models.ForeignKey(
Version,
- verbose_name=_('Version'),
+ verbose_name=_("Version"),
null=True,
- related_name='builds',
+ related_name="builds",
on_delete=models.SET_NULL,
)
type = models.CharField(
- _('Type'),
+ _("Type"),
max_length=55,
choices=BUILD_TYPES,
- default='html',
+ default="html",
)
# Describe build state as where in the build process the build is. This
# allows us to show progression to the user in the form of a progress bar
# or in the build listing
state = models.CharField(
- _('State'),
+ _("State"),
max_length=55,
choices=BUILD_STATE,
default=BUILD_STATE_TRIGGERED,
@@ -738,54 +744,54 @@ class Build(models.Model):
# doesn't help describe progression
# https://github.com/readthedocs/readthedocs.org/pull/7123#issuecomment-635065807
status = models.CharField(
- _('Status'),
+ _("Status"),
choices=BUILD_STATUS_CHOICES,
max_length=32,
null=True,
default=None,
blank=True,
)
- date = models.DateTimeField(_('Date'), auto_now_add=True, db_index=True)
- success = models.BooleanField(_('Success'), default=True)
+ date = models.DateTimeField(_("Date"), auto_now_add=True, db_index=True)
+ success = models.BooleanField(_("Success"), default=True)
# TODO: remove these fields (setup, setup_error, output, error, exit_code)
# since they are not used anymore in the new implementation and only really
# old builds (>5 years ago) only were using these fields.
- setup = models.TextField(_('Setup'), null=True, blank=True)
- setup_error = models.TextField(_('Setup error'), null=True, blank=True)
- output = models.TextField(_('Output'), default='', blank=True)
- error = models.TextField(_('Error'), default='', blank=True)
- exit_code = models.IntegerField(_('Exit code'), null=True, blank=True)
+ setup = models.TextField(_("Setup"), null=True, blank=True)
+ setup_error = models.TextField(_("Setup error"), null=True, blank=True)
+ output = models.TextField(_("Output"), default="", blank=True)
+ error = models.TextField(_("Error"), default="", blank=True)
+ exit_code = models.IntegerField(_("Exit code"), null=True, blank=True)
# Metadata from were the build happened.
# This is also used after the version is deleted.
commit = models.CharField(
- _('Commit'),
+ _("Commit"),
max_length=255,
null=True,
blank=True,
)
version_slug = models.CharField(
- _('Version slug'),
+ _("Version slug"),
max_length=255,
null=True,
blank=True,
)
version_name = models.CharField(
- _('Version name'),
+ _("Version name"),
max_length=255,
null=True,
blank=True,
)
version_type = models.CharField(
- _('Version type'),
+ _("Version type"),
max_length=32,
choices=VERSION_TYPES,
null=True,
blank=True,
)
_config = models.JSONField(
- _('Configuration used in the build'),
+ _("Configuration used in the build"),
null=True,
blank=True,
)
@@ -798,23 +804,23 @@ class Build(models.Model):
validators=[validate_build_config_file],
)
- length = models.IntegerField(_('Build Length'), null=True, blank=True)
+ length = models.IntegerField(_("Build Length"), null=True, blank=True)
builder = models.CharField(
- _('Builder'),
+ _("Builder"),
max_length=255,
null=True,
blank=True,
)
cold_storage = models.BooleanField(
- _('Cold Storage'),
+ _("Cold Storage"),
null=True,
- help_text='Build steps stored outside the database.',
+ help_text="Build steps stored outside the database.",
)
task_id = models.CharField(
- _('Celery task id'),
+ _("Celery task id"),
max_length=36,
null=True,
blank=True,
@@ -834,11 +840,11 @@ class Build(models.Model):
# Only include EXTERNAL type Version builds.
external = ExternalBuildManager.from_queryset(BuildQuerySet)()
- CONFIG_KEY = '__config'
+ CONFIG_KEY = "__config"
class Meta:
- ordering = ['-date']
- get_latest_by = 'date'
+ ordering = ["-date"]
+ get_latest_by = "date"
index_together = [
# Useful for `/_/addons/` API endpoint.
# Query: ``version.builds.filter(success=True, state=BUILD_STATE_FINISHED)``
@@ -847,7 +853,7 @@ class Meta:
["date", "id"],
]
indexes = [
- models.Index(fields=['project', 'date']),
+ models.Index(fields=["project", "date"]),
]
def __init__(self, *args, **kwargs):
@@ -868,7 +874,9 @@ def previous(self):
project=self.project,
version=self.version,
date__lt=date,
- ).order_by('-date').first()
+ )
+ .order_by("-date")
+ .first()
)
return None
@@ -888,8 +896,7 @@ def config(self):
# well
if self._config and self.CONFIG_KEY in self._config:
return (
- Build.objects
- .only('_config')
+ Build.objects.only("_config")
.get(pk=self._config[self.CONFIG_KEY])
._config
)
@@ -935,17 +942,17 @@ def save(self, *args, **kwargs): # noqa
def __str__(self):
return gettext(
- 'Build {project} for {usernames} ({pk})'.format(
+ "Build {project} for {usernames} ({pk})".format(
project=self.project,
- usernames=' '.join(
- self.project.users.all().values_list('username', flat=True),
+ usernames=" ".join(
+ self.project.users.all().values_list("username", flat=True),
),
pk=self.pk,
),
)
def get_absolute_url(self):
- return reverse('builds_detail', args=[self.project.slug, self.pk])
+ return reverse("builds_detail", args=[self.project.slug, self.pk])
def get_full_url(self):
"""
@@ -953,11 +960,11 @@ def get_full_url(self):
Example: https://readthedocs.org/projects/pip/builds/99999999/
"""
- scheme = 'http' if settings.DEBUG else 'https'
- full_url = '{scheme}://{domain}{absolute_url}'.format(
+ scheme = "http" if settings.DEBUG else "https"
+ full_url = "{scheme}://{domain}{absolute_url}".format(
scheme=scheme,
domain=settings.PRODUCTION_DOMAIN,
- absolute_url=self.get_absolute_url()
+ absolute_url=self.get_absolute_url(),
)
return full_url
@@ -990,59 +997,53 @@ def get_commit_url(self):
"""Return the commit URL."""
repo_url = self.project.repo
if self.is_external:
- if 'github' in repo_url:
+ if "github" in repo_url:
user, repo = get_github_username_repo(repo_url)
if not user and not repo:
- return ''
+ return ""
return GITHUB_PULL_REQUEST_COMMIT_URL.format(
user=user,
repo=repo,
number=self.get_version_name(),
- commit=self.commit
+ commit=self.commit,
)
- if 'gitlab' in repo_url:
+ if "gitlab" in repo_url:
user, repo = get_gitlab_username_repo(repo_url)
if not user and not repo:
- return ''
+ return ""
return GITLAB_MERGE_REQUEST_COMMIT_URL.format(
user=user,
repo=repo,
number=self.get_version_name(),
- commit=self.commit
+ commit=self.commit,
)
# TODO: Add External Version Commit URL for Bitbucket.
else:
- if 'github' in repo_url:
+ if "github" in repo_url:
user, repo = get_github_username_repo(repo_url)
if not user and not repo:
- return ''
+ return ""
return GITHUB_COMMIT_URL.format(
- user=user,
- repo=repo,
- commit=self.commit
+ user=user, repo=repo, commit=self.commit
)
- if 'gitlab' in repo_url:
+ if "gitlab" in repo_url:
user, repo = get_gitlab_username_repo(repo_url)
if not user and not repo:
- return ''
+ return ""
return GITLAB_COMMIT_URL.format(
- user=user,
- repo=repo,
- commit=self.commit
+ user=user, repo=repo, commit=self.commit
)
- if 'bitbucket' in repo_url:
+ if "bitbucket" in repo_url:
user, repo = get_bitbucket_username_repo(repo_url)
if not user and not repo:
- return ''
+ return ""
return BITBUCKET_COMMIT_URL.format(
- user=user,
- repo=repo,
- commit=self.commit
+ user=user, repo=repo, commit=self.commit
)
return None
@@ -1077,10 +1078,10 @@ def can_rebuild(self):
"""
if self.is_external:
is_latest_build = (
- self == Build.objects.filter(
- project=self.project,
- version=self.version
- ).only('id').first()
+ self
+ == Build.objects.filter(project=self.project, version=self.version)
+ .only("id")
+ .first()
)
return self.version and self.version.active and is_latest_build
return False
@@ -1097,12 +1098,12 @@ def reset(self):
we care more about deleting the commands.
"""
self.state = BUILD_STATE_TRIGGERED
- self.status = ''
+ self.status = ""
self.success = True
- self.output = ''
- self.error = ''
+ self.output = ""
+ self.error = ""
self.exit_code = None
- self.builder = ''
+ self.builder = ""
self.cold_storage = False
self.commands.all().delete()
self.notifications.all().delete()
@@ -1139,29 +1140,28 @@ class BuildCommandResult(BuildCommandResultMixin, models.Model):
build = models.ForeignKey(
Build,
- verbose_name=_('Build'),
- related_name='commands',
+ verbose_name=_("Build"),
+ related_name="commands",
on_delete=models.CASCADE,
)
- command = models.TextField(_('Command'))
- description = models.TextField(_('Description'), blank=True)
- output = models.TextField(_('Command output'), blank=True)
- exit_code = models.IntegerField(_('Command exit code'))
+ command = models.TextField(_("Command"))
+ description = models.TextField(_("Description"), blank=True)
+ output = models.TextField(_("Command output"), blank=True)
+ exit_code = models.IntegerField(_("Command exit code"))
- start_time = models.DateTimeField(_('Start time'))
- end_time = models.DateTimeField(_('End time'))
+ start_time = models.DateTimeField(_("Start time"))
+ end_time = models.DateTimeField(_("End time"))
class Meta:
- ordering = ['start_time']
- get_latest_by = 'start_time'
+ ordering = ["start_time"]
+ get_latest_by = "start_time"
objects = RelatedBuildQuerySet.as_manager()
def __str__(self):
- return (
- gettext('Build command {pk} for build {build}')
- .format(pk=self.pk, build=self.build)
+ return gettext("Build command {pk} for build {build}").format(
+ pk=self.pk, build=self.build
)
@property
@@ -1176,12 +1176,12 @@ class VersionAutomationRule(PolymorphicModel, TimeStampedModel):
"""Versions automation rules for projects."""
- ACTIVATE_VERSION_ACTION = 'activate-version'
- DELETE_VERSION_ACTION = 'delete-version'
- HIDE_VERSION_ACTION = 'hide-version'
- MAKE_VERSION_PUBLIC_ACTION = 'make-version-public'
- MAKE_VERSION_PRIVATE_ACTION = 'make-version-private'
- SET_DEFAULT_VERSION_ACTION = 'set-default-version'
+ ACTIVATE_VERSION_ACTION = "activate-version"
+ DELETE_VERSION_ACTION = "delete-version"
+ HIDE_VERSION_ACTION = "hide-version"
+ MAKE_VERSION_PUBLIC_ACTION = "make-version-public"
+ MAKE_VERSION_PRIVATE_ACTION = "make-version-private"
+ SET_DEFAULT_VERSION_ACTION = "set-default-version"
ACTIONS = (
(ACTIVATE_VERSION_ACTION, _("Activate version")),
@@ -1197,30 +1197,30 @@ class VersionAutomationRule(PolymorphicModel, TimeStampedModel):
project = models.ForeignKey(
Project,
- related_name='automation_rules',
+ related_name="automation_rules",
on_delete=models.CASCADE,
)
priority = models.PositiveIntegerField(
- _('Rule priority'),
- help_text=_('A lower number (0) means a higher priority'),
+ _("Rule priority"),
+ help_text=_("A lower number (0) means a higher priority"),
default=0,
)
description = models.CharField(
- _('Description'),
+ _("Description"),
max_length=255,
null=True,
blank=True,
)
match_arg = models.CharField(
- _('Match argument'),
- help_text=_('Value used for the rule to match the version'),
+ _("Match argument"),
+ help_text=_("Value used for the rule to match the version"),
max_length=255,
)
predefined_match_arg = models.CharField(
- _('Predefined match argument'),
+ _("Predefined match argument"),
help_text=_(
- 'Match argument defined by us, it is used if is not None, '
- 'otherwise match_arg will be used.'
+ "Match argument defined by us, it is used if is not None, "
+ "otherwise match_arg will be used."
),
max_length=255,
choices=PREDEFINED_MATCH_ARGS,
@@ -1229,21 +1229,21 @@ class VersionAutomationRule(PolymorphicModel, TimeStampedModel):
default=None,
)
action = models.CharField(
- _('Action'),
- help_text=_('Action to apply to matching versions'),
+ _("Action"),
+ help_text=_("Action to apply to matching versions"),
max_length=32,
choices=ACTIONS,
)
action_arg = models.CharField(
- _('Action argument'),
- help_text=_('Value used for the action to perfom an operation'),
+ _("Action argument"),
+ help_text=_("Value used for the action to perfom an operation"),
max_length=255,
null=True,
blank=True,
)
version_type = models.CharField(
- _('Version type'),
- help_text=_('Type of version the rule should be applied to'),
+ _("Version type"),
+ help_text=_("Type of version the rule should be applied to"),
max_length=32,
choices=VERSION_TYPES,
)
@@ -1251,8 +1251,8 @@ class VersionAutomationRule(PolymorphicModel, TimeStampedModel):
_position_manager = ProjectItemPositionManager(position_field_name="priority")
class Meta:
- unique_together = (('project', 'priority'),)
- ordering = ('priority', '-modified', '-created')
+ unique_together = (("project", "priority"),)
+ ordering = ("priority", "-modified", "-created")
def get_match_arg(self):
"""Get the match arg defined for `predefined_match_arg` or the match from user."""
@@ -1301,10 +1301,9 @@ def apply_action(self, version, match_result):
:raises: NotImplementedError if the action
isn't implemented or supported for this rule.
"""
- action = (
- self.allowed_actions_on_create.get(self.action)
- or self.allowed_actions_on_delete.get(self.action)
- )
+ action = self.allowed_actions_on_create.get(
+ self.action
+ ) or self.allowed_actions_on_delete.get(self.action)
if action is None:
raise NotImplementedError
action(version, match_result, self.action_arg)
@@ -1341,7 +1340,7 @@ def delete(self, *args, **kwargs):
def get_description(self):
if self.description:
return self.description
- return f'{self.get_action_display()}'
+ return f"{self.get_action_display()}"
def get_edit_url(self):
raise NotImplementedError
@@ -1349,14 +1348,13 @@ def get_edit_url(self):
def __str__(self):
class_name = self.__class__.__name__
return (
- f'({self.priority}) '
- f'{class_name}/{self.get_action_display()} '
- f'for {self.project.slug}:{self.get_version_type_display()}'
+ f"({self.priority}) "
+ f"{class_name}/{self.get_action_display()} "
+ f"for {self.project.slug}:{self.get_version_type_display()}"
)
class RegexAutomationRule(VersionAutomationRule):
-
TIMEOUT = 1 # timeout in seconds
allowed_actions_on_create = {
@@ -1397,23 +1395,22 @@ def match(self, version, match_arg):
return bool(match), match
except TimeoutError:
log.warning(
- 'Timeout while parsing regex.',
+ "Timeout while parsing regex.",
pattern=match_arg,
version_slug=version.slug,
)
except Exception:
- log.exception('Error parsing regex.', exc_info=True)
+ log.exception("Error parsing regex.", exc_info=True)
return False, None
def get_edit_url(self):
return reverse(
- 'projects_automation_rule_regex_edit',
+ "projects_automation_rule_regex_edit",
args=[self.project.slug, self.pk],
)
class AutomationRuleMatch(TimeStampedModel):
-
ACTIONS_PAST_TENSE = {
VersionAutomationRule.ACTIVATE_VERSION_ACTION: _("Version activated"),
VersionAutomationRule.HIDE_VERSION_ACTION: _("Version hidden"),
@@ -1429,8 +1426,8 @@ class AutomationRuleMatch(TimeStampedModel):
rule = models.ForeignKey(
VersionAutomationRule,
- verbose_name=_('Matched rule'),
- related_name='matches',
+ verbose_name=_("Matched rule"),
+ related_name="matches",
on_delete=models.CASCADE,
)
@@ -1449,7 +1446,7 @@ class AutomationRuleMatch(TimeStampedModel):
objects = AutomationRuleMatchManager()
class Meta:
- ordering = ('-modified', '-created')
+ ordering = ("-modified", "-created")
def get_action_past_tense(self):
return self.ACTIONS_PAST_TENSE.get(self.action)
diff --git a/readthedocs/builds/storage.py b/readthedocs/builds/storage.py
index da57c40d244..36c97d8fa93 100644
--- a/readthedocs/builds/storage.py
+++ b/readthedocs/builds/storage.py
@@ -42,8 +42,8 @@ def _dirpath(path):
It may just be Azure, but for listdir to work correctly, this is needed.
"""
path = str(path)
- if not path.endswith('/'):
- path += '/'
+ if not path.endswith("/"):
+ path += "/"
return path
@@ -68,10 +68,10 @@ def delete_directory(self, path):
:param path: the path to the directory to remove
"""
- if path in ('', '/'):
- raise SuspiciousFileOperation('Deleting all storage cannot be right')
+ if path in ("", "/"):
+ raise SuspiciousFileOperation("Deleting all storage cannot be right")
- log.debug('Deleting path from media storage', path=path)
+ log.debug("Deleting path from media storage", path=path)
folders, files = self.listdir(self._dirpath(path))
for folder_name in folders:
if folder_name:
@@ -89,7 +89,7 @@ def copy_directory(self, source, destination):
:param destination: the destination path in storage
"""
log.debug(
- 'Copying source directory to media storage',
+ "Copying source directory to media storage",
source=source,
destination=destination,
)
@@ -144,10 +144,10 @@ def join(self, directory, filepath):
return safe_join(directory, filepath)
def walk(self, top):
- if top in ('', '/'):
- raise SuspiciousFileOperation('Iterating all storage cannot be right')
+ if top in ("", "/"):
+ raise SuspiciousFileOperation("Iterating all storage cannot be right")
- log.debug('Walking path in media storage', path=top)
+ log.debug("Walking path in media storage", path=top)
folders, files = self.listdir(self._dirpath(top))
yield top, folders, files
@@ -163,11 +163,11 @@ class BuildMediaFileSystemStorage(BuildMediaStorageMixin, FileSystemStorage):
"""Storage subclass that writes build artifacts in PRODUCTION_MEDIA_ARTIFACTS or MEDIA_ROOT."""
def __init__(self, **kwargs):
- location = kwargs.pop('location', None)
+ location = kwargs.pop("location", None)
if not location:
# Mirrors the logic of getting the production media path
- if settings.DEFAULT_PRIVACY_LEVEL == 'public' or settings.DEBUG:
+ if settings.DEFAULT_PRIVACY_LEVEL == "public" or settings.DEBUG:
location = settings.MEDIA_ROOT
else:
location = settings.PRODUCTION_MEDIA_ARTIFACTS
@@ -217,7 +217,6 @@ def url(self, name, *args, **kwargs): # noqa
class StaticFilesStorage(BaseStaticFilesStorage):
-
# Root path of the nginx internal redirect
# that will serve files from this storage.
internal_redirect_root_path = "proxito-static"
diff --git a/readthedocs/builds/utils.py b/readthedocs/builds/utils.py
index f37d8f2a185..2977ac14ece 100644
--- a/readthedocs/builds/utils.py
+++ b/readthedocs/builds/utils.py
@@ -22,7 +22,7 @@
def get_github_username_repo(url):
- if 'github' in url:
+ if "github" in url:
for regex in GITHUB_REGEXS:
match = regex.search(url)
if match:
@@ -31,7 +31,7 @@ def get_github_username_repo(url):
def get_bitbucket_username_repo(url=None):
- if 'bitbucket' in url:
+ if "bitbucket" in url:
for regex in BITBUCKET_REGEXS:
match = regex.search(url)
if match:
@@ -40,7 +40,7 @@ def get_bitbucket_username_repo(url=None):
def get_gitlab_username_repo(url=None):
- if 'gitlab' in url:
+ if "gitlab" in url:
for regex in GITLAB_REGEXS:
match = regex.search(url)
if match:
@@ -56,14 +56,14 @@ def get_vcs_url(*, project, version_type, version_name):
External version example: https://github.com/rtfd/readthedocs.org/pull/99/.
"""
if version_type == EXTERNAL:
- if 'github' in project.repo:
+ if "github" in project.repo:
user, repo = get_github_username_repo(project.repo)
return GITHUB_PULL_REQUEST_URL.format(
user=user,
repo=repo,
number=version_name,
)
- if 'gitlab' in project.repo:
+ if "gitlab" in project.repo:
user, repo = get_gitlab_username_repo(project.repo)
return GITLAB_MERGE_REQUEST_URL.format(
user=user,
@@ -71,16 +71,16 @@ def get_vcs_url(*, project, version_type, version_name):
number=version_name,
)
# TODO: Add VCS URL for Bitbucket.
- return ''
+ return ""
- url = ''
- if ('github' in project.repo) or ('gitlab' in project.repo):
- url = f'/tree/{version_name}/'
- elif 'bitbucket' in project.repo:
- url = f'/src/{version_name}'
+ url = ""
+ if ("github" in project.repo) or ("gitlab" in project.repo):
+ url = f"/tree/{version_name}/"
+ elif "bitbucket" in project.repo:
+ url = f"/src/{version_name}"
# TODO: improve this replacing
- return project.repo.replace('git://', 'https://').replace('.git', '') + url
+ return project.repo.replace("git://", "https://").replace(".git", "") + url
def external_version_name(build_or_version):
diff --git a/readthedocs/core/views/hooks.py b/readthedocs/core/views/hooks.py
index 267ac60f553..3f9897a9c57 100644
--- a/readthedocs/core/views/hooks.py
+++ b/readthedocs/core/views/hooks.py
@@ -30,14 +30,14 @@ def _build_version(project, slug, already_built=()):
version = project.versions.filter(active=True, slug=slug).first()
if version and slug not in already_built:
log.info(
- 'Building.',
+ "Building.",
project_slug=project.slug,
version_slug=version.slug,
)
trigger_build(project=project, version=version)
return slug
- log.info('Not building.', version_slug=slug)
+ log.info("Not building.", version_slug=slug)
return None
@@ -55,7 +55,7 @@ def build_branches(project, branch_list):
versions = project.versions_from_branch_name(branch)
for version in versions:
log.debug(
- 'Processing.',
+ "Processing.",
project_slug=project.slug,
version_slug=version.slug,
)
@@ -82,35 +82,35 @@ def trigger_sync_versions(project):
if not Project.objects.is_active(project):
log.warning(
- 'Sync not triggered because project is not active.',
+ "Sync not triggered because project is not active.",
project_slug=project.slug,
)
return None
try:
version_identifier = project.get_default_branch()
- version = (
- project.versions.filter(
- identifier=version_identifier,
- ).first()
- )
+ version = project.versions.filter(
+ identifier=version_identifier,
+ ).first()
if not version:
- log.info('Unable to sync from version.', version_identifier=version_identifier)
+ log.info(
+ "Unable to sync from version.", version_identifier=version_identifier
+ )
return None
if project.has_feature(Feature.SKIP_SYNC_VERSIONS):
- log.info('Skipping sync versions for project.', project_slug=project.slug)
+ log.info("Skipping sync versions for project.", project_slug=project.slug)
return None
options = {}
if project.build_queue:
# respect the queue for this project
- options['queue'] = project.build_queue
+ options["queue"] = project.build_queue
_, build_api_key = BuildAPIKey.objects.create_key(project=project)
log.debug(
- 'Triggering sync repository.',
+ "Triggering sync repository.",
project_slug=version.project.slug,
version_slug=version.slug,
)
@@ -121,7 +121,7 @@ def trigger_sync_versions(project):
)
return version.slug
except Exception:
- log.exception('Unknown sync versions exception')
+ log.exception("Unknown sync versions exception")
return None
@@ -149,7 +149,7 @@ def get_or_create_external_version(project, version_data):
if created:
log.info(
- 'External version created.',
+ "External version created.",
project_slug=project.slug,
version_slug=external_version.slug,
)
@@ -160,7 +160,7 @@ def get_or_create_external_version(project, version_data):
external_version.state = EXTERNAL_VERSION_STATE_OPEN
external_version.save()
log.info(
- 'External version updated.',
+ "External version updated.",
project_slug=project.slug,
version_slug=external_version.slug,
)
@@ -212,7 +212,7 @@ def build_external_version(project, version):
# Build External version
log.info(
- 'Building external version',
+ "Building external version",
project_slug=project.slug,
version_slug=version.slug,
)
diff --git a/readthedocs/doc_builder/backends/sphinx.py b/readthedocs/doc_builder/backends/sphinx.py
index 8872a1823ab..5356b32feb8 100644
--- a/readthedocs/doc_builder/backends/sphinx.py
+++ b/readthedocs/doc_builder/backends/sphinx.py
@@ -126,26 +126,29 @@ def get_config_params(self):
self.project_path,
),
),
- '',
+ "",
)
remote_version = self.version.commit_name
github_user, github_repo = version_utils.get_github_username_repo(
url=self.project.repo,
)
- github_version_is_editable = (self.version.type == 'branch')
+ github_version_is_editable = self.version.type == "branch"
display_github = github_user is not None
- bitbucket_user, bitbucket_repo = version_utils.get_bitbucket_username_repo( # noqa
+ (
+ bitbucket_user,
+ bitbucket_repo,
+ ) = version_utils.get_bitbucket_username_repo( # noqa
url=self.project.repo,
)
- bitbucket_version_is_editable = (self.version.type == 'branch')
+ bitbucket_version_is_editable = self.version.type == "branch"
display_bitbucket = bitbucket_user is not None
gitlab_user, gitlab_repo = version_utils.get_gitlab_username_repo(
url=self.project.repo,
)
- gitlab_version_is_editable = (self.version.type == 'branch')
+ gitlab_version_is_editable = self.version.type == "branch"
display_gitlab = gitlab_user is not None
versions = []
@@ -211,26 +214,23 @@ def get_config_params(self):
"vcs_url": vcs_url,
"proxied_static_path": self.project.proxied_static_path,
# GitHub
- 'github_user': github_user,
- 'github_repo': github_repo,
- 'github_version': remote_version,
- 'github_version_is_editable': github_version_is_editable,
- 'display_github': display_github,
-
+ "github_user": github_user,
+ "github_repo": github_repo,
+ "github_version": remote_version,
+ "github_version_is_editable": github_version_is_editable,
+ "display_github": display_github,
# Bitbucket
- 'bitbucket_user': bitbucket_user,
- 'bitbucket_repo': bitbucket_repo,
- 'bitbucket_version': remote_version,
- 'bitbucket_version_is_editable': bitbucket_version_is_editable,
- 'display_bitbucket': display_bitbucket,
-
+ "bitbucket_user": bitbucket_user,
+ "bitbucket_repo": bitbucket_repo,
+ "bitbucket_version": remote_version,
+ "bitbucket_version_is_editable": bitbucket_version_is_editable,
+ "display_bitbucket": display_bitbucket,
# GitLab
- 'gitlab_user': gitlab_user,
- 'gitlab_repo': gitlab_repo,
- 'gitlab_version': remote_version,
- 'gitlab_version_is_editable': gitlab_version_is_editable,
- 'display_gitlab': display_gitlab,
-
+ "gitlab_user": gitlab_user,
+ "gitlab_repo": gitlab_repo,
+ "gitlab_version": remote_version,
+ "gitlab_version_is_editable": gitlab_version_is_editable,
+ "display_gitlab": display_gitlab,
# Features
"docsearch_disabled": self.project.has_feature(
Feature.DISABLE_SERVER_SIDE_SEARCH
@@ -272,17 +272,17 @@ def append_conf(self):
)
# Append config to project conf file
- tmpl = template_loader.get_template('doc_builder/conf.py.tmpl')
+ tmpl = template_loader.get_template("doc_builder/conf.py.tmpl")
rendered = tmpl.render(self.get_config_params())
with outfile:
- outfile.write('\n')
+ outfile.write("\n")
outfile.write(rendered)
# Print the contents of conf.py in order to make the rendered
# configfile visible in the build logs
self.run(
- 'cat',
+ "cat",
os.path.relpath(
self.config_file,
self.project_path,
@@ -344,21 +344,19 @@ def __init__(self, *args, **kwargs):
class HtmlDirBuilder(HtmlBuilder):
-
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.sphinx_builder = "dirhtml"
class SingleHtmlBuilder(HtmlBuilder):
-
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.sphinx_builder = "singlehtml"
class LocalMediaBuilder(BaseSphinx):
- sphinx_builder = 'readthedocssinglehtmllocalmedia'
+ sphinx_builder = "readthedocssinglehtmllocalmedia"
relative_output_dir = "htmlzip"
def _post_build(self):
@@ -404,7 +402,6 @@ def _post_build(self):
class EpubBuilder(BaseSphinx):
-
sphinx_builder = "epub"
relative_output_dir = "epub"
@@ -520,7 +517,7 @@ def _build_latexmk(self, cwd):
# FIXME: instead of checking by language here, what we want to check if
# ``latex_engine`` is ``platex``
pdfs = []
- if self.project.language == 'ja':
+ if self.project.language == "ja":
# Japanese language is the only one that requires this extra
# step. I don't know exactly why but most of the documentation that
# I read differentiate this language from the others. I suppose
@@ -529,18 +526,18 @@ def _build_latexmk(self, cwd):
for image in itertools.chain(images, pdfs):
self.run(
- 'extractbb',
+ "extractbb",
image.name,
cwd=self.absolute_host_output_dir,
record=False,
)
- rcfile = 'latexmkrc'
- if self.project.language == 'ja':
- rcfile = 'latexmkjarc'
+ rcfile = "latexmkrc"
+ if self.project.language == "ja":
+ rcfile = "latexmkjarc"
self.run(
- 'cat',
+ "cat",
rcfile,
cwd=self.absolute_host_output_dir,
)
diff --git a/readthedocs/embed/views.py b/readthedocs/embed/views.py
index e7eb84b1aa4..3a6625319df 100644
--- a/readthedocs/embed/views.py
+++ b/readthedocs/embed/views.py
@@ -25,12 +25,11 @@
def escape_selector(selector):
"""Escape special characters from the section id."""
regex = re.compile(r'(!|"|#|\$|%|\'|\(|\)|\*|\+|\,|\.|\/|\:|\;|\?|@)')
- ret = re.sub(regex, r'\\\1', selector)
+ ret = re.sub(regex, r"\\\1", selector)
return ret
class EmbedAPI(EmbedAPIMixin, CDNCacheTagsMixin, APIView):
-
# pylint: disable=line-too-long
"""
@@ -72,10 +71,10 @@ def get(self, request):
project = self._get_project()
version = self._get_version()
- url = request.GET.get('url')
- path = request.GET.get('path', '')
- doc = request.GET.get('doc')
- section = request.GET.get('section')
+ url = request.GET.get("url")
+ path = request.GET.get("path", "")
+ doc = request.GET.get("doc")
+ section = request.GET.get("section")
if url:
unresolved = self.unresolved_url
@@ -84,18 +83,18 @@ def get(self, request):
elif not path and not doc:
return Response(
{
- 'error': (
- 'Invalid Arguments. '
+ "error": (
+ "Invalid Arguments. "
'Please provide "url" or "section" and "path" GET arguments.'
)
},
- status=status.HTTP_400_BAD_REQUEST
+ status=status.HTTP_400_BAD_REQUEST,
)
# Generate the docname from path
# by removing the ``.html`` extension and trailing ``/``.
if path:
- doc = re.sub(r'(.+)\.html$', r'\1', path.strip('/'))
+ doc = re.sub(r"(.+)\.html$", r"\1", path.strip("/"))
response = do_embed(
project=project,
@@ -109,16 +108,16 @@ def get(self, request):
if not response:
return Response(
{
- 'error': (
+ "error": (
"Can't find content for section: "
f"doc={doc} path={path} section={section}"
)
},
- status=status.HTTP_404_NOT_FOUND
+ status=status.HTTP_404_NOT_FOUND,
)
log.info(
- 'EmbedAPI successful response.',
+ "EmbedAPI successful response.",
project_slug=project.slug,
version_slug=version.slug,
doc=doc,
@@ -165,50 +164,47 @@ def do_embed(*, project, version, doc=None, path=None, section=None, url=None):
return None
return {
- 'content': content,
- 'headers': headers,
- 'url': url,
- 'meta': {
- 'project': project.slug,
- 'version': version.slug,
- 'doc': doc,
- 'section': section,
+ "content": content,
+ "headers": headers,
+ "url": url,
+ "meta": {
+ "project": project.slug,
+ "version": version.slug,
+ "doc": doc,
+ "section": section,
},
}
def _get_doc_content(project, version, doc):
storage_path = project.get_storage_path(
- 'json',
+ "json",
version_slug=version.slug,
include_file=False,
version_type=version.type,
)
file_path = build_media_storage.join(
storage_path,
- f'{doc}.fjson'.lstrip('/'),
+ f"{doc}.fjson".lstrip("/"),
)
try:
with build_media_storage.open(file_path) as file:
return json.load(file)
except Exception: # noqa
- log.warning('Unable to read file.', file_path=file_path)
+ log.warning("Unable to read file.", file_path=file_path)
return None
def parse_sphinx(content, section, url):
"""Get the embed content for the section."""
- body = content.get('body')
- toc = content.get('toc')
+ body = content.get("body")
+ toc = content.get("toc")
if not content or not body or not toc:
return (None, None, section)
- headers = [
- recurse_while_none(element)
- for element in PQ(toc)('a')
- ]
+ headers = [recurse_while_none(element) for element in PQ(toc)("a")]
if not section and headers:
# If no section is sent, return the content of the first one
@@ -226,19 +222,19 @@ def parse_sphinx(content, section, url):
escaped_section,
slugify(escaped_section),
make_id(escaped_section),
- f'module-{escaped_section}',
+ f"module-{escaped_section}",
]
query_result = []
for element_id in elements_id:
if not element_id:
continue
try:
- query_result = body_obj(f'#{element_id}')
+ query_result = body_obj(f"#{element_id}")
if query_result:
break
except Exception: # noqa
log.info(
- 'Failed to query section.',
+ "Failed to query section.",
url=url,
element_id=element_id,
)
@@ -248,9 +244,9 @@ def parse_sphinx(content, section, url):
query_result = body_obj(selector).parent()
# Handle ``dt`` special cases
- if len(query_result) == 1 and query_result[0].tag == 'dt':
+ if len(query_result) == 1 and query_result[0].tag == "dt":
parent = query_result.parent()
- if 'glossary' in parent.attr('class'):
+ if "glossary" in parent.attr("class"):
# Sphinx HTML structure for term glossary puts the ``id`` in the
# ``dt`` element with the title of the term. In this case, we
# need to return the next sibling which contains the definition
@@ -263,7 +259,7 @@ def parse_sphinx(content, section, url):
# ...
#
query_result = query_result.next()
- elif 'citation' in parent.attr('class'):
+ elif "citation" in parent.attr("class"):
# Sphinx HTML structure for sphinxcontrib-bibtex puts the ``id`` in the
# ``dt`` element with the title of the cite. In this case, we
# need to return the next sibling which contains the cite itself.
@@ -292,7 +288,7 @@ def parse_sphinx(content, section, url):
def dump(obj):
"""Handle API-based doc HTML."""
- if obj[0].tag in ['span', 'h2']:
+ if obj[0].tag in ["span", "h2"]:
return obj.parent().outerHtml()
return obj.outerHtml()
diff --git a/readthedocs/organizations/managers.py b/readthedocs/organizations/managers.py
index 9a654739cc8..992a7a6d822 100644
--- a/readthedocs/organizations/managers.py
+++ b/readthedocs/organizations/managers.py
@@ -63,7 +63,9 @@ def sorted(self):
members (invites) last.
"""
return (
- self.get_queryset().annotate(
- null_member=models.Count('member'),
- ).order_by('-null_member', 'member')
+ self.get_queryset()
+ .annotate(
+ null_member=models.Count("member"),
+ )
+ .order_by("-null_member", "member")
)
diff --git a/readthedocs/organizations/querysets.py b/readthedocs/organizations/querysets.py
index f775d7b3e94..ed6dac1adb1 100644
--- a/readthedocs/organizations/querysets.py
+++ b/readthedocs/organizations/querysets.py
@@ -25,7 +25,7 @@ def for_user(self, user):
def for_admin_user(self, user):
return self.filter(owners__in=[user]).distinct()
- def created_days_ago(self, days, field='pub_date'):
+ def created_days_ago(self, days, field="pub_date"):
"""
Filter organizations by creation date.
@@ -34,9 +34,9 @@ def created_days_ago(self, days, field='pub_date'):
"""
when = timezone.now() - timedelta(days=days)
query_filter = {}
- query_filter[field + '__year'] = when.year
- query_filter[field + '__month'] = when.month
- query_filter[field + '__day'] = when.day
+ query_filter[field + "__year"] = when.year
+ query_filter[field + "__month"] = when.month
+ query_filter[field + "__day"] = when.day
return self.filter(**query_filter)
def subscription_trial_plan_ended(self):
@@ -133,7 +133,6 @@ def clean_artifacts(self):
artifacts_cleaned=False,
)
-
def single_owner(self, user):
"""Returns organizations where `user` is the only owner."""
return self.annotate(count_owners=Count("owners")).filter(
@@ -143,5 +142,4 @@ def single_owner(self, user):
class OrganizationQuerySet(SettingsOverrideObject):
-
_default_class = BaseOrganizationQuerySet
diff --git a/readthedocs/organizations/templatetags/organizations.py b/readthedocs/organizations/templatetags/organizations.py
index 1e3d0822f33..09144d58ae5 100644
--- a/readthedocs/organizations/templatetags/organizations.py
+++ b/readthedocs/organizations/templatetags/organizations.py
@@ -68,7 +68,7 @@ def admin_teams(user):
return Team.objects.admin(user)
-@register.filter(name='has_sso_enabled')
+@register.filter(name="has_sso_enabled")
def has_sso_enabled_filter(obj, provider=None):
"""Check if `obj` has sso enabled for `provider`."""
return AdminPermission.has_sso_enabled(obj, provider)
diff --git a/readthedocs/organizations/tests/test_access.py b/readthedocs/organizations/tests/test_access.py
index 64a76050b51..f4ad486d71c 100644
--- a/readthedocs/organizations/tests/test_access.py
+++ b/readthedocs/organizations/tests/test_access.py
@@ -9,7 +9,6 @@
class OrganizationAccessMixin:
-
url_responses = {}
def login(self):
@@ -26,30 +25,30 @@ def assertResponse(self, path, method=None, data=None, **kwargs):
data = {}
response = method(path, data=data)
response_attrs = {
- 'status_code': kwargs.pop('status_code', 200),
+ "status_code": kwargs.pop("status_code", 200),
}
response_attrs.update(kwargs)
response_attrs.update(self.url_responses.get(path, {}))
- for (key, val) in list(response_attrs.items()):
+ for key, val in list(response_attrs.items()):
self.assertEqual(getattr(response, key), val)
return response
def setUp(self):
# Previous Fixtures
- self.eric = create_user(username='eric', password='test')
- self.test = create_user(username='test', password='test')
- self.tester = create_user(username='tester', password='test')
- self.project = fixture.get(Project, slug='pip')
+ self.eric = create_user(username="eric", password="test")
+ self.test = create_user(username="test", password="test")
+ self.tester = create_user(username="tester", password="test")
+ self.project = fixture.get(Project, slug="pip")
self.organization = fixture.get(
Organization,
- name='Mozilla',
- slug='mozilla',
+ name="Mozilla",
+ slug="mozilla",
projects=[self.project],
)
self.team = fixture.get(
Team,
- name='Foobar',
- slug='foobar',
+ name="Foobar",
+ slug="foobar",
organization=self.organization,
members=[self.test],
)
@@ -60,18 +59,18 @@ def setUp(self):
)
def test_organization_list(self):
- self.assertResponse('/organizations/', status_code=200)
+ self.assertResponse("/organizations/", status_code=200)
def test_organization_details(self):
- self.assertResponse('/organizations/mozilla/', status_code=200)
- self.assertResponse('/organizations/mozilla/edit/', status_code=200)
+ self.assertResponse("/organizations/mozilla/", status_code=200)
+ self.assertResponse("/organizations/mozilla/edit/", status_code=200)
def test_organization_owners_regression(self):
"""Regression test for paths that have been moved."""
self.assertEqual(self.organization.owners.count(), 1)
- self.assertResponse('/organizations/mozilla/owners/', status_code=200)
+ self.assertResponse("/organizations/mozilla/owners/", status_code=200)
self.assertResponse(
- '/organizations/mozilla/owners/add/',
+ "/organizations/mozilla/owners/add/",
method=self.client.post,
data={"username_or_email": "tester"},
status_code=302,
@@ -84,7 +83,7 @@ def test_organization_owners_regression(self):
self.assertFalse(Invitation.objects.for_object(self.organization).exists())
self.assertEqual(self.organization.owners.count(), 1)
self.assertResponse(
- '/organizations/mozilla/owners/delete/',
+ "/organizations/mozilla/owners/delete/",
method=self.client.post,
data={"user": "tester"},
status_code=404,
@@ -96,9 +95,9 @@ def test_organization_owners_regression(self):
def test_organization_owners(self):
self.assertEqual(self.organization.owners.count(), 1)
- self.assertResponse('/organizations/mozilla/owners/', status_code=200)
+ self.assertResponse("/organizations/mozilla/owners/", status_code=200)
self.assertResponse(
- '/organizations/mozilla/owners/add/',
+ "/organizations/mozilla/owners/add/",
method=self.client.post,
data={"username_or_email": "tester"},
status_code=302,
@@ -109,24 +108,21 @@ def test_organization_owners(self):
self.assertEqual(self.organization.owners.count(), 2)
owner = OrganizationOwner.objects.get(
organization=self.organization,
- owner__username='tester',
+ owner__username="tester",
)
self.assertResponse(
- '/organizations/mozilla/owners/{}/delete/'
- .format(owner.pk),
+ "/organizations/mozilla/owners/{}/delete/".format(owner.pk),
method=self.client.post,
- data={'user': 'tester'},
+ data={"user": "tester"},
status_code=302,
)
self.assertEqual(self.organization.owners.count(), 1)
else:
self.assertFalse(
- OrganizationOwner.objects
- .filter(
+ OrganizationOwner.objects.filter(
organization=self.organization,
- owner__username='tester',
- )
- .exists(),
+ owner__username="tester",
+ ).exists(),
)
self.assertEqual(self.organization.owners.count(), 1)
@@ -134,13 +130,13 @@ def test_organization_members_regression(self):
"""Tests for regression against old member functionality."""
self.assertEqual(self.organization.members.count(), 2)
self.assertResponse(
- '/organizations/mozilla/members/',
+ "/organizations/mozilla/members/",
status_code=200,
)
self.assertResponse(
- '/organizations/mozilla/members/add/',
+ "/organizations/mozilla/members/add/",
method=self.client.post,
- data={'user': 'tester'},
+ data={"user": "tester"},
status_code=404,
)
if self.is_admin():
@@ -149,24 +145,24 @@ def test_organization_members_regression(self):
self.assertEqual(self.organization.members.count(), 2)
self.assertResponse(
- '/organizations/mozilla/members/delete/',
+ "/organizations/mozilla/members/delete/",
method=self.client.post,
- data={'user': 'tester'},
+ data={"user": "tester"},
status_code=404,
)
self.assertEqual(self.organization.members.count(), 2)
def test_organization_teams(self):
self.assertEqual(self.organization.teams.count(), 1)
- self.assertResponse('/organizations/mozilla/teams/', status_code=200)
- user = User.objects.get(username='test')
- project = Project.objects.get(slug='pip')
+ self.assertResponse("/organizations/mozilla/teams/", status_code=200)
+ user = User.objects.get(username="test")
+ project = Project.objects.get(slug="pip")
self.assertResponse(
- '/organizations/mozilla/teams/add/',
+ "/organizations/mozilla/teams/add/",
method=self.client.post,
data={
- 'name': 'more-foobar',
- 'access': 'readonly',
+ "name": "more-foobar",
+ "access": "readonly",
},
status_code=302,
)
@@ -174,17 +170,14 @@ def test_organization_teams(self):
self.assertEqual(self.organization.teams.count(), 2)
self.assertEqual(self.organization.members.count(), 2)
self.assertResponse(
- '/organizations/mozilla/teams/more-foobar/delete/',
+ "/organizations/mozilla/teams/more-foobar/delete/",
method=self.client.post,
status_code=302,
)
else:
self.assertEqual(self.organization.teams.count(), 1)
self.assertFalse(
- self.organization
- .teams
- .filter(name='foobar')
- .exists(),
+ self.organization.teams.filter(name="foobar").exists(),
)
self.assertEqual(self.organization.members.count(), 2)
self.assertEqual(self.organization.teams.count(), 1)
@@ -196,7 +189,7 @@ class OrganizationOwnerAccess(OrganizationAccessMixin, TestCase):
"""Test organization paths with authed org owner."""
def login(self):
- return self.client.login(username='eric', password='test')
+ return self.client.login(username="eric", password="test")
def is_admin(self):
return True
@@ -208,21 +201,18 @@ class OrganizationMemberAccess(OrganizationAccessMixin, TestCase):
"""Test organization paths with authed org member."""
url_responses = {
- '/organizations/': {'status_code': 200},
- '/organizations/mozilla/': {'status_code': 200},
- '/organizations/mozilla/members/': {'status_code': 200},
- '/organizations/mozilla/teams/': {'status_code': 200},
+ "/organizations/": {"status_code": 200},
+ "/organizations/mozilla/": {"status_code": 200},
+ "/organizations/mozilla/members/": {"status_code": 200},
+ "/organizations/mozilla/teams/": {"status_code": 200},
}
def assertResponse(self, path, method=None, data=None, **kwargs):
- kwargs['status_code'] = 404
- super().assertResponse(
- path, method, data,
- **kwargs
- )
+ kwargs["status_code"] = 404
+ super().assertResponse(path, method, data, **kwargs)
def login(self):
- return self.client.login(username='test', password='test')
+ return self.client.login(username="test", password="test")
def is_admin(self):
return False
@@ -234,18 +224,15 @@ class OrganizationNonmemberAccess(OrganizationAccessMixin, TestCase):
"""Test organization paths with authed but non-org user."""
url_responses = {
- '/organizations/': {'status_code': 200},
+ "/organizations/": {"status_code": 200},
}
def assertResponse(self, path, method=None, data=None, **kwargs):
- kwargs['status_code'] = 404
- super().assertResponse(
- path, method,
- data, **kwargs
- )
+ kwargs["status_code"] = 404
+ super().assertResponse(path, method, data, **kwargs)
def login(self):
- return self.client.login(username='tester', password='test')
+ return self.client.login(username="tester", password="test")
def is_admin(self):
return False
diff --git a/readthedocs/organizations/tests/test_privacy_urls.py b/readthedocs/organizations/tests/test_privacy_urls.py
index f989627ff50..c20d6b64050 100644
--- a/readthedocs/organizations/tests/test_privacy_urls.py
+++ b/readthedocs/organizations/tests/test_privacy_urls.py
@@ -7,7 +7,6 @@
class OrganizationMixin(URLAccessMixin):
-
def setUp(self):
super().setUp()
self.user = get(User)
@@ -44,10 +43,12 @@ def login(self):
def test_public_urls(self):
from readthedocs.organizations.urls.public import urlpatterns
+
self._test_url(urlpatterns)
def test_private_urls(self):
from readthedocs.organizations.urls.private import urlpatterns
+
self._test_url(urlpatterns)
@@ -61,8 +62,10 @@ class AuthUserOrganizationsTest(OrganizationMixin, TestCase):
"/organizations/choose/{next_name}/": {"status_code": 302},
"/organizations/invite/{hash}/redeem/": {"status_code": 302},
# 405's where we should be POST'ing
- '/organizations/{slug}/owners/{owner}/delete/': {'status_code': 405},
- '/organizations/{slug}/teams/{team}/members/{member}/revoke/': {'status_code': 405},
+ "/organizations/{slug}/owners/{owner}/delete/": {"status_code": 405},
+ "/organizations/{slug}/teams/{team}/members/{member}/revoke/": {
+ "status_code": 405
+ },
}
def login(self):
@@ -70,8 +73,10 @@ def login(self):
def test_public_urls(self):
from readthedocs.organizations.urls.public import urlpatterns
+
self._test_url(urlpatterns)
def test_private_urls(self):
from readthedocs.organizations.urls.private import urlpatterns
+
self._test_url(urlpatterns)
diff --git a/readthedocs/organizations/urls/private.py b/readthedocs/organizations/urls/private.py
index 8188372bc6f..874e125e480 100644
--- a/readthedocs/organizations/urls/private.py
+++ b/readthedocs/organizations/urls/private.py
@@ -7,7 +7,7 @@
path(
"",
views.ListOrganization.as_view(),
- name='organization_list',
+ name="organization_list",
),
re_path(
r"^choose/(?P[\w.-]+)/$",
@@ -17,68 +17,68 @@
path(
"create/",
views.CreateOrganizationSignup.as_view(),
- name='organization_create',
+ name="organization_create",
),
re_path(
- r'^(?P[\w.-]+)/edit/$',
+ r"^(?P[\w.-]+)/edit/$",
views.EditOrganization.as_view(),
- name='organization_edit',
+ name="organization_edit",
),
re_path(
- r'^(?P[\w.-]+)/delete/$',
+ r"^(?P[\w.-]+)/delete/$",
views.DeleteOrganization.as_view(),
- name='organization_delete',
+ name="organization_delete",
),
re_path(
- r'^(?P[\w.-]+)/security-log/$',
+ r"^(?P[\w.-]+)/security-log/$",
views.OrganizationSecurityLog.as_view(),
- name='organization_security_log',
+ name="organization_security_log",
),
# Owners
re_path(
- r'^(?P[\w.-]+)/owners/(?P\d+)/delete/$',
+ r"^(?P[\w.-]+)/owners/(?P\d+)/delete/$",
views.DeleteOrganizationOwner.as_view(),
- name='organization_owner_delete',
+ name="organization_owner_delete",
),
re_path(
- r'^(?P[\w.-]+)/owners/add/$',
+ r"^(?P[\w.-]+)/owners/add/$",
views.AddOrganizationOwner.as_view(),
- name='organization_owner_add',
+ name="organization_owner_add",
),
re_path(
- r'^(?P[\w.-]+)/owners/$',
+ r"^(?P[\w.-]+)/owners/$",
views.EditOrganizationOwners.as_view(),
- name='organization_owners',
+ name="organization_owners",
),
# Teams
re_path(
- r'^(?P[\w.-]+)/teams/add/$',
+ r"^(?P[\w.-]+)/teams/add/$",
views.AddOrganizationTeam.as_view(),
- name='organization_team_add',
+ name="organization_team_add",
),
re_path(
- r'^(?P[\w.-]+)/teams/(?P[\w.-]+)/edit/$',
+ r"^(?P[\w.-]+)/teams/(?P[\w.-]+)/edit/$",
views.EditOrganizationTeam.as_view(),
- name='organization_team_edit',
+ name="organization_team_edit",
),
re_path(
- r'^(?P[\w.-]+)/teams/(?P[\w.-]+)/projects/$',
+ r"^(?P[\w.-]+)/teams/(?P[\w.-]+)/projects/$",
views.UpdateOrganizationTeamProject.as_view(),
- name='organization_team_project_edit',
+ name="organization_team_project_edit",
),
re_path(
- r'^(?P[\w.-]+)/teams/(?P[\w.-]+)/delete/$',
+ r"^(?P[\w.-]+)/teams/(?P[\w.-]+)/delete/$",
views.DeleteOrganizationTeam.as_view(),
- name='organization_team_delete',
+ name="organization_team_delete",
),
re_path(
- r'^(?P[\w.-]+)/teams/(?P[\w.-]+)/members/invite/$',
+ r"^(?P[\w.-]+)/teams/(?P[\w.-]+)/members/invite/$",
views.AddOrganizationTeamMember.as_view(),
- name='organization_team_member_add',
+ name="organization_team_member_add",
),
re_path(
- r'^(?P[\w.-]+)/teams/(?P[\w.-]+)/members/(?P\d+)/revoke/$',
+ r"^(?P[\w.-]+)/teams/(?P[\w.-]+)/members/(?P\d+)/revoke/$",
views.DeleteOrganizationTeamMember.as_view(),
- name='organization_team_member_delete',
+ name="organization_team_member_delete",
),
]
diff --git a/readthedocs/organizations/urls/public.py b/readthedocs/organizations/urls/public.py
index db83b0f865b..67a39bd380f 100644
--- a/readthedocs/organizations/urls/public.py
+++ b/readthedocs/organizations/urls/public.py
@@ -12,30 +12,30 @@
name="organization_verify_email",
),
re_path(
- r'^(?P[\w.-]+)/$',
+ r"^(?P[\w.-]+)/$",
views.DetailOrganization.as_view(),
- name='organization_detail',
+ name="organization_detail",
),
# Teams
re_path(
- r'^(?P[\w.-]+)/teams/(?P[\w.-]+)/$',
+ r"^(?P[\w.-]+)/teams/(?P[\w.-]+)/$",
views.ListOrganizationTeamMembers.as_view(),
- name='organization_team_detail',
+ name="organization_team_detail",
),
re_path(
- r'^(?P[\w.-]+)/teams/$',
+ r"^(?P[\w.-]+)/teams/$",
views.ListOrganizationTeams.as_view(),
- name='organization_team_list',
+ name="organization_team_list",
),
re_path(
- r'^invite/(?P[\w.-]+)/redeem/$',
+ r"^invite/(?P[\w.-]+)/redeem/$",
views.RedirectRedeemTeamInvitation.as_view(),
- name='organization_invite_redeem',
+ name="organization_invite_redeem",
),
# Members
re_path(
- r'^(?P[\w.-]+)/members/$',
+ r"^(?P[\w.-]+)/members/$",
views.ListOrganizationMembers.as_view(),
- name='organization_members',
+ name="organization_members",
),
]
diff --git a/readthedocs/organizations/views/base.py b/readthedocs/organizations/views/base.py
index 2d105870ba0..0cfdfaf1307 100644
--- a/readthedocs/organizations/views/base.py
+++ b/readthedocs/organizations/views/base.py
@@ -55,7 +55,7 @@ class OrganizationMixin(CheckOrganizationsEnabled):
access
"""
- org_url_field = 'slug'
+ org_url_field = "slug"
admin_only = True
def get_queryset(self):
@@ -88,7 +88,7 @@ def get_context_data(self, **kwargs):
"""Add organization to context data."""
context = super().get_context_data(**kwargs)
organization = self.get_organization()
- context['organization'] = organization
+ context["organization"] = organization
return context
@@ -109,21 +109,25 @@ def get_team_queryset(self):
This will either be team the user is a member of, or teams where the
user is an owner of the organization.
"""
- return Team.objects.member(self.request.user).filter(
- organization=self.get_organization(),
- ).order_by('name')
+ return (
+ Team.objects.member(self.request.user)
+ .filter(
+ organization=self.get_organization(),
+ )
+ .order_by("name")
+ )
@lru_cache(maxsize=1)
def get_team(self):
"""Return team determined by url kwarg."""
return get_object_or_404(
self.get_team_queryset(),
- slug=self.kwargs['team'],
+ slug=self.kwargs["team"],
)
def get_form(self, data=None, files=None, **kwargs):
"""Pass in organization to form class instance."""
- kwargs['organization'] = self.get_organization()
+ kwargs["organization"] = self.get_organization()
return self.form_class(data, files, **kwargs)
@@ -137,8 +141,8 @@ class OrganizationView(CheckOrganizationsEnabled):
admin_only = True
# Only relevant when mixed into
- lookup_field = 'slug'
- lookup_url_field = 'slug'
+ lookup_field = "slug"
+ lookup_url_field = "slug"
def get_queryset(self):
if self.admin_only:
@@ -146,7 +150,7 @@ def get_queryset(self):
return Organization.objects.for_user(user=self.request.user)
def get_form(self, data=None, files=None, **kwargs):
- kwargs['user'] = self.request.user
+ kwargs["user"] = self.request.user
cls = self.get_form_class()
return cls(data, files, **kwargs)
@@ -154,12 +158,12 @@ def get_context_data(self, **kwargs):
"""Add onboarding context."""
context = super().get_context_data(**kwargs)
if not self.get_queryset().exists():
- context['onboarding'] = True
+ context["onboarding"] = True
return context
def get_success_url(self):
return reverse_lazy(
- 'organization_edit',
+ "organization_edit",
args=[self.object.slug],
)
@@ -171,15 +175,12 @@ class OrganizationOwnerView(SuccessMessageMixin, OrganizationMixin):
model = OrganizationOwner
form_class = OrganizationOwnerForm
admin_only = True
- lookup_url_kwarg = 'owner'
+ lookup_url_kwarg = "owner"
def get_queryset(self):
- return (
- OrganizationOwner.objects.filter(
- organization=self.get_organization(),
- )
- .prefetch_related('owner')
- )
+ return OrganizationOwner.objects.filter(
+ organization=self.get_organization(),
+ ).prefetch_related("owner")
def get_form(self, data=None, files=None, **kwargs):
kwargs["organization"] = self.get_organization()
@@ -203,7 +204,7 @@ def get_context_data(self, **kwargs):
def get_success_url(self):
return reverse_lazy(
- 'organization_owners',
+ "organization_owners",
args=[self.get_organization().slug],
)
@@ -223,7 +224,7 @@ def get_object(self):
def get_success_url(self):
return reverse_lazy(
- 'organization_team_detail',
+ "organization_team_detail",
args=[
self.get_organization().slug,
self.object.slug,
@@ -245,7 +246,7 @@ def get_queryset(self):
)
def get_object(self):
- return self.get_queryset().get(pk=self.kwargs['member'])
+ return self.get_queryset().get(pk=self.kwargs["member"])
def _get_invitations(self):
return Invitation.objects.for_object(self.get_team())
@@ -265,6 +266,6 @@ def get_success_url(self):
organization = self.get_organization()
team = self.get_team()
return reverse_lazy(
- 'organization_team_detail',
+ "organization_team_detail",
args=[organization.slug, team.slug],
)
diff --git a/readthedocs/organizations/views/private.py b/readthedocs/organizations/views/private.py
index fce01a8207a..8acfae41523 100644
--- a/readthedocs/organizations/views/private.py
+++ b/readthedocs/organizations/views/private.py
@@ -40,13 +40,13 @@ class CreateOrganizationSignup(PrivateViewMixin, OrganizationView, CreateView):
"""View to create an organization after the user has signed up."""
- template_name = 'organizations/organization_create.html'
+ template_name = "organizations/organization_create.html"
form_class = OrganizationSignupForm
def get_form(self, data=None, files=None, **kwargs):
"""Add request user as default billing address email."""
- kwargs['initial'] = {'email': self.request.user.email}
- kwargs['user'] = self.request.user
+ kwargs["initial"] = {"email": self.request.user.email}
+ kwargs["user"] = self.request.user
return super().get_form(data=data, files=files, **kwargs)
def get_success_url(self):
@@ -60,7 +60,7 @@ def get_success_url(self):
redirects to Organization's Edit page.
"""
return reverse_lazy(
- 'organization_detail',
+ "organization_detail",
args=[self.object.slug],
)
@@ -89,7 +89,6 @@ class ChooseOrganization(ListOrganization):
template_name = "organizations/organization_choose.html"
def get(self, request, *args, **kwargs):
-
self.next_name = self.kwargs["next_name"]
self.next_querystring = self.request.GET.get("next_querystring")
@@ -113,33 +112,33 @@ def get_context_data(self, **kwargs):
class EditOrganization(
- PrivateViewMixin,
- UpdateChangeReasonPostView,
- OrganizationView,
- UpdateView,
+ PrivateViewMixin,
+ UpdateChangeReasonPostView,
+ OrganizationView,
+ UpdateView,
):
- template_name = 'organizations/admin/organization_edit.html'
+ template_name = "organizations/admin/organization_edit.html"
class DeleteOrganization(
- PrivateViewMixin,
- UpdateChangeReasonPostView,
- OrganizationView,
- DeleteView,
+ PrivateViewMixin,
+ UpdateChangeReasonPostView,
+ OrganizationView,
+ DeleteView,
):
- template_name = 'organizations/admin/organization_delete.html'
+ template_name = "organizations/admin/organization_delete.html"
def get_success_url(self):
- return reverse_lazy('organization_list')
+ return reverse_lazy("organization_list")
# Owners views
class EditOrganizationOwners(PrivateViewMixin, OrganizationOwnerView, ListView):
- template_name = 'organizations/admin/owners_edit.html'
+ template_name = "organizations/admin/owners_edit.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
- context['form'] = self.form_class()
+ context["form"] = self.form_class()
return context
@@ -154,8 +153,8 @@ def form_valid(self, form):
class DeleteOrganizationOwner(PrivateViewMixin, OrganizationOwnerView, DeleteView):
- success_message = _('Owner removed')
- http_method_names = ['post']
+ success_message = _("Owner removed")
+ http_method_names = ["post"]
def post(self, request, *args, **kwargs):
if self._is_last_user():
@@ -166,18 +165,18 @@ def post(self, request, *args, **kwargs):
# Team views
class AddOrganizationTeam(PrivateViewMixin, OrganizationTeamView, CreateView):
- template_name = 'organizations/team_create.html'
- success_message = _('Team added')
+ template_name = "organizations/team_create.html"
+ success_message = _("Team added")
class DeleteOrganizationTeam(
- PrivateViewMixin,
- UpdateChangeReasonPostView,
- OrganizationTeamView,
- DeleteView,
+ PrivateViewMixin,
+ UpdateChangeReasonPostView,
+ OrganizationTeamView,
+ DeleteView,
):
- template_name = 'organizations/team_delete.html'
- success_message = _('Team deleted')
+ template_name = "organizations/team_delete.html"
+ success_message = _("Team deleted")
def post(self, request, *args, **kwargs):
"""Hack to show messages on delete."""
@@ -187,24 +186,24 @@ def post(self, request, *args, **kwargs):
def get_success_url(self):
return reverse_lazy(
- 'organization_team_list',
+ "organization_team_list",
args=[self.get_organization().slug],
)
class EditOrganizationTeam(PrivateViewMixin, OrganizationTeamView, UpdateView):
- template_name = 'organizations/team_edit.html'
- success_message = _('Team updated')
+ template_name = "organizations/team_edit.html"
+ success_message = _("Team updated")
class UpdateOrganizationTeamProject(PrivateViewMixin, OrganizationTeamView, UpdateView):
form_class = OrganizationTeamProjectForm
- success_message = _('Team projects updated')
- template_name = 'organizations/team_project_edit.html'
+ success_message = _("Team projects updated")
+ template_name = "organizations/team_project_edit.html"
class AddOrganizationTeamMember(PrivateViewMixin, OrganizationTeamMemberView, FormView):
- template_name = 'organizations/team_member_create.html'
+ template_name = "organizations/team_member_create.html"
def form_valid(self, form):
# Manually calling to save, since this isn't a ModelFormView.
@@ -216,9 +215,11 @@ def form_valid(self, form):
return super().form_valid(form)
-class DeleteOrganizationTeamMember(PrivateViewMixin, OrganizationTeamMemberView, DeleteView):
- success_message = _('Member removed from team')
- http_method_names = ['post']
+class DeleteOrganizationTeamMember(
+ PrivateViewMixin, OrganizationTeamMemberView, DeleteView
+):
+ success_message = _("Member removed from team")
+ http_method_names = ["post"]
def post(self, request, *args, **kwargs):
"""Hack to show messages on delete."""
@@ -232,11 +233,11 @@ class OrganizationSecurityLog(PrivateViewMixin, OrganizationMixin, ListView):
"""Display security logs related to this organization."""
model = AuditLog
- template_name = 'organizations/security_log.html'
+ template_name = "organizations/security_log.html"
feature_type = TYPE_AUDIT_LOGS
def get(self, request, *args, **kwargs):
- download_data = request.GET.get('download', False)
+ download_data = request.GET.get("download", False)
if download_data:
return self._get_csv_data()
return super().get(request, *args, **kwargs)
@@ -259,18 +260,18 @@ def _get_csv_data(self):
start_date = self._get_start_date()
end_date = timezone.now().date()
- date_filter = self.filter.form.cleaned_data.get('date')
+ date_filter = self.filter.form.cleaned_data.get("date")
if date_filter:
start_date = date_filter.start or start_date
end_date = date_filter.stop or end_date
- filename = 'readthedocs_organization_security_logs_{organization}_{start}_{end}.csv'.format(
+ filename = "readthedocs_organization_security_logs_{organization}_{start}_{end}.csv".format(
organization=organization.slug,
- start=timezone.datetime.strftime(start_date, '%Y-%m-%d'),
- end=timezone.datetime.strftime(end_date, '%Y-%m-%d'),
+ start=timezone.datetime.strftime(start_date, "%Y-%m-%d"),
+ end=timezone.datetime.strftime(end_date, "%Y-%m-%d"),
)
csv_data = [
- [timezone.datetime.strftime(date, '%Y-%m-%d %H:%M:%S'), *rest]
+ [timezone.datetime.strftime(date, "%Y-%m-%d %H:%M:%S"), *rest]
for date, *rest in data
]
csv_data.insert(0, [header for header, _ in values])
diff --git a/readthedocs/projects/constants.py b/readthedocs/projects/constants.py
index f9256de3814..d203e5a8711 100644
--- a/readthedocs/projects/constants.py
+++ b/readthedocs/projects/constants.py
@@ -10,18 +10,18 @@
from django.utils.translation import gettext_lazy as _
-SPHINX = 'sphinx'
-MKDOCS = 'mkdocs'
-SPHINX_HTMLDIR = 'sphinx_htmldir'
-SPHINX_SINGLEHTML = 'sphinx_singlehtml'
+SPHINX = "sphinx"
+MKDOCS = "mkdocs"
+SPHINX_HTMLDIR = "sphinx_htmldir"
+SPHINX_SINGLEHTML = "sphinx_singlehtml"
# This type is defined by the users in their mkdocs.yml file.
MKDOCS_HTML = "mkdocs_html"
GENERIC = "generic"
DOCUMENTATION_CHOICES = (
- (SPHINX, _('Sphinx Html')),
- (MKDOCS, _('Mkdocs')),
- (SPHINX_HTMLDIR, _('Sphinx HtmlDir')),
- (SPHINX_SINGLEHTML, _('Sphinx Single Page HTML')),
+ (SPHINX, _("Sphinx Html")),
+ (MKDOCS, _("Mkdocs")),
+ (SPHINX_HTMLDIR, _("Sphinx HtmlDir")),
+ (SPHINX_SINGLEHTML, _("Sphinx Single Page HTML")),
)
DOCTYPE_CHOICES = DOCUMENTATION_CHOICES + (
(MKDOCS_HTML, _("Mkdocs Html Pages")),
@@ -29,11 +29,11 @@
)
-MEDIA_TYPE_HTML = 'html'
-MEDIA_TYPE_PDF = 'pdf'
-MEDIA_TYPE_EPUB = 'epub'
-MEDIA_TYPE_HTMLZIP = 'htmlzip'
-MEDIA_TYPE_JSON = 'json'
+MEDIA_TYPE_HTML = "html"
+MEDIA_TYPE_PDF = "pdf"
+MEDIA_TYPE_EPUB = "epub"
+MEDIA_TYPE_HTMLZIP = "htmlzip"
+MEDIA_TYPE_JSON = "json"
DOWNLOADABLE_MEDIA_TYPES = (
MEDIA_TYPE_PDF,
MEDIA_TYPE_EPUB,
@@ -51,24 +51,24 @@
BUILD_COMMANDS_OUTPUT_PATH_HTML = os.path.join(BUILD_COMMANDS_OUTPUT_PATH, "html")
SAMPLE_FILES = (
- ('Installation', 'projects/samples/installation.rst.html'),
- ('Getting started', 'projects/samples/getting_started.rst.html'),
+ ("Installation", "projects/samples/installation.rst.html"),
+ ("Getting started", "projects/samples/getting_started.rst.html"),
)
SCRAPE_CONF_SETTINGS = [
- 'copyright',
- 'project',
- 'version',
- 'release',
- 'source_suffix',
- 'html_theme',
- 'extensions',
+ "copyright",
+ "project",
+ "version",
+ "release",
+ "source_suffix",
+ "html_theme",
+ "extensions",
]
HEADING_MARKUP = (
- (1, '='),
- (2, '-'),
- (3, '^'),
+ (1, "="),
+ (2, "-"),
+ (3, "^"),
(4, '"'),
)
@@ -76,208 +76,208 @@
DELETED_STATUS = 99
STATUS_CHOICES = (
- (LIVE_STATUS, _('Live')),
- (DELETED_STATUS, _('Deleted')),
+ (LIVE_STATUS, _("Live")),
+ (DELETED_STATUS, _("Deleted")),
)
-REPO_TYPE_GIT = 'git'
-REPO_TYPE_SVN = 'svn'
-REPO_TYPE_HG = 'hg'
-REPO_TYPE_BZR = 'bzr'
+REPO_TYPE_GIT = "git"
+REPO_TYPE_SVN = "svn"
+REPO_TYPE_HG = "hg"
+REPO_TYPE_BZR = "bzr"
REPO_CHOICES = (
- (REPO_TYPE_GIT, _('Git')),
- (REPO_TYPE_SVN, _('Subversion')),
- (REPO_TYPE_HG, _('Mercurial')),
- (REPO_TYPE_BZR, _('Bazaar')),
+ (REPO_TYPE_GIT, _("Git")),
+ (REPO_TYPE_SVN, _("Subversion")),
+ (REPO_TYPE_HG, _("Mercurial")),
+ (REPO_TYPE_BZR, _("Bazaar")),
)
-PUBLIC = 'public'
-PRIVATE = 'private'
+PUBLIC = "public"
+PRIVATE = "private"
PRIVACY_CHOICES = (
- (PUBLIC, _('Public')),
- (PRIVATE, _('Private')),
+ (PUBLIC, _("Public")),
+ (PRIVATE, _("Private")),
)
IMPORTANT_VERSION_FILTERS = {
- 'slug': 'important',
+ "slug": "important",
}
# in the future this constant can be replaced with a implementation that
# detect all available Python interpreters in the fly (Maybe using
# update-alternatives linux tool family?).
PYTHON_CHOICES = (
- ('python', _('CPython 2.x')),
- ('python3', _('CPython 3.x')),
+ ("python", _("CPython 2.x")),
+ ("python3", _("CPython 3.x")),
)
# Via http://sphinx-doc.org/latest/config.html#confval-language
# Languages supported for the lang_slug in the URL
# Translations for builtin Sphinx messages only available for a subset of these
LANGUAGES = (
- ('aa', 'Afar'),
- ('ab', 'Abkhaz'),
- ('acr', 'Achi'),
- ('af', 'Afrikaans'),
- ('agu', 'Awakateko'),
- ('am', 'Amharic'),
- ('ar', 'Arabic'),
- ('as', 'Assamese'),
- ('ay', 'Aymara'),
- ('az', 'Azerbaijani'),
- ('ba', 'Bashkir'),
- ('be', 'Belarusian'),
- ('bg', 'Bulgarian'),
- ('bh', 'Bihari'),
- ('bi', 'Bislama'),
- ('bn', 'Bengali'),
- ('bo', 'Tibetan'),
- ('br', 'Breton'),
- ('ca', 'Catalan'),
- ('caa', 'Ch\'orti\''),
- ('cac', 'Chuj'),
- ('cab', 'Garífuna'),
- ('cak', 'Kaqchikel'),
- ('co', 'Corsican'),
- ('cs', 'Czech'),
- ('cy', 'Welsh'),
- ('da', 'Danish'),
- ('de', 'German'),
- ('dz', 'Dzongkha'),
- ('el', 'Greek'),
- ('en', 'English'),
- ('eo', 'Esperanto'),
- ('es', 'Spanish'),
- ('et', 'Estonian'),
- ('eu', 'Basque'),
- ('fa', 'Iranian'),
- ('fi', 'Finnish'),
- ('fj', 'Fijian'),
- ('fo', 'Faroese'),
- ('fr', 'French'),
- ('fy', 'Western Frisian'),
- ('ga', 'Irish'),
- ('gd', 'Scottish Gaelic'),
- ('gl', 'Galician'),
- ('gn', 'Guarani'),
- ('gu', 'Gujarati'),
- ('ha', 'Hausa'),
- ('hi', 'Hindi'),
- ('he', 'Hebrew'),
- ('hr', 'Croatian'),
- ('hu', 'Hungarian'),
- ('hy', 'Armenian'),
- ('ia', 'Interlingua'),
- ('id', 'Indonesian'),
- ('ie', 'Interlingue'),
- ('ik', 'Inupiaq'),
- ('is', 'Icelandic'),
- ('it', 'Italian'),
- ('itz', 'Itza\''),
- ('iu', 'Inuktitut'),
- ('ixl', 'Ixil'),
- ('ja', 'Japanese'),
- ('jac', 'Popti\''),
- ('jv', 'Javanese'),
- ('ka', 'Georgian'),
- ('kjb', 'Q\'anjob\'al'),
- ('kek', 'Q\'eqchi\''),
- ('kk', 'Kazakh'),
- ('kl', 'Kalaallisut'),
- ('km', 'Khmer'),
- ('kn', 'Kannada'),
- ('knj', 'Akateko'),
- ('ko', 'Korean'),
- ('ks', 'Kashmiri'),
- ('ku', 'Kurdish'),
- ('ky', 'Kyrgyz'),
- ('la', 'Latin'),
- ('ln', 'Lingala'),
- ('lo', 'Lao'),
- ('lt', 'Lithuanian'),
- ('lv', 'Latvian'),
- ('mam', 'Mam'),
- ('mg', 'Malagasy'),
- ('mi', 'Maori'),
- ('mk', 'Macedonian'),
- ('ml', 'Malayalam'),
- ('mn', 'Mongolian'),
- ('mop', 'Mopan'),
- ('mr', 'Marathi'),
- ('ms', 'Malay'),
- ('mt', 'Maltese'),
- ('my', 'Burmese'),
- ('na', 'Nauru'),
- ('ne', 'Nepali'),
- ('nl', 'Dutch'),
- ('no', 'Norwegian'),
- ('oc', 'Occitan'),
- ('om', 'Oromo'),
- ('or', 'Oriya'),
- ('pa', 'Panjabi'),
- ('pl', 'Polish'),
- ('pnb', 'Western Punjabi'),
- ('poc', 'Poqomam'),
- ('poh', 'Poqomchi'),
- ('ps', 'Pashto'),
- ('pt', 'Portuguese'),
- ('qu', 'Quechua'),
- ('quc', 'K\'iche\''),
- ('qum', 'Sipakapense'),
- ('quv', 'Sakapulteko'),
- ('rm', 'Romansh'),
- ('rn', 'Kirundi'),
- ('ro', 'Romanian'),
- ('ru', 'Russian'),
- ('rw', 'Kinyarwanda'),
- ('sa', 'Sanskrit'),
- ('sd', 'Sindhi'),
- ('sg', 'Sango'),
- ('si', 'Sinhala'),
- ('sk', 'Slovak'),
- ('skr', 'Saraiki'),
- ('sl', 'Slovenian'),
- ('sm', 'Samoan'),
- ('sn', 'Shona'),
- ('so', 'Somali'),
- ('sq', 'Albanian'),
- ('sr', 'Serbian'),
- ('ss', 'Swati'),
- ('st', 'Southern Sotho'),
- ('su', 'Sudanese'),
- ('sv', 'Swedish'),
- ('sw', 'Swahili'),
- ('ta', 'Tamil'),
- ('te', 'Telugu'),
- ('tg', 'Tajik'),
- ('th', 'Thai'),
- ('ti', 'Tigrinya'),
- ('tk', 'Turkmen'),
- ('tl', 'Tagalog'),
- ('tn', 'Tswana'),
- ('to', 'Tonga'),
- ('tr', 'Turkish'),
- ('ts', 'Tsonga'),
- ('tt', 'Tatar'),
- ('ttc', 'Tektiteko'),
- ('tzj', 'Tz\'utujil'),
- ('tw', 'Twi'),
- ('ug', 'Uyghur'),
- ('uk', 'Ukrainian'),
- ('ur', 'Urdu'),
- ('usp', 'Uspanteko'),
- ('uz', 'Uzbek'),
- ('vi', 'Vietnamese'),
- ('vo', 'Volapuk'),
- ('wo', 'Wolof'),
- ('xh', 'Xhosa'),
- ('xin', 'Xinka'),
- ('yi', 'Yiddish'),
- ('yo', 'Yoruba'),
- ('za', 'Zhuang'),
- ('zh', 'Chinese'),
- ('zu', 'Zulu'),
+ ("aa", "Afar"),
+ ("ab", "Abkhaz"),
+ ("acr", "Achi"),
+ ("af", "Afrikaans"),
+ ("agu", "Awakateko"),
+ ("am", "Amharic"),
+ ("ar", "Arabic"),
+ ("as", "Assamese"),
+ ("ay", "Aymara"),
+ ("az", "Azerbaijani"),
+ ("ba", "Bashkir"),
+ ("be", "Belarusian"),
+ ("bg", "Bulgarian"),
+ ("bh", "Bihari"),
+ ("bi", "Bislama"),
+ ("bn", "Bengali"),
+ ("bo", "Tibetan"),
+ ("br", "Breton"),
+ ("ca", "Catalan"),
+ ("caa", "Ch'orti'"),
+ ("cac", "Chuj"),
+ ("cab", "Garífuna"),
+ ("cak", "Kaqchikel"),
+ ("co", "Corsican"),
+ ("cs", "Czech"),
+ ("cy", "Welsh"),
+ ("da", "Danish"),
+ ("de", "German"),
+ ("dz", "Dzongkha"),
+ ("el", "Greek"),
+ ("en", "English"),
+ ("eo", "Esperanto"),
+ ("es", "Spanish"),
+ ("et", "Estonian"),
+ ("eu", "Basque"),
+ ("fa", "Iranian"),
+ ("fi", "Finnish"),
+ ("fj", "Fijian"),
+ ("fo", "Faroese"),
+ ("fr", "French"),
+ ("fy", "Western Frisian"),
+ ("ga", "Irish"),
+ ("gd", "Scottish Gaelic"),
+ ("gl", "Galician"),
+ ("gn", "Guarani"),
+ ("gu", "Gujarati"),
+ ("ha", "Hausa"),
+ ("hi", "Hindi"),
+ ("he", "Hebrew"),
+ ("hr", "Croatian"),
+ ("hu", "Hungarian"),
+ ("hy", "Armenian"),
+ ("ia", "Interlingua"),
+ ("id", "Indonesian"),
+ ("ie", "Interlingue"),
+ ("ik", "Inupiaq"),
+ ("is", "Icelandic"),
+ ("it", "Italian"),
+ ("itz", "Itza'"),
+ ("iu", "Inuktitut"),
+ ("ixl", "Ixil"),
+ ("ja", "Japanese"),
+ ("jac", "Popti'"),
+ ("jv", "Javanese"),
+ ("ka", "Georgian"),
+ ("kjb", "Q'anjob'al"),
+ ("kek", "Q'eqchi'"),
+ ("kk", "Kazakh"),
+ ("kl", "Kalaallisut"),
+ ("km", "Khmer"),
+ ("kn", "Kannada"),
+ ("knj", "Akateko"),
+ ("ko", "Korean"),
+ ("ks", "Kashmiri"),
+ ("ku", "Kurdish"),
+ ("ky", "Kyrgyz"),
+ ("la", "Latin"),
+ ("ln", "Lingala"),
+ ("lo", "Lao"),
+ ("lt", "Lithuanian"),
+ ("lv", "Latvian"),
+ ("mam", "Mam"),
+ ("mg", "Malagasy"),
+ ("mi", "Maori"),
+ ("mk", "Macedonian"),
+ ("ml", "Malayalam"),
+ ("mn", "Mongolian"),
+ ("mop", "Mopan"),
+ ("mr", "Marathi"),
+ ("ms", "Malay"),
+ ("mt", "Maltese"),
+ ("my", "Burmese"),
+ ("na", "Nauru"),
+ ("ne", "Nepali"),
+ ("nl", "Dutch"),
+ ("no", "Norwegian"),
+ ("oc", "Occitan"),
+ ("om", "Oromo"),
+ ("or", "Oriya"),
+ ("pa", "Panjabi"),
+ ("pl", "Polish"),
+ ("pnb", "Western Punjabi"),
+ ("poc", "Poqomam"),
+ ("poh", "Poqomchi"),
+ ("ps", "Pashto"),
+ ("pt", "Portuguese"),
+ ("qu", "Quechua"),
+ ("quc", "K'iche'"),
+ ("qum", "Sipakapense"),
+ ("quv", "Sakapulteko"),
+ ("rm", "Romansh"),
+ ("rn", "Kirundi"),
+ ("ro", "Romanian"),
+ ("ru", "Russian"),
+ ("rw", "Kinyarwanda"),
+ ("sa", "Sanskrit"),
+ ("sd", "Sindhi"),
+ ("sg", "Sango"),
+ ("si", "Sinhala"),
+ ("sk", "Slovak"),
+ ("skr", "Saraiki"),
+ ("sl", "Slovenian"),
+ ("sm", "Samoan"),
+ ("sn", "Shona"),
+ ("so", "Somali"),
+ ("sq", "Albanian"),
+ ("sr", "Serbian"),
+ ("ss", "Swati"),
+ ("st", "Southern Sotho"),
+ ("su", "Sudanese"),
+ ("sv", "Swedish"),
+ ("sw", "Swahili"),
+ ("ta", "Tamil"),
+ ("te", "Telugu"),
+ ("tg", "Tajik"),
+ ("th", "Thai"),
+ ("ti", "Tigrinya"),
+ ("tk", "Turkmen"),
+ ("tl", "Tagalog"),
+ ("tn", "Tswana"),
+ ("to", "Tonga"),
+ ("tr", "Turkish"),
+ ("ts", "Tsonga"),
+ ("tt", "Tatar"),
+ ("ttc", "Tektiteko"),
+ ("tzj", "Tz'utujil"),
+ ("tw", "Twi"),
+ ("ug", "Uyghur"),
+ ("uk", "Ukrainian"),
+ ("ur", "Urdu"),
+ ("usp", "Uspanteko"),
+ ("uz", "Uzbek"),
+ ("vi", "Vietnamese"),
+ ("vo", "Volapuk"),
+ ("wo", "Wolof"),
+ ("xh", "Xhosa"),
+ ("xin", "Xinka"),
+ ("yi", "Yiddish"),
+ ("yo", "Yoruba"),
+ ("za", "Zhuang"),
+ ("zh", "Chinese"),
+ ("zu", "Zulu"),
# Try these to test our non-2 letter language support
("nb-no", "Norwegian Bokmal"),
("pt-br", "Brazilian Portuguese"),
@@ -312,76 +312,68 @@
)
PROGRAMMING_LANGUAGES = (
- ('words', 'Only Words'),
- ('py', 'Python'),
- ('js', 'JavaScript'),
- ('php', 'PHP'),
- ('ruby', 'Ruby'),
- ('perl', 'Perl'),
- ('java', 'Java'),
- ('go', 'Go'),
- ('julia', 'Julia'),
- ('c', 'C'),
- ('csharp', 'C#'),
- ('cpp', 'C++'),
- ('objc', 'Objective-C'),
- ('css', 'CSS'),
- ('ts', 'TypeScript'),
- ('swift', 'Swift'),
- ('vb', 'Visual Basic'),
- ('r', 'R'),
- ('scala', 'Scala'),
- ('groovy', 'Groovy'),
- ('coffee', 'CoffeeScript'),
- ('lua', 'Lua'),
- ('haskell', 'Haskell'),
- ('other', 'Other'),
+ ("words", "Only Words"),
+ ("py", "Python"),
+ ("js", "JavaScript"),
+ ("php", "PHP"),
+ ("ruby", "Ruby"),
+ ("perl", "Perl"),
+ ("java", "Java"),
+ ("go", "Go"),
+ ("julia", "Julia"),
+ ("c", "C"),
+ ("csharp", "C#"),
+ ("cpp", "C++"),
+ ("objc", "Objective-C"),
+ ("css", "CSS"),
+ ("ts", "TypeScript"),
+ ("swift", "Swift"),
+ ("vb", "Visual Basic"),
+ ("r", "R"),
+ ("scala", "Scala"),
+ ("groovy", "Groovy"),
+ ("coffee", "CoffeeScript"),
+ ("lua", "Lua"),
+ ("haskell", "Haskell"),
+ ("other", "Other"),
)
-PROJECT_PK_REGEX = r'(?:[-\w]+)'
-PROJECT_SLUG_REGEX = r'(?:[-\w]+)'
+PROJECT_PK_REGEX = r"(?:[-\w]+)"
+PROJECT_SLUG_REGEX = r"(?:[-\w]+)"
GITHUB_REGEXS = [
- re.compile(r'github.com/(.+)/(.+)(?:\.git){1}$'),
+ re.compile(r"github.com/(.+)/(.+)(?:\.git){1}$"),
# This must come before the one without a / to make sure we don't capture the /
- re.compile(r'github.com/(.+)/(.+)/'),
- re.compile(r'github.com/(.+)/(.+)'),
- re.compile(r'github.com:(.+)/(.+)\.git$'),
+ re.compile(r"github.com/(.+)/(.+)/"),
+ re.compile(r"github.com/(.+)/(.+)"),
+ re.compile(r"github.com:(.+)/(.+)\.git$"),
]
BITBUCKET_REGEXS = [
- re.compile(r'bitbucket.org/(.+)/(.+)\.git$'),
- re.compile(r'@bitbucket.org/(.+)/(.+)\.git$'),
+ re.compile(r"bitbucket.org/(.+)/(.+)\.git$"),
+ re.compile(r"@bitbucket.org/(.+)/(.+)\.git$"),
# This must come before the one without a / to make sure we don't capture the /
- re.compile(r'bitbucket.org/(.+)/(.+)/'),
- re.compile(r'bitbucket.org/(.+)/(.+)'),
- re.compile(r'bitbucket.org:(.+)/(.+)\.git$'),
+ re.compile(r"bitbucket.org/(.+)/(.+)/"),
+ re.compile(r"bitbucket.org/(.+)/(.+)"),
+ re.compile(r"bitbucket.org:(.+)/(.+)\.git$"),
]
GITLAB_REGEXS = [
- re.compile(r'gitlab.com/(.+)/(.+)(?:\.git){1}$'),
+ re.compile(r"gitlab.com/(.+)/(.+)(?:\.git){1}$"),
# This must come before the one without a / to make sure we don't capture the /
- re.compile(r'gitlab.com/(.+)/(.+)/'),
- re.compile(r'gitlab.com/(.+)/(.+)'),
- re.compile(r'gitlab.com:(.+)/(.+)\.git$'),
+ re.compile(r"gitlab.com/(.+)/(.+)/"),
+ re.compile(r"gitlab.com/(.+)/(.+)"),
+ re.compile(r"gitlab.com:(.+)/(.+)\.git$"),
]
GITHUB_URL = (
- 'https://github.com/{user}/{repo}/'
- '{action}/{version}{docroot}{path}{source_suffix}'
-)
-GITHUB_COMMIT_URL = (
- 'https://github.com/{user}/{repo}/'
- 'commit/{commit}'
-)
-GITHUB_PULL_REQUEST_URL = (
- 'https://github.com/{user}/{repo}/'
- 'pull/{number}'
+ "https://github.com/{user}/{repo}/"
+ "{action}/{version}{docroot}{path}{source_suffix}"
)
+GITHUB_COMMIT_URL = "https://github.com/{user}/{repo}/commit/{commit}"
+GITHUB_PULL_REQUEST_URL = "https://github.com/{user}/{repo}/pull/{number}"
GITHUB_PULL_REQUEST_COMMIT_URL = (
- 'https://github.com/{user}/{repo}/'
- 'pull/{number}/commits/{commit}'
+ "https://github.com/{user}/{repo}/pull/{number}/commits/{commit}"
)
BITBUCKET_URL = (
- 'https://bitbucket.org/{user}/{repo}/'
- 'src/{version}{docroot}{path}{source_suffix}'
+ "https://bitbucket.org/{user}/{repo}/src/{version}{docroot}{path}{source_suffix}"
)
BITBUCKET_COMMIT_URL = "https://bitbucket.org/{user}/{repo}/commits/{commit}"
GITLAB_URL = (
diff --git a/readthedocs/projects/managers.py b/readthedocs/projects/managers.py
index 995ea3da4f2..1a47352b210 100644
--- a/readthedocs/projects/managers.py
+++ b/readthedocs/projects/managers.py
@@ -2,6 +2,5 @@
class HTMLFileManager(models.Manager):
-
def get_queryset(self):
- return super().get_queryset().filter(name__endswith='.html')
+ return super().get_queryset().filter(name__endswith=".html")
diff --git a/readthedocs/projects/querysets.py b/readthedocs/projects/querysets.py
index 4d55f9b2729..cd612797df8 100644
--- a/readthedocs/projects/querysets.py
+++ b/readthedocs/projects/querysets.py
@@ -32,8 +32,12 @@ def for_user_and_viewer(self, user, viewer):
- Projects where both are member
- Public projects from `user`
"""
- viewer_projects = self._add_user_projects(self.none(), viewer, admin=True, member=True)
- owner_projects = self._add_user_projects(self.none(), user, admin=True, member=True)
+ viewer_projects = self._add_user_projects(
+ self.none(), viewer, admin=True, member=True
+ )
+ owner_projects = self._add_user_projects(
+ self.none(), user, admin=True, member=True
+ )
owner_public_projects = owner_projects.filter(privacy_level=constants.PUBLIC)
queryset = (viewer_projects & owner_projects) | owner_public_projects
return queryset.distinct()
@@ -79,11 +83,7 @@ def is_active(self, project):
"""
any_owner_banned = any(u.profile.banned for u in project.users.all())
organization = project.organizations.first()
- if (
- project.skip
- or any_owner_banned
- or (organization and organization.disabled)
- ):
+ if project.skip or any_owner_banned or (organization and organization.disabled):
return False
return True
@@ -132,12 +132,12 @@ def prefetch_latest_build(self):
# Prefetch the latest build for each project.
subquery = Subquery(
- Build.internal.filter(
- project=OuterRef('project_id')
- ).order_by('-date').values_list('id', flat=True)[:1]
+ Build.internal.filter(project=OuterRef("project_id"))
+ .order_by("-date")
+ .values_list("id", flat=True)[:1]
)
latest_build = Prefetch(
- 'builds',
+ "builds",
Build.internal.filter(pk__in=subquery),
to_attr=self.model.LATEST_BUILD_CACHE,
)
@@ -180,25 +180,22 @@ class RelatedProjectQuerySet(models.QuerySet):
"""
use_for_related_fields = True
- project_field = 'project'
+ project_field = "project"
def _add_from_user_projects(self, queryset, user):
if user and user.is_authenticated:
- projects_pk = (
- AdminPermission.projects(
- user=user,
- admin=True,
- member=True,
- )
- .values_list('pk', flat=True)
- )
- kwargs = {f'{self.project_field}__in': projects_pk}
+ projects_pk = AdminPermission.projects(
+ user=user,
+ admin=True,
+ member=True,
+ ).values_list("pk", flat=True)
+ kwargs = {f"{self.project_field}__in": projects_pk}
user_queryset = self.filter(**kwargs)
queryset = user_queryset | queryset
return queryset
def public(self, user=None, project=None):
- kwargs = {f'{self.project_field}__privacy_level': constants.PUBLIC}
+ kwargs = {f"{self.project_field}__privacy_level": constants.PUBLIC}
queryset = self.filter(**kwargs)
if user:
if user.is_superuser:
@@ -214,12 +211,12 @@ def api(self, user=None):
class ParentRelatedProjectQuerySet(RelatedProjectQuerySet):
- project_field = 'parent'
+ project_field = "parent"
use_for_related_fields = True
class ChildRelatedProjectQuerySet(RelatedProjectQuerySet):
- project_field = 'child'
+ project_field = "child"
use_for_related_fields = True
@@ -228,7 +225,7 @@ class FeatureQuerySet(models.QuerySet):
def for_project(self, project):
return self.filter(
- Q(projects=project) |
- Q(default_true=True, add_date__gt=project.pub_date) |
- Q(future_default_true=True, add_date__lte=project.pub_date)
+ Q(projects=project)
+ | Q(default_true=True, add_date__gt=project.pub_date)
+ | Q(future_default_true=True, add_date__lte=project.pub_date)
).distinct()
diff --git a/readthedocs/projects/tests/mockers.py b/readthedocs/projects/tests/mockers.py
index dc4e9a23fe6..4dae7d137de 100644
--- a/readthedocs/projects/tests/mockers.py
+++ b/readthedocs/projects/tests/mockers.py
@@ -10,7 +10,6 @@
class BuildEnvironmentMocker:
-
def __init__(self, project, version, build, requestsmock):
self.project = project
self.version = version
@@ -75,39 +74,38 @@ def _mock_artifact_builders(self):
# self.patches['builder.pdf.LatexBuildCommand.output'] = mock.patch(
# 'readthedocs.doc_builder.backends.sphinx.LatexBuildCommand.output',
# )
- self.patches['builder.pdf.glob'] = mock.patch(
- 'readthedocs.doc_builder.backends.sphinx.glob',
- return_value=['output.file'],
+ self.patches["builder.pdf.glob"] = mock.patch(
+ "readthedocs.doc_builder.backends.sphinx.glob",
+ return_value=["output.file"],
)
- self.patches['builder.pdf.os.path.getmtime'] = mock.patch(
- 'readthedocs.doc_builder.backends.sphinx.os.path.getmtime',
+ self.patches["builder.pdf.os.path.getmtime"] = mock.patch(
+ "readthedocs.doc_builder.backends.sphinx.os.path.getmtime",
return_value=1,
)
# NOTE: this is a problem, because it does not execute
# `run_command_class` which does other extra stuffs, like appending the
# commands to `environment.commands` which is used later
- self.patches['environment.run_command_class'] = mock.patch(
- 'readthedocs.projects.tasks.builds.LocalBuildEnvironment.run_command_class',
- return_value=mock.MagicMock(output='stdout', successful=True)
+ self.patches["environment.run_command_class"] = mock.patch(
+ "readthedocs.projects.tasks.builds.LocalBuildEnvironment.run_command_class",
+ return_value=mock.MagicMock(output="stdout", successful=True),
)
-
# TODO: find a way to not mock this one and mock `open()` used inside
# it instead to make the mock more granularly and be able to execute
# the `append_conf` normally.
- self.patches['builder.html.mkdocs.MkdocsHTML.append_conf'] = mock.patch(
- 'readthedocs.doc_builder.backends.mkdocs.MkdocsHTML.append_conf',
+ self.patches["builder.html.mkdocs.MkdocsHTML.append_conf"] = mock.patch(
+ "readthedocs.doc_builder.backends.mkdocs.MkdocsHTML.append_conf",
)
- self.patches['builder.html.mkdocs.MkdocsHTML.get_final_doctype'] = mock.patch(
- 'readthedocs.doc_builder.backends.mkdocs.MkdocsHTML.get_final_doctype',
+ self.patches["builder.html.mkdocs.MkdocsHTML.get_final_doctype"] = mock.patch(
+ "readthedocs.doc_builder.backends.mkdocs.MkdocsHTML.get_final_doctype",
return_value=MKDOCS,
)
# NOTE: another approach would be to make these files are in the tmpdir
# used for testing (see ``apply_fs`` util function)
- self.patches['builder.html.sphinx.HtmlBuilder.append_conf'] = mock.patch(
- 'readthedocs.doc_builder.backends.sphinx.HtmlBuilder.append_conf',
+ self.patches["builder.html.sphinx.HtmlBuilder.append_conf"] = mock.patch(
+ "readthedocs.doc_builder.backends.sphinx.HtmlBuilder.append_conf",
)
# self.patches['builder.html.mkdocs.yaml_dump_safely'] = mock.patch(
@@ -119,9 +117,9 @@ def _mock_artifact_builders(self):
# )
def _mock_git_repository(self):
- self.patches['git.Backend.run'] = mock.patch(
- 'readthedocs.vcs_support.backends.git.Backend.run',
- return_value=(0, 'stdout', 'stderr'),
+ self.patches["git.Backend.run"] = mock.patch(
+ "readthedocs.vcs_support.backends.git.Backend.run",
+ return_value=(0, "stdout", "stderr"),
)
# TODO: improve this
@@ -131,22 +129,22 @@ def _mock_git_repository(self):
# to hook into test setup and teardown such that we can clean up nicely.
# This probably means that the tmp dir should be handed to the mocker from
# outside.
- self.project_repository_path = '/tmp/readthedocs-tests/git-repository'
+ self.project_repository_path = "/tmp/readthedocs-tests/git-repository"
shutil.rmtree(self.project_repository_path, ignore_errors=True)
os.makedirs(self.project_repository_path)
- self.patches['models.Project.checkout_path'] = mock.patch(
- 'readthedocs.projects.models.Project.checkout_path',
+ self.patches["models.Project.checkout_path"] = mock.patch(
+ "readthedocs.projects.models.Project.checkout_path",
return_value=self.project_repository_path,
)
- self.patches['git.Backend.make_clean_working_dir'] = mock.patch(
- 'readthedocs.vcs_support.backends.git.Backend.make_clean_working_dir',
+ self.patches["git.Backend.make_clean_working_dir"] = mock.patch(
+ "readthedocs.vcs_support.backends.git.Backend.make_clean_working_dir",
)
# Make a the backend to return 3 submodules when asked
- self.patches['git.Backend.submodules'] = mock.patch(
- 'readthedocs.vcs_support.backends.git.Backend.submodules',
+ self.patches["git.Backend.submodules"] = mock.patch(
+ "readthedocs.vcs_support.backends.git.Backend.submodules",
new_callable=mock.PropertyMock,
return_value=[
"one",
@@ -161,9 +159,9 @@ def _mock_environment(self):
# example). So, there are some things we cannot check with this mock
#
# It would be good to find a way to mock `BuildCommand.run` instead
- self.patches['environment.run'] = mock.patch(
- 'readthedocs.projects.tasks.builds.LocalBuildEnvironment.run',
- return_value=mock.MagicMock(successful=True)
+ self.patches["environment.run"] = mock.patch(
+ "readthedocs.projects.tasks.builds.LocalBuildEnvironment.run",
+ return_value=mock.MagicMock(successful=True),
)
# self.patches['environment.run'] = mock.patch(
@@ -172,61 +170,61 @@ def _mock_environment(self):
# )
def _mock_storage(self):
- self.patches['build_media_storage'] = mock.patch(
- 'readthedocs.projects.tasks.builds.build_media_storage',
+ self.patches["build_media_storage"] = mock.patch(
+ "readthedocs.projects.tasks.builds.build_media_storage",
)
def _mock_api(self):
- headers = {'Content-Type': 'application/json'}
+ headers = {"Content-Type": "application/json"}
self.requestsmock.get(
- f'{settings.SLUMBER_API_HOST}/api/v2/version/{self.version.pk}/',
+ f"{settings.SLUMBER_API_HOST}/api/v2/version/{self.version.pk}/",
json=lambda requests, context: VersionAdminSerializer(self.version).data,
headers=headers,
)
self.requestsmock.patch(
- f'{settings.SLUMBER_API_HOST}/api/v2/version/{self.version.pk}/',
+ f"{settings.SLUMBER_API_HOST}/api/v2/version/{self.version.pk}/",
status_code=201,
)
self.requestsmock.get(
- f'{settings.SLUMBER_API_HOST}/api/v2/build/{self.build.pk}/',
+ f"{settings.SLUMBER_API_HOST}/api/v2/build/{self.build.pk}/",
json=lambda request, context: {
- 'id': self.build.pk,
- 'state': BUILD_STATE_TRIGGERED,
- 'commit': self.build.commit,
+ "id": self.build.pk,
+ "state": BUILD_STATE_TRIGGERED,
+ "commit": self.build.commit,
},
headers=headers,
)
self.requestsmock.post(
- f'{settings.SLUMBER_API_HOST}/api/v2/command/',
+ f"{settings.SLUMBER_API_HOST}/api/v2/command/",
status_code=201,
)
self.requestsmock.patch(
- f'{settings.SLUMBER_API_HOST}/api/v2/build/{self.build.pk}/',
+ f"{settings.SLUMBER_API_HOST}/api/v2/build/{self.build.pk}/",
status_code=201,
)
self.requestsmock.get(
- f'{settings.SLUMBER_API_HOST}/api/v2/build/concurrent/?project__slug={self.project.slug}',
+ f"{settings.SLUMBER_API_HOST}/api/v2/build/concurrent/?project__slug={self.project.slug}",
json=lambda request, context: {
- 'limit_reached': False,
- 'max_concurrent': settings.RTD_MAX_CONCURRENT_BUILDS,
- 'concurrent': 0,
+ "limit_reached": False,
+ "max_concurrent": settings.RTD_MAX_CONCURRENT_BUILDS,
+ "concurrent": 0,
},
headers=headers,
)
self.requestsmock.get(
- f'{settings.SLUMBER_API_HOST}/api/v2/project/{self.project.pk}/active_versions/',
+ f"{settings.SLUMBER_API_HOST}/api/v2/project/{self.project.pk}/active_versions/",
json=lambda request, context: {
- 'versions': [
+ "versions": [
{
- 'id': self.version.pk,
- 'slug': self.version.slug,
+ "id": self.version.pk,
+ "slug": self.version.slug,
},
]
},
@@ -234,7 +232,7 @@ def _mock_api(self):
)
self.requestsmock.patch(
- f'{settings.SLUMBER_API_HOST}/api/v2/project/{self.project.pk}/',
+ f"{settings.SLUMBER_API_HOST}/api/v2/project/{self.project.pk}/",
status_code=201,
)
diff --git a/readthedocs/proxito/tests/test_full.py b/readthedocs/proxito/tests/test_full.py
index e8e845b2d80..6d60009b9aa 100644
--- a/readthedocs/proxito/tests/test_full.py
+++ b/readthedocs/proxito/tests/test_full.py
@@ -48,97 +48,106 @@ class TestFullDocServing(BaseDocServing):
# Test the full range of possible doc URL's
def test_health_check(self):
- url = reverse('health_check')
- host = 'project.dev.readthedocs.io'
+ url = reverse("health_check")
+ host = "project.dev.readthedocs.io"
resp = self.client.get(url, headers={"host": host})
self.assertEqual(resp.status_code, 200)
- self.assertEqual(resp.json(), {'status': 200})
+ self.assertEqual(resp.json(), {"status": 200})
# Test with IP address, which should still work
# since we're skipping middleware
- host = '127.0.0.1'
+ host = "127.0.0.1"
resp = self.client.get(url, headers={"host": host})
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.json(), {"status": 200})
self.assertEqual(resp["CDN-Cache-Control"], "private")
def test_subproject_serving(self):
- url = '/projects/subproject/en/latest/awesome.html'
- host = 'project.dev.readthedocs.io'
+ url = "/projects/subproject/en/latest/awesome.html"
+ host = "project.dev.readthedocs.io"
resp = self.client.get(url, headers={"host": host})
self.assertEqual(
- resp['x-accel-redirect'], '/proxito/media/html/subproject/latest/awesome.html',
+ resp["x-accel-redirect"],
+ "/proxito/media/html/subproject/latest/awesome.html",
)
def test_subproject_single_version(self):
self.subproject.versioning_scheme = SINGLE_VERSION_WITHOUT_TRANSLATIONS
self.subproject.save()
- url = '/projects/subproject/awesome.html'
- host = 'project.dev.readthedocs.io'
+ url = "/projects/subproject/awesome.html"
+ host = "project.dev.readthedocs.io"
resp = self.client.get(url, headers={"host": host})
self.assertEqual(
- resp['x-accel-redirect'], '/proxito/media/html/subproject/latest/awesome.html',
+ resp["x-accel-redirect"],
+ "/proxito/media/html/subproject/latest/awesome.html",
)
def test_subproject_translation_serving(self):
- url = '/projects/subproject/es/latest/awesome.html'
- host = 'project.dev.readthedocs.io'
+ url = "/projects/subproject/es/latest/awesome.html"
+ host = "project.dev.readthedocs.io"
resp = self.client.get(url, headers={"host": host})
self.assertEqual(
- resp['x-accel-redirect'], '/proxito/media/html/subproject-translation/latest/awesome.html',
+ resp["x-accel-redirect"],
+ "/proxito/media/html/subproject-translation/latest/awesome.html",
)
def test_subproject_alias_serving(self):
- url = '/projects/this-is-an-alias/en/latest/awesome.html'
- host = 'project.dev.readthedocs.io'
+ url = "/projects/this-is-an-alias/en/latest/awesome.html"
+ host = "project.dev.readthedocs.io"
resp = self.client.get(url, headers={"host": host})
self.assertEqual(
- resp['x-accel-redirect'], '/proxito/media/html/subproject-alias/latest/awesome.html',
+ resp["x-accel-redirect"],
+ "/proxito/media/html/subproject-alias/latest/awesome.html",
)
def test_translation_serving(self):
- url = '/es/latest/awesome.html'
- host = 'project.dev.readthedocs.io'
+ url = "/es/latest/awesome.html"
+ host = "project.dev.readthedocs.io"
resp = self.client.get(url, headers={"host": host})
self.assertEqual(
- resp['x-accel-redirect'], '/proxito/media/html/translation/latest/awesome.html',
+ resp["x-accel-redirect"],
+ "/proxito/media/html/translation/latest/awesome.html",
)
def test_normal_serving(self):
- url = '/en/latest/awesome.html'
- host = 'project.dev.readthedocs.io'
+ url = "/en/latest/awesome.html"
+ host = "project.dev.readthedocs.io"
resp = self.client.get(url, headers={"host": host})
self.assertEqual(
- resp['x-accel-redirect'], '/proxito/media/html/project/latest/awesome.html',
+ resp["x-accel-redirect"],
+ "/proxito/media/html/project/latest/awesome.html",
)
def test_single_version_serving(self):
self.project.versioning_scheme = SINGLE_VERSION_WITHOUT_TRANSLATIONS
self.project.save()
- url = '/awesome.html'
- host = 'project.dev.readthedocs.io'
+ url = "/awesome.html"
+ host = "project.dev.readthedocs.io"
resp = self.client.get(url, headers={"host": host})
self.assertEqual(
- resp['x-accel-redirect'], '/proxito/media/html/project/latest/awesome.html',
+ resp["x-accel-redirect"],
+ "/proxito/media/html/project/latest/awesome.html",
)
def test_single_version_serving_looks_like_normal(self):
self.project.versioning_scheme = SINGLE_VERSION_WITHOUT_TRANSLATIONS
self.project.save()
- url = '/en/stable/awesome.html'
- host = 'project.dev.readthedocs.io'
+ url = "/en/stable/awesome.html"
+ host = "project.dev.readthedocs.io"
resp = self.client.get(url, headers={"host": host})
self.assertEqual(
- resp['x-accel-redirect'], '/proxito/media/html/project/latest/en/stable/awesome.html',
+ resp["x-accel-redirect"],
+ "/proxito/media/html/project/latest/en/stable/awesome.html",
)
def test_index_serving(self):
- host = 'project.dev.readthedocs.io'
- urls = ('/en/latest/awesome/', '/en/latest/awesome/index.html')
+ host = "project.dev.readthedocs.io"
+ urls = ("/en/latest/awesome/", "/en/latest/awesome/index.html")
for url in urls:
resp = self.client.get(url, headers={"host": host})
self.assertEqual(
- resp['x-accel-redirect'], '/proxito/media/html/project/latest/awesome/index.html',
+ resp["x-accel-redirect"],
+ "/proxito/media/html/project/latest/awesome/index.html",
)
def test_single_version_external_serving(self):
@@ -146,24 +155,25 @@ def test_single_version_external_serving(self):
self.project.save()
fixture.get(
Version,
- verbose_name='10',
- slug='10',
+ verbose_name="10",
+ slug="10",
type=EXTERNAL,
active=True,
project=self.project,
)
- url = '/awesome.html'
- host = 'project--10.dev.readthedocs.build'
+ url = "/awesome.html"
+ host = "project--10.dev.readthedocs.build"
resp = self.client.get(url, headers={"host": host})
self.assertEqual(
- resp['x-accel-redirect'], '/proxito/media/external/html/project/10/awesome.html',
+ resp["x-accel-redirect"],
+ "/proxito/media/external/html/project/10/awesome.html",
)
def test_external_version_serving(self):
fixture.get(
Version,
- verbose_name='10',
- slug='10',
+ verbose_name="10",
+ slug="10",
type=EXTERNAL,
active=True,
project=self.project,
@@ -172,7 +182,8 @@ def test_external_version_serving(self):
host = "project--10.dev.readthedocs.build"
resp = self.client.get(url, headers={"host": host})
self.assertEqual(
- resp['x-accel-redirect'], '/proxito/media/external/html/project/10/awesome.html',
+ resp["x-accel-redirect"],
+ "/proxito/media/external/html/project/10/awesome.html",
)
def test_external_version_serving_old_slugs(self):
@@ -184,19 +195,20 @@ def test_external_version_serving_old_slugs(self):
"""
fixture.get(
Version,
- verbose_name='10',
- slug='10',
+ verbose_name="10",
+ slug="10",
type=EXTERNAL,
active=True,
project=self.project,
)
- self.project.slug = 'test--project'
+ self.project.slug = "test--project"
self.project.save()
host = "test--project--10.dev.readthedocs.build"
resp = self.client.get("/en/10/awesome.html", headers={"host": host})
self.assertEqual(
- resp['x-accel-redirect'], '/proxito/media/external/html/test--project/10/awesome.html',
+ resp["x-accel-redirect"],
+ "/proxito/media/external/html/test--project/10/awesome.html",
)
# Invalid tests
@@ -229,8 +241,8 @@ def test_inactive_version(self):
def test_serve_external_version_on_main_domain(self):
fixture.get(
Version,
- verbose_name='10',
- slug='10',
+ verbose_name="10",
+ slug="10",
type=EXTERNAL,
active=True,
project=self.project,
@@ -283,27 +295,27 @@ def test_serve_different_external_version_from_domain(self):
self.assertEqual(resp["X-RTD-Version"], "10")
def test_invalid_language_for_project_with_versions(self):
- url = '/foo/latest/awesome.html'
- host = 'project.dev.readthedocs.io'
+ url = "/foo/latest/awesome.html"
+ host = "project.dev.readthedocs.io"
resp = self.client.get(url, headers={"host": host})
self.assertEqual(resp.status_code, 404)
def test_invalid_translation_for_project_with_versions(self):
- url = '/cs/latest/awesome.html'
- host = 'project.dev.readthedocs.io'
+ url = "/cs/latest/awesome.html"
+ host = "project.dev.readthedocs.io"
resp = self.client.get(url, headers={"host": host})
self.assertEqual(resp.status_code, 404)
def test_invalid_subproject(self):
- url = '/projects/doesnt-exist/foo.html'
- host = 'project.dev.readthedocs.io'
+ url = "/projects/doesnt-exist/foo.html"
+ host = "project.dev.readthedocs.io"
resp = self.client.get(url, headers={"host": host})
self.assertEqual(resp.status_code, 404)
# https://github.com/readthedocs/readthedocs.org/pull/6226/files/596aa85a4886407f0eb65233ebf9c38ee3e8d485#r332445803
def test_valid_project_as_invalid_subproject(self):
- url = '/projects/translation/es/latest/foo.html'
- host = 'project.dev.readthedocs.io'
+ url = "/projects/translation/es/latest/foo.html"
+ host = "project.dev.readthedocs.io"
resp = self.client.get(url, headers={"host": host})
self.assertEqual(resp.status_code, 404)
@@ -313,7 +325,7 @@ def test_public_domain_hsts(self):
self.assertFalse("strict-transport-security" in response)
response = self.client.get("/", headers={"host": host}, secure=True)
- self.assertFalse('strict-transport-security' in response)
+ self.assertFalse("strict-transport-security" in response)
with override_settings(PUBLIC_DOMAIN_USES_HTTPS=True):
response = self.client.get("/", headers={"host": host})
@@ -321,12 +333,12 @@ def test_public_domain_hsts(self):
response = self.client.get("/", headers={"host": host}, secure=True)
self.assertEqual(
- response['strict-transport-security'],
- 'max-age=31536000; includeSubDomains; preload',
+ response["strict-transport-security"],
+ "max-age=31536000; includeSubDomains; preload",
)
def test_custom_domain_response_hsts(self):
- hostname = 'docs.random.com'
+ hostname = "docs.random.com"
domain = fixture.get(
Domain,
project=self.project,
@@ -337,21 +349,22 @@ def test_custom_domain_response_hsts(self):
)
response = self.client.get("/", headers={"host": hostname})
- self.assertFalse('strict-transport-security' in response)
+ self.assertFalse("strict-transport-security" in response)
response = self.client.get("/", headers={"host": hostname}, secure=True)
- self.assertFalse('strict-transport-security' in response)
+ self.assertFalse("strict-transport-security" in response)
domain.hsts_max_age = 3600
domain.save()
response = self.client.get("/", headers={"host": hostname})
- self.assertFalse('strict-transport-security' in response)
+ self.assertFalse("strict-transport-security" in response)
response = self.client.get("/", headers={"host": hostname}, secure=True)
- self.assertTrue('strict-transport-security' in response)
+ self.assertTrue("strict-transport-security" in response)
self.assertEqual(
- response['strict-transport-security'], 'max-age=3600',
+ response["strict-transport-security"],
+ "max-age=3600",
)
domain.hsts_include_subdomains = True
@@ -359,9 +372,10 @@ def test_custom_domain_response_hsts(self):
domain.save()
response = self.client.get("/", headers={"host": hostname}, secure=True)
- self.assertTrue('strict-transport-security' in response)
+ self.assertTrue("strict-transport-security" in response)
self.assertEqual(
- response['strict-transport-security'], 'max-age=3600; includeSubDomains; preload',
+ response["strict-transport-security"],
+ "max-age=3600; includeSubDomains; preload",
)
def test_single_version_serving_projects_dir(self):
@@ -482,14 +496,15 @@ class TestDocServingBackends(BaseDocServing):
@override_settings(PYTHON_MEDIA=True)
def test_python_media_serving(self):
with mock.patch(
- 'readthedocs.proxito.views.mixins.serve', return_value=HttpResponse()) as serve_mock:
- url = '/en/latest/awesome.html'
- host = 'project.dev.readthedocs.io'
+ "readthedocs.proxito.views.mixins.serve", return_value=HttpResponse()
+ ) as serve_mock:
+ url = "/en/latest/awesome.html"
+ host = "project.dev.readthedocs.io"
self.client.get(url, headers={"host": host})
serve_mock.assert_called_with(
mock.ANY,
- '/media/html/project/latest/awesome.html',
- os.path.join(settings.SITE_ROOT, 'media'),
+ "/media/html/project/latest/awesome.html",
+ os.path.join(settings.SITE_ROOT, "media"),
)
@override_settings(PYTHON_MEDIA=False)
@@ -499,7 +514,8 @@ def test_nginx_media_serving(self):
)
self.assertEqual(resp.status_code, 200)
self.assertEqual(
- resp['x-accel-redirect'], '/proxito/media/html/project/latest/awesome.html',
+ resp["x-accel-redirect"],
+ "/proxito/media/html/project/latest/awesome.html",
)
@override_settings(PYTHON_MEDIA=False)
@@ -509,8 +525,8 @@ def test_project_nginx_serving_unicode_filename(self):
)
self.assertEqual(resp.status_code, 200)
self.assertEqual(
- resp['x-accel-redirect'],
- '/proxito/media/html/project/latest/%C3%BA%C3%B1%C3%AD%C4%8D%C3%B3d%C3%A9.html',
+ resp["x-accel-redirect"],
+ "/proxito/media/html/project/latest/%C3%BA%C3%B1%C3%AD%C4%8D%C3%B3d%C3%A9.html",
)
@override_settings(PYTHON_MEDIA=False)
@@ -729,11 +745,11 @@ def test_filename_with_parent_paths(self):
def test_track_html_files_only(self):
self.assertEqual(AuditLog.objects.all().count(), 0)
- url = '/en/latest/awesome.html'
- host = 'project.dev.readthedocs.io'
+ url = "/en/latest/awesome.html"
+ host = "project.dev.readthedocs.io"
with override_settings(RTD_DEFAULT_FEATURES={}):
resp = self.client.get(url, headers={"host": host})
- self.assertIn('x-accel-redirect', resp)
+ self.assertIn("x-accel-redirect", resp)
self.assertEqual(AuditLog.objects.all().count(), 0)
url = "/en/latest/awesome.html"
@@ -744,7 +760,7 @@ def test_track_html_files_only(self):
)
):
resp = self.client.get(url, headers={"host": host})
- self.assertIn('x-accel-redirect', resp)
+ self.assertIn("x-accel-redirect", resp)
self.assertEqual(AuditLog.objects.all().count(), 1)
log = AuditLog.objects.last()
@@ -786,7 +802,7 @@ def test_track_downloads(self):
)
):
resp = self.client.get(url, headers={"host": host})
- self.assertIn('x-accel-redirect', resp)
+ self.assertIn("x-accel-redirect", resp)
self.assertEqual(AuditLog.objects.all().count(), 1)
log = AuditLog.objects.last()
@@ -798,7 +814,7 @@ def test_track_downloads(self):
@override_settings(
PYTHON_MEDIA=False,
- PUBLIC_DOMAIN='readthedocs.io',
+ PUBLIC_DOMAIN="readthedocs.io",
RTD_EXTERNAL_VERSION_DOMAIN="dev.readthedocs.build",
)
# We are overriding the storage class instead of using RTD_BUILD_MEDIA_STORAGE,
@@ -820,7 +836,7 @@ def tearDown(self):
# Cleanup cache to avoid throttling on tests
cache.clear()
- @mock.patch.object(BuildMediaFileSystemStorageTest, 'exists')
+ @mock.patch.object(BuildMediaFileSystemStorageTest, "exists")
def test_default_robots_txt(self, storage_exists):
storage_exists.return_value = False
self.project.versions.update(active=True, built=True)
@@ -839,14 +855,14 @@ def test_default_robots_txt(self, storage_exists):
).lstrip()
self.assertEqual(response.content.decode(), expected)
- @mock.patch.object(BuildMediaFileSystemStorageTest, 'exists')
+ @mock.patch.object(BuildMediaFileSystemStorageTest, "exists")
def test_default_robots_txt_disallow_hidden_versions(self, storage_exists):
storage_exists.return_value = False
self.project.versions.update(active=True, built=True)
fixture.get(
Version,
project=self.project,
- slug='hidden',
+ slug="hidden",
active=True,
hidden=True,
privacy_level=PUBLIC,
@@ -854,7 +870,7 @@ def test_default_robots_txt_disallow_hidden_versions(self, storage_exists):
fixture.get(
Version,
project=self.project,
- slug='hidden-2',
+ slug="hidden-2",
active=True,
hidden=True,
privacy_level=PUBLIC,
@@ -862,7 +878,7 @@ def test_default_robots_txt_disallow_hidden_versions(self, storage_exists):
fixture.get(
Version,
project=self.project,
- slug='hidden-and-inactive',
+ slug="hidden-and-inactive",
active=False,
hidden=True,
privacy_level=PUBLIC,
@@ -870,7 +886,7 @@ def test_default_robots_txt_disallow_hidden_versions(self, storage_exists):
fixture.get(
Version,
project=self.project,
- slug='hidden-and-private',
+ slug="hidden-and-private",
active=False,
hidden=True,
privacy_level=PRIVATE,
@@ -893,10 +909,12 @@ def test_default_robots_txt_disallow_hidden_versions(self, storage_exists):
).lstrip()
self.assertEqual(response.content.decode(), expected)
- @mock.patch.object(BuildMediaFileSystemStorageTest, 'exists')
+ @mock.patch.object(BuildMediaFileSystemStorageTest, "exists")
def test_default_robots_txt_private_version(self, storage_exists):
storage_exists.return_value = False
- self.project.versions.update(active=True, built=True, privacy_level=constants.PRIVATE)
+ self.project.versions.update(
+ active=True, built=True, privacy_level=constants.PRIVATE
+ )
response = self.client.get(
reverse("robots_txt"), headers={"host": "project.readthedocs.io"}
)
@@ -908,11 +926,14 @@ def test_custom_robots_txt(self):
reverse("robots_txt"), headers={"host": "project.readthedocs.io"}
)
self.assertEqual(
- response['x-accel-redirect'], '/proxito/media/html/project/latest/robots.txt',
+ response["x-accel-redirect"],
+ "/proxito/media/html/project/latest/robots.txt",
)
def test_custom_robots_txt_private_version(self):
- self.project.versions.update(active=True, built=True, privacy_level=constants.PRIVATE)
+ self.project.versions.update(
+ active=True, built=True, privacy_level=constants.PRIVATE
+ )
response = self.client.get(
reverse("robots_txt"), headers={"host": "project.readthedocs.io"}
)
@@ -937,11 +958,10 @@ def test_directory_indexes(self):
),
headers={"host": "project.readthedocs.io"},
)
+ self.assertEqual(response.status_code, 302)
self.assertEqual(
- response.status_code, 302
- )
- self.assertEqual(
- response['location'], '/en/latest/index-exists/',
+ response["location"],
+ "/en/latest/index-exists/",
)
def test_versioned_no_slash(self):
@@ -957,11 +977,10 @@ def test_versioned_no_slash(self):
reverse("proxito_404_handler", kwargs={"proxito_path": "/en/latest"}),
headers={"host": "project.readthedocs.io"},
)
+ self.assertEqual(response.status_code, 302)
self.assertEqual(
- response.status_code, 302
- )
- self.assertEqual(
- response['location'], '/en/latest/',
+ response["location"],
+ "/en/latest/",
)
@mock.patch.object(BuildMediaFileSystemStorageTest, "open")
@@ -984,11 +1003,10 @@ def test_directory_indexes_readme_serving(self, storage_open):
),
headers={"host": "project.readthedocs.io"},
)
+ self.assertEqual(response.status_code, 302)
self.assertEqual(
- response.status_code, 302
- )
- self.assertEqual(
- response['location'], '/en/latest/readme-exists/README.html',
+ response["location"],
+ "/en/latest/readme-exists/README.html",
)
def test_directory_indexes_get_args(self):
@@ -1009,19 +1027,18 @@ def test_directory_indexes_get_args(self):
+ "?foo=bar",
headers={"host": "project.readthedocs.io"},
)
+ self.assertEqual(response.status_code, 302)
self.assertEqual(
- response.status_code, 302
- )
- self.assertEqual(
- response['location'], '/en/latest/index-exists/?foo=bar',
+ response["location"],
+ "/en/latest/index-exists/?foo=bar",
)
- @mock.patch.object(BuildMediaFileSystemStorageTest, 'open')
+ @mock.patch.object(BuildMediaFileSystemStorageTest, "open")
def test_404_storage_serves_custom_404_sphinx(self, storage_open):
self.project.versions.update(active=True, built=True)
fancy_version = fixture.get(
Version,
- slug='fancy-version',
+ slug="fancy-version",
privacy_level=constants.PUBLIC,
active=True,
built=True,
@@ -1084,7 +1101,7 @@ def test_redirects_to_correct_index_ending_without_slash(self):
self.project.versions.update(active=True, built=True)
version = fixture.get(
Version,
- slug='fancy-version',
+ slug="fancy-version",
privacy_level=constants.PUBLIC,
active=True,
built=True,
@@ -1108,9 +1125,11 @@ def test_redirects_to_correct_index_ending_without_slash(self):
headers={"host": "project.readthedocs.io"},
)
self.assertEqual(response.status_code, 302)
- self.assertEqual(response['location'], '/en/fancy-version/not-found/README.html')
+ self.assertEqual(
+ response["location"], "/en/fancy-version/not-found/README.html"
+ )
- @mock.patch.object(BuildMediaFileSystemStorageTest, 'open')
+ @mock.patch.object(BuildMediaFileSystemStorageTest, "open")
def test_404_index_redirect_skips_not_built_versions(self, storage_open):
self.version.built = False
self.version.save()
@@ -1197,7 +1216,7 @@ def test_404_storage_serves_custom_404_sphinx_single_html(self, storage_open):
self.project.versions.update(active=True, built=True)
fancy_version = fixture.get(
Version,
- slug='fancy-version',
+ slug="fancy-version",
privacy_level=constants.PUBLIC,
active=True,
built=True,
@@ -1223,12 +1242,12 @@ def test_404_storage_serves_custom_404_sphinx_single_html(self, storage_open):
self.assertEqual(response.status_code, 404)
storage_open.assert_called_once_with("html/project/fancy-version/404.html")
- @mock.patch.object(BuildMediaFileSystemStorageTest, 'open')
+ @mock.patch.object(BuildMediaFileSystemStorageTest, "open")
def test_404_storage_serves_custom_404_sphinx_htmldir(self, storage_open):
self.project.versions.update(active=True, built=True)
fancy_version = fixture.get(
Version,
- slug='fancy-version',
+ slug="fancy-version",
privacy_level=constants.PUBLIC,
active=True,
built=True,
@@ -1253,12 +1272,12 @@ def test_404_storage_serves_custom_404_sphinx_htmldir(self, storage_open):
self.assertEqual(response.status_code, 404)
storage_open.assert_called_once_with("html/project/fancy-version/404.html")
- @mock.patch.object(BuildMediaFileSystemStorageTest, 'open')
+ @mock.patch.object(BuildMediaFileSystemStorageTest, "open")
def test_404_storage_serves_custom_404_mkdocs(self, storage_open):
self.project.versions.update(active=True, built=True)
fancy_version = fixture.get(
Version,
- slug='fancy-version',
+ slug="fancy-version",
privacy_level=constants.PUBLIC,
active=True,
built=True,
@@ -1284,12 +1303,12 @@ def test_404_storage_serves_custom_404_mkdocs(self, storage_open):
self.assertEqual(response.status_code, 404)
storage_open.assert_called_once_with("html/project/fancy-version/404.html")
- @mock.patch.object(BuildMediaFileSystemStorageTest, 'open')
+ @mock.patch.object(BuildMediaFileSystemStorageTest, "open")
def test_404_all_paths_checked_sphinx(self, storage_open):
self.project.versions.update(active=True, built=True)
fancy_version = fixture.get(
Version,
- slug='fancy-version',
+ slug="fancy-version",
privacy_level=constants.PUBLIC,
active=True,
built=True,
@@ -1310,12 +1329,12 @@ def test_404_all_paths_checked_sphinx(self, storage_open):
self.assertEqual(r.status_code, 404)
storage_open.assert_not_called()
- @mock.patch.object(BuildMediaFileSystemStorageTest, 'open')
+ @mock.patch.object(BuildMediaFileSystemStorageTest, "open")
def test_404_all_paths_checked_sphinx_single_html(self, storage_open):
self.project.versions.update(active=True, built=True)
fancy_version = fixture.get(
Version,
- slug='fancy-version',
+ slug="fancy-version",
privacy_level=constants.PUBLIC,
active=True,
built=True,
@@ -1341,7 +1360,7 @@ def test_404_all_paths_checked_sphinx_html_dir(self, storage_open):
self.project.versions.update(active=True, built=True)
fancy_version = fixture.get(
Version,
- slug='fancy-version',
+ slug="fancy-version",
privacy_level=constants.PUBLIC,
active=True,
built=True,
@@ -1367,7 +1386,7 @@ def test_404_all_paths_checked_mkdocs(self, storage_open):
self.project.versions.update(active=True, built=True)
fancy_version = fixture.get(
Version,
- slug='fancy-version',
+ slug="fancy-version",
privacy_level=constants.PUBLIC,
active=True,
built=True,
@@ -1395,7 +1414,7 @@ def test_404_all_paths_checked_default_version_different_doc_type(
self.project.versions.update(active=True, built=True)
fancy_version = fixture.get(
Version,
- slug='fancy-version',
+ slug="fancy-version",
privacy_level=constants.PUBLIC,
active=True,
built=True,
@@ -1589,37 +1608,37 @@ def test_sitemap_xml(self):
)
not_translated_public_version = fixture.get(
Version,
- identifier='not-translated-version',
- verbose_name='not-translated-version',
- slug='not-translated-version',
+ identifier="not-translated-version",
+ verbose_name="not-translated-version",
+ slug="not-translated-version",
privacy_level=constants.PUBLIC,
project=self.project,
- active=True
+ active=True,
)
stable_version = fixture.get(
Version,
- identifier='stable',
- verbose_name='stable',
- slug='stable',
+ identifier="stable",
+ verbose_name="stable",
+ slug="stable",
privacy_level=constants.PUBLIC,
project=self.project,
- active=True
+ active=True,
)
# This is a EXTERNAL Version
external_version = fixture.get(
Version,
- identifier='pr-version',
- verbose_name='pr-version',
- slug='pr-9999',
+ identifier="pr-version",
+ verbose_name="pr-version",
+ slug="pr-9999",
project=self.project,
active=True,
- type=EXTERNAL
+ type=EXTERNAL,
)
# This also creates a Version `latest` Automatically for this project
translation = fixture.get(
Project,
main_language_project=self.project,
- language='translation-es',
+ language="translation-es",
privacy_level=constants.PUBLIC,
)
translation.versions.update(privacy_level=constants.PUBLIC)
@@ -1628,7 +1647,7 @@ def test_sitemap_xml(self):
hreflang_test_translation_project = fixture.get(
Project,
main_language_project=self.project,
- language='zh_CN',
+ language="zh_CN",
privacy_level=constants.PUBLIC,
)
hreflang_test_translation_project.versions.update(
@@ -1639,8 +1658,10 @@ def test_sitemap_xml(self):
reverse("sitemap_xml"), headers={"host": "project.readthedocs.io"}
)
self.assertEqual(response.status_code, 200)
- self.assertEqual(response['Content-Type'], 'application/xml')
- for version in self.project.versions(manager=INTERNAL).filter(privacy_level=constants.PUBLIC):
+ self.assertEqual(response["Content-Type"], "application/xml")
+ for version in self.project.versions(manager=INTERNAL).filter(
+ privacy_level=constants.PUBLIC
+ ):
self.assertContains(
response,
self.project.get_docs_url(
@@ -1670,7 +1691,7 @@ def test_sitemap_xml(self):
)
# hreflang should use hyphen instead of underscore
# in language and country value. (zh_CN should be zh-CN)
- self.assertContains(response, 'zh-CN')
+ self.assertContains(response, "zh-CN")
# External Versions should not be in the sitemap_xml.
self.assertNotContains(
@@ -1683,26 +1704,30 @@ def test_sitemap_xml(self):
# Check if STABLE version has 'priority of 1 and changefreq of weekly.
self.assertEqual(
- response.context['versions'][0]['loc'],
+ response.context["versions"][0]["loc"],
self.project.get_docs_url(
version_slug=stable_version.slug,
lang_slug=self.project.language,
- ),)
- self.assertEqual(response.context['versions'][0]['priority'], 1)
- self.assertEqual(response.context['versions'][0]['changefreq'], 'weekly')
+ ),
+ )
+ self.assertEqual(response.context["versions"][0]["priority"], 1)
+ self.assertEqual(response.context["versions"][0]["changefreq"], "weekly")
# Check if LATEST version has priority of 0.9 and changefreq of daily.
self.assertEqual(
- response.context['versions'][1]['loc'],
+ response.context["versions"][1]["loc"],
self.project.get_docs_url(
- version_slug='latest',
+ version_slug="latest",
lang_slug=self.project.language,
- ),)
- self.assertEqual(response.context['versions'][1]['priority'], 0.9)
- self.assertEqual(response.context['versions'][1]['changefreq'], 'daily')
+ ),
+ )
+ self.assertEqual(response.context["versions"][1]["priority"], 0.9)
+ self.assertEqual(response.context["versions"][1]["changefreq"], "daily")
def test_sitemap_all_private_versions(self):
- self.project.versions.update(active=True, built=True, privacy_level=constants.PRIVATE)
+ self.project.versions.update(
+ active=True, built=True, privacy_level=constants.PRIVATE
+ )
response = self.client.get(
reverse("sitemap_xml"), headers={"host": "project.readthedocs.io"}
)
@@ -1778,7 +1803,7 @@ def test_404_download(self):
@override_settings(
ALLOW_PRIVATE_REPOS=True,
- PUBLIC_DOMAIN='dev.readthedocs.io',
+ PUBLIC_DOMAIN="dev.readthedocs.io",
PUBLIC_DOMAIN_USES_HTTPS=True,
RTD_DEFAULT_FEATURES=dict([RTDProductFeature(type=TYPE_CNAME).to_item()]),
)
@@ -1790,7 +1815,6 @@ def test_404_download(self):
new=StaticFileSystemStorageTest(),
)
class TestCDNCache(BaseDocServing):
-
def _test_cache_control_header_project(self, expected_value, host=None):
"""
Test the CDN-Cache-Control header on requests for `self.project`.
@@ -1798,31 +1822,31 @@ def _test_cache_control_header_project(self, expected_value, host=None):
:param expected_value: The expected value of the header: 'public' or 'private'.
:param host: Hostname to use in the requests.
"""
- host = host or 'project.dev.readthedocs.io'
+ host = host or "project.dev.readthedocs.io"
# Normal serving.
urls = [
- '/en/latest/',
- '/en/latest/foo.html',
+ "/en/latest/",
+ "/en/latest/foo.html",
]
for url in urls:
resp = self.client.get(url, secure=True, headers={"host": host})
- self.assertEqual(resp.headers['CDN-Cache-Control'], expected_value, url)
- self.assertEqual(resp.headers['Cache-Tag'], 'project,project:latest', url)
+ self.assertEqual(resp.headers["CDN-Cache-Control"], expected_value, url)
+ self.assertEqual(resp.headers["Cache-Tag"], "project,project:latest", url)
# Page & system redirects are always cached.
# Authz is done on the redirected URL.
- location = f'https://{host}/en/latest/'
+ location = f"https://{host}/en/latest/"
urls = [
- ['', location],
- ['/', location],
- ['/page/foo.html', f'https://{host}/en/latest/foo.html'],
+ ["", location],
+ ["/", location],
+ ["/page/foo.html", f"https://{host}/en/latest/foo.html"],
]
for url, location in urls:
resp = self.client.get(url, secure=True, headers={"host": host})
- self.assertEqual(resp['Location'], location, url)
- self.assertEqual(resp.headers['CDN-Cache-Control'], 'public', url)
- self.assertEqual(resp.headers['Cache-Tag'], 'project', url)
+ self.assertEqual(resp["Location"], location, url)
+ self.assertEqual(resp.headers["CDN-Cache-Control"], "public", url)
+ self.assertEqual(resp.headers["Cache-Tag"], "project", url)
# Proxied static files are always cached.
resp = self.client.get("/_/static/file.js", secure=True, headers={"host": host})
@@ -1832,7 +1856,7 @@ def _test_cache_control_header_project(self, expected_value, host=None):
)
# Slash redirects can always be cached.
- url = '/en//latest//'
+ url = "/en//latest//"
resp = self.client.get(url, secure=True, headers={"host": host})
self.assertEqual(resp["Location"], "/en/latest/", url)
self.assertEqual(resp.headers["CDN-Cache-Control"], "public", url)
@@ -1862,30 +1886,32 @@ def _test_cache_control_header_subproject(self, expected_value, host=None):
:param expected_value: The expected value of the header: 'public' or 'private'.
:param host: Hostname to use in the requests.
"""
- host = host or 'project.dev.readthedocs.io'
+ host = host or "project.dev.readthedocs.io"
# Normal serving.
urls = [
- '/projects/subproject/en/latest/',
- '/projects/subproject/en/latest/foo.html',
+ "/projects/subproject/en/latest/",
+ "/projects/subproject/en/latest/foo.html",
]
for url in urls:
resp = self.client.get(url, secure=True, headers={"host": host})
- self.assertEqual(resp.headers['CDN-Cache-Control'], expected_value, url)
- self.assertEqual(resp.headers['Cache-Tag'], 'subproject,subproject:latest', url)
+ self.assertEqual(resp.headers["CDN-Cache-Control"], expected_value, url)
+ self.assertEqual(
+ resp.headers["Cache-Tag"], "subproject,subproject:latest", url
+ )
# Page & system redirects are always cached.
# Authz is done on the redirected URL.
- location = f'https://{host}/projects/subproject/en/latest/'
+ location = f"https://{host}/projects/subproject/en/latest/"
urls = [
- ['/projects/subproject', location],
- ['/projects/subproject/', location],
+ ["/projects/subproject", location],
+ ["/projects/subproject/", location],
]
for url, location in urls:
resp = self.client.get(url, secure=True, headers={"host": host})
- self.assertEqual(resp['Location'], location, url)
- self.assertEqual(resp.headers['CDN-Cache-Control'], 'public', url)
- self.assertEqual(resp.headers['Cache-Tag'], 'subproject', url)
+ self.assertEqual(resp["Location"], location, url)
+ self.assertEqual(resp.headers["CDN-Cache-Control"], "public", url)
+ self.assertEqual(resp.headers["Cache-Tag"], "subproject", url)
# Proxied static files are always cached.
resp = self.client.get("/_/static/file.js", secure=True, headers={"host": host})
@@ -1895,7 +1921,7 @@ def _test_cache_control_header_subproject(self, expected_value, host=None):
)
# Slash redirects can always be cached.
- url = '/projects//subproject//'
+ url = "/projects//subproject//"
resp = self.client.get(url, secure=True, headers={"host": host})
self.assertEqual(resp["Location"], "/projects/subproject/", url)
self.assertEqual(resp.headers["CDN-Cache-Control"], "public", url)
@@ -1903,13 +1929,15 @@ def _test_cache_control_header_subproject(self, expected_value, host=None):
def test_cache_on_private_versions(self):
self.project.versions.update(privacy_level=PRIVATE)
- self._test_cache_control_header_project(expected_value='private')
+ self._test_cache_control_header_project(expected_value="private")
def test_cache_on_private_versions_custom_domain(self):
self.project.versions.update(privacy_level=PRIVATE)
self.domain.canonical = True
self.domain.save()
- self._test_cache_control_header_project(expected_value='private', host=self.domain.domain)
+ self._test_cache_control_header_project(
+ expected_value="private", host=self.domain.domain
+ )
# HTTPS redirects can always be cached.
resp = self.client.get(
@@ -1921,13 +1949,15 @@ def test_cache_on_private_versions_custom_domain(self):
def test_cache_public_versions(self):
self.project.versions.update(privacy_level=PUBLIC)
- self._test_cache_control_header_project(expected_value='public')
+ self._test_cache_control_header_project(expected_value="public")
def test_cache_public_versions_custom_domain(self):
self.project.versions.update(privacy_level=PUBLIC)
self.domain.canonical = True
self.domain.save()
- self._test_cache_control_header_project(expected_value='public', host=self.domain.domain)
+ self._test_cache_control_header_project(
+ expected_value="public", host=self.domain.domain
+ )
# HTTPS redirect respects the privacy level of the version.
resp = self.client.get(
@@ -1939,17 +1969,19 @@ def test_cache_public_versions_custom_domain(self):
def test_cache_on_private_versions_subproject(self):
self.subproject.versions.update(privacy_level=PRIVATE)
- self._test_cache_control_header_subproject(expected_value='private')
+ self._test_cache_control_header_subproject(expected_value="private")
def test_cache_on_private_versions_custom_domain_subproject(self):
self.subproject.versions.update(privacy_level=PRIVATE)
self.domain.canonical = True
self.domain.save()
- self._test_cache_control_header_subproject(expected_value='private', host=self.domain.domain)
+ self._test_cache_control_header_subproject(
+ expected_value="private", host=self.domain.domain
+ )
# HTTPS redirects can always be cached.
resp = self.client.get(
- '/projects/subproject/en/latest/',
+ "/projects/subproject/en/latest/",
secure=False,
headers={"host": self.domain.domain},
)
@@ -1962,17 +1994,19 @@ def test_cache_on_private_versions_custom_domain_subproject(self):
def test_cache_public_versions_subproject(self):
self.subproject.versions.update(privacy_level=PUBLIC)
- self._test_cache_control_header_subproject(expected_value='public')
+ self._test_cache_control_header_subproject(expected_value="public")
def test_cache_public_versions_custom_domain(self):
self.subproject.versions.update(privacy_level=PUBLIC)
self.domain.canonical = True
self.domain.save()
- self._test_cache_control_header_subproject(expected_value='public', host=self.domain.domain)
+ self._test_cache_control_header_subproject(
+ expected_value="public", host=self.domain.domain
+ )
# HTTPS redirects can always be cached.
resp = self.client.get(
- '/projects/subproject/en/latest/',
+ "/projects/subproject/en/latest/",
secure=False,
headers={"host": self.domain.domain},
)
diff --git a/readthedocs/rtd_tests/tests/test_automation_rules.py b/readthedocs/rtd_tests/tests/test_automation_rules.py
index aaf6756bdba..a6b9df0e507 100644
--- a/readthedocs/rtd_tests/tests/test_automation_rules.py
+++ b/readthedocs/rtd_tests/tests/test_automation_rules.py
@@ -20,62 +20,58 @@
@pytest.mark.django_db
-@mock.patch('readthedocs.builds.automation_actions.trigger_build')
+@mock.patch("readthedocs.builds.automation_actions.trigger_build")
class TestRegexAutomationRules:
-
@pytest.fixture(autouse=True)
def setup_method(self):
self.project = get(Project)
@pytest.mark.parametrize(
- 'version_name,regex,result',
+ "version_name,regex,result",
[
# Matches all
- ('master', r'.*', True),
- ('latest', r'.*', True),
-
+ ("master", r".*", True),
+ ("latest", r".*", True),
# Contains match
- ('master', r'master', True),
- ('master-something', r'master', True),
- ('something-master', r'master', True),
- ('foo', r'master', False),
-
+ ("master", r"master", True),
+ ("master-something", r"master", True),
+ ("something-master", r"master", True),
+ ("foo", r"master", False),
# Starts with match
- ('master', r'^master', True),
- ('master-foo', r'^master', True),
- ('foo-master', r'^master', False),
-
+ ("master", r"^master", True),
+ ("master-foo", r"^master", True),
+ ("foo-master", r"^master", False),
# Ends with match
- ('master', r'master$', True),
- ('foo-master', r'master$', True),
- ('master-foo', r'master$', False),
-
+ ("master", r"master$", True),
+ ("foo-master", r"master$", True),
+ ("master-foo", r"master$", False),
# Exact match
- ('master', r'^master$', True),
- ('masterr', r'^master$', False),
- ('mmaster', r'^master$', False),
-
+ ("master", r"^master$", True),
+ ("masterr", r"^master$", False),
+ ("mmaster", r"^master$", False),
# Match versions from 1.3.x series
- ('1.3.2', r'^1\.3\..*', True),
- ('1.3.3.5', r'^1\.3\..*', True),
- ('1.3.3-rc', r'^1\.3\..*', True),
- ('1.2.3', r'^1\.3\..*', False),
-
+ ("1.3.2", r"^1\.3\..*", True),
+ ("1.3.3.5", r"^1\.3\..*", True),
+ ("1.3.3-rc", r"^1\.3\..*", True),
+ ("1.2.3", r"^1\.3\..*", False),
# Some special regex scape characters
- ('12-a', r'^\d{2}-\D$', True),
- ('1-a', r'^\d{2}-\D$', False),
-
+ ("12-a", r"^\d{2}-\D$", True),
+ ("1-a", r"^\d{2}-\D$", False),
# Groups
- ('1.3-rc', r'^(\d\.?)*-(\w*)$', True),
-
+ ("1.3-rc", r"^(\d\.?)*-(\w*)$", True),
# Bad regex
- ('master', r'*', False),
- ('master', r'?', False),
- ]
+ ("master", r"*", False),
+ ("master", r"?", False),
+ ],
)
- @pytest.mark.parametrize('version_type', [BRANCH, TAG])
+ @pytest.mark.parametrize("version_type", [BRANCH, TAG])
def test_match(
- self, trigger_build, version_name, regex, result, version_type,
+ self,
+ trigger_build,
+ version_name,
+ regex,
+ result,
+ version_type,
):
version = get(
Version,
@@ -97,21 +93,23 @@ def test_match(
assert rule.matches.all().count() == (1 if result else 0)
@pytest.mark.parametrize(
- 'version_name,result',
+ "version_name,result",
[
- ('master', True),
- ('latest', True),
- ('master-something', True),
- ('something-master', True),
- ('1.3.2', True),
- ('1.3.3.5', True),
- ('1.3.3-rc', True),
- ('12-a', True),
- ('1-a', True),
- ]
+ ("master", True),
+ ("latest", True),
+ ("master-something", True),
+ ("something-master", True),
+ ("1.3.2", True),
+ ("1.3.3.5", True),
+ ("1.3.3-rc", True),
+ ("12-a", True),
+ ("1-a", True),
+ ],
)
- @pytest.mark.parametrize('version_type', [BRANCH, TAG])
- def test_predefined_match_all_versions(self, trigger_build, version_name, result, version_type):
+ @pytest.mark.parametrize("version_type", [BRANCH, TAG])
+ def test_predefined_match_all_versions(
+ self, trigger_build, version_name, result, version_type
+ ):
version = get(
Version,
verbose_name=version_name,
@@ -131,23 +129,24 @@ def test_predefined_match_all_versions(self, trigger_build, version_name, result
assert rule.run(version) is result
@pytest.mark.parametrize(
- 'version_name,result',
+ "version_name,result",
[
- ('master', False),
- ('latest', False),
- ('master-something', False),
- ('something-master', False),
- ('1.3.3.5', False),
- ('12-a', False),
- ('1-a', False),
-
- ('1.3.2', True),
- ('1.3.3-rc', True),
- ('0.1.1', True),
- ]
+ ("master", False),
+ ("latest", False),
+ ("master-something", False),
+ ("something-master", False),
+ ("1.3.3.5", False),
+ ("12-a", False),
+ ("1-a", False),
+ ("1.3.2", True),
+ ("1.3.3-rc", True),
+ ("0.1.1", True),
+ ],
)
- @pytest.mark.parametrize('version_type', [BRANCH, TAG])
- def test_predefined_match_semver_versions(self, trigger_build, version_name, result, version_type):
+ @pytest.mark.parametrize("version_type", [BRANCH, TAG])
+ def test_predefined_match_semver_versions(
+ self, trigger_build, version_name, result, version_type
+ ):
version = get(
Version,
verbose_name=version_name,
@@ -169,7 +168,7 @@ def test_predefined_match_semver_versions(self, trigger_build, version_name, res
def test_action_activation(self, trigger_build):
version = get(
Version,
- verbose_name='v2',
+ verbose_name="v2",
project=self.project,
active=False,
type=TAG,
@@ -178,7 +177,7 @@ def test_action_activation(self, trigger_build):
RegexAutomationRule,
project=self.project,
priority=0,
- match_arg='.*',
+ match_arg=".*",
action=VersionAutomationRule.ACTIVATE_VERSION_ACTION,
version_type=TAG,
)
@@ -186,9 +185,9 @@ def test_action_activation(self, trigger_build):
assert version.active is True
trigger_build.assert_called_once()
- @pytest.mark.parametrize('version_type', [BRANCH, TAG])
+ @pytest.mark.parametrize("version_type", [BRANCH, TAG])
def test_action_delete_version(self, trigger_build, version_type):
- slug = 'delete-me'
+ slug = "delete-me"
version = get(
Version,
slug=slug,
@@ -201,16 +200,18 @@ def test_action_delete_version(self, trigger_build, version_type):
RegexAutomationRule,
project=self.project,
priority=0,
- match_arg='.*',
+ match_arg=".*",
action=VersionAutomationRule.DELETE_VERSION_ACTION,
version_type=version_type,
)
assert rule.run(version) is True
assert not self.project.versions.filter(slug=slug).exists()
- @pytest.mark.parametrize('version_type', [BRANCH, TAG])
- def test_action_delete_version_on_default_version(self, trigger_build, version_type):
- slug = 'delete-me'
+ @pytest.mark.parametrize("version_type", [BRANCH, TAG])
+ def test_action_delete_version_on_default_version(
+ self, trigger_build, version_type
+ ):
+ slug = "delete-me"
version = get(
Version,
slug=slug,
@@ -226,7 +227,7 @@ def test_action_delete_version_on_default_version(self, trigger_build, version_t
RegexAutomationRule,
project=self.project,
priority=0,
- match_arg='.*',
+ match_arg=".*",
action=VersionAutomationRule.DELETE_VERSION_ACTION,
version_type=version_type,
)
@@ -236,7 +237,7 @@ def test_action_delete_version_on_default_version(self, trigger_build, version_t
def test_action_set_default_version(self, trigger_build):
version = get(
Version,
- verbose_name='v2',
+ verbose_name="v2",
project=self.project,
active=True,
type=TAG,
@@ -245,7 +246,7 @@ def test_action_set_default_version(self, trigger_build):
RegexAutomationRule,
project=self.project,
priority=0,
- match_arg='.*',
+ match_arg=".*",
action=VersionAutomationRule.SET_DEFAULT_VERSION_ACTION,
version_type=TAG,
)
@@ -256,7 +257,7 @@ def test_action_set_default_version(self, trigger_build):
def test_version_hide_action(self, trigger_build):
version = get(
Version,
- verbose_name='v2',
+ verbose_name="v2",
project=self.project,
active=False,
hidden=False,
@@ -266,7 +267,7 @@ def test_version_hide_action(self, trigger_build):
RegexAutomationRule,
project=self.project,
priority=0,
- match_arg='.*',
+ match_arg=".*",
action=VersionAutomationRule.HIDE_VERSION_ACTION,
version_type=TAG,
)
@@ -278,7 +279,7 @@ def test_version_hide_action(self, trigger_build):
def test_version_make_public_action(self, trigger_build):
version = get(
Version,
- verbose_name='v2',
+ verbose_name="v2",
project=self.project,
active=False,
hidden=False,
@@ -289,7 +290,7 @@ def test_version_make_public_action(self, trigger_build):
RegexAutomationRule,
project=self.project,
priority=0,
- match_arg='.*',
+ match_arg=".*",
action=VersionAutomationRule.MAKE_VERSION_PUBLIC_ACTION,
version_type=TAG,
)
@@ -300,7 +301,7 @@ def test_version_make_public_action(self, trigger_build):
def test_version_make_private_action(self, trigger_build):
version = get(
Version,
- verbose_name='v2',
+ verbose_name="v2",
project=self.project,
active=False,
hidden=False,
@@ -311,7 +312,7 @@ def test_version_make_private_action(self, trigger_build):
RegexAutomationRule,
project=self.project,
priority=0,
- match_arg='.*',
+ match_arg=".*",
action=VersionAutomationRule.MAKE_VERSION_PRIVATE_ACTION,
version_type=TAG,
)
@@ -322,7 +323,7 @@ def test_version_make_private_action(self, trigger_build):
def test_matches_history(self, trigger_build):
version = get(
Version,
- verbose_name='test',
+ verbose_name="test",
project=self.project,
active=False,
type=TAG,
@@ -333,7 +334,7 @@ def test_matches_history(self, trigger_build):
RegexAutomationRule,
project=self.project,
priority=0,
- match_arg='^test',
+ match_arg="^test",
action=VersionAutomationRule.ACTIVATE_VERSION_ACTION,
version_type=TAG,
)
@@ -342,34 +343,33 @@ def test_matches_history(self, trigger_build):
assert rule.matches.all().count() == 1
match = rule.matches.first()
- assert match.version_name == 'test'
+ assert match.version_name == "test"
assert match.version_type == TAG
assert match.action == VersionAutomationRule.ACTIVATE_VERSION_ACTION
- assert match.match_arg == '^test'
+ assert match.match_arg == "^test"
for i in range(1, 31):
- version.verbose_name = f'test {i}'
+ version.verbose_name = f"test {i}"
version.save()
assert rule.run(version) is True
assert rule.matches.all().count() == 15
match = rule.matches.first()
- assert match.version_name == 'test 30'
+ assert match.version_name == "test 30"
assert match.version_type == TAG
assert match.action == VersionAutomationRule.ACTIVATE_VERSION_ACTION
- assert match.match_arg == '^test'
+ assert match.match_arg == "^test"
match = rule.matches.last()
- assert match.version_name == 'test 16'
+ assert match.version_name == "test 16"
assert match.version_type == TAG
assert match.action == VersionAutomationRule.ACTIVATE_VERSION_ACTION
- assert match.match_arg == '^test'
+ assert match.match_arg == "^test"
@pytest.mark.django_db
class TestAutomationRuleManager:
-
@pytest.fixture(autouse=True)
def setup_method(self):
self.project = get(Project)
@@ -379,8 +379,8 @@ def test_add_rule_regex(self):
rule = RegexAutomationRule.objects.create(
project=self.project,
- description='First rule',
- match_arg='.*',
+ description="First rule",
+ match_arg=".*",
version_type=TAG,
action=VersionAutomationRule.ACTIVATE_VERSION_ACTION,
)
@@ -392,8 +392,8 @@ def test_add_rule_regex(self):
# Adding a second rule
rule = RegexAutomationRule.objects.create(
project=self.project,
- description='Second rule',
- match_arg='.*',
+ description="Second rule",
+ match_arg=".*",
version_type=BRANCH,
action=VersionAutomationRule.ACTIVATE_VERSION_ACTION,
)
@@ -403,10 +403,10 @@ def test_add_rule_regex(self):
# Adding a rule with a not secuencial priority
rule = get(
RegexAutomationRule,
- description='Third rule',
+ description="Third rule",
project=self.project,
priority=9,
- match_arg='.*',
+ match_arg=".*",
version_type=TAG,
action=VersionAutomationRule.ACTIVATE_VERSION_ACTION,
)
@@ -416,8 +416,8 @@ def test_add_rule_regex(self):
# Adding a new rule
rule = RegexAutomationRule.objects.create(
project=self.project,
- description='Fourth rule',
- match_arg='.*',
+ description="Fourth rule",
+ match_arg=".*",
version_type=BRANCH,
action=VersionAutomationRule.ACTIVATE_VERSION_ACTION,
)
@@ -431,7 +431,6 @@ def test_add_rule_regex(self):
@pytest.mark.django_db
class TestAutomationRuleMove:
-
@pytest.fixture(autouse=True)
def setup_method(self):
self.project = get(Project)
@@ -448,7 +447,7 @@ def _add_rule(self, description):
rule = RegexAutomationRule.objects.create(
project=self.project,
description=description,
- match_arg='.*',
+ match_arg=".*",
version_type=BRANCH,
action=VersionAutomationRule.ACTIVATE_VERSION_ACTION,
)
diff --git a/readthedocs/rtd_tests/tests/test_build_storage.py b/readthedocs/rtd_tests/tests/test_build_storage.py
index 0d44ed72595..2ac89e71f6d 100644
--- a/readthedocs/rtd_tests/tests/test_build_storage.py
+++ b/readthedocs/rtd_tests/tests/test_build_storage.py
@@ -9,7 +9,7 @@
from readthedocs.builds.storage import BuildMediaFileSystemStorage
-files_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'files')
+files_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), "files")
class TestBuildMediaStorage(TestCase):
@@ -40,7 +40,7 @@ def assertFileTree(self, source, tree):
self.assertFileTree(self.storage.join(source, folder), files)
def test_copy_directory(self):
- self.assertFalse(self.storage.exists('files/test.html'))
+ self.assertFalse(self.storage.exists("files/test.html"))
with override_settings(DOCROOT=files_dir):
self.storage.copy_directory(files_dir, "files")
@@ -52,12 +52,12 @@ def test_copy_directory(self):
self.assertFalse(self.storage.exists("files/dir-symlink"))
def test_sync_directory(self):
- tmp_files_dir = os.path.join(tempfile.mkdtemp(), 'files')
+ tmp_files_dir = os.path.join(tempfile.mkdtemp(), "files")
shutil.copytree(files_dir, tmp_files_dir, symlinks=True)
- storage_dir = 'files'
+ storage_dir = "files"
tree = [
- ('api', ['index.html']),
+ ("api", ["index.html"]),
"404.html",
"api.fjson",
"conf.py",
@@ -69,24 +69,24 @@ def test_sync_directory(self):
self.assertFileTree(storage_dir, tree)
tree = [
- ('api', ['index.html']),
+ ("api", ["index.html"]),
"404.html",
- 'conf.py',
+ "conf.py",
"index.html",
- 'test.html',
+ "test.html",
]
- os.remove(os.path.join(tmp_files_dir, 'api.fjson'))
+ os.remove(os.path.join(tmp_files_dir, "api.fjson"))
with override_settings(DOCROOT=tmp_files_dir):
self.storage.rclone_sync_directory(tmp_files_dir, storage_dir)
self.assertFileTree(storage_dir, tree)
tree = [
"404.html",
- 'conf.py',
+ "conf.py",
"index.html",
- 'test.html',
+ "test.html",
]
- shutil.rmtree(os.path.join(tmp_files_dir, 'api'))
+ shutil.rmtree(os.path.join(tmp_files_dir, "api"))
with override_settings(DOCROOT=tmp_files_dir):
self.storage.rclone_sync_directory(tmp_files_dir, storage_dir)
self.assertFileTree(storage_dir, tree)
@@ -136,14 +136,14 @@ def test_delete_directory(self):
files, ["404.html", "api.fjson", "conf.py", "index.html", "test.html"]
)
- self.storage.delete_directory('files/')
- _, files = self.storage.listdir('files')
+ self.storage.delete_directory("files/")
+ _, files = self.storage.listdir("files")
self.assertEqual(files, [])
# We don't check "dirs" here - in filesystem backed storages
# the empty directories are not deleted
# Cloud storage generally doesn't consider empty directories to exist
- dirs, files = self.storage.listdir('files/api')
+ dirs, files = self.storage.listdir("files/api")
self.assertEqual(dirs, [])
self.assertEqual(files, [])
@@ -151,7 +151,7 @@ def test_walk(self):
with override_settings(DOCROOT=files_dir):
self.storage.copy_directory(files_dir, "files")
- output = list(self.storage.walk('files'))
+ output = list(self.storage.walk("files"))
self.assertEqual(len(output), 2)
top, dirs, files = output[0]
@@ -162,9 +162,9 @@ def test_walk(self):
)
top, dirs, files = output[1]
- self.assertEqual(top, 'files/api')
+ self.assertEqual(top, "files/api")
self.assertCountEqual(dirs, [])
- self.assertCountEqual(files, ['index.html'])
+ self.assertCountEqual(files, ["index.html"])
def test_rclone_sync(self):
tmp_files_dir = Path(tempfile.mkdtemp()) / "files"
diff --git a/readthedocs/rtd_tests/tests/test_project_querysets.py b/readthedocs/rtd_tests/tests/test_project_querysets.py
index 687ff11cc6b..0876bc71eea 100644
--- a/readthedocs/rtd_tests/tests/test_project_querysets.py
+++ b/readthedocs/rtd_tests/tests/test_project_querysets.py
@@ -15,7 +15,6 @@
class ProjectQuerySetTests(TestCase):
-
def setUp(self):
self.user = get(User)
self.another_user = get(User)
@@ -74,21 +73,21 @@ def setUp(self):
}
def test_subproject_queryset_attributes(self):
- self.assertEqual(ParentRelatedProjectQuerySet.project_field, 'parent')
+ self.assertEqual(ParentRelatedProjectQuerySet.project_field, "parent")
self.assertTrue(ParentRelatedProjectQuerySet.use_for_related_fields)
- self.assertEqual(ChildRelatedProjectQuerySet.project_field, 'child')
+ self.assertEqual(ChildRelatedProjectQuerySet.project_field, "child")
self.assertTrue(ChildRelatedProjectQuerySet.use_for_related_fields)
def test_subproject_queryset_as_manager_gets_correct_class(self):
mgr = ChildRelatedProjectQuerySet.as_manager()
self.assertEqual(
mgr.__class__.__name__,
- 'ManagerFromChildRelatedProjectQuerySet',
+ "ManagerFromChildRelatedProjectQuerySet",
)
mgr = ParentRelatedProjectQuerySet.as_manager()
self.assertEqual(
mgr.__class__.__name__,
- 'ManagerFromParentRelatedProjectQuerySet',
+ "ManagerFromParentRelatedProjectQuerySet",
)
def test_is_active(self):
@@ -137,18 +136,12 @@ def test_public(self):
def test_public_user(self):
query = Project.objects.public(user=self.user)
- projects = (
- self.user_projects |
- {self.another_project}
- )
+ projects = self.user_projects | {self.another_project}
self.assertEqual(query.count(), len(projects))
self.assertEqual(set(query), projects)
query = Project.objects.public(user=self.another_user)
- projects = (
- self.another_user_projects |
- {self.project}
- )
+ projects = self.another_user_projects | {self.project}
self.assertEqual(query.count(), len(projects))
self.assertEqual(set(query), projects)
@@ -190,10 +183,7 @@ def test_for_user_and_viewer(self):
self.assertEqual(set(query), projects)
def test_for_user_and_viewer_same_user(self):
- query = Project.objects.for_user_and_viewer(
- user=self.user,
- viewer=self.user
- )
+ query = Project.objects.for_user_and_viewer(user=self.user, viewer=self.user)
projects = self.user_projects
self.assertEqual(query.count(), len(projects))
self.assertEqual(set(query), projects)
@@ -220,7 +210,6 @@ def test_only_owner(self):
class FeatureQuerySetTests(TestCase):
-
def test_feature_for_project_is_explicit_applied(self):
project = fixture.get(Project, main_language_project=None)
feature = fixture.get(Feature, projects=[project])
diff --git a/readthedocs/search/api/v2/serializers.py b/readthedocs/search/api/v2/serializers.py
index 534fd22ace2..2add0015a93 100644
--- a/readthedocs/search/api/v2/serializers.py
+++ b/readthedocs/search/api/v2/serializers.py
@@ -22,7 +22,6 @@
class ProjectHighlightSerializer(serializers.Serializer):
-
name = serializers.SerializerMethodField()
slug = serializers.SerializerMethodField()
description = serializers.SerializerMethodField()
@@ -38,7 +37,6 @@ def get_description(self, obj):
class ProjectSearchSerializer(serializers.Serializer):
-
type = serializers.CharField(default="project", source=None, read_only=True)
name = serializers.CharField()
slug = serializers.CharField()
@@ -48,7 +46,6 @@ class ProjectSearchSerializer(serializers.Serializer):
class PageHighlightSerializer(serializers.Serializer):
-
title = serializers.SerializerMethodField()
def get_title(self, obj):
@@ -167,7 +164,6 @@ def get_blocks(self, obj):
class SectionHighlightSerializer(serializers.Serializer):
-
title = serializers.SerializerMethodField()
content = serializers.SerializerMethodField()
@@ -179,7 +175,6 @@ def get_content(self, obj):
class SectionSearchSerializer(serializers.Serializer):
-
type = serializers.CharField(default="section", source=None, read_only=True)
id = serializers.CharField()
title = serializers.CharField()
diff --git a/readthedocs/search/documents.py b/readthedocs/search/documents.py
index 926d34cab0c..d165868e809 100644
--- a/readthedocs/search/documents.py
+++ b/readthedocs/search/documents.py
@@ -5,19 +5,18 @@
from readthedocs.projects.models import HTMLFile, Project
-project_conf = settings.ES_INDEXES['project']
-project_index = Index(project_conf['name'])
-project_index.settings(**project_conf['settings'])
+project_conf = settings.ES_INDEXES["project"]
+project_index = Index(project_conf["name"])
+project_index.settings(**project_conf["settings"])
-page_conf = settings.ES_INDEXES['page']
-page_index = Index(page_conf['name'])
-page_index.settings(**page_conf['settings'])
+page_conf = settings.ES_INDEXES["page"]
+page_index = Index(page_conf["name"])
+page_index.settings(**page_conf["settings"])
log = structlog.get_logger(__name__)
class RTDDocTypeMixin:
-
def update(self, *args, **kwargs):
# Hack a fix to our broken connection pooling
# This creates a new connection on every request,
@@ -33,20 +32,20 @@ class ProjectDocument(RTDDocTypeMixin, Document):
"""Document representation of a Project."""
# Metadata
- url = fields.TextField(attr='get_absolute_url')
+ url = fields.TextField(attr="get_absolute_url")
users = fields.NestedField(
properties={
- 'username': fields.TextField(),
- 'id': fields.IntegerField(),
+ "username": fields.TextField(),
+ "id": fields.IntegerField(),
}
)
language = fields.KeywordField()
- name = fields.TextField(attr='name')
- slug = fields.TextField(attr='slug')
- description = fields.TextField(attr='description')
+ name = fields.TextField(attr="name")
+ slug = fields.TextField(attr="slug")
+ description = fields.TextField(attr="description")
- modified_model_field = 'modified_date'
+ modified_model_field = "modified_date"
def get_queryset(self):
"""
@@ -83,33 +82,33 @@ class PageDocument(RTDDocTypeMixin, Document):
"""
# Metadata
- project = fields.KeywordField(attr='project.slug')
- version = fields.KeywordField(attr='version.slug')
- doctype = fields.KeywordField(attr='version.documentation_type')
- path = fields.KeywordField(attr='processed_json.path')
- full_path = fields.KeywordField(attr='path')
+ project = fields.KeywordField(attr="project.slug")
+ version = fields.KeywordField(attr="version.slug")
+ doctype = fields.KeywordField(attr="version.documentation_type")
+ path = fields.KeywordField(attr="processed_json.path")
+ full_path = fields.KeywordField(attr="path")
rank = fields.IntegerField()
# Searchable content
title = fields.TextField(
- attr='processed_json.title',
+ attr="processed_json.title",
)
sections = fields.NestedField(
- attr='processed_json.sections',
+ attr="processed_json.sections",
properties={
- 'id': fields.KeywordField(),
- 'title': fields.TextField(),
- 'content': fields.TextField(
- term_vector='with_positions_offsets',
+ "id": fields.KeywordField(),
+ "title": fields.TextField(),
+ "content": fields.TextField(
+ term_vector="with_positions_offsets",
),
- }
+ },
)
- modified_model_field = 'modified_date'
+ modified_model_field = "modified_date"
class Django:
model = HTMLFile
- fields = ('commit', 'build')
+ fields = ("commit", "build")
ignore_signals = True
def prepare_rank(self, html_file):
@@ -121,10 +120,9 @@ def get_queryset(self):
"""Don't include ignored files and delisted projects."""
queryset = super().get_queryset()
queryset = (
- queryset
- .exclude(ignore=True)
+ queryset.exclude(ignore=True)
.exclude(project__delisted=True)
.exclude(project__is_spam=True)
- .select_related('version', 'project')
+ .select_related("version", "project")
)
return queryset
diff --git a/readthedocs/settings/base.py b/readthedocs/settings/base.py
index d984110448b..c11d6fc5437 100644
--- a/readthedocs/settings/base.py
+++ b/readthedocs/settings/base.py
@@ -16,12 +16,14 @@
try:
import readthedocsext # noqa
+
ext = True
except ImportError:
ext = False
try:
import readthedocsext.theme # noqa
+
ext_theme = True
except ImportError:
ext_theme = False
@@ -37,13 +39,13 @@ class CommunityBaseSettings(Settings):
# Django settings
SITE_ID = 1
- ROOT_URLCONF = 'readthedocs.urls'
- LOGIN_REDIRECT_URL = '/dashboard/'
+ ROOT_URLCONF = "readthedocs.urls"
+ LOGIN_REDIRECT_URL = "/dashboard/"
FORCE_WWW = False
- SECRET_KEY = 'replace-this-please' # noqa
+ SECRET_KEY = "replace-this-please" # noqa
ATOMIC_REQUESTS = True
- DEFAULT_AUTO_FIELD = 'django.db.models.AutoField'
+ DEFAULT_AUTO_FIELD = "django.db.models.AutoField"
# Debug settings
DEBUG = True
@@ -52,10 +54,13 @@ class CommunityBaseSettings(Settings):
@property
def DEBUG_TOOLBAR_CONFIG(self):
def _show_debug_toolbar(request):
- return request.environ.get('SERVER_NAME', None) != 'testserver' and self.SHOW_DEBUG_TOOLBAR
+ return (
+ request.environ.get("SERVER_NAME", None) != "testserver"
+ and self.SHOW_DEBUG_TOOLBAR
+ )
return {
- 'SHOW_TOOLBAR_CALLBACK': _show_debug_toolbar,
+ "SHOW_TOOLBAR_CALLBACK": _show_debug_toolbar,
}
@property
@@ -72,28 +77,28 @@ def SHOW_DEBUG_TOOLBAR(self):
# Domains and URLs
RTD_IS_PRODUCTION = False
- PRODUCTION_DOMAIN = 'readthedocs.org'
+ PRODUCTION_DOMAIN = "readthedocs.org"
PUBLIC_DOMAIN = None
PUBLIC_DOMAIN_USES_HTTPS = False
- PUBLIC_API_URL = 'https://{}'.format(PRODUCTION_DOMAIN)
- RTD_INTERSPHINX_URL = 'https://{}'.format(PRODUCTION_DOMAIN)
- RTD_EXTERNAL_VERSION_DOMAIN = 'external-builds.readthedocs.io'
+ PUBLIC_API_URL = "https://{}".format(PRODUCTION_DOMAIN)
+ RTD_INTERSPHINX_URL = "https://{}".format(PRODUCTION_DOMAIN)
+ RTD_EXTERNAL_VERSION_DOMAIN = "external-builds.readthedocs.io"
# Doc Builder Backends
- MKDOCS_BACKEND = 'readthedocs.doc_builder.backends.mkdocs'
- SPHINX_BACKEND = 'readthedocs.doc_builder.backends.sphinx'
+ MKDOCS_BACKEND = "readthedocs.doc_builder.backends.mkdocs"
+ SPHINX_BACKEND = "readthedocs.doc_builder.backends.sphinx"
# slumber settings
- SLUMBER_API_HOST = 'https://readthedocs.org'
+ SLUMBER_API_HOST = "https://readthedocs.org"
# Email
- DEFAULT_FROM_EMAIL = 'no-reply@readthedocs.org'
+ DEFAULT_FROM_EMAIL = "no-reply@readthedocs.org"
SERVER_EMAIL = DEFAULT_FROM_EMAIL
SUPPORT_EMAIL = None
SUPPORT_FORM_ENDPOINT = None
# Sessions
- SESSION_COOKIE_DOMAIN = 'readthedocs.org'
+ SESSION_COOKIE_DOMAIN = "readthedocs.org"
SESSION_COOKIE_HTTPONLY = True
SESSION_COOKIE_AGE = 30 * 24 * 60 * 60 # 30 days
SESSION_SAVE_EVERY_REQUEST = False
@@ -106,7 +111,7 @@ def SESSION_COOKIE_SAMESITE(self):
if self.USE_PROMOS:
return None
# This is django's default.
- return 'Lax'
+ return "Lax"
# CSRF
CSRF_COOKIE_HTTPONLY = True
@@ -116,7 +121,7 @@ def SESSION_COOKIE_SAMESITE(self):
# https://docs.djangoproject.com/en/1.11/ref/middleware/#django.middleware.security.SecurityMiddleware
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_REFERRER_POLICY = "strict-origin-when-cross-origin"
- X_FRAME_OPTIONS = 'DENY'
+ X_FRAME_OPTIONS = "DENY"
# Content Security Policy
# https://django-csp.readthedocs.io/
@@ -126,21 +131,19 @@ def SESSION_COOKIE_SAMESITE(self):
CSP_OBJECT_SRC = ("'none'",)
CSP_REPORT_URI = None
CSP_REPORT_ONLY = False
- CSP_EXCLUDE_URL_PREFIXES = (
- "/admin/",
- )
+ CSP_EXCLUDE_URL_PREFIXES = ("/admin/",)
# Read the Docs
READ_THE_DOCS_EXTENSIONS = ext
- RTD_LATEST = 'latest'
- RTD_LATEST_VERBOSE_NAME = 'latest'
- RTD_STABLE = 'stable'
- RTD_STABLE_VERBOSE_NAME = 'stable'
+ RTD_LATEST = "latest"
+ RTD_LATEST_VERBOSE_NAME = "latest"
+ RTD_STABLE = "stable"
+ RTD_STABLE_VERBOSE_NAME = "stable"
RTD_CLEAN_AFTER_BUILD = False
RTD_MAX_CONCURRENT_BUILDS = 4
RTD_BUILDS_MAX_RETRIES = 25
RTD_BUILDS_RETRY_DELAY = 5 * 60 # seconds
- RTD_BUILD_STATUS_API_NAME = 'docs/readthedocs'
+ RTD_BUILD_STATUS_API_NAME = "docs/readthedocs"
RTD_ANALYTICS_DEFAULT_RETENTION_DAYS = 30 * 3
RTD_AUDITLOGS_DEFAULT_RETENTION_DAYS = 30 * 3
@@ -163,20 +166,37 @@ def RTD_DEFAULT_FEATURES(self):
# number of days or limit of the feature.
from readthedocs.subscriptions import constants
from readthedocs.subscriptions.products import RTDProductFeature
- return dict((
- RTDProductFeature(type=constants.TYPE_CNAME).to_item(),
- RTDProductFeature(type=constants.TYPE_EMBED_API).to_item(),
- # Retention days for search analytics.
- RTDProductFeature(type=constants.TYPE_SEARCH_ANALYTICS, value=self.RTD_ANALYTICS_DEFAULT_RETENTION_DAYS).to_item(),
- # Retention days for page view analytics.
- RTDProductFeature(type=constants.TYPE_PAGEVIEW_ANALYTICS, value=self.RTD_ANALYTICS_DEFAULT_RETENTION_DAYS).to_item(),
- # Retention days for audit logs.
- RTDProductFeature(type=constants.TYPE_AUDIT_LOGS, value=self.RTD_AUDITLOGS_DEFAULT_RETENTION_DAYS).to_item(),
- # Max number of concurrent builds.
- RTDProductFeature(type=constants.TYPE_CONCURRENT_BUILDS, value=self.RTD_MAX_CONCURRENT_BUILDS).to_item(),
- # Max number of redirects allowed per project.
- RTDProductFeature(type=constants.TYPE_REDIRECTS_LIMIT, value=100).to_item(),
- ))
+
+ return dict(
+ (
+ RTDProductFeature(type=constants.TYPE_CNAME).to_item(),
+ RTDProductFeature(type=constants.TYPE_EMBED_API).to_item(),
+ # Retention days for search analytics.
+ RTDProductFeature(
+ type=constants.TYPE_SEARCH_ANALYTICS,
+ value=self.RTD_ANALYTICS_DEFAULT_RETENTION_DAYS,
+ ).to_item(),
+ # Retention days for page view analytics.
+ RTDProductFeature(
+ type=constants.TYPE_PAGEVIEW_ANALYTICS,
+ value=self.RTD_ANALYTICS_DEFAULT_RETENTION_DAYS,
+ ).to_item(),
+ # Retention days for audit logs.
+ RTDProductFeature(
+ type=constants.TYPE_AUDIT_LOGS,
+ value=self.RTD_AUDITLOGS_DEFAULT_RETENTION_DAYS,
+ ).to_item(),
+ # Max number of concurrent builds.
+ RTDProductFeature(
+ type=constants.TYPE_CONCURRENT_BUILDS,
+ value=self.RTD_MAX_CONCURRENT_BUILDS,
+ ).to_item(),
+ # Max number of redirects allowed per project.
+ RTDProductFeature(
+ type=constants.TYPE_REDIRECTS_LIMIT, value=100
+ ).to_item(),
+ )
+ )
# A dictionary of Stripe products mapped to a RTDProduct object.
# In .org we don't have subscriptions/products, default features are
@@ -186,18 +206,18 @@ def RTD_DEFAULT_FEATURES(self):
# Database and API hitting settings
DONT_HIT_DB = True
RTD_SAVE_BUILD_COMMANDS_TO_STORAGE = False
- DATABASE_ROUTERS = ['readthedocs.core.db.MapAppsRouter']
+ DATABASE_ROUTERS = ["readthedocs.core.db.MapAppsRouter"]
USER_MATURITY_DAYS = 7
# override classes
CLASS_OVERRIDES = {}
- DOC_PATH_PREFIX = '_/'
+ DOC_PATH_PREFIX = "_/"
@property
def RTD_EXT_THEME_ENABLED(self):
- return ext_theme and 'RTD_EXT_THEME_ENABLED' in os.environ
+ return ext_theme and "RTD_EXT_THEME_ENABLED" in os.environ
RTD_EXT_THEME_DEV_SERVER = None
@@ -205,196 +225,197 @@ def RTD_EXT_THEME_ENABLED(self):
@property
def INSTALLED_APPS(self): # noqa
apps = [
- 'django.contrib.auth',
- 'django.contrib.admin',
- 'django.contrib.contenttypes',
- 'django.contrib.sessions',
- 'django.contrib.sites',
- 'django.contrib.messages',
- 'django.contrib.humanize',
-
+ "django.contrib.auth",
+ "django.contrib.admin",
+ "django.contrib.contenttypes",
+ "django.contrib.sessions",
+ "django.contrib.sites",
+ "django.contrib.messages",
+ "django.contrib.humanize",
# readthedocs.core app needs to be before
# django.contrib.staticfiles to use our custom collectstatic
# command
- 'readthedocs.core',
- 'django.contrib.staticfiles',
-
+ "readthedocs.core",
+ "django.contrib.staticfiles",
# third party apps
- 'dj_pagination',
- 'taggit',
- 'django_gravatar',
- 'rest_framework',
- 'rest_framework.authtoken',
+ "dj_pagination",
+ "taggit",
+ "django_gravatar",
+ "rest_framework",
+ "rest_framework.authtoken",
"rest_framework_api_key",
"generic_relations",
- 'corsheaders',
- 'annoying',
- 'django_extensions',
- 'crispy_forms',
- 'django_elasticsearch_dsl',
- 'django_filters',
- 'polymorphic',
- 'simple_history',
- 'djstripe',
- 'django_celery_beat',
+ "corsheaders",
+ "annoying",
+ "django_extensions",
+ "crispy_forms",
+ "django_elasticsearch_dsl",
+ "django_filters",
+ "polymorphic",
+ "simple_history",
+ "djstripe",
+ "django_celery_beat",
"django_safemigrate.apps.SafeMigrateConfig",
-
# our apps
- 'readthedocs.projects',
- 'readthedocs.organizations',
- 'readthedocs.builds',
- 'readthedocs.doc_builder',
- 'readthedocs.oauth',
- 'readthedocs.redirects',
- 'readthedocs.sso',
- 'readthedocs.audit',
- 'readthedocs.rtd_tests',
- 'readthedocs.api.v2',
- 'readthedocs.api.v3',
-
- 'readthedocs.gold',
- 'readthedocs.payments',
- 'readthedocs.subscriptions',
- 'readthedocs.notifications',
- 'readthedocs.integrations',
- 'readthedocs.analytics',
- 'readthedocs.search',
- 'readthedocs.embed',
- 'readthedocs.telemetry',
- 'readthedocs.domains',
- 'readthedocs.invitations',
-
+ "readthedocs.projects",
+ "readthedocs.organizations",
+ "readthedocs.builds",
+ "readthedocs.doc_builder",
+ "readthedocs.oauth",
+ "readthedocs.redirects",
+ "readthedocs.sso",
+ "readthedocs.audit",
+ "readthedocs.rtd_tests",
+ "readthedocs.api.v2",
+ "readthedocs.api.v3",
+ "readthedocs.gold",
+ "readthedocs.payments",
+ "readthedocs.subscriptions",
+ "readthedocs.notifications",
+ "readthedocs.integrations",
+ "readthedocs.analytics",
+ "readthedocs.search",
+ "readthedocs.embed",
+ "readthedocs.telemetry",
+ "readthedocs.domains",
+ "readthedocs.invitations",
# allauth
- 'allauth',
- 'allauth.account',
- 'allauth.socialaccount',
- 'allauth.socialaccount.providers.github',
- 'allauth.socialaccount.providers.gitlab',
- 'allauth.socialaccount.providers.bitbucket',
- 'allauth.socialaccount.providers.bitbucket_oauth2',
- 'cacheops',
+ "allauth",
+ "allauth.account",
+ "allauth.socialaccount",
+ "allauth.socialaccount.providers.github",
+ "allauth.socialaccount.providers.gitlab",
+ "allauth.socialaccount.providers.bitbucket",
+ "allauth.socialaccount.providers.bitbucket_oauth2",
+ "cacheops",
]
if ext:
- apps.append('readthedocsext.cdn')
- apps.append('readthedocsext.donate')
- apps.append('readthedocsext.spamfighting')
+ apps.append("readthedocsext.cdn")
+ apps.append("readthedocsext.donate")
+ apps.append("readthedocsext.spamfighting")
if self.RTD_EXT_THEME_ENABLED:
- apps.append('readthedocsext.theme')
+ apps.append("readthedocsext.theme")
if self.SHOW_DEBUG_TOOLBAR:
- apps.append('debug_toolbar')
+ apps.append("debug_toolbar")
return apps
@property
def CRISPY_TEMPLATE_PACK(self):
if self.RTD_EXT_THEME_ENABLED:
- return 'semantic-ui'
- return 'bootstrap'
+ return "semantic-ui"
+ return "bootstrap"
@property
def CRISPY_ALLOWED_TEMPLATE_PACKS(self):
if self.RTD_EXT_THEME_ENABLED:
- return ('semantic-ui',)
+ return ("semantic-ui",)
return ("bootstrap", "uni_form", "bootstrap3", "bootstrap4")
@property
def USE_PROMOS(self): # noqa
- return 'readthedocsext.donate' in self.INSTALLED_APPS
+ return "readthedocsext.donate" in self.INSTALLED_APPS
@property
def MIDDLEWARE(self):
middlewares = [
- 'readthedocs.core.middleware.NullCharactersMiddleware',
- 'readthedocs.core.middleware.ReadTheDocsSessionMiddleware',
- 'django.middleware.locale.LocaleMiddleware',
- 'corsheaders.middleware.CorsMiddleware',
- 'django.middleware.common.CommonMiddleware',
- 'django.middleware.security.SecurityMiddleware',
- 'django.middleware.csrf.CsrfViewMiddleware',
- 'django.middleware.clickjacking.XFrameOptionsMiddleware',
- 'django.contrib.auth.middleware.AuthenticationMiddleware',
- 'django.contrib.messages.middleware.MessageMiddleware',
+ "readthedocs.core.middleware.NullCharactersMiddleware",
+ "readthedocs.core.middleware.ReadTheDocsSessionMiddleware",
+ "django.middleware.locale.LocaleMiddleware",
+ "corsheaders.middleware.CorsMiddleware",
+ "django.middleware.common.CommonMiddleware",
+ "django.middleware.security.SecurityMiddleware",
+ "django.middleware.csrf.CsrfViewMiddleware",
+ "django.middleware.clickjacking.XFrameOptionsMiddleware",
+ "django.contrib.auth.middleware.AuthenticationMiddleware",
+ "django.contrib.messages.middleware.MessageMiddleware",
"allauth.account.middleware.AccountMiddleware",
- 'dj_pagination.middleware.PaginationMiddleware',
- 'csp.middleware.CSPMiddleware',
- 'readthedocs.core.middleware.ReferrerPolicyMiddleware',
- 'simple_history.middleware.HistoryRequestMiddleware',
- 'readthedocs.core.logs.ReadTheDocsRequestMiddleware',
- 'django_structlog.middlewares.CeleryMiddleware',
+ "dj_pagination.middleware.PaginationMiddleware",
+ "csp.middleware.CSPMiddleware",
+ "readthedocs.core.middleware.ReferrerPolicyMiddleware",
+ "simple_history.middleware.HistoryRequestMiddleware",
+ "readthedocs.core.logs.ReadTheDocsRequestMiddleware",
+ "django_structlog.middlewares.CeleryMiddleware",
]
if self.SHOW_DEBUG_TOOLBAR:
- middlewares.insert(0, 'debug_toolbar.middleware.DebugToolbarMiddleware')
+ middlewares.insert(0, "debug_toolbar.middleware.DebugToolbarMiddleware")
return middlewares
-
-
AUTHENTICATION_BACKENDS = (
# Needed to login by username in Django admin, regardless of `allauth`
- 'django.contrib.auth.backends.ModelBackend',
+ "django.contrib.auth.backends.ModelBackend",
# `allauth` specific authentication methods, such as login by e-mail
- 'allauth.account.auth_backends.AuthenticationBackend',
+ "allauth.account.auth_backends.AuthenticationBackend",
)
AUTH_PASSWORD_VALIDATORS = [
{
- 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
+ "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{
- 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
- 'OPTIONS': {
- 'min_length': 9,
- }
+ "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
+ "OPTIONS": {
+ "min_length": 9,
+ },
},
{
- 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
+ "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
- 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
+ "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
# Paths
SITE_ROOT = os.path.dirname(
- os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
- TEMPLATE_ROOT = os.path.join(SITE_ROOT, 'readthedocs', 'templates')
- DOCROOT = os.path.join(SITE_ROOT, 'user_builds')
- LOGS_ROOT = os.path.join(SITE_ROOT, 'logs')
- PRODUCTION_ROOT = os.path.join(SITE_ROOT, 'prod_artifacts')
- PRODUCTION_MEDIA_ARTIFACTS = os.path.join(PRODUCTION_ROOT, 'media')
+ os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+ )
+ TEMPLATE_ROOT = os.path.join(SITE_ROOT, "readthedocs", "templates")
+ DOCROOT = os.path.join(SITE_ROOT, "user_builds")
+ LOGS_ROOT = os.path.join(SITE_ROOT, "logs")
+ PRODUCTION_ROOT = os.path.join(SITE_ROOT, "prod_artifacts")
+ PRODUCTION_MEDIA_ARTIFACTS = os.path.join(PRODUCTION_ROOT, "media")
# Assets and media
- STATIC_ROOT = os.path.join(SITE_ROOT, 'static')
- STATIC_URL = '/static/'
- MEDIA_ROOT = os.path.join(SITE_ROOT, 'media/')
- MEDIA_URL = '/media/'
- ADMIN_MEDIA_PREFIX = '/media/admin/'
+ STATIC_ROOT = os.path.join(SITE_ROOT, "static")
+ STATIC_URL = "/static/"
+ MEDIA_ROOT = os.path.join(SITE_ROOT, "media/")
+ MEDIA_URL = "/media/"
+ ADMIN_MEDIA_PREFIX = "/media/admin/"
STATICFILES_DIRS = [
- os.path.join(SITE_ROOT, 'readthedocs', 'static'),
- os.path.join(SITE_ROOT, 'media'),
+ os.path.join(SITE_ROOT, "readthedocs", "static"),
+ os.path.join(SITE_ROOT, "media"),
]
STATICFILES_FINDERS = [
- 'readthedocs.core.static.SelectiveFileSystemFinder',
- 'django.contrib.staticfiles.finders.AppDirectoriesFinder',
- 'readthedocs.core.finders.DebugToolbarFinder',
+ "readthedocs.core.static.SelectiveFileSystemFinder",
+ "django.contrib.staticfiles.finders.AppDirectoriesFinder",
+ "readthedocs.core.finders.DebugToolbarFinder",
]
PYTHON_MEDIA = False
# Django Storage subclass used to write build artifacts to cloud or local storage
# https://docs.readthedocs.io/page/development/settings.html#rtd-build-media-storage
- RTD_BUILD_MEDIA_STORAGE = 'readthedocs.builds.storage.BuildMediaFileSystemStorage'
- RTD_BUILD_ENVIRONMENT_STORAGE = 'readthedocs.builds.storage.BuildMediaFileSystemStorage'
- RTD_BUILD_TOOLS_STORAGE = 'readthedocs.builds.storage.BuildMediaFileSystemStorage'
- RTD_BUILD_COMMANDS_STORAGE = 'readthedocs.builds.storage.BuildMediaFileSystemStorage'
- RTD_STATICFILES_STORAGE = 'readthedocs.builds.storage.StaticFilesStorage'
+ RTD_BUILD_MEDIA_STORAGE = "readthedocs.builds.storage.BuildMediaFileSystemStorage"
+ RTD_BUILD_ENVIRONMENT_STORAGE = (
+ "readthedocs.builds.storage.BuildMediaFileSystemStorage"
+ )
+ RTD_BUILD_TOOLS_STORAGE = "readthedocs.builds.storage.BuildMediaFileSystemStorage"
+ RTD_BUILD_COMMANDS_STORAGE = (
+ "readthedocs.builds.storage.BuildMediaFileSystemStorage"
+ )
+ RTD_STATICFILES_STORAGE = "readthedocs.builds.storage.StaticFilesStorage"
@property
def TEMPLATES(self):
dirs = [self.TEMPLATE_ROOT]
if self.RTD_EXT_THEME_ENABLED:
- dirs.insert(0, os.path.join(
- os.path.dirname(readthedocsext.theme.__file__),
- 'templates',
- ))
+ dirs.insert(
+ 0,
+ os.path.join(
+ os.path.dirname(readthedocsext.theme.__file__),
+ "templates",
+ ),
+ )
# Disable ``cached.Loader`` on development
# https://docs.djangoproject.com/en/4.2/ref/templates/api/#django.template.loaders.cached.Loader
@@ -406,21 +427,21 @@ def TEMPLATES(self):
return [
{
- 'BACKEND': 'django.template.backends.django.DjangoTemplates',
- 'DIRS': dirs,
- 'OPTIONS': {
- 'debug': self.DEBUG,
- 'loaders': default_loaders if self.DEBUG else cached_loaders,
- 'context_processors': [
- 'django.contrib.auth.context_processors.auth',
- 'django.contrib.messages.context_processors.messages',
- 'django.template.context_processors.debug',
- 'django.template.context_processors.i18n',
- 'django.template.context_processors.media',
- 'django.template.context_processors.request',
+ "BACKEND": "django.template.backends.django.DjangoTemplates",
+ "DIRS": dirs,
+ "OPTIONS": {
+ "debug": self.DEBUG,
+ "loaders": default_loaders if self.DEBUG else cached_loaders,
+ "context_processors": [
+ "django.contrib.auth.context_processors.auth",
+ "django.contrib.messages.context_processors.messages",
+ "django.template.context_processors.debug",
+ "django.template.context_processors.i18n",
+ "django.template.context_processors.media",
+ "django.template.context_processors.request",
# Read the Docs processor
- 'readthedocs.core.context_processors.readthedocs_processor',
- 'readthedocs.core.context_processors.user_notifications',
+ "readthedocs.core.context_processors.readthedocs_processor",
+ "readthedocs.core.context_processors.user_notifications",
],
},
},
@@ -428,43 +449,43 @@ def TEMPLATES(self):
# Cache
CACHES = {
- 'default': {
- 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
- 'PREFIX': 'docs',
+ "default": {
+ "BACKEND": "django.core.cache.backends.locmem.LocMemCache",
+ "PREFIX": "docs",
}
}
CACHE_MIDDLEWARE_SECONDS = 60
# I18n
- TIME_ZONE = 'UTC'
+ TIME_ZONE = "UTC"
USE_TZ = True
- LANGUAGE_CODE = 'en-us'
+ LANGUAGE_CODE = "en-us"
LANGUAGES = (
- ('ca', gettext('Catalan')),
- ('en', gettext('English')),
- ('es', gettext('Spanish')),
- ('pt-br', gettext('Brazilian Portuguese')),
- ('nb', gettext('Norwegian Bokmål')),
- ('fr', gettext('French')),
- ('ru', gettext('Russian')),
- ('de', gettext('German')),
- ('gl', gettext('Galician')),
- ('vi', gettext('Vietnamese')),
- ('zh-cn', gettext('Simplified Chinese')),
- ('zh-tw', gettext('Traditional Chinese')),
- ('ja', gettext('Japanese')),
- ('uk', gettext('Ukrainian')),
- ('it', gettext('Italian')),
- ('ko', gettext('Korean')),
+ ("ca", gettext("Catalan")),
+ ("en", gettext("English")),
+ ("es", gettext("Spanish")),
+ ("pt-br", gettext("Brazilian Portuguese")),
+ ("nb", gettext("Norwegian Bokmål")),
+ ("fr", gettext("French")),
+ ("ru", gettext("Russian")),
+ ("de", gettext("German")),
+ ("gl", gettext("Galician")),
+ ("vi", gettext("Vietnamese")),
+ ("zh-cn", gettext("Simplified Chinese")),
+ ("zh-tw", gettext("Traditional Chinese")),
+ ("ja", gettext("Japanese")),
+ ("uk", gettext("Ukrainian")),
+ ("it", gettext("Italian")),
+ ("ko", gettext("Korean")),
)
LOCALE_PATHS = [
- os.path.join(SITE_ROOT, 'readthedocs', 'locale'),
+ os.path.join(SITE_ROOT, "readthedocs", "locale"),
]
USE_I18N = True
USE_L10N = True
# Celery
- CELERY_APP_NAME = 'readthedocs'
+ CELERY_APP_NAME = "readthedocs"
CELERY_ALWAYS_EAGER = True
CELERYD_TASK_TIME_LIMIT = 60 * 60 # 60 minutes
CELERY_SEND_TASK_ERROR_EMAILS = False
@@ -476,75 +497,75 @@ def TEMPLATES(self):
CELERYD_PREFETCH_MULTIPLIER = 1
CELERY_CREATE_MISSING_QUEUES = True
- CELERY_DEFAULT_QUEUE = 'celery'
- CELERYBEAT_SCHEDULER = 'django_celery_beat.schedulers:DatabaseScheduler'
+ CELERY_DEFAULT_QUEUE = "celery"
+ CELERYBEAT_SCHEDULER = "django_celery_beat.schedulers:DatabaseScheduler"
CELERYBEAT_SCHEDULE = {
- 'quarter-finish-inactive-builds': {
- 'task': 'readthedocs.projects.tasks.utils.finish_inactive_builds',
- 'schedule': crontab(minute='*/15'),
- 'options': {'queue': 'web'},
+ "quarter-finish-inactive-builds": {
+ "task": "readthedocs.projects.tasks.utils.finish_inactive_builds",
+ "schedule": crontab(minute="*/15"),
+ "options": {"queue": "web"},
},
- 'every-day-delete-old-search-queries': {
- 'task': 'readthedocs.search.tasks.delete_old_search_queries_from_db',
- 'schedule': crontab(minute=0, hour=0),
- 'options': {'queue': 'web'},
+ "every-day-delete-old-search-queries": {
+ "task": "readthedocs.search.tasks.delete_old_search_queries_from_db",
+ "schedule": crontab(minute=0, hour=0),
+ "options": {"queue": "web"},
},
- 'every-day-delete-old-page-views': {
- 'task': 'readthedocs.analytics.tasks.delete_old_page_counts',
- 'schedule': crontab(minute=27, hour='*/6'),
- 'options': {'queue': 'web'},
+ "every-day-delete-old-page-views": {
+ "task": "readthedocs.analytics.tasks.delete_old_page_counts",
+ "schedule": crontab(minute=27, hour="*/6"),
+ "options": {"queue": "web"},
},
- 'every-day-delete-old-buildata-models': {
- 'task': 'readthedocs.telemetry.tasks.delete_old_build_data',
- 'schedule': crontab(minute=0, hour=2),
- 'options': {'queue': 'web'},
+ "every-day-delete-old-buildata-models": {
+ "task": "readthedocs.telemetry.tasks.delete_old_build_data",
+ "schedule": crontab(minute=0, hour=2),
+ "options": {"queue": "web"},
},
- 'weekly-delete-old-personal-audit-logs': {
- 'task': 'readthedocs.audit.tasks.delete_old_personal_audit_logs',
- 'schedule': crontab(day_of_week="wed", minute=0, hour=7),
- 'options': {'queue': 'web'},
+ "weekly-delete-old-personal-audit-logs": {
+ "task": "readthedocs.audit.tasks.delete_old_personal_audit_logs",
+ "schedule": crontab(day_of_week="wed", minute=0, hour=7),
+ "options": {"queue": "web"},
},
- 'every-day-resync-sso-organization-users': {
- 'task': 'readthedocs.oauth.tasks.sync_remote_repositories_organizations',
- 'schedule': crontab(minute=0, hour=4),
- 'options': {'queue': 'web'},
+ "every-day-resync-sso-organization-users": {
+ "task": "readthedocs.oauth.tasks.sync_remote_repositories_organizations",
+ "schedule": crontab(minute=0, hour=4),
+ "options": {"queue": "web"},
},
- 'quarter-archive-builds': {
- 'task': 'readthedocs.builds.tasks.archive_builds_task',
- 'schedule': crontab(minute='*/15'),
- 'options': {'queue': 'web'},
- 'kwargs': {
- 'days': 1,
- 'limit': 500,
- 'delete': True,
+ "quarter-archive-builds": {
+ "task": "readthedocs.builds.tasks.archive_builds_task",
+ "schedule": crontab(minute="*/15"),
+ "options": {"queue": "web"},
+ "kwargs": {
+ "days": 1,
+ "limit": 500,
+ "delete": True,
},
},
- 'every-three-hours-delete-inactive-external-versions': {
- 'task': 'readthedocs.builds.tasks.delete_closed_external_versions',
+ "every-three-hours-delete-inactive-external-versions": {
+ "task": "readthedocs.builds.tasks.delete_closed_external_versions",
# Increase the frequency because we have 255k closed versions and they keep growing.
# It's better to increase this frequency than the `limit=` of the task.
- 'schedule': crontab(minute=0, hour='*/3'),
- 'options': {'queue': 'web'},
+ "schedule": crontab(minute=0, hour="*/3"),
+ "options": {"queue": "web"},
},
- 'every-day-resync-remote-repositories': {
- 'task': 'readthedocs.oauth.tasks.sync_active_users_remote_repositories',
- 'schedule': crontab(minute=30, hour=2),
- 'options': {'queue': 'web'},
+ "every-day-resync-remote-repositories": {
+ "task": "readthedocs.oauth.tasks.sync_active_users_remote_repositories",
+ "schedule": crontab(minute=30, hour=2),
+ "options": {"queue": "web"},
},
- 'every-day-email-pending-custom-domains': {
- 'task': 'readthedocs.domains.tasks.email_pending_custom_domains',
- 'schedule': crontab(minute=0, hour=3),
- 'options': {'queue': 'web'},
+ "every-day-email-pending-custom-domains": {
+ "task": "readthedocs.domains.tasks.email_pending_custom_domains",
+ "schedule": crontab(minute=0, hour=3),
+ "options": {"queue": "web"},
},
- 'every-15m-delete-pidbox-objects': {
- 'task': 'readthedocs.core.tasks.cleanup_pidbox_keys',
- 'schedule': crontab(minute='*/15'),
- 'options': {'queue': 'web'},
+ "every-15m-delete-pidbox-objects": {
+ "task": "readthedocs.core.tasks.cleanup_pidbox_keys",
+ "schedule": crontab(minute="*/15"),
+ "options": {"queue": "web"},
},
- 'every-day-delete-old-revoked-build-api-keys': {
- 'task': 'readthedocs.api.v2.tasks.delete_old_revoked_build_api_keys',
- 'schedule': crontab(minute=0, hour=4),
- 'options': {'queue': 'web'},
+ "every-day-delete-old-revoked-build-api-keys": {
+ "task": "readthedocs.api.v2.tasks.delete_old_revoked_build_api_keys",
+ "schedule": crontab(minute=0, hour=4),
+ "options": {"queue": "web"},
},
}
@@ -553,7 +574,7 @@ def TEMPLATES(self):
# Docker
DOCKER_ENABLE = False
- DOCKER_SOCKET = 'unix:///var/run/docker.sock'
+ DOCKER_SOCKET = "unix:///var/run/docker.sock"
# User used to create the container.
# In production we use the same user than the one defined by the
@@ -561,15 +582,17 @@ def TEMPLATES(self):
# In development, we can use the "UID:GID" of the current user running the
# instance to avoid file permissions issues.
# https://docs.docker.com/engine/reference/run/#user
- RTD_DOCKER_USER = 'docs:docs'
- RTD_DOCKER_SUPER_USER = 'root:root'
- RTD_DOCKER_WORKDIR = '/home/docs/'
+ RTD_DOCKER_USER = "docs:docs"
+ RTD_DOCKER_SUPER_USER = "root:root"
+ RTD_DOCKER_WORKDIR = "/home/docs/"
RTD_DOCKER_COMPOSE = False
- DOCKER_VERSION = 'auto'
- DOCKER_DEFAULT_VERSION = 'ubuntu-22.04'
- DOCKER_IMAGE = '{}:{}'.format(constants_docker.DOCKER_DEFAULT_IMAGE, DOCKER_DEFAULT_VERSION)
+ DOCKER_VERSION = "auto"
+ DOCKER_DEFAULT_VERSION = "ubuntu-22.04"
+ DOCKER_IMAGE = "{}:{}".format(
+ constants_docker.DOCKER_DEFAULT_IMAGE, DOCKER_DEFAULT_VERSION
+ )
# Additional binds for the build container
RTD_DOCKER_ADDITIONAL_BINDS = {}
@@ -580,15 +603,17 @@ def TEMPLATES(self):
def _get_docker_memory_limit(self):
try:
- total_memory = int(subprocess.check_output(
- "free -m | awk '/^Mem:/{print $2}'",
- shell=True,
- ))
+ total_memory = int(
+ subprocess.check_output(
+ "free -m | awk '/^Mem:/{print $2}'",
+ shell=True,
+ )
+ )
return total_memory, round(total_memory - 1000, -2)
except ValueError:
# On systems without a `free` command it will return a string to
# int and raise a ValueError
- log.exception('Failed to get memory size, using defaults Docker limits.')
+ log.exception("Failed to get memory size, using defaults Docker limits.")
# Coefficient used to determine build time limit, as a percentage of total
# memory. Historical values here were 0.225 to 0.3.
@@ -608,8 +633,8 @@ def DOCKER_LIMITS(self):
"""
# Our normal default
limits = {
- 'memory': '1g',
- 'time': 600,
+ "memory": "1g",
+ "time": 600,
}
# Only run on our servers
@@ -617,82 +642,66 @@ def DOCKER_LIMITS(self):
total_memory, memory_limit = self._get_docker_memory_limit()
if memory_limit:
limits = {
- 'memory': f'{memory_limit}m',
- 'time': max(
- limits['time'],
+ "memory": f"{memory_limit}m",
+ "time": max(
+ limits["time"],
round(total_memory * self.DOCKER_TIME_LIMIT_COEFF, -2),
- )
+ ),
}
log.info(
- 'Using dynamic docker limits.',
+ "Using dynamic docker limits.",
hostname=socket.gethostname(),
- memory=limits['memory'],
- time=limits['time'],
+ memory=limits["memory"],
+ time=limits["time"],
)
return limits
# All auth
- ACCOUNT_ADAPTER = 'readthedocs.core.adapters.AccountAdapter'
+ ACCOUNT_ADAPTER = "readthedocs.core.adapters.AccountAdapter"
ACCOUNT_EMAIL_REQUIRED = True
# Make email verification mandatory.
# Users won't be able to login until they verify the email address.
- ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
+ ACCOUNT_EMAIL_VERIFICATION = "mandatory"
- ACCOUNT_AUTHENTICATION_METHOD = 'username_email'
+ ACCOUNT_AUTHENTICATION_METHOD = "username_email"
ACCOUNT_EMAIL_CONFIRMATION_EXPIRE_DAYS = 7
SOCIALACCOUNT_AUTO_SIGNUP = False
SOCIALACCOUNT_STORE_TOKENS = True
_SOCIALACCOUNT_PROVIDERS = {
- 'github': {
+ "github": {
"APPS": [
- {
- "client_id": "123",
- "secret": "456",
- "key": ""
- },
+ {"client_id": "123", "secret": "456", "key": ""},
],
"VERIFIED_EMAIL": True,
- 'SCOPE': [
- 'user:email',
- 'read:org',
- 'admin:repo_hook',
- 'repo:status',
+ "SCOPE": [
+ "user:email",
+ "read:org",
+ "admin:repo_hook",
+ "repo:status",
],
},
- 'gitlab': {
+ "gitlab": {
"APPS": [
- {
- "client_id": "123",
- "secret": "456",
- "key": ""
- },
+ {"client_id": "123", "secret": "456", "key": ""},
],
"VERIFIED_EMAIL": True,
- 'SCOPE': [
- 'api',
- 'read_user',
+ "SCOPE": [
+ "api",
+ "read_user",
],
},
"bitbucket_oauth2": {
"APPS": [
- {
- "client_id": "123",
- "secret": "456",
- "key": ""
- },
+ {"client_id": "123", "secret": "456", "key": ""},
],
# Bitbucket scope/permissions are determined by the Oauth consumer setup on bitbucket.org.
},
# Deprecated, we use `bitbucket_oauth2` for all new connections.
"bitbucket": {
"APPS": [
- {
- "client_id": "123",
- "secret": "456",
- "key": ""
- },
+ {"client_id": "123", "secret": "456", "key": ""},
],
},
}
@@ -707,7 +716,7 @@ def SOCIALACCOUNT_PROVIDERS(self):
return self._SOCIALACCOUNT_PROVIDERS
ACCOUNT_FORMS = {
- 'signup': 'readthedocs.forms.SignupFormWithNewsletter',
+ "signup": "readthedocs.forms.SignupFormWithNewsletter",
}
# CORS
@@ -725,13 +734,13 @@ def SOCIALACCOUNT_PROVIDERS(self):
# we won't be able to pass credentials fo the sustainability API with that value.
CORS_ALLOWED_ORIGIN_REGEXES = [re.compile(".+")]
CORS_ALLOW_HEADERS = list(default_headers) + [
- 'x-hoverxref-version',
+ "x-hoverxref-version",
]
# Additional protection to allow only idempotent methods.
CORS_ALLOW_METHODS = [
- 'GET',
- 'OPTIONS',
- 'HEAD',
+ "GET",
+ "OPTIONS",
+ "HEAD",
]
# URLs to allow CORS to read from unauthed.
@@ -751,19 +760,19 @@ def SOCIALACCOUNT_PROVIDERS(self):
# RTD Settings
ALLOW_PRIVATE_REPOS = False
- DEFAULT_PRIVACY_LEVEL = 'public'
- DEFAULT_VERSION_PRIVACY_LEVEL = 'public'
+ DEFAULT_PRIVACY_LEVEL = "public"
+ DEFAULT_VERSION_PRIVACY_LEVEL = "public"
ALLOW_ADMIN = True
# Organization settings
RTD_ALLOW_ORGANIZATIONS = False
- RTD_ORG_DEFAULT_STRIPE_SUBSCRIPTION_PRICE = 'trial-v2-monthly'
+ RTD_ORG_DEFAULT_STRIPE_SUBSCRIPTION_PRICE = "trial-v2-monthly"
RTD_ORG_TRIAL_PERIOD_DAYS = 30
# Elasticsearch settings.
ELASTICSEARCH_DSL = {
- 'default': {
- 'hosts': 'http://elastic:password@search:9200',
+ "default": {
+ "hosts": "http://elastic:password@search:9200",
},
}
# Chunk size for elasticsearch reindex celery tasks
@@ -781,18 +790,15 @@ def SOCIALACCOUNT_PROVIDERS(self):
# and a second replica resulting in immediate 50% bump in max search throughput.
ES_INDEXES = {
- 'project': {
- 'name': 'project_index',
- 'settings': {
- 'number_of_shards': 1,
- 'number_of_replicas': 1
- },
+ "project": {
+ "name": "project_index",
+ "settings": {"number_of_shards": 1, "number_of_replicas": 1},
},
- 'page': {
- 'name': 'page_index',
- 'settings': {
- 'number_of_shards': 1,
- 'number_of_replicas': 1,
+ "page": {
+ "name": "page_index",
+ "settings": {
+ "number_of_shards": 1,
+ "number_of_replicas": 1,
},
},
}
@@ -810,17 +816,15 @@ def SOCIALACCOUNT_PROVIDERS(self):
# Disable auto refresh for increasing index performance
ELASTICSEARCH_DSL_AUTO_REFRESH = False
- ALLOWED_HOSTS = ['*']
+ ALLOWED_HOSTS = ["*"]
- ABSOLUTE_URL_OVERRIDES = {
- 'auth.user': lambda o: '/profiles/{}/'.format(o.username)
- }
+ ABSOLUTE_URL_OVERRIDES = {"auth.user": lambda o: "/profiles/{}/".format(o.username)}
- INTERNAL_IPS = ('127.0.0.1',)
+ INTERNAL_IPS = ("127.0.0.1",)
# Taggit
# https://django-taggit.readthedocs.io
- TAGGIT_TAGS_FROM_STRING = 'readthedocs.projects.tag_utils.rtd_parse_tags'
+ TAGGIT_TAGS_FROM_STRING = "readthedocs.projects.tag_utils.rtd_parse_tags"
# Stripe
# Existing values we use
@@ -829,7 +833,7 @@ def SOCIALACCOUNT_PROVIDERS(self):
# DJStripe values -- **CHANGE THESE IN PRODUCTION**
STRIPE_LIVE_SECRET_KEY = None
- STRIPE_TEST_SECRET_KEY = "sk_test_x" # A default so the `checks` don't fail
+ STRIPE_TEST_SECRET_KEY = "sk_test_x" # A default so the `checks` don't fail
DJSTRIPE_WEBHOOK_SECRET = None
STRIPE_LIVE_MODE = False # Change to True in production
# This is less optimal than setting the webhook secret
@@ -839,7 +843,9 @@ def SOCIALACCOUNT_PROVIDERS(self):
# These values shouldn't need to change..
DJSTRIPE_FOREIGN_KEY_TO_FIELD = "id"
- DJSTRIPE_USE_NATIVE_JSONFIELD = True # We recommend setting to True for new installations
+ DJSTRIPE_USE_NATIVE_JSONFIELD = (
+ True # We recommend setting to True for new installations
+ )
# Disable adding djstripe metadata to the Customer objects.
# We are managing the subscriber relationship by ourselves,
@@ -858,31 +864,35 @@ def SOCIALACCOUNT_PROVIDERS(self):
# Misc application settings
GLOBAL_ANALYTICS_CODE = None
DASHBOARD_ANALYTICS_CODE = None # For the dashboard, not docs
- GRAVATAR_DEFAULT_IMAGE = 'https://assets.readthedocs.org/static/images/silhouette.png' # NOQA
+ GRAVATAR_DEFAULT_IMAGE = (
+ "https://assets.readthedocs.org/static/images/silhouette.png" # NOQA
+ )
OAUTH_AVATAR_USER_DEFAULT_URL = GRAVATAR_DEFAULT_IMAGE
OAUTH_AVATAR_ORG_DEFAULT_URL = GRAVATAR_DEFAULT_IMAGE
REST_FRAMEWORK = {
- 'DEFAULT_FILTER_BACKENDS': ('django_filters.rest_framework.DjangoFilterBackend',),
- 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination', # NOQA
- 'DEFAULT_THROTTLE_RATES': {
- 'anon': '5/minute',
- 'user': '60/minute',
+ "DEFAULT_FILTER_BACKENDS": (
+ "django_filters.rest_framework.DjangoFilterBackend",
+ ),
+ "DEFAULT_PAGINATION_CLASS": "rest_framework.pagination.LimitOffsetPagination", # NOQA
+ "DEFAULT_THROTTLE_RATES": {
+ "anon": "5/minute",
+ "user": "60/minute",
},
- 'PAGE_SIZE': 10,
- 'TEST_REQUEST_DEFAULT_FORMAT': 'json',
+ "PAGE_SIZE": 10,
+ "TEST_REQUEST_DEFAULT_FORMAT": "json",
}
- SILENCED_SYSTEM_CHECKS = ['fields.W342']
+ SILENCED_SYSTEM_CHECKS = ["fields.W342"]
# Logging
- LOG_FORMAT = '%(name)s:%(lineno)s[%(process)d]: %(levelname)s %(message)s'
+ LOG_FORMAT = "%(name)s:%(lineno)s[%(process)d]: %(levelname)s %(message)s"
LOGGING = {
- 'version': 1,
- 'disable_existing_loggers': True,
- 'formatters': {
- 'default': {
- 'format': LOG_FORMAT,
- 'datefmt': '%d/%b/%Y %H:%M:%S',
+ "version": 1,
+ "disable_existing_loggers": True,
+ "formatters": {
+ "default": {
+ "format": LOG_FORMAT,
+ "datefmt": "%d/%b/%Y %H:%M:%S",
},
# structlog
"plain_console": {
@@ -908,79 +918,81 @@ def SOCIALACCOUNT_PROVIDERS(self):
"key_value": {
"()": structlog.stdlib.ProcessorFormatter,
"processors": [
- structlog.processors.TimeStamper(fmt='iso'),
+ structlog.processors.TimeStamper(fmt="iso"),
structlog.stdlib.ProcessorFormatter.remove_processors_meta,
- structlog.processors.KeyValueRenderer(key_order=['timestamp', 'level', 'event', 'logger']),
+ structlog.processors.KeyValueRenderer(
+ key_order=["timestamp", "level", "event", "logger"]
+ ),
],
# Allows to add extra data to log entries generated via ``logging`` module
# See https://www.structlog.org/en/stable/standard-library.html#rendering-using-structlog-based-formatters-within-logging
"foreign_pre_chain": shared_processors,
},
},
- 'handlers': {
- 'console': {
- 'level': 'INFO',
- 'class': 'logging.StreamHandler',
- 'formatter': 'plain_console',
+ "handlers": {
+ "console": {
+ "level": "INFO",
+ "class": "logging.StreamHandler",
+ "formatter": "plain_console",
},
- 'debug': {
- 'level': 'DEBUG',
- 'class': 'logging.handlers.RotatingFileHandler',
- 'filename': os.path.join(LOGS_ROOT, 'debug.log'),
- 'formatter': 'key_value',
+ "debug": {
+ "level": "DEBUG",
+ "class": "logging.handlers.RotatingFileHandler",
+ "filename": os.path.join(LOGS_ROOT, "debug.log"),
+ "formatter": "key_value",
},
- 'null': {
- 'class': 'logging.NullHandler',
+ "null": {
+ "class": "logging.NullHandler",
},
},
- 'loggers': {
- '': { # root logger
- 'handlers': ['debug', 'console'],
+ "loggers": {
+ "": { # root logger
+ "handlers": ["debug", "console"],
# Always send from the root, handlers can filter levels
- 'level': 'INFO',
+ "level": "INFO",
},
- 'docker.utils.config': {
- 'handlers': ['null'],
+ "docker.utils.config": {
+ "handlers": ["null"],
# Don't double log at the root logger for these.
- 'propagate': False,
+ "propagate": False,
},
- 'django_structlog.middlewares.request': {
- 'handlers': ['null'],
+ "django_structlog.middlewares.request": {
+ "handlers": ["null"],
# Don't double log at the root logger for these.
- 'propagate': False,
+ "propagate": False,
},
- 'readthedocs': {
- 'handlers': ['debug', 'console'],
- 'level': 'DEBUG',
+ "readthedocs": {
+ "handlers": ["debug", "console"],
+ "level": "DEBUG",
# Don't double log at the root logger for these.
- 'propagate': False,
+ "propagate": False,
},
- 'django.security.DisallowedHost': {
- 'handlers': ['null'],
- 'propagate': False,
+ "django.security.DisallowedHost": {
+ "handlers": ["null"],
+ "propagate": False,
},
- 'elastic_transport.transport': {
- 'handlers': ['null'],
- 'propagate': False,
+ "elastic_transport.transport": {
+ "handlers": ["null"],
+ "propagate": False,
},
- 'celery.worker.consumer.gossip': {
- 'handlers': ['null'],
- 'propagate': False,
+ "celery.worker.consumer.gossip": {
+ "handlers": ["null"],
+ "propagate": False,
},
},
}
# MailerLite API for newsletter signups
- MAILERLITE_API_SUBSCRIBERS_URL = 'https://api.mailerlite.com/api/v2/subscribers'
+ MAILERLITE_API_SUBSCRIBERS_URL = "https://api.mailerlite.com/api/v2/subscribers"
MAILERLITE_API_ONBOARDING_GROUP_ID = None
MAILERLITE_API_ONBOARDING_GROUP_URL = None
MAILERLITE_API_KEY = None
RTD_EMBED_API_EXTERNAL_DOMAINS = [
- r'^docs\.python\.org$',
- r'^docs\.scipy\.org$',
- r'^docs\.sympy\.org$',
- r'^numpy\.org$',
+ r"^docs\.python\.org$",
+ r"^docs\.scipy\.org$",
+ r"^docs\.sympy\.org$",
+ r"^numpy\.org$",
]
RTD_EMBED_API_PAGE_CACHE_TIMEOUT = 5 * 10
RTD_EMBED_API_DEFAULT_REQUEST_TIMEOUT = 1
@@ -996,42 +1008,39 @@ def SOCIALACCOUNT_PROVIDERS(self):
CACHEOPS_ENABLED = False
CACHEOPS_TIMEOUT = 60 * 60 # seconds
- CACHEOPS_OPS = {'get', 'fetch'}
+ CACHEOPS_OPS = {"get", "fetch"}
CACHEOPS_DEGRADE_ON_FAILURE = True
CACHEOPS = {
# readthedocs.projects.*
- 'projects.project': {
- 'ops': CACHEOPS_OPS,
- 'timeout': CACHEOPS_TIMEOUT,
+ "projects.project": {
+ "ops": CACHEOPS_OPS,
+ "timeout": CACHEOPS_TIMEOUT,
},
- 'projects.feature': {
- 'ops': CACHEOPS_OPS,
- 'timeout': CACHEOPS_TIMEOUT,
+ "projects.feature": {
+ "ops": CACHEOPS_OPS,
+ "timeout": CACHEOPS_TIMEOUT,
},
- 'projects.projectrelationship': {
- 'ops': CACHEOPS_OPS,
- 'timeout': CACHEOPS_TIMEOUT,
+ "projects.projectrelationship": {
+ "ops": CACHEOPS_OPS,
+ "timeout": CACHEOPS_TIMEOUT,
},
- 'projects.domain': {
- 'ops': CACHEOPS_OPS,
- 'timeout': CACHEOPS_TIMEOUT,
+ "projects.domain": {
+ "ops": CACHEOPS_OPS,
+ "timeout": CACHEOPS_TIMEOUT,
},
-
# readthedocs.builds.*
- 'builds.version': {
- 'ops': CACHEOPS_OPS,
- 'timeout': CACHEOPS_TIMEOUT,
+ "builds.version": {
+ "ops": CACHEOPS_OPS,
+ "timeout": CACHEOPS_TIMEOUT,
},
-
# readthedocs.organizations.*
- 'organizations.organization': {
- 'ops': CACHEOPS_OPS,
- 'timeout': CACHEOPS_TIMEOUT,
+ "organizations.organization": {
+ "ops": CACHEOPS_OPS,
+ "timeout": CACHEOPS_TIMEOUT,
},
-
# readthedocs.subscriptions.*
- 'subscriptions.planfeature': {
- 'ops': CACHEOPS_OPS,
- 'timeout': CACHEOPS_TIMEOUT,
+ "subscriptions.planfeature": {
+ "ops": CACHEOPS_OPS,
+ "timeout": CACHEOPS_TIMEOUT,
},
}
diff --git a/readthedocs/subscriptions/tests/test_views.py b/readthedocs/subscriptions/tests/test_views.py
index 9088d03a624..c6517207545 100644
--- a/readthedocs/subscriptions/tests/test_views.py
+++ b/readthedocs/subscriptions/tests/test_views.py
@@ -106,7 +106,9 @@ def _create_stripe_subscription(
return stripe_subscription
def test_active_subscription(self):
- resp = self.client.get(reverse('subscription_detail', args=[self.organization.slug]))
+ resp = self.client.get(
+ reverse("subscription_detail", args=[self.organization.slug])
+ )
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.context["stripe_subscription"], self.stripe_subscription)
self.assertContains(resp, "active")
@@ -130,24 +132,26 @@ def test_active_subscription_with_extra_product(self):
self.assertContains(resp, "active")
self.assertContains(resp, "Extra products:")
# The subscribe form isn't shown, but the manage susbcription button is.
- self.assertContains(resp, 'Manage Subscription')
- self.assertNotContains(resp, 'Create Subscription')
+ self.assertContains(resp, "Manage Subscription")
+ self.assertNotContains(resp, "Create Subscription")
- @requests_mock.Mocker(kw='mock_request')
+ @requests_mock.Mocker(kw="mock_request")
def test_manage_subscription(self, mock_request):
payload = {
- 'url': 'https://billing.stripe.com/session/a1b2c3',
+ "url": "https://billing.stripe.com/session/a1b2c3",
}
- mock_request.post('https://api.stripe.com/v1/billing_portal/sessions', json=payload)
+ mock_request.post(
+ "https://api.stripe.com/v1/billing_portal/sessions", json=payload
+ )
response = self.client.post(
reverse(
- 'stripe_customer_portal',
- kwargs={'slug': self.organization.slug},
+ "stripe_customer_portal",
+ kwargs={"slug": self.organization.slug},
),
)
self.assertRedirects(
response,
- payload.get('url'),
+ payload.get("url"),
fetch_redirect_response=False,
)
@@ -192,14 +196,16 @@ def test_user_without_subscription_and_customer(
self.organization.stripe_subscription = None
self.organization.save()
self.organization.refresh_from_db()
- self.assertFalse(hasattr(self.organization, 'subscription'))
+ self.assertFalse(hasattr(self.organization, "subscription"))
self.assertIsNone(self.organization.stripe_customer)
self.assertIsNone(self.organization.stripe_subscription)
- resp = self.client.get(reverse('subscription_detail', args=[self.organization.slug]))
+ resp = self.client.get(
+ reverse("subscription_detail", args=[self.organization.slug])
+ )
self.assertEqual(resp.status_code, 200)
self.organization.refresh_from_db()
- self.assertEqual(self.organization.stripe_id, 'cus_a1b2c3')
+ self.assertEqual(self.organization.stripe_id, "cus_a1b2c3")
self.assertEqual(self.organization.stripe_customer, stripe_customer)
self.assertEqual(self.organization.stripe_subscription, stripe_subscription)
customer_create_mock.assert_called_once()
@@ -207,9 +213,11 @@ def test_user_without_subscription_and_customer(
def test_user_with_canceled_subscription(self):
self.stripe_subscription.status = SubscriptionStatus.canceled
self.stripe_subscription.save()
- resp = self.client.get(reverse('subscription_detail', args=[self.organization.slug]))
+ resp = self.client.get(
+ reverse("subscription_detail", args=[self.organization.slug])
+ )
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.context["stripe_subscription"], self.stripe_subscription)
# The Manage Subscription form isn't shown, but the Subscribe is.
- self.assertNotContains(resp, 'Manage Subscription')
- self.assertContains(resp, 'Create Subscription')
+ self.assertNotContains(resp, "Manage Subscription")
+ self.assertContains(resp, "Create Subscription")
diff --git a/readthedocs/subscriptions/views.py b/readthedocs/subscriptions/views.py
index 31680d57dcd..7397f3eaa81 100644
--- a/readthedocs/subscriptions/views.py
+++ b/readthedocs/subscriptions/views.py
@@ -37,11 +37,8 @@ def get(self, request, *args, **kwargs):
super().get(request, *args, **kwargs)
# The query argument ``upgraded=true`` is used as a the callback
# URL for stripe checkout, see `self.redirect_to_checkout`.
- if request.GET.get('upgraded') == 'true':
- messages.success(
- self.request,
- _('Your plan has been upgraded!')
- )
+ if request.GET.get("upgraded") == "true":
+ messages.success(self.request, _("Your plan has been upgraded!"))
form = self.get_form()
context = self.get_context_data(form=form)
@@ -77,27 +74,29 @@ def redirect_to_checkout(self, form):
stripe_customer = get_or_create_stripe_customer(organization)
checkout_session = stripe.checkout.Session.create(
customer=stripe_customer.id,
- payment_method_types=['card'],
+ payment_method_types=["card"],
line_items=[
{
"price": stripe_price.id,
"quantity": 1,
}
],
- mode='subscription',
- success_url=url + '?upgraded=true',
+ mode="subscription",
+ success_url=url + "?upgraded=true",
cancel_url=url,
)
return HttpResponseRedirect(checkout_session.url)
except Exception:
log.exception(
- 'Error while creating a Stripe checkout session.',
+ "Error while creating a Stripe checkout session.",
organization_slug=organization.slug,
price=stripe_price.id,
)
messages.error(
self.request,
- _('There was an error connecting to Stripe, please try again in a few minutes.'),
+ _(
+ "There was an error connecting to Stripe, please try again in a few minutes."
+ ),
)
return HttpResponseRedirect(self.get_success_url())
@@ -157,7 +156,7 @@ def get_context_data(self, **kwargs):
def get_success_url(self):
return reverse(
- 'subscription_detail',
+ "subscription_detail",
args=[self.get_organization().slug],
)
@@ -166,11 +165,11 @@ class StripeCustomerPortal(OrganizationMixin, GenericView):
"""Create a stripe billing portal session for the user to manage their subscription."""
- http_method_names = ['post']
+ http_method_names = ["post"]
def get_success_url(self):
return reverse(
- 'subscription_detail',
+ "subscription_detail",
args=[self.get_organization().slug],
)
@@ -187,12 +186,14 @@ def post(self, request, *args, **kwargs):
return HttpResponseRedirect(billing_portal.url)
except: # noqa
log.exception(
- 'There was an error connecting to Stripe to create the billing portal session.',
+ "There was an error connecting to Stripe to create the billing portal session.",
stripe_customer=stripe_customer.id,
organization_slug=organization.slug,
)
messages.error(
request,
- _('There was an error connecting to Stripe, please try again in a few minutes'),
+ _(
+ "There was an error connecting to Stripe, please try again in a few minutes"
+ ),
)
return HttpResponseRedirect(self.get_success_url())
diff --git a/readthedocs/vcs_support/backends/git.py b/readthedocs/vcs_support/backends/git.py
index e4693c35746..508de6bb316 100644
--- a/readthedocs/vcs_support/backends/git.py
+++ b/readthedocs/vcs_support/backends/git.py
@@ -34,7 +34,7 @@ class Backend(BaseVCS):
supports_branches = True
supports_submodules = True
supports_lsremote = True
- fallback_branch = 'master' # default branch
+ fallback_branch = "master" # default branch
repo_depth = 50
def __init__(self, *args, **kwargs):
@@ -46,16 +46,16 @@ def __init__(self, *args, **kwargs):
# We also need to know about Version.machine
self.version_machine = kwargs.pop("version_machine")
super().__init__(*args, **kwargs)
- self.token = kwargs.get('token')
+ self.token = kwargs.get("token")
self.repo_url = self._get_clone_url()
def _get_clone_url(self):
- if '://' in self.repo_url:
- hacked_url = self.repo_url.split('://')[1]
- hacked_url = re.sub('.git$', '', hacked_url)
- clone_url = 'https://%s' % hacked_url
+ if "://" in self.repo_url:
+ hacked_url = self.repo_url.split("://")[1]
+ hacked_url = re.sub(".git$", "", hacked_url)
+ clone_url = "https://%s" % hacked_url
if self.token:
- clone_url = 'https://{}@{}'.format(self.token, hacked_url)
+ clone_url = "https://{}@{}".format(self.token, hacked_url)
return clone_url
# Don't edit URL because all hosts aren't the same
# else:
@@ -292,7 +292,7 @@ def get_available_submodules(self, config) -> tuple[bool, list]:
def checkout_revision(self, revision):
try:
- code, out, err = self.run('git', 'checkout', '--force', revision)
+ code, out, err = self.run("git", "checkout", "--force", revision)
return [code, out, err]
except RepositoryError as exc:
raise RepositoryError(
@@ -341,7 +341,7 @@ def lsremote(self, include_tags=True, include_branches=True):
tag = ref.replace("refs/tags/", "", 1)
# If the tag is annotated, then the real commit
# will be on the ref ending with ^{}.
- if tag.endswith('^{}'):
+ if tag.endswith("^{}"):
light_tags[tag[:-3]] = commit
else:
all_tags[tag] = commit
@@ -432,7 +432,7 @@ def checkout(self, identifier=None):
code, out, err = self.checkout_revision(identifier)
# Clean any remains of previous checkouts
- self.run('git', 'clean', '-d', '-f', '-f')
+ self.run("git", "clean", "-d", "-f", "-f")
return code, out, err
def update_submodules(self, config):
@@ -449,13 +449,13 @@ def checkout_submodules(self, submodules: list[str], recursive: bool):
If submodules is empty, all submodules will be updated.
"""
- self.run('git', 'submodule', 'sync')
+ self.run("git", "submodule", "sync")
cmd = [
- 'git',
- 'submodule',
- 'update',
- '--init',
- '--force',
+ "git",
+ "submodule",
+ "update",
+ "--init",
+ "--force",
]
if recursive:
cmd.append("--recursive")
@@ -466,7 +466,7 @@ def checkout_submodules(self, submodules: list[str], recursive: bool):
def find_ref(self, ref):
# If the ref already starts with 'origin/',
# we don't need to do anything.
- if ref.startswith('origin/'):
+ if ref.startswith("origin/"):
return ref
# Check if ref is a branch of the origin remote