diff --git a/readthedocs/api/v2/serializers.py b/readthedocs/api/v2/serializers.py
index 560ecc440d2..1774a882c4c 100644
--- a/readthedocs/api/v2/serializers.py
+++ b/readthedocs/api/v2/serializers.py
@@ -77,19 +77,14 @@ def get_skip(self, obj):
class Meta(ProjectSerializer.Meta):
fields = ProjectSerializer.Meta.fields + (
- "enable_epub_build",
- "enable_pdf_build",
- "conf_py_file",
"analytics_code",
"analytics_disabled",
"cdn_enabled",
"container_image",
"container_mem_limit",
"container_time_limit",
- "install_project",
"skip",
"requirements_file",
- "python_interpreter",
"features",
"has_valid_clone",
"has_valid_webhook",
diff --git a/readthedocs/doc_builder/config.py b/readthedocs/doc_builder/config.py
index 4141dad01c6..94252237d24 100644
--- a/readthedocs/doc_builder/config.py
+++ b/readthedocs/doc_builder/config.py
@@ -28,13 +28,3 @@ def load_yaml_config(version, readthedocs_yaml_path=None):
readthedocs_yaml_path=readthedocs_yaml_path,
)
return config
-
-
-def get_default_formats(project):
- """Get a list of the default formats for ``project``."""
- formats = ["htmlzip"]
- if project.enable_epub_build:
- formats += ["epub"]
- if project.enable_pdf_build:
- formats += ["pdf"]
- return formats
diff --git a/readthedocs/projects/fixtures/test_data.json b/readthedocs/projects/fixtures/test_data.json
index cfc3284f9d6..bbf923a49f1 100644
--- a/readthedocs/projects/fixtures/test_data.json
+++ b/readthedocs/projects/fixtures/test_data.json
@@ -31,14 +31,8 @@
"ad_free": false,
"is_spam": null,
"show_version_warning": false,
- "enable_epub_build": true,
- "enable_pdf_build": true,
- "path": "",
- "conf_py_file": "",
"featured": false,
"skip": false,
- "install_project": false,
- "python_interpreter": "python3",
"privacy_level": "public",
"language": "en",
"programming_language": "words",
@@ -83,14 +77,8 @@
"ad_free": false,
"is_spam": null,
"show_version_warning": false,
- "enable_epub_build": true,
- "enable_pdf_build": true,
- "path": "",
- "conf_py_file": "",
"featured": false,
"skip": false,
- "install_project": false,
- "python_interpreter": "python3",
"privacy_level": "public",
"language": "en",
"programming_language": "words",
@@ -135,14 +123,8 @@
"ad_free": false,
"is_spam": null,
"show_version_warning": false,
- "enable_epub_build": true,
- "enable_pdf_build": true,
- "path": "",
- "conf_py_file": "",
"featured": false,
"skip": false,
- "install_project": false,
- "python_interpreter": "python3",
"privacy_level": "public",
"language": "en",
"programming_language": "words",
@@ -187,14 +169,8 @@
"ad_free": false,
"is_spam": null,
"show_version_warning": false,
- "enable_epub_build": true,
- "enable_pdf_build": true,
- "path": "",
- "conf_py_file": "",
"featured": false,
"skip": false,
- "install_project": false,
- "python_interpreter": "python3",
"privacy_level": "public",
"language": "en",
"programming_language": "words",
@@ -239,14 +215,8 @@
"ad_free": false,
"is_spam": null,
"show_version_warning": false,
- "enable_epub_build": true,
- "enable_pdf_build": true,
- "path": "",
- "conf_py_file": "",
"featured": false,
"skip": false,
- "install_project": false,
- "python_interpreter": "python3",
"privacy_level": "public",
"language": "en",
"programming_language": "words",
@@ -291,14 +261,8 @@
"ad_free": false,
"is_spam": null,
"show_version_warning": false,
- "enable_epub_build": true,
- "enable_pdf_build": true,
- "path": "",
- "conf_py_file": "",
"featured": false,
"skip": false,
- "install_project": false,
- "python_interpreter": "python3",
"privacy_level": "public",
"language": "en",
"programming_language": "words",
@@ -343,14 +307,8 @@
"ad_free": false,
"is_spam": null,
"show_version_warning": false,
- "enable_epub_build": true,
- "enable_pdf_build": true,
- "path": "",
- "conf_py_file": "",
"featured": false,
"skip": false,
- "install_project": false,
- "python_interpreter": "python3",
"privacy_level": "public",
"language": "en",
"programming_language": "words",
@@ -395,14 +353,8 @@
"ad_free": false,
"is_spam": null,
"show_version_warning": false,
- "enable_epub_build": true,
- "enable_pdf_build": true,
- "path": "",
- "conf_py_file": "",
"featured": false,
"skip": false,
- "install_project": false,
- "python_interpreter": "python3",
"privacy_level": "public",
"language": "en",
"programming_language": "words",
@@ -447,14 +399,8 @@
"ad_free": false,
"is_spam": null,
"show_version_warning": false,
- "enable_epub_build": true,
- "enable_pdf_build": true,
- "path": "",
- "conf_py_file": "",
"featured": false,
"skip": false,
- "install_project": false,
- "python_interpreter": "python3",
"privacy_level": "public",
"language": "en",
"programming_language": "words",
@@ -499,14 +445,8 @@
"ad_free": false,
"is_spam": null,
"show_version_warning": false,
- "enable_epub_build": true,
- "enable_pdf_build": true,
- "path": "",
- "conf_py_file": "",
"featured": false,
"skip": false,
- "install_project": false,
- "python_interpreter": "python3",
"privacy_level": "public",
"language": "en",
"programming_language": "words",
@@ -551,14 +491,8 @@
"ad_free": false,
"is_spam": null,
"show_version_warning": false,
- "enable_epub_build": true,
- "enable_pdf_build": true,
- "path": "",
- "conf_py_file": "",
"featured": false,
"skip": false,
- "install_project": false,
- "python_interpreter": "python3",
"privacy_level": "public",
"language": "en",
"programming_language": "words",
@@ -603,14 +537,8 @@
"ad_free": false,
"is_spam": null,
"show_version_warning": false,
- "enable_epub_build": true,
- "enable_pdf_build": true,
- "path": "",
- "conf_py_file": "",
"featured": false,
"skip": false,
- "install_project": false,
- "python_interpreter": "python3",
"privacy_level": "public",
"language": "en",
"programming_language": "words",
@@ -655,14 +583,8 @@
"ad_free": false,
"is_spam": null,
"show_version_warning": false,
- "enable_epub_build": true,
- "enable_pdf_build": true,
- "path": "",
- "conf_py_file": "",
"featured": false,
"skip": false,
- "install_project": false,
- "python_interpreter": "python3",
"privacy_level": "public",
"language": "en",
"programming_language": "words",
@@ -707,14 +629,8 @@
"ad_free": false,
"is_spam": null,
"show_version_warning": false,
- "enable_epub_build": true,
- "enable_pdf_build": true,
- "path": "",
- "conf_py_file": "",
"featured": false,
"skip": false,
- "install_project": false,
- "python_interpreter": "python3",
"privacy_level": "public",
"language": "en",
"programming_language": "words",
@@ -757,14 +673,8 @@
"ad_free": false,
"is_spam": null,
"show_version_warning": false,
- "enable_epub_build": true,
- "enable_pdf_build": true,
- "path": "",
- "conf_py_file": "",
"featured": false,
"skip": false,
- "install_project": false,
- "python_interpreter": "python3",
"privacy_level": "public",
"language": "en",
"programming_language": "words",
@@ -807,14 +717,8 @@
"ad_free": false,
"is_spam": null,
"show_version_warning": false,
- "enable_epub_build": true,
- "enable_pdf_build": true,
- "path": "",
- "conf_py_file": "",
"featured": false,
"skip": false,
- "install_project": false,
- "python_interpreter": "python3",
"privacy_level": "public",
"language": "en",
"programming_language": "words",
@@ -857,14 +761,8 @@
"ad_free": false,
"is_spam": null,
"show_version_warning": false,
- "enable_epub_build": true,
- "enable_pdf_build": true,
- "path": "",
- "conf_py_file": "",
"featured": false,
"skip": false,
- "install_project": false,
- "python_interpreter": "python3",
"privacy_level": "public",
"language": "en",
"programming_language": "words",
diff --git a/readthedocs/projects/forms.py b/readthedocs/projects/forms.py
index b1088ae8908..2ea6e03a047 100644
--- a/readthedocs/projects/forms.py
+++ b/readthedocs/projects/forms.py
@@ -44,10 +44,10 @@ class ProjectForm(SimpleHistoryModelForm):
:param user: If provided, add this user as a project user on save
"""
- required_css_class = 'required'
+ required_css_class = "required"
def __init__(self, *args, **kwargs):
- self.user = kwargs.pop('user', None)
+ self.user = kwargs.pop("user", None)
super().__init__(*args, **kwargs)
def save(self, commit=True):
@@ -203,13 +203,13 @@ class Meta:
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- self.fields['repo'].widget.attrs['placeholder'] = self.placehold_repo()
- self.fields['repo'].widget.attrs['required'] = True
+ self.fields["repo"].widget.attrs["placeholder"] = self.placehold_repo()
+ self.fields["repo"].widget.attrs["required"] = True
def save(self, commit=True):
"""Add remote repository relationship to the project instance."""
instance = super().save(commit)
- remote_repo = self.cleaned_data.get('remote_repository', None)
+ remote_repo = self.cleaned_data.get("remote_repository", None)
if remote_repo:
if commit:
remote_repo.projects.add(self.instance)
@@ -219,25 +219,27 @@ def save(self, commit=True):
return instance
def clean_name(self):
- name = self.cleaned_data.get('name', '')
+ name = self.cleaned_data.get("name", "")
if not self.instance.pk:
potential_slug = slugify(name)
if Project.objects.filter(slug=potential_slug).exists():
raise forms.ValidationError(
- _('Invalid project name, a project already exists with that name'),
+ _("Invalid project name, a project already exists with that name"),
) # yapf: disable # noqa
if not potential_slug:
# Check the generated slug won't be empty
- raise forms.ValidationError(_('Invalid project name'),)
+ raise forms.ValidationError(
+ _("Invalid project name"),
+ )
return name
def clean_repo(self):
- repo = self.cleaned_data.get('repo', '')
- return repo.rstrip('/')
+ repo = self.cleaned_data.get("repo", "")
+ return repo.rstrip("/")
def clean_remote_repository(self):
- remote_repo = self.cleaned_data.get('remote_repository', None)
+ remote_repo = self.cleaned_data.get("remote_repository", None)
if not remote_repo:
return None
try:
@@ -249,15 +251,17 @@ def clean_remote_repository(self):
raise forms.ValidationError(_("Repository invalid")) from exc
def placehold_repo(self):
- return choice([
- 'https://bitbucket.org/cherrypy/cherrypy',
- 'https://bitbucket.org/birkenfeld/sphinx',
- 'https://bitbucket.org/hpk42/tox',
- 'https://github.com/zzzeek/sqlalchemy.git',
- 'https://github.com/django/django.git',
- 'https://github.com/fabric/fabric.git',
- 'https://github.com/ericholscher/django-kong.git',
- ])
+ return choice(
+ [
+ "https://bitbucket.org/cherrypy/cherrypy",
+ "https://bitbucket.org/birkenfeld/sphinx",
+ "https://bitbucket.org/hpk42/tox",
+ "https://github.com/zzzeek/sqlalchemy.git",
+ "https://github.com/django/django.git",
+ "https://github.com/fabric/fabric.git",
+ "https://github.com/ericholscher/django-kong.git",
+ ]
+ )
class ProjectExtraForm(ProjectForm):
@@ -267,12 +271,12 @@ class ProjectExtraForm(ProjectForm):
class Meta:
model = Project
fields = (
- 'description',
- 'documentation_type',
- 'language',
- 'programming_language',
- 'tags',
- 'project_url',
+ "description",
+ "documentation_type",
+ "language",
+ "programming_language",
+ "tags",
+ "project_url",
)
description = forms.CharField(
@@ -282,12 +286,12 @@ class Meta:
)
def clean_tags(self):
- tags = self.cleaned_data.get('tags', [])
+ tags = self.cleaned_data.get("tags", [])
for tag in tags:
if len(tag) > 100:
raise forms.ValidationError(
_(
- 'Length of each tag must be less than or equal to 100 characters.',
+ "Length of each tag must be less than or equal to 100 characters.",
),
)
return tags
@@ -326,8 +330,8 @@ def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Remove the nullable option from the form
- self.fields['analytics_disabled'].widget = forms.CheckboxInput()
- self.fields['analytics_disabled'].empty_value = False
+ self.fields["analytics_disabled"].widget = forms.CheckboxInput()
+ self.fields["analytics_disabled"].empty_value = False
# Remove empty choice from options.
self.fields["versioning_scheme"].choices = [
@@ -353,27 +357,28 @@ def __init__(self, *args, **kwargs):
self.fields.pop("show_version_warning")
if not settings.ALLOW_PRIVATE_REPOS:
- for field in ['privacy_level', 'external_builds_privacy_level']:
+ for field in ["privacy_level", "external_builds_privacy_level"]:
self.fields.pop(field)
- default_choice = (None, '-' * 9)
- versions_choices = self.instance.versions(manager=INTERNAL).filter(
- machine=False).values_list('verbose_name', flat=True)
+ default_choice = (None, "-" * 9)
+ versions_choices = (
+ self.instance.versions(manager=INTERNAL)
+ .filter(machine=False)
+ .values_list("verbose_name", flat=True)
+ )
- self.fields['default_branch'].widget = forms.Select(
- choices=[default_choice] + list(
- zip(versions_choices, versions_choices)
- ),
+ self.fields["default_branch"].widget = forms.Select(
+ choices=[default_choice] + list(zip(versions_choices, versions_choices)),
)
active_versions = self.get_all_active_versions()
if active_versions:
- self.fields['default_version'].widget = forms.Select(
+ self.fields["default_version"].widget = forms.Select(
choices=active_versions,
)
else:
- self.fields['default_version'].widget.attrs['readonly'] = True
+ self.fields["default_version"].widget.attrs["readonly"] = True
self.setup_external_builds_option()
@@ -458,17 +463,6 @@ def can_build_external_versions(self, integrations):
return True
return False
- def clean_conf_py_file(self):
- filename = self.cleaned_data.get("conf_py_file", "").strip()
- if filename and "conf.py" not in filename:
- raise forms.ValidationError(
- _(
- 'Your configuration file is invalid, make sure it contains '
- 'conf.py in it.',
- ),
- ) # yapf: disable
- return filename
-
def clean_readthedocs_yaml_path(self):
"""
Validate user input to help user.
@@ -491,15 +485,17 @@ def get_all_active_versions(self):
version_qs = self.instance.all_active_versions()
if version_qs.exists():
version_qs = sort_version_aware(version_qs)
- all_versions = [(version.slug, version.verbose_name) for version in version_qs]
+ all_versions = [
+ (version.slug, version.verbose_name) for version in version_qs
+ ]
return all_versions
return None
class UpdateProjectForm(
- ProjectTriggerBuildMixin,
- ProjectBasicsForm,
- ProjectExtraForm,
+ ProjectTriggerBuildMixin,
+ ProjectBasicsForm,
+ ProjectExtraForm,
):
"""Basic project settings form for Admin."""
@@ -508,25 +504,24 @@ class Meta: # noqa
model = Project
fields = (
# Basics
- 'name',
- 'repo',
+ "name",
+ "repo",
"repo_type",
# Extra
- 'description',
- 'language',
- 'programming_language',
- 'project_url',
- 'tags',
+ "description",
+ "language",
+ "programming_language",
+ "project_url",
+ "tags",
)
def clean_language(self):
"""Ensure that language isn't already active."""
- language = self.cleaned_data['language']
+ language = self.cleaned_data["language"]
project = self.instance
if project:
msg = _(
- 'There is already a "{lang}" translation '
- 'for the {proj} project.',
+ 'There is already a "{lang}" translation for the {proj} project.',
)
if project.translations.filter(language=language).exists():
raise forms.ValidationError(
@@ -539,8 +534,7 @@ def clean_language(self):
msg.format(lang=language, proj=main_project.slug),
)
siblings = (
- main_project.translations
- .filter(language=language)
+ main_project.translations.filter(language=language)
.exclude(pk=project.pk)
.exists()
)
@@ -562,33 +556,33 @@ class Meta:
fields = "__all__"
def __init__(self, *args, **kwargs):
- self.project = kwargs.pop('project')
- self.user = kwargs.pop('user')
+ self.project = kwargs.pop("project")
+ self.user = kwargs.pop("user")
super().__init__(*args, **kwargs)
# Don't display the update form with an editable child, as it will be
# filtered out from the queryset anyways.
- if hasattr(self, 'instance') and self.instance.pk is not None:
- self.fields['child'].queryset = Project.objects.filter(pk=self.instance.child.pk)
+ if hasattr(self, "instance") and self.instance.pk is not None:
+ self.fields["child"].queryset = Project.objects.filter(
+ pk=self.instance.child.pk
+ )
else:
- self.fields['child'].queryset = self.project.get_subproject_candidates(self.user)
+ self.fields["child"].queryset = self.project.get_subproject_candidates(
+ self.user
+ )
def clean_parent(self):
- self.project.is_valid_as_superproject(
- forms.ValidationError
- )
+ self.project.is_valid_as_superproject(forms.ValidationError)
return self.project
def clean_alias(self):
- alias = self.cleaned_data['alias']
- subproject = (
- self.project.subprojects
- .filter(alias=alias)
- .exclude(id=self.instance.pk)
+ alias = self.cleaned_data["alias"]
+ subproject = self.project.subprojects.filter(alias=alias).exclude(
+ id=self.instance.pk
)
if subproject.exists():
raise forms.ValidationError(
- _('A subproject with this alias already exists'),
+ _("A subproject with this alias already exists"),
)
return alias
@@ -680,12 +674,12 @@ class EmailHookForm(forms.Form):
email = forms.EmailField()
def __init__(self, *args, **kwargs):
- self.project = kwargs.pop('project', None)
+ self.project = kwargs.pop("project", None)
super().__init__(*args, **kwargs)
def clean_email(self):
self.email = EmailHook.objects.get_or_create(
- email=self.cleaned_data['email'],
+ email=self.cleaned_data["email"],
project=self.project,
)[0]
return self.email
@@ -703,39 +697,42 @@ class WebHookForm(forms.ModelForm):
class Meta:
model = WebHook
- fields = ['project', 'url', 'events', 'payload', 'secret']
+ fields = ["project", "url", "events", "payload", "secret"]
widgets = {
- 'events': forms.CheckboxSelectMultiple,
+ "events": forms.CheckboxSelectMultiple,
}
def __init__(self, *args, **kwargs):
- self.project = kwargs.pop('project', None)
+ self.project = kwargs.pop("project", None)
super().__init__(*args, **kwargs)
if self.instance and self.instance.pk:
# Show secret in the detail form, but as readonly.
- self.fields['secret'].disabled = True
+ self.fields["secret"].disabled = True
else:
# Don't show the secret in the creation form.
- self.fields.pop('secret')
- self.fields['payload'].initial = json.dumps({
- 'event': '{{ event }}',
- 'name': '{{ project.name }}',
- 'slug': '{{ project.slug }}',
- 'version': '{{ version.slug }}',
- 'commit': '{{ build.commit }}',
- 'build': '{{ build.id }}',
- 'start_date': '{{ build.start_date }}',
- 'build_url': '{{ build.url }}',
- 'docs_url': '{{ build.docs_url }}',
- }, indent=2)
+ self.fields.pop("secret")
+ self.fields["payload"].initial = json.dumps(
+ {
+ "event": "{{ event }}",
+ "name": "{{ project.name }}",
+ "slug": "{{ project.slug }}",
+ "version": "{{ version.slug }}",
+ "commit": "{{ build.commit }}",
+ "build": "{{ build.id }}",
+ "start_date": "{{ build.start_date }}",
+ "build_url": "{{ build.url }}",
+ "docs_url": "{{ build.docs_url }}",
+ },
+ indent=2,
+ )
def clean_project(self):
return self.project
def clean_payload(self):
"""Check if the payload is a valid json object and format it."""
- payload = self.cleaned_data['payload']
+ payload = self.cleaned_data["payload"]
try:
payload = json.loads(payload)
payload = json.dumps(payload, indent=2)
@@ -753,19 +750,22 @@ class TranslationBaseForm(forms.Form):
project = forms.ChoiceField()
def __init__(self, *args, **kwargs):
- self.parent = kwargs.pop('parent', None)
- self.user = kwargs.pop('user')
+ self.parent = kwargs.pop("parent", None)
+ self.user = kwargs.pop("user")
super().__init__(*args, **kwargs)
- self.fields['project'].choices = self.get_choices()
+ self.fields["project"].choices = self.get_choices()
def get_choices(self):
- return [(
- project.slug,
- '{project} ({lang})'.format(
- project=project.slug,
- lang=project.get_language_display(),
- ),
- ) for project in self.get_translation_queryset().all()]
+ return [
+ (
+ project.slug,
+ "{project} ({lang})".format(
+ project=project.slug,
+ lang=project.get_language_display(),
+ ),
+ )
+ for project in self.get_translation_queryset().all()
+ ]
def clean(self):
if not self.parent.supports_translations:
@@ -779,7 +779,7 @@ def clean(self):
def clean_project(self):
"""Ensures that selected project is valid as a translation."""
- translation_project_slug = self.cleaned_data['project']
+ translation_project_slug = self.cleaned_data["project"]
# Ensure parent project isn't already itself a translation
if self.parent.main_language_project is not None:
@@ -798,7 +798,7 @@ def clean_project(self):
)
self.translation = project_translation_qs.first()
if self.translation.language == self.parent.language:
- msg = ('Both projects can not have the same language ({lang}).')
+ msg = "Both projects can not have the same language ({lang})."
raise forms.ValidationError(
_(msg).format(lang=self.parent.get_language_display()),
)
@@ -811,15 +811,15 @@ def clean_project(self):
)
# yapf: enable
if exists_translation:
- msg = ('This project already has a translation for {lang}.')
+ msg = "This project already has a translation for {lang}."
raise forms.ValidationError(
_(msg).format(lang=self.translation.get_language_display()),
)
is_parent = self.translation.translations.exists()
if is_parent:
msg = (
- 'A project with existing translations '
- 'can not be added as a project translation.'
+ "A project with existing translations "
+ "can not be added as a project translation."
)
raise forms.ValidationError(_(msg))
return translation_project_slug
@@ -869,7 +869,7 @@ class Meta:
]
def __init__(self, *args, **kwargs):
- self.project = kwargs.pop('project', None)
+ self.project = kwargs.pop("project", None)
super().__init__(*args, **kwargs)
# Remove the nullable option from the form.
@@ -893,15 +893,15 @@ class DomainForm(forms.ModelForm):
class Meta:
model = Domain
- fields = ['project', 'domain', 'canonical', 'https']
+ fields = ["project", "domain", "canonical", "https"]
def __init__(self, *args, **kwargs):
- self.project = kwargs.pop('project', None)
+ self.project = kwargs.pop("project", None)
super().__init__(*args, **kwargs)
# Disable domain manipulation on Update, but allow on Create
if self.instance.pk:
- self.fields['domain'].disabled = True
+ self.fields["domain"].disabled = True
# Remove the https option at creation,
# but show it if the domain is already marked as http only,
@@ -914,17 +914,15 @@ def clean_project(self):
def clean_domain(self):
"""Validates domain."""
- domain = self.cleaned_data['domain'].lower()
+ domain = self.cleaned_data["domain"].lower()
parsed = urlparse(domain)
# Force the scheme to have a valid netloc.
if not parsed.scheme:
- parsed = urlparse(f'https://{domain}')
+ parsed = urlparse(f"https://{domain}")
if not parsed.netloc:
- raise forms.ValidationError(
- f'{domain} is not a valid domain.'
- )
+ raise forms.ValidationError(f"{domain} is not a valid domain.")
domain_string = parsed.netloc
@@ -938,24 +936,21 @@ def clean_domain(self):
settings.RTD_EXTERNAL_VERSION_DOMAIN,
]:
if invalid_domain and domain_string.endswith(invalid_domain):
- raise forms.ValidationError(
- f'{invalid_domain} is not a valid domain.'
- )
+ raise forms.ValidationError(f"{invalid_domain} is not a valid domain.")
return domain_string
def clean_canonical(self):
- canonical = self.cleaned_data['canonical']
+ canonical = self.cleaned_data["canonical"]
pk = self.instance.pk
has_canonical_domain = (
- Domain.objects
- .filter(project=self.project, canonical=True)
+ Domain.objects.filter(project=self.project, canonical=True)
.exclude(pk=pk)
.exists()
)
if canonical and has_canonical_domain:
raise forms.ValidationError(
- _('Only one domain can be canonical at a time.'),
+ _("Only one domain can be canonical at a time."),
)
return canonical
@@ -978,10 +973,12 @@ class Meta:
]
def __init__(self, *args, **kwargs):
- self.project = kwargs.pop('project', None)
+ self.project = kwargs.pop("project", None)
super().__init__(*args, **kwargs)
# Alter the integration type choices to only provider webhooks
- self.fields['integration_type'].choices = Integration.WEBHOOK_INTEGRATIONS # yapf: disable # noqa
+ self.fields[
+ "integration_type"
+ ].choices = Integration.WEBHOOK_INTEGRATIONS # yapf: disable # noqa
def clean_project(self):
return self.project
@@ -997,10 +994,10 @@ class ProjectAdvertisingForm(forms.ModelForm):
class Meta:
model = Project
- fields = ['allow_promos']
+ fields = ["allow_promos"]
def __init__(self, *args, **kwargs):
- self.project = kwargs.pop('project', None)
+ self.project = kwargs.pop("project", None)
super().__init__(*args, **kwargs)
@@ -1018,11 +1015,11 @@ class FeatureForm(forms.ModelForm):
class Meta:
model = Feature
- fields = ['projects', 'feature_id', 'default_true', 'future_default_true']
+ fields = ["projects", "feature_id", "default_true", "future_default_true"]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- self.fields['feature_id'].choices = Feature.FEATURES
+ self.fields["feature_id"].choices = Feature.FEATURES
class EnvironmentVariableForm(forms.ModelForm):
@@ -1037,43 +1034,43 @@ class EnvironmentVariableForm(forms.ModelForm):
class Meta:
model = EnvironmentVariable
- fields = ('name', 'value', 'public', 'project')
+ fields = ("name", "value", "public", "project")
def __init__(self, *args, **kwargs):
- self.project = kwargs.pop('project', None)
+ self.project = kwargs.pop("project", None)
super().__init__(*args, **kwargs)
# Remove the nullable option from the form.
# TODO: remove after migration.
- self.fields['public'].widget = forms.CheckboxInput()
- self.fields['public'].empty_value = False
+ self.fields["public"].widget = forms.CheckboxInput()
+ self.fields["public"].empty_value = False
def clean_project(self):
return self.project
def clean_name(self):
"""Validate environment variable name chosen."""
- name = self.cleaned_data['name']
- if name.startswith('__'):
+ name = self.cleaned_data["name"]
+ if name.startswith("__"):
raise forms.ValidationError(
_("Variable name can't start with __ (double underscore)"),
)
- if name.startswith('READTHEDOCS'):
+ if name.startswith("READTHEDOCS"):
raise forms.ValidationError(
_("Variable name can't start with READTHEDOCS"),
)
if self.project.environmentvariable_set.filter(name=name).exists():
raise forms.ValidationError(
_(
- 'There is already a variable with this name for this project',
+ "There is already a variable with this name for this project",
),
)
- if ' ' in name:
+ if " " in name:
raise forms.ValidationError(
_("Variable name can't contain spaces"),
)
- if not fullmatch('[a-zA-Z0-9_]+', name):
+ if not fullmatch("[a-zA-Z0-9_]+", name):
raise forms.ValidationError(
- _('Only letters, numbers and underscore are allowed'),
+ _("Only letters, numbers and underscore are allowed"),
)
return name
diff --git a/readthedocs/projects/migrations/0116_mark_fields_as_null.py b/readthedocs/projects/migrations/0116_mark_fields_as_null.py
new file mode 100644
index 00000000000..e867d9278f4
--- /dev/null
+++ b/readthedocs/projects/migrations/0116_mark_fields_as_null.py
@@ -0,0 +1,213 @@
+# Generated by Django 4.2.10 on 2024-02-19 11:16
+
+from django.db import migrations, models
+from django_safemigrate import Safe
+
+
+class Migration(migrations.Migration):
+ safe = Safe.before_deploy
+
+ dependencies = [
+ ("projects", "0115_add_addonsconfig_history"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="historicalproject",
+ name="conf_py_file",
+ field=models.CharField(
+ blank=True,
+ default="",
+ help_text="Path from project root to conf.py
file (ex. docs/conf.py
). Leave blank if you want us to find it for you.",
+ max_length=255,
+ null=True,
+ verbose_name="Python configuration file",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="historicalproject",
+ name="documentation_type",
+ field=models.CharField(
+ choices=[
+ ("sphinx", "Sphinx Html"),
+ ("mkdocs", "Mkdocs"),
+ ("sphinx_htmldir", "Sphinx HtmlDir"),
+ ("sphinx_singlehtml", "Sphinx Single Page HTML"),
+ ],
+ default=None,
+ null=True,
+ blank=True,
+ help_text='Type of documentation you are building. More info on sphinx builders.',
+ max_length=20,
+ verbose_name="Documentation type",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="historicalproject",
+ name="enable_epub_build",
+ field=models.BooleanField(
+ blank=True,
+ default=False,
+ help_text="Create a EPUB version of your documentation with each build.",
+ null=True,
+ verbose_name="Enable EPUB build",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="historicalproject",
+ name="enable_pdf_build",
+ field=models.BooleanField(
+ blank=True,
+ default=False,
+ help_text="Create a PDF version of your documentation with each build.",
+ null=True,
+ verbose_name="Enable PDF build",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="historicalproject",
+ name="install_project",
+ field=models.BooleanField(
+ blank=True,
+ default=False,
+ help_text="Install your project inside a virtualenv using setup.py install
",
+ null=True,
+ verbose_name="Install Project",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="historicalproject",
+ name="path",
+ field=models.CharField(
+ blank=True,
+ editable=False,
+ help_text="The directory where conf.py
lives",
+ max_length=255,
+ null=True,
+ verbose_name="Path",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="historicalproject",
+ name="python_interpreter",
+ field=models.CharField(
+ blank=True,
+ choices=[("python", "CPython 2.x"), ("python3", "CPython 3.x")],
+ default="python3",
+ help_text="The Python interpreter used to create the virtual environment.",
+ max_length=20,
+ null=True,
+ verbose_name="Python Interpreter",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="historicalproject",
+ name="use_system_packages",
+ field=models.BooleanField(
+ blank=True,
+ default=False,
+ help_text="Give the virtual environment access to the global site-packages dir.",
+ null=True,
+ verbose_name="Use system packages",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="project",
+ name="conf_py_file",
+ field=models.CharField(
+ blank=True,
+ default="",
+ help_text="Path from project root to conf.py
file (ex. docs/conf.py
). Leave blank if you want us to find it for you.",
+ max_length=255,
+ null=True,
+ verbose_name="Python configuration file",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="project",
+ name="documentation_type",
+ field=models.CharField(
+ choices=[
+ ("sphinx", "Sphinx Html"),
+ ("mkdocs", "Mkdocs"),
+ ("sphinx_htmldir", "Sphinx HtmlDir"),
+ ("sphinx_singlehtml", "Sphinx Single Page HTML"),
+ ],
+ default=None,
+ null=True,
+ blank=True,
+ help_text='Type of documentation you are building. More info on sphinx builders.',
+ max_length=20,
+ verbose_name="Documentation type",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="project",
+ name="enable_epub_build",
+ field=models.BooleanField(
+ blank=True,
+ default=False,
+ help_text="Create a EPUB version of your documentation with each build.",
+ null=True,
+ verbose_name="Enable EPUB build",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="project",
+ name="enable_pdf_build",
+ field=models.BooleanField(
+ blank=True,
+ default=False,
+ help_text="Create a PDF version of your documentation with each build.",
+ null=True,
+ verbose_name="Enable PDF build",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="project",
+ name="install_project",
+ field=models.BooleanField(
+ blank=True,
+ default=False,
+ help_text="Install your project inside a virtualenv using setup.py install
",
+ null=True,
+ verbose_name="Install Project",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="project",
+ name="path",
+ field=models.CharField(
+ blank=True,
+ editable=False,
+ help_text="The directory where conf.py
lives",
+ max_length=255,
+ null=True,
+ verbose_name="Path",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="project",
+ name="python_interpreter",
+ field=models.CharField(
+ blank=True,
+ choices=[("python", "CPython 2.x"), ("python3", "CPython 3.x")],
+ default="python3",
+ help_text="The Python interpreter used to create the virtual environment.",
+ max_length=20,
+ null=True,
+ verbose_name="Python Interpreter",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="project",
+ name="use_system_packages",
+ field=models.BooleanField(
+ blank=True,
+ default=False,
+ help_text="Give the virtual environment access to the global site-packages dir.",
+ null=True,
+ verbose_name="Use system packages",
+ ),
+ ),
+ ]
diff --git a/readthedocs/projects/migrations/0117_remove_old_fields.py b/readthedocs/projects/migrations/0117_remove_old_fields.py
new file mode 100644
index 00000000000..3fb69bfc6fd
--- /dev/null
+++ b/readthedocs/projects/migrations/0117_remove_old_fields.py
@@ -0,0 +1,71 @@
+# Generated by Django 4.2.10 on 2024-02-19 11:17
+
+from django.db import migrations
+from django_safemigrate import Safe
+
+
+class Migration(migrations.Migration):
+ safe = Safe.after_deploy
+
+ dependencies = [
+ ("projects", "0116_mark_fields_as_null"),
+ ]
+
+ operations = [
+ migrations.RemoveField(
+ model_name="historicalproject",
+ name="conf_py_file",
+ ),
+ migrations.RemoveField(
+ model_name="historicalproject",
+ name="enable_epub_build",
+ ),
+ migrations.RemoveField(
+ model_name="historicalproject",
+ name="enable_pdf_build",
+ ),
+ migrations.RemoveField(
+ model_name="historicalproject",
+ name="install_project",
+ ),
+ migrations.RemoveField(
+ model_name="historicalproject",
+ name="path",
+ ),
+ migrations.RemoveField(
+ model_name="historicalproject",
+ name="python_interpreter",
+ ),
+ migrations.RemoveField(
+ model_name="historicalproject",
+ name="use_system_packages",
+ ),
+ migrations.RemoveField(
+ model_name="project",
+ name="conf_py_file",
+ ),
+ migrations.RemoveField(
+ model_name="project",
+ name="enable_epub_build",
+ ),
+ migrations.RemoveField(
+ model_name="project",
+ name="enable_pdf_build",
+ ),
+ migrations.RemoveField(
+ model_name="project",
+ name="install_project",
+ ),
+ migrations.RemoveField(
+ model_name="project",
+ name="path",
+ ),
+ migrations.RemoveField(
+ model_name="project",
+ name="python_interpreter",
+ ),
+ migrations.RemoveField(
+ model_name="project",
+ name="use_system_packages",
+ ),
+ ]
diff --git a/readthedocs/projects/models.py b/readthedocs/projects/models.py
index 0457ab9f9cd..14015566e60 100644
--- a/readthedocs/projects/models.py
+++ b/readthedocs/projects/models.py
@@ -315,17 +315,6 @@ class Project(models.Model):
"Path from the root of your project.",
),
)
- documentation_type = models.CharField(
- _("Documentation type"),
- max_length=20,
- choices=constants.DOCUMENTATION_CHOICES,
- default="sphinx",
- help_text=_(
- 'Type of documentation you are building. More info on sphinx builders.',
- ),
- )
custom_prefix = models.CharField(
_("Custom path prefix"),
@@ -557,99 +546,23 @@ class Project(models.Model):
object_id_field="attached_to_id",
)
- # TODO: remove the following fields since they all are going to be ignored
- # by the application when we start requiring a ``.readthedocs.yaml`` file.
- # These fields are:
- # - requirements_file
- # - documentation_type
- # - enable_epub_build
- # - enable_pdf_build
- # - path
- # - conf_py_file
- # - install_project
- # - python_interpreter
- # - use_system_packages
- requirements_file = models.CharField(
- _("Requirements file"),
- max_length=255,
- default=None,
- null=True,
- blank=True,
- help_text=_(
- "A '
- "pip requirements file needed to build your documentation. "
- "Path from the root of your project.",
- ),
- )
+ # TODO: remove field ``documentation_type`` when possible.
+ # This field is not used anymore in the application.
+ # However, the APIv3 project details endpoint returns it,
+ # and there are some tests and similars that depend on it still.
documentation_type = models.CharField(
_("Documentation type"),
max_length=20,
choices=constants.DOCUMENTATION_CHOICES,
- default="sphinx",
+ default=None,
+ null=True,
+ blank=True,
help_text=_(
'Type of documentation you are building. More info on sphinx builders.',
),
)
- enable_epub_build = models.BooleanField(
- _("Enable EPUB build"),
- default=False,
- help_text=_(
- "Create a EPUB version of your documentation with each build.",
- ),
- )
- enable_pdf_build = models.BooleanField(
- _("Enable PDF build"),
- default=False,
- help_text=_(
- "Create a PDF version of your documentation with each build.",
- ),
- )
- path = models.CharField(
- _("Path"),
- max_length=255,
- editable=False,
- help_text=_(
- "The directory where conf.py
lives",
- ),
- )
- conf_py_file = models.CharField(
- _("Python configuration file"),
- max_length=255,
- default="",
- blank=True,
- help_text=_(
- "Path from project root to conf.py
file "
- "(ex. docs/conf.py
). "
- "Leave blank if you want us to find it for you.",
- ),
- )
- install_project = models.BooleanField(
- _("Install Project"),
- help_text=_(
- "Install your project inside a virtualenv using setup.py "
- "install
",
- ),
- default=False,
- )
- python_interpreter = models.CharField(
- _("Python Interpreter"),
- max_length=20,
- choices=constants.PYTHON_CHOICES,
- default="python3",
- help_text=_(
- "The Python interpreter used to create the virtual environment.",
- ),
- )
- use_system_packages = models.BooleanField(
- _("Use system packages"),
- help_text=_(
- "Give the virtual environment access to the global site-packages dir.",
- ),
- default=False,
- )
# Property used for storing the latest build for a project when prefetching
LATEST_BUILD_CACHE = "_latest_build"
@@ -961,18 +874,6 @@ def artifact_path(self, type_, version=LATEST):
def conf_file(self, version=LATEST):
"""Find a Sphinx ``conf.py`` file in the project checkout."""
- if self.conf_py_file:
- conf_path = os.path.join(
- self.checkout_path(version),
- self.conf_py_file,
- )
-
- if os.path.exists(conf_path):
- log.info("Inserting conf.py file path from model")
- return conf_path
-
- log.warning("Conf file specified on model doesn't exist")
-
files = self.find("conf.py", version)
if not files:
files = self.full_find("conf.py", version)
diff --git a/readthedocs/projects/tests/test_build_tasks.py b/readthedocs/projects/tests/test_build_tasks.py
index abecffaa0da..cf62eadd114 100644
--- a/readthedocs/projects/tests/test_build_tasks.py
+++ b/readthedocs/projects/tests/test_build_tasks.py
@@ -28,7 +28,6 @@
@pytest.mark.django_db(databases="__all__")
class BuildEnvironmentBase:
-
# NOTE: `load_yaml_config` maybe be moved to the setup and assign to self.
@pytest.fixture(autouse=True)
@@ -61,8 +60,6 @@ def _get_project(self):
return fixture.get(
Project,
slug="project",
- enable_epub_build=True,
- enable_pdf_build=True,
)
def _trigger_update_docs_task(self):
@@ -74,8 +71,8 @@ def _trigger_update_docs_task(self):
build_commit=self.build.commit,
)
-class TestCustomConfigFile(BuildEnvironmentBase):
+class TestCustomConfigFile(BuildEnvironmentBase):
# Relative path to where a custom config file is assumed to exist in repo
config_file_name = "unique.yaml"
@@ -83,8 +80,6 @@ def _get_project(self):
return fixture.get(
Project,
slug="project",
- enable_epub_build=False,
- enable_pdf_build=False,
readthedocs_yaml_path=self.config_file_name,
)
@@ -155,6 +150,7 @@ def test_config_file_is_loaded(
# Assert that we are building a PDF, since that is what our custom config file says
build_docs_class.assert_called_with("sphinx_pdf")
+
class TestBuildTask(BuildEnvironmentBase):
@pytest.mark.parametrize(
"formats,builders",
@@ -641,10 +637,9 @@ def test_failed_build(
self._trigger_update_docs_task()
# It has to be called twice, ``before_start`` and ``after_return``
- clean_build.assert_has_calls([
- mock.call(mock.ANY), # the argument is an APIVersion
- mock.call(mock.ANY)
- ])
+ clean_build.assert_has_calls(
+ [mock.call(mock.ANY), mock.call(mock.ANY)] # the argument is an APIVersion
+ )
send_notifications.assert_called_once_with(
self.version.pk,
@@ -1894,27 +1889,27 @@ class TestSyncRepositoryTask(BuildEnvironmentBase):
def _trigger_sync_repository_task(self):
sync_repository_task.delay(self.version.pk, build_api_key="1234")
- @mock.patch('readthedocs.projects.tasks.builds.clean_build')
+ @mock.patch("readthedocs.projects.tasks.builds.clean_build")
def test_clean_build_after_sync_repository(self, clean_build):
self._trigger_sync_repository_task()
clean_build.assert_called_once()
- @mock.patch('readthedocs.projects.tasks.builds.SyncRepositoryTask.execute')
- @mock.patch('readthedocs.projects.tasks.builds.clean_build')
+ @mock.patch("readthedocs.projects.tasks.builds.SyncRepositoryTask.execute")
+ @mock.patch("readthedocs.projects.tasks.builds.clean_build")
def test_clean_build_after_failure_in_sync_repository(self, clean_build, execute):
- execute.side_effect = Exception('Something weird happen')
+ execute.side_effect = Exception("Something weird happen")
self._trigger_sync_repository_task()
clean_build.assert_called_once()
@pytest.mark.parametrize(
- 'verbose_name',
+ "verbose_name",
[
- 'stable',
- 'latest',
+ "stable",
+ "latest",
],
)
- @mock.patch('readthedocs.projects.tasks.builds.SyncRepositoryTask.on_failure')
+ @mock.patch("readthedocs.projects.tasks.builds.SyncRepositoryTask.on_failure")
def test_check_duplicate_reserved_version_latest(self, on_failure, verbose_name):
# `repository.tags` and `repository.branch` both will return a tag/branch named `latest/stable`
with mock.patch(
diff --git a/readthedocs/rtd_tests/tests/test_api.py b/readthedocs/rtd_tests/tests/test_api.py
index cb65719f6ae..027cc53607d 100644
--- a/readthedocs/rtd_tests/tests/test_api.py
+++ b/readthedocs/rtd_tests/tests/test_api.py
@@ -84,10 +84,10 @@ def get_signature(integration, payload):
@override_settings(PUBLIC_DOMAIN="readthedocs.io")
class APIBuildTests(TestCase):
- fixtures = ['eric.json', 'test_data.json']
+ fixtures = ["eric.json", "test_data.json"]
def setUp(self):
- self.user = User.objects.get(username='eric')
+ self.user = User.objects.get(username="eric")
self.project = get(Project, users=[self.user])
self.version = self.project.versions.get(slug=LATEST)
@@ -98,10 +98,10 @@ def test_reset_build(self):
version=self.version,
state=BUILD_STATE_CLONING,
success=False,
- output='Output',
- error='Error',
+ output="Output",
+ error="Error",
exit_code=9,
- builder='Builder',
+ builder="Builder",
cold_storage=True,
)
command = get(
@@ -122,35 +122,34 @@ def test_reset_build(self):
_, build_api_key = BuildAPIKey.objects.create_key(self.project)
client.credentials(HTTP_AUTHORIZATION=f"Token {build_api_key}")
- r = client.post(reverse('build-reset', args=(build.pk,)))
+ r = client.post(reverse("build-reset", args=(build.pk,)))
self.assertEqual(r.status_code, 204)
build.refresh_from_db()
self.assertEqual(build.project, self.project)
self.assertEqual(build.version, self.version)
self.assertEqual(build.state, BUILD_STATE_TRIGGERED)
- self.assertEqual(build.status, '')
+ self.assertEqual(build.status, "")
self.assertTrue(build.success)
- self.assertEqual(build.output, '')
- self.assertEqual(build.error, '')
+ self.assertEqual(build.output, "")
+ self.assertEqual(build.error, "")
self.assertIsNone(build.exit_code)
- self.assertEqual(build.builder, '')
+ self.assertEqual(build.builder, "")
self.assertFalse(build.cold_storage)
self.assertEqual(build.commands.count(), 0)
self.assertEqual(build.notifications.count(), 0)
-
def test_api_does_not_have_private_config_key_superuser(self):
client = APIClient()
- client.login(username='super', password='test')
+ client.login(username="super", password="test")
project = Project.objects.get(pk=1)
version = project.versions.first()
build = Build.objects.create(project=project, version=version)
- resp = client.get('/api/v2/build/{}/'.format(build.pk))
+ resp = client.get("/api/v2/build/{}/".format(build.pk))
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- self.assertIn('config', resp.data)
- self.assertNotIn('_config', resp.data)
+ self.assertIn("config", resp.data)
+ self.assertNotIn("_config", resp.data)
def test_api_does_not_have_private_config_key_normal_user(self):
client = APIClient()
@@ -158,10 +157,10 @@ def test_api_does_not_have_private_config_key_normal_user(self):
version = project.versions.first()
build = Build.objects.create(project=project, version=version)
- resp = client.get('/api/v2/build/{}/'.format(build.pk))
+ resp = client.get("/api/v2/build/{}/".format(build.pk))
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- self.assertIn('config', resp.data)
- self.assertNotIn('_config', resp.data)
+ self.assertIn("config", resp.data)
+ self.assertNotIn("_config", resp.data)
def test_save_same_config_using_patch(self):
project = Project.objects.get(pk=1)
@@ -173,30 +172,30 @@ def test_save_same_config_using_patch(self):
client.credentials(HTTP_AUTHORIZATION=f"Token {build_api_key}")
resp = client.patch(
- '/api/v2/build/{}/'.format(build_one.pk),
- {'config': {'one': 'two'}},
- format='json',
+ "/api/v2/build/{}/".format(build_one.pk),
+ {"config": {"one": "two"}},
+ format="json",
)
- self.assertEqual(resp.data['config'], {'one': 'two'})
+ self.assertEqual(resp.data["config"], {"one": "two"})
build_two = Build.objects.create(project=project, version=version)
resp = client.patch(
- '/api/v2/build/{}/'.format(build_two.pk),
- {'config': {'one': 'two'}},
- format='json',
+ "/api/v2/build/{}/".format(build_two.pk),
+ {"config": {"one": "two"}},
+ format="json",
)
- self.assertEqual(resp.data['config'], {'one': 'two'})
+ self.assertEqual(resp.data["config"], {"one": "two"})
- resp = client.get('/api/v2/build/{}/'.format(build_one.pk))
+ resp = client.get("/api/v2/build/{}/".format(build_one.pk))
self.assertEqual(resp.status_code, status.HTTP_200_OK)
build = resp.data
- self.assertEqual(build['config'], {'one': 'two'})
+ self.assertEqual(build["config"], {"one": "two"})
# Checking the values from the db, just to be sure the
# api isn't lying.
self.assertEqual(
Build.objects.get(pk=build_one.pk)._config,
- {'one': 'two'},
+ {"one": "two"},
)
self.assertEqual(
Build.objects.get(pk=build_two.pk)._config,
@@ -206,10 +205,10 @@ def test_save_same_config_using_patch(self):
def test_response_building(self):
"""The ``view docs`` attr should return a link to the dashboard."""
client = APIClient()
- client.login(username='super', password='test')
+ client.login(username="super", password="test")
project = get(
Project,
- language='en',
+ language="en",
main_language_project=None,
)
version = get(
@@ -222,17 +221,17 @@ def test_response_building(self):
Build,
project=project,
version=version,
- state='cloning',
+ state="cloning",
exit_code=0,
)
- resp = client.get('/api/v2/build/{build}/'.format(build=build.pk))
+ resp = client.get("/api/v2/build/{build}/".format(build=build.pk))
self.assertEqual(resp.status_code, 200)
dashboard_url = reverse(
- 'project_version_detail',
+ "project_version_detail",
kwargs={
- 'project_slug': project.slug,
- 'version_slug': version.slug,
+ "project_slug": project.slug,
+ "version_slug": version.slug,
},
)
@@ -248,7 +247,7 @@ def test_response_building(self):
def test_response_finished_and_success(self):
"""The ``view docs`` attr should return a link to the docs."""
client = APIClient()
- client.login(username='super', password='test')
+ client.login(username="super", password="test")
project = get(
Project,
language="en",
@@ -266,7 +265,7 @@ def test_response_finished_and_success(self):
Build,
project=project,
version=version,
- state='finished',
+ state="finished",
exit_code=0,
)
buildcommandresult = get(
@@ -275,7 +274,7 @@ def test_response_finished_and_success(self):
command="python -m pip install --upgrade --no-cache-dir pip setuptools<58.3.0",
exit_code=0,
)
- resp = client.get('/api/v2/build/{build}/'.format(build=build.pk))
+ resp = client.get("/api/v2/build/{build}/".format(build=build.pk))
self.assertEqual(resp.status_code, 200)
build = resp.data
docs_url = f"http://{project.slug}.readthedocs.io/en/{version.slug}/"
@@ -293,10 +292,10 @@ def test_response_finished_and_success(self):
def test_response_finished_and_fail(self):
"""The ``view docs`` attr should return a link to the dashboard."""
client = APIClient()
- client.login(username='super', password='test')
+ client.login(username="super", password="test")
project = get(
Project,
- language='en',
+ language="en",
main_language_project=None,
)
version = get(
@@ -309,19 +308,19 @@ def test_response_finished_and_fail(self):
Build,
project=project,
version=version,
- state='finished',
+ state="finished",
success=False,
exit_code=1,
)
- resp = client.get('/api/v2/build/{build}/'.format(build=build.pk))
+ resp = client.get("/api/v2/build/{build}/".format(build=build.pk))
self.assertEqual(resp.status_code, 200)
dashboard_url = reverse(
- 'project_version_detail',
+ "project_version_detail",
kwargs={
- 'project_slug': project.slug,
- 'version_slug': version.slug,
+ "project_slug": project.slug,
+ "version_slug": version.slug,
},
)
build = resp.data
@@ -338,21 +337,21 @@ def test_make_build_without_permission(self):
def _try_post():
resp = client.post(
- '/api/v2/build/',
+ "/api/v2/build/",
{
- 'project': 1,
- 'version': 1,
- 'success': True,
- 'output': 'Test Output',
- 'error': 'Test Error',
+ "project": 1,
+ "version": 1,
+ "success": True,
+ "output": "Test Output",
+ "error": "Test Error",
},
- format='json',
+ format="json",
)
self.assertEqual(resp.status_code, 403)
_try_post()
- api_user = get(User, is_staff=False, password='test')
+ api_user = get(User, is_staff=False, password="test")
assert api_user.is_staff is False
client.force_authenticate(user=api_user)
_try_post()
@@ -360,19 +359,19 @@ def _try_post():
def test_update_build_without_permission(self):
"""Ensure anonymous/non-staff users cannot update build endpoints."""
client = APIClient()
- api_user = get(User, is_staff=False, password='test')
+ api_user = get(User, is_staff=False, password="test")
client.force_authenticate(user=api_user)
project = Project.objects.get(pk=1)
version = project.versions.first()
- build = get(Build, project=project, version=version, state='cloning')
+ build = get(Build, project=project, version=version, state="cloning")
resp = client.put(
- '/api/v2/build/{}/'.format(build.pk),
+ "/api/v2/build/{}/".format(build.pk),
{
- 'project': 1,
- 'version': 1,
- 'state': 'finished',
+ "project": 1,
+ "version": 1,
+ "state": "finished",
},
- format='json',
+ format="json",
)
self.assertEqual(resp.status_code, 403)
@@ -385,19 +384,19 @@ def test_make_build_protected_fields(self):
"""
project = Project.objects.get(pk=1)
version = project.versions.first()
- build = get(Build, project=project, version=version, builder='foo')
+ build = get(Build, project=project, version=version, builder="foo")
client = APIClient()
- api_user = get(User, is_staff=False, password='test')
+ api_user = get(User, is_staff=False, password="test")
client.force_authenticate(user=api_user)
- resp = client.get('/api/v2/build/{}/'.format(build.pk), format='json')
+ resp = client.get("/api/v2/build/{}/".format(build.pk), format="json")
self.assertEqual(resp.status_code, 200)
_, build_api_key = BuildAPIKey.objects.create_key(project)
client.credentials(HTTP_AUTHORIZATION=f"Token {build_api_key}")
- resp = client.get('/api/v2/build/{}/'.format(build.pk), format='json')
+ resp = client.get("/api/v2/build/{}/".format(build.pk), format="json")
self.assertEqual(resp.status_code, 200)
- self.assertIn('builder', resp.data)
+ self.assertIn("builder", resp.data)
def test_make_build_commands(self):
"""Create build commands."""
@@ -410,7 +409,7 @@ def test_make_build_commands(self):
start_time = now - datetime.timedelta(seconds=5)
end_time = now
resp = client.post(
- '/api/v2/command/',
+ "/api/v2/command/",
{
"build": build.pk,
"command": "$CONDA_ENVS_PATH/$CONDA_DEFAULT_ENV/bin/python -m sphinx",
@@ -431,7 +430,7 @@ def test_make_build_commands(self):
"start_time": start_time + datetime.timedelta(seconds=1),
"end_time": end_time,
},
- format='json',
+ format="json",
)
self.assertEqual(resp.status_code, status.HTTP_201_CREATED)
resp = client.get(f"/api/v2/build/{build.pk}/")
@@ -482,20 +481,20 @@ def test_get_raw_log_success(self):
api_user = get(User)
client.force_authenticate(user=api_user)
- resp = client.get('/api/v2/build/{}.txt'.format(build.pk))
+ resp = client.get("/api/v2/build/{}.txt".format(build.pk))
self.assertEqual(resp.status_code, 200)
- self.assertIn('Read the Docs build information', resp.content.decode())
- self.assertIn('Build id: {}'.format(build.id), resp.content.decode())
- self.assertIn('Project: {}'.format(build.project.slug), resp.content.decode())
- self.assertIn('Version: {}'.format(build.version.slug), resp.content.decode())
- self.assertIn('Commit: {}'.format(build.commit), resp.content.decode())
- self.assertIn('Date: ', resp.content.decode())
- self.assertIn('State: finished', resp.content.decode())
- self.assertIn('Success: True', resp.content.decode())
- self.assertIn('[rtd-command-info]', resp.content.decode())
+ self.assertIn("Read the Docs build information", resp.content.decode())
+ self.assertIn("Build id: {}".format(build.id), resp.content.decode())
+ self.assertIn("Project: {}".format(build.project.slug), resp.content.decode())
+ self.assertIn("Version: {}".format(build.version.slug), resp.content.decode())
+ self.assertIn("Commit: {}".format(build.commit), resp.content.decode())
+ self.assertIn("Date: ", resp.content.decode())
+ self.assertIn("State: finished", resp.content.decode())
+ self.assertIn("Success: True", resp.content.decode())
+ self.assertIn("[rtd-command-info]", resp.content.decode())
self.assertIn(
- 'python setup.py install\nInstalling dependencies...',
+ "python setup.py install\nInstalling dependencies...",
resp.content.decode(),
)
self.assertIn(
@@ -507,41 +506,45 @@ def test_get_raw_log_building(self):
project = Project.objects.get(pk=1)
version = project.versions.first()
build = get(
- Build, project=project, version=version,
- builder='foo', success=False,
- exit_code=1, state='building',
+ Build,
+ project=project,
+ version=version,
+ builder="foo",
+ success=False,
+ exit_code=1,
+ state="building",
)
get(
BuildCommandResult,
build=build,
- command='python setup.py install',
- output='Installing dependencies...',
+ command="python setup.py install",
+ output="Installing dependencies...",
exit_code=1,
)
get(
BuildCommandResult,
build=build,
- command='git checkout master',
+ command="git checkout master",
output='Switched to branch "master"',
)
client = APIClient()
api_user = get(User)
client.force_authenticate(user=api_user)
- resp = client.get('/api/v2/build/{}.txt'.format(build.pk))
+ resp = client.get("/api/v2/build/{}.txt".format(build.pk))
self.assertEqual(resp.status_code, 200)
- self.assertIn('Read the Docs build information', resp.content.decode())
- self.assertIn('Build id: {}'.format(build.id), resp.content.decode())
- self.assertIn('Project: {}'.format(build.project.slug), resp.content.decode())
- self.assertIn('Version: {}'.format(build.version.slug), resp.content.decode())
- self.assertIn('Commit: {}'.format(build.commit), resp.content.decode())
- self.assertIn('Date: ', resp.content.decode())
- self.assertIn('State: building', resp.content.decode())
- self.assertIn('Success: Unknow', resp.content.decode())
- self.assertIn('[rtd-command-info]', resp.content.decode())
+ self.assertIn("Read the Docs build information", resp.content.decode())
+ self.assertIn("Build id: {}".format(build.id), resp.content.decode())
+ self.assertIn("Project: {}".format(build.project.slug), resp.content.decode())
+ self.assertIn("Version: {}".format(build.version.slug), resp.content.decode())
+ self.assertIn("Commit: {}".format(build.commit), resp.content.decode())
+ self.assertIn("Date: ", resp.content.decode())
+ self.assertIn("State: building", resp.content.decode())
+ self.assertIn("Success: Unknow", resp.content.decode())
+ self.assertIn("[rtd-command-info]", resp.content.decode())
self.assertIn(
- 'python setup.py install\nInstalling dependencies...',
+ "python setup.py install\nInstalling dependencies...",
resp.content.decode(),
)
self.assertIn(
@@ -571,27 +574,27 @@ def test_get_raw_log_failure(self):
get(
BuildCommandResult,
build=build,
- command='git checkout master',
+ command="git checkout master",
output='Switched to branch "master"',
)
client = APIClient()
api_user = get(User)
client.force_authenticate(user=api_user)
- resp = client.get('/api/v2/build/{}.txt'.format(build.pk))
+ resp = client.get("/api/v2/build/{}.txt".format(build.pk))
self.assertEqual(resp.status_code, 200)
- self.assertIn('Read the Docs build information', resp.content.decode())
- self.assertIn('Build id: {}'.format(build.id), resp.content.decode())
- self.assertIn('Project: {}'.format(build.project.slug), resp.content.decode())
- self.assertIn('Version: {}'.format(build.version.slug), resp.content.decode())
- self.assertIn('Commit: {}'.format(build.commit), resp.content.decode())
- self.assertIn('Date: ', resp.content.decode())
- self.assertIn('State: finished', resp.content.decode())
- self.assertIn('Success: False', resp.content.decode())
- self.assertIn('[rtd-command-info]', resp.content.decode())
+ self.assertIn("Read the Docs build information", resp.content.decode())
+ self.assertIn("Build id: {}".format(build.id), resp.content.decode())
+ self.assertIn("Project: {}".format(build.project.slug), resp.content.decode())
+ self.assertIn("Version: {}".format(build.version.slug), resp.content.decode())
+ self.assertIn("Commit: {}".format(build.commit), resp.content.decode())
+ self.assertIn("Date: ", resp.content.decode())
+ self.assertIn("State: finished", resp.content.decode())
+ self.assertIn("Success: False", resp.content.decode())
+ self.assertIn("[rtd-command-info]", resp.content.decode())
self.assertIn(
- 'python setup.py install\nInstalling dependencies...',
+ "python setup.py install\nInstalling dependencies...",
resp.content.decode(),
)
self.assertIn(
@@ -604,7 +607,7 @@ def test_get_invalid_raw_log(self):
api_user = get(User)
client.force_authenticate(user=api_user)
- resp = client.get('/api/v2/build/{}.txt'.format(404))
+ resp = client.get("/api/v2/build/{}.txt".format(404))
self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
def test_build_filter_by_commit(self):
@@ -617,19 +620,19 @@ def test_build_filter_by_commit(self):
project2 = Project.objects.get(pk=2)
version1 = project1.versions.first()
version2 = project2.versions.first()
- get(Build, project=project1, version=version1, builder='foo', commit='test')
- get(Build, project=project2, version=version2, builder='foo', commit='other')
+ get(Build, project=project1, version=version1, builder="foo", commit="test")
+ get(Build, project=project2, version=version2, builder="foo", commit="other")
client = APIClient()
- api_user = get(User, is_staff=False, password='test')
+ api_user = get(User, is_staff=False, password="test")
client.force_authenticate(user=api_user)
- resp = client.get('/api/v2/build/', {'commit': 'test'}, format='json')
+ resp = client.get("/api/v2/build/", {"commit": "test"}, format="json")
self.assertEqual(resp.status_code, 200)
build = resp.data
- self.assertEqual(len(build['results']), 1)
+ self.assertEqual(len(build["results"]), 1)
class APITests(TestCase):
- fixtures = ['eric.json', 'test_data.json']
+ fixtures = ["eric.json", "test_data.json"]
def test_create_key_for_project_with_long_slug(self):
user = get(User)
@@ -669,7 +672,6 @@ def test_user_doesnt_get_full_api_return(self):
project = get(
Project,
main_language_project=None,
- conf_py_file="foo",
readthedocs_yaml_path="bar",
)
client = APIClient()
@@ -678,16 +680,13 @@ def test_user_doesnt_get_full_api_return(self):
client.force_authenticate(user=user)
resp = client.get("/api/v2/project/%s/" % (project.pk))
self.assertEqual(resp.status_code, 200)
- self.assertNotIn("conf_py_file", resp.data)
self.assertNotIn("readthedocs_yaml_path", resp.data)
_, build_api_key = BuildAPIKey.objects.create_key(project)
client.credentials(HTTP_AUTHORIZATION=f"Token {build_api_key}")
- resp = client.get('/api/v2/project/%s/' % (project.pk))
+ resp = client.get("/api/v2/project/%s/" % (project.pk))
self.assertEqual(resp.status_code, 200)
- self.assertIn("conf_py_file", resp.data)
- self.assertEqual(resp.data["conf_py_file"], "foo")
self.assertIn("readthedocs_yaml_path", resp.data)
self.assertEqual(resp.data["readthedocs_yaml_path"], "bar")
@@ -1395,11 +1394,11 @@ def test_project_features(self):
_, build_api_key = BuildAPIKey.objects.create_key(project)
client.credentials(HTTP_AUTHORIZATION=f"Token {build_api_key}")
- resp = client.get('/api/v2/project/%s/' % (project.pk))
+ resp = client.get("/api/v2/project/%s/" % (project.pk))
self.assertEqual(resp.status_code, 200)
- self.assertIn('features', resp.data)
+ self.assertIn("features", resp.data)
self.assertCountEqual(
- resp.data['features'],
+ resp.data["features"],
[feature1.feature_id, feature2.feature_id],
)
@@ -1411,13 +1410,13 @@ def test_project_features_multiple_projects(self):
_, build_api_key = BuildAPIKey.objects.create_key(project1)
client.credentials(HTTP_AUTHORIZATION=f"Token {build_api_key}")
- resp = client.get('/api/v2/project/%s/' % (project1.pk))
+ resp = client.get("/api/v2/project/%s/" % (project1.pk))
self.assertEqual(resp.status_code, 200)
- self.assertIn('features', resp.data)
- self.assertEqual(resp.data['features'], [feature.feature_id])
+ self.assertIn("features", resp.data)
+ self.assertEqual(resp.data["features"], [feature.feature_id])
def test_remote_repository_pagination(self):
- account = get(SocialAccount, provider='github')
+ account = get(SocialAccount, provider="github")
user = get(User)
for _ in range(20):
@@ -1426,19 +1425,19 @@ def test_remote_repository_pagination(self):
RemoteRepositoryRelation,
remote_repository=repo,
user=user,
- account=account
+ account=account,
)
client = APIClient()
client.force_authenticate(user=user)
- resp = client.get('/api/v2/remote/repo/')
+ resp = client.get("/api/v2/remote/repo/")
self.assertEqual(resp.status_code, 200)
- self.assertEqual(len(resp.data['results']), 15) # page_size
- self.assertIn('?page=2', resp.data['next'])
+ self.assertEqual(len(resp.data["results"]), 15) # page_size
+ self.assertIn("?page=2", resp.data["next"])
def test_remote_organization_pagination(self):
- account = get(SocialAccount, provider='github')
+ account = get(SocialAccount, provider="github")
user = get(User)
for _ in range(30):
org = get(RemoteOrganization)
@@ -1446,23 +1445,23 @@ def test_remote_organization_pagination(self):
RemoteOrganizationRelation,
remote_organization=org,
user=user,
- account=account
+ account=account,
)
client = APIClient()
client.force_authenticate(user=user)
- resp = client.get('/api/v2/remote/org/')
+ resp = client.get("/api/v2/remote/org/")
self.assertEqual(resp.status_code, 200)
- self.assertEqual(len(resp.data['results']), 25) # page_size
- self.assertIn('?page=2', resp.data['next'])
+ self.assertEqual(len(resp.data["results"]), 25) # page_size
+ self.assertIn("?page=2", resp.data["next"])
def test_project_environment_variables(self):
project = get(Project, main_language_project=None)
get(
EnvironmentVariable,
- name='TOKEN',
- value='a1b2c3',
+ name="TOKEN",
+ value="a1b2c3",
project=project,
)
@@ -1470,46 +1469,46 @@ def test_project_environment_variables(self):
_, build_api_key = BuildAPIKey.objects.create_key(project)
client.credentials(HTTP_AUTHORIZATION=f"Token {build_api_key}")
- resp = client.get('/api/v2/project/%s/' % (project.pk))
+ resp = client.get("/api/v2/project/%s/" % (project.pk))
self.assertEqual(resp.status_code, 200)
- self.assertIn('environment_variables', resp.data)
+ self.assertIn("environment_variables", resp.data)
self.assertEqual(
- resp.data['environment_variables'],
- {'TOKEN': dict(value='a1b2c3', public=False)},
+ resp.data["environment_variables"],
+ {"TOKEN": dict(value="a1b2c3", public=False)},
)
def test_init_api_project(self):
project_data = {
- 'name': 'Test Project',
- 'slug': 'test-project',
- 'show_advertising': True,
+ "name": "Test Project",
+ "slug": "test-project",
+ "show_advertising": True,
}
api_project = APIProject(**project_data)
- self.assertEqual(api_project.slug, 'test-project')
+ self.assertEqual(api_project.slug, "test-project")
self.assertEqual(api_project.features, [])
self.assertFalse(api_project.ad_free)
self.assertTrue(api_project.show_advertising)
self.assertEqual(api_project.environment_variables(public_only=False), {})
self.assertEqual(api_project.environment_variables(public_only=True), {})
- project_data['features'] = ['test-feature']
- project_data['show_advertising'] = False
- project_data['environment_variables'] = {
- 'TOKEN': dict(value='a1b2c3', public=False),
- 'RELEASE': dict(value='prod', public=True),
+ project_data["features"] = ["test-feature"]
+ project_data["show_advertising"] = False
+ project_data["environment_variables"] = {
+ "TOKEN": dict(value="a1b2c3", public=False),
+ "RELEASE": dict(value="prod", public=True),
}
api_project = APIProject(**project_data)
- self.assertEqual(api_project.features, ['test-feature'])
+ self.assertEqual(api_project.features, ["test-feature"])
self.assertTrue(api_project.ad_free)
self.assertFalse(api_project.show_advertising)
self.assertEqual(
api_project.environment_variables(public_only=False),
- {'TOKEN': 'a1b2c3', 'RELEASE': 'prod'},
+ {"TOKEN": "a1b2c3", "RELEASE": "prod"},
)
self.assertEqual(
api_project.environment_variables(public_only=True),
- {'RELEASE': 'prod'},
+ {"RELEASE": "prod"},
)
def test_invalid_attributes_api_project(self):
@@ -1542,9 +1541,9 @@ def test_invalid_attributes_api_version(self):
)
def test_concurrent_builds(self):
expected = {
- 'limit_reached': False,
- 'concurrent': 2,
- 'max_concurrent': 4,
+ "limit_reached": False,
+ "concurrent": 2,
+ "max_concurrent": 4,
}
project = get(
Project,
@@ -1562,7 +1561,9 @@ def test_concurrent_builds(self):
_, build_api_key = BuildAPIKey.objects.create_key(project)
client.credentials(HTTP_AUTHORIZATION=f"Token {build_api_key}")
- resp = client.get(f'/api/v2/build/concurrent/', data={'project__slug': project.slug})
+ resp = client.get(
+ f"/api/v2/build/concurrent/", data={"project__slug": project.slug}
+ )
self.assertEqual(resp.status_code, 200)
self.assertDictEqual(expected, resp.data)
@@ -1571,24 +1572,24 @@ class APIImportTests(TestCase):
"""Import API endpoint tests."""
- fixtures = ['eric.json', 'test_data.json']
+ fixtures = ["eric.json", "test_data.json"]
def test_permissions(self):
"""Ensure user repositories aren't leaked to other users."""
client = APIClient()
- account_a = get(SocialAccount, provider='github')
- account_b = get(SocialAccount, provider='github')
- account_c = get(SocialAccount, provider='github')
- user_a = get(User, password='test')
- user_b = get(User, password='test')
- user_c = get(User, password='test')
+ account_a = get(SocialAccount, provider="github")
+ account_b = get(SocialAccount, provider="github")
+ account_c = get(SocialAccount, provider="github")
+ user_a = get(User, password="test")
+ user_b = get(User, password="test")
+ user_c = get(User, password="test")
org_a = get(RemoteOrganization)
get(
RemoteOrganizationRelation,
remote_organization=org_a,
user=user_a,
- account=account_a
+ account=account_a,
)
repo_a = get(
RemoteRepository,
@@ -1598,7 +1599,7 @@ def test_permissions(self):
RemoteRepositoryRelation,
remote_repository=repo_a,
user=user_a,
- account=account_a
+ account=account_a,
)
repo_b = get(
@@ -1609,44 +1610,44 @@ def test_permissions(self):
RemoteRepositoryRelation,
remote_repository=repo_b,
user=user_b,
- account=account_b
+ account=account_b,
)
client.force_authenticate(user=user_a)
- resp = client.get('/api/v2/remote/repo/', format='json')
+ resp = client.get("/api/v2/remote/repo/", format="json")
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- repos = resp.data['results']
- self.assertEqual(repos[0]['id'], repo_a.id)
- self.assertEqual(repos[0]['organization']['id'], org_a.id)
+ repos = resp.data["results"]
+ self.assertEqual(repos[0]["id"], repo_a.id)
+ self.assertEqual(repos[0]["organization"]["id"], org_a.id)
self.assertEqual(len(repos), 1)
- resp = client.get('/api/v2/remote/org/', format='json')
+ resp = client.get("/api/v2/remote/org/", format="json")
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- orgs = resp.data['results']
- self.assertEqual(orgs[0]['id'], org_a.id)
+ orgs = resp.data["results"]
+ self.assertEqual(orgs[0]["id"], org_a.id)
self.assertEqual(len(orgs), 1)
client.force_authenticate(user=user_b)
- resp = client.get('/api/v2/remote/repo/', format='json')
+ resp = client.get("/api/v2/remote/repo/", format="json")
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- repos = resp.data['results']
- self.assertEqual(repos[0]['id'], repo_b.id)
- self.assertEqual(repos[0]['organization'], None)
+ repos = resp.data["results"]
+ self.assertEqual(repos[0]["id"], repo_b.id)
+ self.assertEqual(repos[0]["organization"], None)
self.assertEqual(len(repos), 1)
client.force_authenticate(user=user_c)
- resp = client.get('/api/v2/remote/repo/', format='json')
+ resp = client.get("/api/v2/remote/repo/", format="json")
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- repos = resp.data['results']
+ repos = resp.data["results"]
self.assertEqual(len(repos), 0)
-@mock.patch('readthedocs.core.views.hooks.trigger_build')
+@mock.patch("readthedocs.core.views.hooks.trigger_build")
class IntegrationsTests(TestCase):
"""Integration for webhooks, etc."""
- fixtures = ['eric.json', 'test_data.json']
+ fixtures = ["eric.json", "test_data.json"]
def setUp(self):
self.project = get(
@@ -1656,15 +1657,21 @@ def setUp(self):
default_branch="master",
)
self.version = get(
- Version, slug='master', verbose_name='master',
- active=True, project=self.project,
+ Version,
+ slug="master",
+ verbose_name="master",
+ active=True,
+ project=self.project,
)
self.version_tag = get(
- Version, slug='v1.0', verbose_name='v1.0',
- active=True, project=self.project,
+ Version,
+ slug="v1.0",
+ verbose_name="v1.0",
+ active=True,
+ project=self.project,
)
self.github_payload = {
- 'ref': 'master',
+ "ref": "master",
}
self.commit = "ec26de721c3235aad62de7213c562f8c821"
self.github_pull_request_payload = {
@@ -1683,33 +1690,33 @@ def setUp(self):
self.gitlab_merge_request_payload = {
"object_kind": GITLAB_MERGE_REQUEST,
"object_attributes": {
- "iid": '2',
- "last_commit": {
- "id": self.commit
- },
+ "iid": "2",
+ "last_commit": {"id": self.commit},
"action": "open",
"source_branch": "source_branch",
"target_branch": "master",
},
}
self.gitlab_payload = {
- 'object_kind': GITLAB_PUSH,
- 'ref': 'master',
- 'before': '95790bf891e76fee5e1747ab589903a6a1f80f22',
- 'after': '95790bf891e76fee5e1747ab589903a6a1f80f23',
+ "object_kind": GITLAB_PUSH,
+ "ref": "master",
+ "before": "95790bf891e76fee5e1747ab589903a6a1f80f22",
+ "after": "95790bf891e76fee5e1747ab589903a6a1f80f23",
}
self.bitbucket_payload = {
- 'push': {
- 'changes': [{
- 'new': {
- 'type': 'branch',
- 'name': 'master',
- },
- 'old': {
- 'type': 'branch',
- 'name': 'master',
- },
- }],
+ "push": {
+ "changes": [
+ {
+ "new": {
+ "type": "branch",
+ "name": "master",
+ },
+ "old": {
+ "type": "branch",
+ "name": "master",
+ },
+ }
+ ],
},
}
@@ -1740,7 +1747,7 @@ def test_webhook_skipped_project(self, trigger_build):
self.project.save()
response = client.post(
- '/api/v2/webhook/github/{}/'.format(
+ "/api/v2/webhook/github/{}/".format(
self.project.slug,
),
self.github_payload,
@@ -1751,14 +1758,18 @@ def test_webhook_skipped_project(self, trigger_build):
),
},
)
- self.assertDictEqual(response.data, {'detail': 'This project is currently disabled'})
+ self.assertDictEqual(
+ response.data, {"detail": "This project is currently disabled"}
+ )
self.assertEqual(response.status_code, status.HTTP_406_NOT_ACCEPTABLE)
self.assertFalse(trigger_build.called)
- @mock.patch('readthedocs.core.views.hooks.sync_repository_task')
- def test_sync_repository_custom_project_queue(self, sync_repository_task, trigger_build):
+ @mock.patch("readthedocs.core.views.hooks.sync_repository_task")
+ def test_sync_repository_custom_project_queue(
+ self, sync_repository_task, trigger_build
+ ):
client = APIClient()
- self.project.build_queue = 'specific-build-queue'
+ self.project.build_queue = "specific-build-queue"
self.project.save()
headers = {
@@ -1768,16 +1779,16 @@ def test_sync_repository_custom_project_queue(self, sync_repository_task, trigge
),
}
resp = client.post(
- '/api/v2/webhook/github/{}/'.format(self.project.slug),
+ "/api/v2/webhook/github/{}/".format(self.project.slug),
self.github_payload,
- format='json',
+ format="json",
headers=headers,
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- self.assertFalse(resp.data['build_triggered'])
- self.assertEqual(resp.data['project'], self.project.slug)
- self.assertEqual(resp.data['versions'], [LATEST])
- self.assertTrue(resp.data['versions_synced'])
+ self.assertFalse(resp.data["build_triggered"])
+ self.assertEqual(resp.data["project"], self.project.slug)
+ self.assertEqual(resp.data["versions"], [LATEST])
+ self.assertTrue(resp.data["versions_synced"])
trigger_build.assert_not_called()
latest_version = self.project.versions.get(slug=LATEST)
sync_repository_task.apply_async.assert_called_with(
@@ -1787,7 +1798,7 @@ def test_sync_repository_custom_project_queue(self, sync_repository_task, trigge
kwargs={
"build_api_key": mock.ANY,
},
- queue='specific-build-queue',
+ queue="specific-build-queue",
)
def test_github_webhook_for_branches(self, trigger_build):
@@ -1796,7 +1807,7 @@ def test_github_webhook_for_branches(self, trigger_build):
data = {"ref": "master"}
client.post(
- '/api/v2/webhook/github/{}/'.format(self.project.slug),
+ "/api/v2/webhook/github/{}/".format(self.project.slug),
data,
format="json",
headers={
@@ -1809,7 +1820,7 @@ def test_github_webhook_for_branches(self, trigger_build):
data = {"ref": "non-existent"}
client.post(
- '/api/v2/webhook/github/{}/'.format(self.project.slug),
+ "/api/v2/webhook/github/{}/".format(self.project.slug),
data,
format="json",
headers={
@@ -1822,7 +1833,7 @@ def test_github_webhook_for_branches(self, trigger_build):
data = {"ref": "refs/heads/master"}
client.post(
- '/api/v2/webhook/github/{}/'.format(self.project.slug),
+ "/api/v2/webhook/github/{}/".format(self.project.slug),
data,
format="json",
headers={
@@ -1839,7 +1850,7 @@ def test_github_webhook_for_tags(self, trigger_build):
data = {"ref": "v1.0"}
client.post(
- '/api/v2/webhook/github/{}/'.format(self.project.slug),
+ "/api/v2/webhook/github/{}/".format(self.project.slug),
data,
format="json",
headers={
@@ -1852,9 +1863,9 @@ def test_github_webhook_for_tags(self, trigger_build):
data = {"ref": "refs/heads/non-existent"}
client.post(
- '/api/v2/webhook/github/{}/'.format(self.project.slug),
+ "/api/v2/webhook/github/{}/".format(self.project.slug),
data,
- format='json',
+ format="json",
headers={
GITHUB_SIGNATURE_HEADER: get_signature(self.github_integration, data),
},
@@ -1865,9 +1876,9 @@ def test_github_webhook_for_tags(self, trigger_build):
data = {"ref": "refs/tags/v1.0"}
client.post(
- '/api/v2/webhook/github/{}/'.format(self.project.slug),
+ "/api/v2/webhook/github/{}/".format(self.project.slug),
data,
- format='json',
+ format="json",
headers={
GITHUB_SIGNATURE_HEADER: get_signature(self.github_integration, data),
},
@@ -1876,25 +1887,27 @@ def test_github_webhook_for_tags(self, trigger_build):
[mock.call(version=self.version_tag, project=self.project)],
)
- @mock.patch('readthedocs.core.views.hooks.sync_repository_task')
- def test_github_webhook_no_build_on_delete(self, sync_repository_task, trigger_build):
+ @mock.patch("readthedocs.core.views.hooks.sync_repository_task")
+ def test_github_webhook_no_build_on_delete(
+ self, sync_repository_task, trigger_build
+ ):
client = APIClient()
- payload = {'ref': 'master', 'deleted': True}
+ payload = {"ref": "master", "deleted": True}
headers = {
GITHUB_EVENT_HEADER: GITHUB_PUSH,
GITHUB_SIGNATURE_HEADER: get_signature(self.github_integration, payload),
}
resp = client.post(
- '/api/v2/webhook/github/{}/'.format(self.project.slug),
+ "/api/v2/webhook/github/{}/".format(self.project.slug),
payload,
- format='json',
+ format="json",
headers=headers,
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- self.assertFalse(resp.data['build_triggered'])
- self.assertEqual(resp.data['project'], self.project.slug)
- self.assertEqual(resp.data['versions'], [LATEST])
+ self.assertFalse(resp.data["build_triggered"])
+ self.assertEqual(resp.data["project"], self.project.slug)
+ self.assertEqual(resp.data["versions"], [LATEST])
trigger_build.assert_not_called()
latest_version = self.project.versions.get(slug=LATEST)
sync_repository_task.apply_async.assert_called_with(
@@ -1922,7 +1935,7 @@ def test_github_ping_event(self, sync_repository_task, trigger_build):
trigger_build.assert_not_called()
sync_repository_task.assert_not_called()
- @mock.patch('readthedocs.core.views.hooks.sync_repository_task')
+ @mock.patch("readthedocs.core.views.hooks.sync_repository_task")
def test_github_create_event(self, sync_repository_task, trigger_build):
client = APIClient()
@@ -1933,22 +1946,22 @@ def test_github_create_event(self, sync_repository_task, trigger_build):
),
}
resp = client.post(
- '/api/v2/webhook/github/{}/'.format(self.project.slug),
+ "/api/v2/webhook/github/{}/".format(self.project.slug),
self.github_payload,
- format='json',
+ format="json",
headers=headers,
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- self.assertFalse(resp.data['build_triggered'])
- self.assertEqual(resp.data['project'], self.project.slug)
- self.assertEqual(resp.data['versions'], [LATEST])
+ self.assertFalse(resp.data["build_triggered"])
+ self.assertEqual(resp.data["project"], self.project.slug)
+ self.assertEqual(resp.data["versions"], [LATEST])
trigger_build.assert_not_called()
latest_version = self.project.versions.get(slug=LATEST)
sync_repository_task.apply_async.assert_called_with(
args=[latest_version.pk], kwargs={"build_api_key": mock.ANY}
)
- @mock.patch('readthedocs.core.utils.trigger_build')
+ @mock.patch("readthedocs.core.utils.trigger_build")
def test_github_pull_request_opened_event(self, trigger_build, core_trigger_build):
client = APIClient()
@@ -1959,32 +1972,31 @@ def test_github_pull_request_opened_event(self, trigger_build, core_trigger_buil
),
}
resp = client.post(
- '/api/v2/webhook/github/{}/'.format(self.project.slug),
+ "/api/v2/webhook/github/{}/".format(self.project.slug),
self.github_pull_request_payload,
- format='json',
+ format="json",
headers=headers,
)
# get the created external version
- external_version = self.project.versions(
- manager=EXTERNAL
- ).get(verbose_name='2')
+ external_version = self.project.versions(manager=EXTERNAL).get(verbose_name="2")
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- self.assertTrue(resp.data['build_triggered'])
- self.assertEqual(resp.data['project'], self.project.slug)
- self.assertEqual(resp.data['versions'], [external_version.verbose_name])
+ self.assertTrue(resp.data["build_triggered"])
+ self.assertEqual(resp.data["project"], self.project.slug)
+ self.assertEqual(resp.data["versions"], [external_version.verbose_name])
core_trigger_build.assert_called_once_with(
- project=self.project,
- version=external_version, commit=self.commit
+ project=self.project, version=external_version, commit=self.commit
)
self.assertTrue(external_version)
- @mock.patch('readthedocs.core.utils.trigger_build')
- def test_github_pull_request_reopened_event(self, trigger_build, core_trigger_build):
+ @mock.patch("readthedocs.core.utils.trigger_build")
+ def test_github_pull_request_reopened_event(
+ self, trigger_build, core_trigger_build
+ ):
client = APIClient()
# Update the payload for `reopened` webhook event
- pull_request_number = '5'
+ pull_request_number = "5"
payload = self.github_pull_request_payload
payload["action"] = GITHUB_PULL_REQUEST_REOPENED
payload["number"] = pull_request_number
@@ -1994,32 +2006,33 @@ def test_github_pull_request_reopened_event(self, trigger_build, core_trigger_bu
GITHUB_SIGNATURE_HEADER: get_signature(self.github_integration, payload),
}
resp = client.post(
- '/api/v2/webhook/github/{}/'.format(self.project.slug),
+ "/api/v2/webhook/github/{}/".format(self.project.slug),
payload,
- format='json',
+ format="json",
headers=headers,
)
# get the created external version
- external_version = self.project.versions(
- manager=EXTERNAL
- ).get(verbose_name=pull_request_number)
+ external_version = self.project.versions(manager=EXTERNAL).get(
+ verbose_name=pull_request_number
+ )
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- self.assertTrue(resp.data['build_triggered'])
- self.assertEqual(resp.data['project'], self.project.slug)
- self.assertEqual(resp.data['versions'], [external_version.verbose_name])
+ self.assertTrue(resp.data["build_triggered"])
+ self.assertEqual(resp.data["project"], self.project.slug)
+ self.assertEqual(resp.data["versions"], [external_version.verbose_name])
core_trigger_build.assert_called_once_with(
- project=self.project,
- version=external_version, commit=self.commit
+ project=self.project, version=external_version, commit=self.commit
)
self.assertTrue(external_version)
- @mock.patch('readthedocs.core.utils.trigger_build')
- def test_github_pull_request_synchronize_event(self, trigger_build, core_trigger_build):
+ @mock.patch("readthedocs.core.utils.trigger_build")
+ def test_github_pull_request_synchronize_event(
+ self, trigger_build, core_trigger_build
+ ):
client = APIClient()
- pull_request_number = '6'
- prev_identifier = '95790bf891e76fee5e1747ab589903a6a1f80f23'
+ pull_request_number = "6"
+ prev_identifier = "95790bf891e76fee5e1747ab589903a6a1f80f23"
# create an existing external version for pull request
version = get(
Version,
@@ -2029,7 +2042,7 @@ def test_github_pull_request_synchronize_event(self, trigger_build, core_trigger
uploaded=True,
active=True,
verbose_name=pull_request_number,
- identifier=prev_identifier
+ identifier=prev_identifier,
)
# Update the payload for `synchronize` webhook event
@@ -2042,33 +2055,32 @@ def test_github_pull_request_synchronize_event(self, trigger_build, core_trigger
GITHUB_SIGNATURE_HEADER: get_signature(self.github_integration, payload),
}
resp = client.post(
- '/api/v2/webhook/github/{}/'.format(self.project.slug),
+ "/api/v2/webhook/github/{}/".format(self.project.slug),
payload,
- format='json',
+ format="json",
headers=headers,
)
# get updated external version
- external_version = self.project.versions(
- manager=EXTERNAL
- ).get(verbose_name=pull_request_number)
+ external_version = self.project.versions(manager=EXTERNAL).get(
+ verbose_name=pull_request_number
+ )
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- self.assertTrue(resp.data['build_triggered'])
- self.assertEqual(resp.data['project'], self.project.slug)
- self.assertEqual(resp.data['versions'], [external_version.verbose_name])
+ self.assertTrue(resp.data["build_triggered"])
+ self.assertEqual(resp.data["project"], self.project.slug)
+ self.assertEqual(resp.data["versions"], [external_version.verbose_name])
core_trigger_build.assert_called_once_with(
- project=self.project,
- version=external_version, commit=self.commit
+ project=self.project, version=external_version, commit=self.commit
)
# `synchronize` webhook event updated the identifier (commit hash)
self.assertNotEqual(prev_identifier, external_version.identifier)
- @mock.patch('readthedocs.core.utils.trigger_build')
+ @mock.patch("readthedocs.core.utils.trigger_build")
def test_github_pull_request_closed_event(self, trigger_build, core_trigger_build):
client = APIClient()
- pull_request_number = '7'
- identifier = '95790bf891e76fee5e1747ab589903a6a1f80f23'
+ pull_request_number = "7"
+ identifier = "95790bf891e76fee5e1747ab589903a6a1f80f23"
# create an existing external version for pull request
version = get(
Version,
@@ -2078,7 +2090,7 @@ def test_github_pull_request_closed_event(self, trigger_build, core_trigger_buil
uploaded=True,
active=True,
verbose_name=pull_request_number,
- identifier=identifier
+ identifier=identifier,
)
# Update the payload for `closed` webhook event
@@ -2092,14 +2104,14 @@ def test_github_pull_request_closed_event(self, trigger_build, core_trigger_buil
GITHUB_SIGNATURE_HEADER: get_signature(self.github_integration, payload),
}
resp = client.post(
- '/api/v2/webhook/github/{}/'.format(self.project.slug),
+ "/api/v2/webhook/github/{}/".format(self.project.slug),
payload,
- format='json',
+ format="json",
headers=headers,
)
- external_version = self.project.versions(
- manager=EXTERNAL
- ).get(verbose_name=pull_request_number)
+ external_version = self.project.versions(manager=EXTERNAL).get(
+ verbose_name=pull_request_number
+ )
self.assertTrue(external_version.active)
self.assertEqual(external_version.state, EXTERNAL_VERSION_STATE_CLOSED)
@@ -2114,24 +2126,20 @@ def test_github_pull_request_no_action(self, trigger_build):
payload = {
"number": 2,
- "pull_request": {
- "head": {
- "sha": "ec26de721c3235aad62de7213c562f8c821"
- }
- }
+ "pull_request": {"head": {"sha": "ec26de721c3235aad62de7213c562f8c821"}},
}
headers = {
GITHUB_EVENT_HEADER: GITHUB_PULL_REQUEST,
GITHUB_SIGNATURE_HEADER: get_signature(self.github_integration, payload),
}
resp = client.post(
- '/api/v2/webhook/github/{}/'.format(self.project.slug),
+ "/api/v2/webhook/github/{}/".format(self.project.slug),
payload,
- format='json',
+ format="json",
headers=headers,
)
self.assertEqual(resp.status_code, 200)
- self.assertEqual(resp.data['detail'], 'Unhandled webhook event')
+ self.assertEqual(resp.data["detail"], "Unhandled webhook event")
def test_github_pull_request_opened_event_invalid_payload(self, trigger_build):
client = APIClient()
@@ -2142,9 +2150,9 @@ def test_github_pull_request_opened_event_invalid_payload(self, trigger_build):
}
headers = {GITHUB_EVENT_HEADER: GITHUB_PULL_REQUEST}
resp = client.post(
- '/api/v2/webhook/github/{}/'.format(self.project.slug),
+ "/api/v2/webhook/github/{}/".format(self.project.slug),
payload,
- format='json',
+ format="json",
headers=headers,
)
@@ -2159,15 +2167,15 @@ def test_github_pull_request_closed_event_invalid_payload(self, trigger_build):
}
headers = {GITHUB_EVENT_HEADER: GITHUB_PULL_REQUEST}
resp = client.post(
- '/api/v2/webhook/github/{}/'.format(self.project.slug),
+ "/api/v2/webhook/github/{}/".format(self.project.slug),
payload,
- format='json',
+ format="json",
headers=headers,
)
self.assertEqual(resp.status_code, 400)
- @mock.patch('readthedocs.core.views.hooks.sync_repository_task')
+ @mock.patch("readthedocs.core.views.hooks.sync_repository_task")
def test_github_delete_event(self, sync_repository_task, trigger_build):
client = APIClient()
@@ -2178,15 +2186,15 @@ def test_github_delete_event(self, sync_repository_task, trigger_build):
),
}
resp = client.post(
- '/api/v2/webhook/github/{}/'.format(self.project.slug),
+ "/api/v2/webhook/github/{}/".format(self.project.slug),
self.github_payload,
- format='json',
+ format="json",
headers=headers,
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- self.assertFalse(resp.data['build_triggered'])
- self.assertEqual(resp.data['project'], self.project.slug)
- self.assertEqual(resp.data['versions'], [LATEST])
+ self.assertFalse(resp.data["build_triggered"])
+ self.assertEqual(resp.data["project"], self.project.slug)
+ self.assertEqual(resp.data["versions"], [LATEST])
trigger_build.assert_not_called()
latest_version = self.project.versions.get(slug=LATEST)
sync_repository_task.apply_async.assert_called_with(
@@ -2196,21 +2204,21 @@ def test_github_delete_event(self, sync_repository_task, trigger_build):
def test_github_parse_ref(self, trigger_build):
wh = GitHubWebhookView()
- self.assertEqual(wh._normalize_ref('refs/heads/master'), 'master')
- self.assertEqual(wh._normalize_ref('refs/heads/v0.1'), 'v0.1')
- self.assertEqual(wh._normalize_ref('refs/tags/v0.1'), 'v0.1')
- self.assertEqual(wh._normalize_ref('refs/tags/tag'), 'tag')
- self.assertEqual(wh._normalize_ref('refs/heads/stable/2018'), 'stable/2018')
- self.assertEqual(wh._normalize_ref('refs/tags/tag/v0.1'), 'tag/v0.1')
+ self.assertEqual(wh._normalize_ref("refs/heads/master"), "master")
+ self.assertEqual(wh._normalize_ref("refs/heads/v0.1"), "v0.1")
+ self.assertEqual(wh._normalize_ref("refs/tags/v0.1"), "v0.1")
+ self.assertEqual(wh._normalize_ref("refs/tags/tag"), "tag")
+ self.assertEqual(wh._normalize_ref("refs/heads/stable/2018"), "stable/2018")
+ self.assertEqual(wh._normalize_ref("refs/tags/tag/v0.1"), "tag/v0.1")
def test_github_invalid_webhook(self, trigger_build):
"""GitHub webhook unhandled event."""
client = APIClient()
payload = {"foo": "bar"}
resp = client.post(
- '/api/v2/webhook/github/{}/'.format(self.project.slug),
+ "/api/v2/webhook/github/{}/".format(self.project.slug),
payload,
- format='json',
+ format="json",
headers={
GITHUB_EVENT_HEADER: "issues",
GITHUB_SIGNATURE_HEADER: get_signature(
@@ -2219,30 +2227,24 @@ def test_github_invalid_webhook(self, trigger_build):
},
)
self.assertEqual(resp.status_code, 200)
- self.assertEqual(resp.data['detail'], 'Unhandled webhook event')
+ self.assertEqual(resp.data["detail"], "Unhandled webhook event")
def test_github_invalid_payload(self, trigger_build):
client = APIClient()
- wrong_signature = '1234'
+ wrong_signature = "1234"
self.assertNotEqual(self.github_integration.secret, wrong_signature)
headers = {
GITHUB_EVENT_HEADER: GITHUB_PUSH,
GITHUB_SIGNATURE_HEADER: wrong_signature,
}
resp = client.post(
- reverse(
- 'api_webhook_github',
- kwargs={'project_slug': self.project.slug}
- ),
+ reverse("api_webhook_github", kwargs={"project_slug": self.project.slug}),
self.github_payload,
- format='json',
+ format="json",
headers=headers,
)
self.assertEqual(resp.status_code, 400)
- self.assertEqual(
- resp.data['detail'],
- GitHubWebhookView.invalid_payload_msg
- )
+ self.assertEqual(resp.data["detail"], GitHubWebhookView.invalid_payload_msg)
def test_github_valid_payload(self, trigger_build):
client = APIClient()
@@ -2256,12 +2258,9 @@ def test_github_valid_payload(self, trigger_build):
GITHUB_SIGNATURE_HEADER: signature,
}
resp = client.post(
- reverse(
- 'api_webhook_github',
- kwargs={'project_slug': self.project.slug}
- ),
+ reverse("api_webhook_github", kwargs={"project_slug": self.project.slug}),
json.loads(payload),
- format='json',
+ format="json",
headers=headers,
)
self.assertEqual(resp.status_code, 200)
@@ -2270,24 +2269,18 @@ def test_github_empty_signature(self, trigger_build):
client = APIClient()
headers = {
GITHUB_EVENT_HEADER: GITHUB_PUSH,
- GITHUB_SIGNATURE_HEADER: '',
+ GITHUB_SIGNATURE_HEADER: "",
}
resp = client.post(
- reverse(
- 'api_webhook_github',
- kwargs={'project_slug': self.project.slug}
- ),
+ reverse("api_webhook_github", kwargs={"project_slug": self.project.slug}),
self.github_payload,
- format='json',
+ format="json",
headers=headers,
)
self.assertEqual(resp.status_code, 400)
- self.assertEqual(
- resp.data['detail'],
- GitHubWebhookView.invalid_payload_msg
- )
+ self.assertEqual(resp.data["detail"], GitHubWebhookView.invalid_payload_msg)
- @mock.patch('readthedocs.core.views.hooks.sync_repository_task', mock.MagicMock())
+ @mock.patch("readthedocs.core.views.hooks.sync_repository_task", mock.MagicMock())
def test_github_sync_on_push_event(self, trigger_build):
"""Sync if the webhook doesn't have the create/delete events, but we receive a push event with created/deleted."""
self.github_integration.provider_data = {
@@ -2298,26 +2291,23 @@ def test_github_sync_on_push_event(self, trigger_build):
client = APIClient()
payload = {
- 'ref': 'master',
- 'created': True,
- 'deleted': False,
+ "ref": "master",
+ "created": True,
+ "deleted": False,
}
headers = {
GITHUB_EVENT_HEADER: GITHUB_PUSH,
GITHUB_SIGNATURE_HEADER: get_signature(self.github_integration, payload),
}
resp = client.post(
- reverse(
- 'api_webhook_github',
- kwargs={'project_slug': self.project.slug}
- ),
+ reverse("api_webhook_github", kwargs={"project_slug": self.project.slug}),
payload,
- format='json',
+ format="json",
headers=headers,
)
- self.assertTrue(resp.json()['versions_synced'])
+ self.assertTrue(resp.json()["versions_synced"])
- @mock.patch('readthedocs.core.views.hooks.sync_repository_task', mock.MagicMock())
+ @mock.patch("readthedocs.core.views.hooks.sync_repository_task", mock.MagicMock())
def test_github_dont_trigger_double_sync(self, trigger_build):
"""Don't trigger a sync twice if the webhook has the create/delete events."""
self.github_integration.provider_data = {
@@ -2331,24 +2321,21 @@ def test_github_dont_trigger_double_sync(self, trigger_build):
client = APIClient()
payload = {
- 'ref': 'master',
- 'created': True,
- 'deleted': False,
+ "ref": "master",
+ "created": True,
+ "deleted": False,
}
headers = {
GITHUB_EVENT_HEADER: GITHUB_PUSH,
GITHUB_SIGNATURE_HEADER: get_signature(self.github_integration, payload),
}
resp = client.post(
- reverse(
- 'api_webhook_github',
- kwargs={'project_slug': self.project.slug}
- ),
+ reverse("api_webhook_github", kwargs={"project_slug": self.project.slug}),
payload,
- format='json',
+ format="json",
headers=headers,
)
- self.assertFalse(resp.json()['versions_synced'])
+ self.assertFalse(resp.json()["versions_synced"])
payload = {"ref": "master"}
headers = {
@@ -2356,15 +2343,12 @@ def test_github_dont_trigger_double_sync(self, trigger_build):
GITHUB_SIGNATURE_HEADER: get_signature(self.github_integration, payload),
}
resp = client.post(
- reverse(
- 'api_webhook_github',
- kwargs={'project_slug': self.project.slug}
- ),
+ reverse("api_webhook_github", kwargs={"project_slug": self.project.slug}),
payload,
- format='json',
+ format="json",
headers=headers,
)
- self.assertTrue(resp.json()['versions_synced'])
+ self.assertTrue(resp.json()["versions_synced"])
def test_github_get_external_version_data(self, trigger_build):
view = GitHubWebhookView(data=self.github_pull_request_payload)
@@ -2381,23 +2365,24 @@ def test_gitlab_webhook_for_branches(self, trigger_build):
GITLAB_TOKEN_HEADER: self.gitlab_integration.secret,
}
client.post(
- '/api/v2/webhook/gitlab/{}/'.format(self.project.slug),
+ "/api/v2/webhook/gitlab/{}/".format(self.project.slug),
self.gitlab_payload,
- format='json',
+ format="json",
headers=headers,
)
trigger_build.assert_called_with(
- version=mock.ANY, project=self.project,
+ version=mock.ANY,
+ project=self.project,
)
trigger_build.reset_mock()
self.gitlab_payload.update(
- ref='non-existent',
+ ref="non-existent",
)
client.post(
- '/api/v2/webhook/gitlab/{}/'.format(self.project.slug),
+ "/api/v2/webhook/gitlab/{}/".format(self.project.slug),
self.gitlab_payload,
- format='json',
+ format="json",
)
trigger_build.assert_not_called()
@@ -2405,151 +2390,161 @@ def test_gitlab_webhook_for_tags(self, trigger_build):
client = APIClient()
self.gitlab_payload.update(
object_kind=GITLAB_TAG_PUSH,
- ref='v1.0',
+ ref="v1.0",
)
headers = {
GITLAB_TOKEN_HEADER: self.gitlab_integration.secret,
}
client.post(
- '/api/v2/webhook/gitlab/{}/'.format(self.project.slug),
+ "/api/v2/webhook/gitlab/{}/".format(self.project.slug),
self.gitlab_payload,
- format='json',
+ format="json",
headers=headers,
)
trigger_build.assert_called_with(
- version=self.version_tag, project=self.project,
+ version=self.version_tag,
+ project=self.project,
)
trigger_build.reset_mock()
self.gitlab_payload.update(
- ref='refs/tags/v1.0',
+ ref="refs/tags/v1.0",
)
client.post(
- '/api/v2/webhook/gitlab/{}/'.format(self.project.slug),
+ "/api/v2/webhook/gitlab/{}/".format(self.project.slug),
self.gitlab_payload,
- format='json',
+ format="json",
headers=headers,
)
trigger_build.assert_called_with(
- version=self.version_tag, project=self.project,
+ version=self.version_tag,
+ project=self.project,
)
trigger_build.reset_mock()
self.gitlab_payload.update(
- ref='refs/heads/non-existent',
+ ref="refs/heads/non-existent",
)
client.post(
- '/api/v2/webhook/gitlab/{}/'.format(self.project.slug),
+ "/api/v2/webhook/gitlab/{}/".format(self.project.slug),
self.gitlab_payload,
- format='json',
+ format="json",
headers=headers,
)
trigger_build.assert_not_called()
- @mock.patch('readthedocs.core.views.hooks.sync_repository_task')
+ @mock.patch("readthedocs.core.views.hooks.sync_repository_task")
def test_gitlab_push_hook_creation(
- self, sync_repository_task, trigger_build,
+ self,
+ sync_repository_task,
+ trigger_build,
):
client = APIClient()
self.gitlab_payload.update(
before=GITLAB_NULL_HASH,
- after='95790bf891e76fee5e1747ab589903a6a1f80f22',
+ after="95790bf891e76fee5e1747ab589903a6a1f80f22",
)
resp = client.post(
- '/api/v2/webhook/gitlab/{}/'.format(self.project.slug),
+ "/api/v2/webhook/gitlab/{}/".format(self.project.slug),
self.gitlab_payload,
- format='json',
+ format="json",
headers={
GITLAB_TOKEN_HEADER: self.gitlab_integration.secret,
},
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- self.assertFalse(resp.data['build_triggered'])
- self.assertEqual(resp.data['project'], self.project.slug)
- self.assertEqual(resp.data['versions'], [LATEST])
+ self.assertFalse(resp.data["build_triggered"])
+ self.assertEqual(resp.data["project"], self.project.slug)
+ self.assertEqual(resp.data["versions"], [LATEST])
trigger_build.assert_not_called()
latest_version = self.project.versions.get(slug=LATEST)
sync_repository_task.apply_async.assert_called_with(
args=[latest_version.pk], kwargs={"build_api_key": mock.ANY}
)
- @mock.patch('readthedocs.core.views.hooks.sync_repository_task')
+ @mock.patch("readthedocs.core.views.hooks.sync_repository_task")
def test_gitlab_push_hook_deletion(
- self, sync_repository_task, trigger_build,
+ self,
+ sync_repository_task,
+ trigger_build,
):
client = APIClient()
self.gitlab_payload.update(
- before='95790bf891e76fee5e1747ab589903a6a1f80f22',
+ before="95790bf891e76fee5e1747ab589903a6a1f80f22",
after=GITLAB_NULL_HASH,
)
resp = client.post(
- '/api/v2/webhook/gitlab/{}/'.format(self.project.slug),
+ "/api/v2/webhook/gitlab/{}/".format(self.project.slug),
self.gitlab_payload,
- format='json',
+ format="json",
headers={
GITLAB_TOKEN_HEADER: self.gitlab_integration.secret,
},
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- self.assertFalse(resp.data['build_triggered'])
- self.assertEqual(resp.data['project'], self.project.slug)
- self.assertEqual(resp.data['versions'], [LATEST])
+ self.assertFalse(resp.data["build_triggered"])
+ self.assertEqual(resp.data["project"], self.project.slug)
+ self.assertEqual(resp.data["versions"], [LATEST])
trigger_build.assert_not_called()
latest_version = self.project.versions.get(slug=LATEST)
sync_repository_task.apply_async.assert_called_with(
args=[latest_version.pk], kwargs={"build_api_key": mock.ANY}
)
- @mock.patch('readthedocs.core.views.hooks.sync_repository_task')
+ @mock.patch("readthedocs.core.views.hooks.sync_repository_task")
def test_gitlab_tag_push_hook_creation(
- self, sync_repository_task, trigger_build,
+ self,
+ sync_repository_task,
+ trigger_build,
):
client = APIClient()
self.gitlab_payload.update(
object_kind=GITLAB_TAG_PUSH,
before=GITLAB_NULL_HASH,
- after='95790bf891e76fee5e1747ab589903a6a1f80f22',
+ after="95790bf891e76fee5e1747ab589903a6a1f80f22",
)
resp = client.post(
- '/api/v2/webhook/gitlab/{}/'.format(self.project.slug),
+ "/api/v2/webhook/gitlab/{}/".format(self.project.slug),
self.gitlab_payload,
- format='json',
+ format="json",
headers={
GITLAB_TOKEN_HEADER: self.gitlab_integration.secret,
},
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- self.assertFalse(resp.data['build_triggered'])
- self.assertEqual(resp.data['project'], self.project.slug)
- self.assertEqual(resp.data['versions'], [LATEST])
+ self.assertFalse(resp.data["build_triggered"])
+ self.assertEqual(resp.data["project"], self.project.slug)
+ self.assertEqual(resp.data["versions"], [LATEST])
trigger_build.assert_not_called()
latest_version = self.project.versions.get(slug=LATEST)
sync_repository_task.apply_async.assert_called_with(
args=[latest_version.pk], kwargs={"build_api_key": mock.ANY}
)
- @mock.patch('readthedocs.core.views.hooks.sync_repository_task')
+ @mock.patch("readthedocs.core.views.hooks.sync_repository_task")
def test_gitlab_tag_push_hook_deletion(
- self, sync_repository_task, trigger_build,
+ self,
+ sync_repository_task,
+ trigger_build,
):
client = APIClient()
self.gitlab_payload.update(
object_kind=GITLAB_TAG_PUSH,
- before='95790bf891e76fee5e1747ab589903a6a1f80f22',
+ before="95790bf891e76fee5e1747ab589903a6a1f80f22",
after=GITLAB_NULL_HASH,
)
resp = client.post(
- '/api/v2/webhook/gitlab/{}/'.format(self.project.slug),
+ "/api/v2/webhook/gitlab/{}/".format(self.project.slug),
self.gitlab_payload,
- format='json',
+ format="json",
headers={
GITLAB_TOKEN_HEADER: self.gitlab_integration.secret,
},
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- self.assertFalse(resp.data['build_triggered'])
- self.assertEqual(resp.data['project'], self.project.slug)
- self.assertEqual(resp.data['versions'], [LATEST])
+ self.assertFalse(resp.data["build_triggered"])
+ self.assertEqual(resp.data["project"], self.project.slug)
+ self.assertEqual(resp.data["versions"], [LATEST])
trigger_build.assert_not_called()
latest_version = self.project.versions.get(slug=LATEST)
sync_repository_task.apply_async.assert_called_with(
@@ -2560,37 +2555,31 @@ def test_gitlab_invalid_webhook(self, trigger_build):
"""GitLab webhook unhandled event."""
client = APIClient()
resp = client.post(
- '/api/v2/webhook/gitlab/{}/'.format(self.project.slug),
- {'object_kind': 'pull_request'},
- format='json',
+ "/api/v2/webhook/gitlab/{}/".format(self.project.slug),
+ {"object_kind": "pull_request"},
+ format="json",
headers={
GITLAB_TOKEN_HEADER: self.gitlab_integration.secret,
},
)
self.assertEqual(resp.status_code, 200)
- self.assertEqual(resp.data['detail'], 'Unhandled webhook event')
+ self.assertEqual(resp.data["detail"], "Unhandled webhook event")
def test_gitlab_invalid_payload(self, trigger_build):
client = APIClient()
- wrong_secret = '1234'
+ wrong_secret = "1234"
self.assertNotEqual(self.gitlab_integration.secret, wrong_secret)
headers = {
GITLAB_TOKEN_HEADER: wrong_secret,
}
resp = client.post(
- reverse(
- 'api_webhook_gitlab',
- kwargs={'project_slug': self.project.slug}
- ),
+ reverse("api_webhook_gitlab", kwargs={"project_slug": self.project.slug}),
self.gitlab_payload,
- format='json',
+ format="json",
headers=headers,
)
self.assertEqual(resp.status_code, 400)
- self.assertEqual(
- resp.data['detail'],
- GitLabWebhookView.invalid_payload_msg
- )
+ self.assertEqual(resp.data["detail"], GitLabWebhookView.invalid_payload_msg)
def test_gitlab_valid_payload(self, trigger_build):
client = APIClient()
@@ -2598,12 +2587,9 @@ def test_gitlab_valid_payload(self, trigger_build):
GITLAB_TOKEN_HEADER: self.gitlab_integration.secret,
}
resp = client.post(
- reverse(
- 'api_webhook_gitlab',
- kwargs={'project_slug': self.project.slug}
- ),
- {'object_kind': 'pull_request'},
- format='json',
+ reverse("api_webhook_gitlab", kwargs={"project_slug": self.project.slug}),
+ {"object_kind": "pull_request"},
+ format="json",
headers=headers,
)
self.assertEqual(resp.status_code, 200)
@@ -2611,95 +2597,79 @@ def test_gitlab_valid_payload(self, trigger_build):
def test_gitlab_empty_token(self, trigger_build):
client = APIClient()
headers = {
- GITLAB_TOKEN_HEADER: '',
+ GITLAB_TOKEN_HEADER: "",
}
resp = client.post(
- reverse(
- 'api_webhook_gitlab',
- kwargs={'project_slug': self.project.slug}
- ),
- {'object_kind': 'pull_request'},
- format='json',
+ reverse("api_webhook_gitlab", kwargs={"project_slug": self.project.slug}),
+ {"object_kind": "pull_request"},
+ format="json",
headers=headers,
)
self.assertEqual(resp.status_code, 400)
- self.assertEqual(
- resp.data['detail'],
- GitLabWebhookView.invalid_payload_msg
- )
+ self.assertEqual(resp.data["detail"], GitLabWebhookView.invalid_payload_msg)
- @mock.patch('readthedocs.core.utils.trigger_build')
+ @mock.patch("readthedocs.core.utils.trigger_build")
def test_gitlab_merge_request_open_event(self, trigger_build, core_trigger_build):
client = APIClient()
resp = client.post(
- reverse(
- 'api_webhook_gitlab',
- kwargs={'project_slug': self.project.slug}
- ),
+ reverse("api_webhook_gitlab", kwargs={"project_slug": self.project.slug}),
self.gitlab_merge_request_payload,
- format='json',
+ format="json",
headers={
GITLAB_TOKEN_HEADER: self.gitlab_integration.secret,
},
)
# get the created external version
- external_version = self.project.versions(
- manager=EXTERNAL
- ).get(verbose_name='2')
+ external_version = self.project.versions(manager=EXTERNAL).get(verbose_name="2")
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- self.assertTrue(resp.data['build_triggered'])
- self.assertEqual(resp.data['project'], self.project.slug)
- self.assertEqual(resp.data['versions'], [external_version.verbose_name])
+ self.assertTrue(resp.data["build_triggered"])
+ self.assertEqual(resp.data["project"], self.project.slug)
+ self.assertEqual(resp.data["versions"], [external_version.verbose_name])
core_trigger_build.assert_called_once_with(
- project=self.project,
- version=external_version, commit=self.commit
+ project=self.project, version=external_version, commit=self.commit
)
self.assertTrue(external_version)
- @mock.patch('readthedocs.core.utils.trigger_build')
+ @mock.patch("readthedocs.core.utils.trigger_build")
def test_gitlab_merge_request_reopen_event(self, trigger_build, core_trigger_build):
client = APIClient()
# Update the payload for `reopen` webhook event
- merge_request_number = '5'
+ merge_request_number = "5"
payload = self.gitlab_merge_request_payload
payload["object_attributes"]["action"] = GITLAB_MERGE_REQUEST_REOPEN
payload["object_attributes"]["iid"] = merge_request_number
resp = client.post(
- reverse(
- 'api_webhook_gitlab',
- kwargs={'project_slug': self.project.slug}
- ),
+ reverse("api_webhook_gitlab", kwargs={"project_slug": self.project.slug}),
payload,
- format='json',
+ format="json",
headers={
GITLAB_TOKEN_HEADER: self.gitlab_integration.secret,
},
)
# get the created external version
- external_version = self.project.versions(
- manager=EXTERNAL
- ).get(verbose_name=merge_request_number)
+ external_version = self.project.versions(manager=EXTERNAL).get(
+ verbose_name=merge_request_number
+ )
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- self.assertTrue(resp.data['build_triggered'])
- self.assertEqual(resp.data['project'], self.project.slug)
- self.assertEqual(resp.data['versions'], [external_version.verbose_name])
+ self.assertTrue(resp.data["build_triggered"])
+ self.assertEqual(resp.data["project"], self.project.slug)
+ self.assertEqual(resp.data["versions"], [external_version.verbose_name])
core_trigger_build.assert_called_once_with(
- project=self.project,
- version=external_version, commit=self.commit
+ project=self.project, version=external_version, commit=self.commit
)
self.assertTrue(external_version)
- @mock.patch('readthedocs.core.utils.trigger_build')
+ @mock.patch("readthedocs.core.utils.trigger_build")
def test_gitlab_merge_request_update_event(self, trigger_build, core_trigger_build):
client = APIClient()
- merge_request_number = '6'
- prev_identifier = '95790bf891e76fee5e1747ab589903a6a1f80f23'
+ merge_request_number = "6"
+ prev_identifier = "95790bf891e76fee5e1747ab589903a6a1f80f23"
# create an existing external version for merge request
version = get(
Version,
@@ -2709,7 +2679,7 @@ def test_gitlab_merge_request_update_event(self, trigger_build, core_trigger_bui
uploaded=True,
active=True,
verbose_name=merge_request_number,
- identifier=prev_identifier
+ identifier=prev_identifier,
)
# Update the payload for merge request `update` webhook event
@@ -2718,38 +2688,34 @@ def test_gitlab_merge_request_update_event(self, trigger_build, core_trigger_bui
payload["object_attributes"]["iid"] = merge_request_number
resp = client.post(
- reverse(
- 'api_webhook_gitlab',
- kwargs={'project_slug': self.project.slug}
- ),
+ reverse("api_webhook_gitlab", kwargs={"project_slug": self.project.slug}),
payload,
- format='json',
+ format="json",
headers={
GITLAB_TOKEN_HEADER: self.gitlab_integration.secret,
},
)
# get updated external version
- external_version = self.project.versions(
- manager=EXTERNAL
- ).get(verbose_name=merge_request_number)
+ external_version = self.project.versions(manager=EXTERNAL).get(
+ verbose_name=merge_request_number
+ )
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- self.assertTrue(resp.data['build_triggered'])
- self.assertEqual(resp.data['project'], self.project.slug)
- self.assertEqual(resp.data['versions'], [external_version.verbose_name])
+ self.assertTrue(resp.data["build_triggered"])
+ self.assertEqual(resp.data["project"], self.project.slug)
+ self.assertEqual(resp.data["versions"], [external_version.verbose_name])
core_trigger_build.assert_called_once_with(
- project=self.project,
- version=external_version, commit=self.commit
+ project=self.project, version=external_version, commit=self.commit
)
# `update` webhook event updated the identifier (commit hash)
self.assertNotEqual(prev_identifier, external_version.identifier)
- @mock.patch('readthedocs.core.utils.trigger_build')
+ @mock.patch("readthedocs.core.utils.trigger_build")
def test_gitlab_merge_request_close_event(self, trigger_build, core_trigger_build):
client = APIClient()
- merge_request_number = '7'
- identifier = '95790bf891e76fee5e1747ab589903a6a1f80f23'
+ merge_request_number = "7"
+ identifier = "95790bf891e76fee5e1747ab589903a6a1f80f23"
# create an existing external version for merge request
version = get(
Version,
@@ -2759,7 +2725,7 @@ def test_gitlab_merge_request_close_event(self, trigger_build, core_trigger_buil
uploaded=True,
active=True,
verbose_name=merge_request_number,
- identifier=identifier
+ identifier=identifier,
)
# Update the payload for `closed` webhook event
@@ -2769,19 +2735,16 @@ def test_gitlab_merge_request_close_event(self, trigger_build, core_trigger_buil
payload["object_attributes"]["last_commit"]["id"] = identifier
resp = client.post(
- reverse(
- 'api_webhook_gitlab',
- kwargs={'project_slug': self.project.slug}
- ),
+ reverse("api_webhook_gitlab", kwargs={"project_slug": self.project.slug}),
payload,
- format='json',
+ format="json",
headers={
GITLAB_TOKEN_HEADER: self.gitlab_integration.secret,
},
)
- external_version = self.project.versions(
- manager=EXTERNAL
- ).get(verbose_name=merge_request_number)
+ external_version = self.project.versions(manager=EXTERNAL).get(
+ verbose_name=merge_request_number
+ )
self.assertTrue(external_version.active)
self.assertEqual(external_version.state, EXTERNAL_VERSION_STATE_CLOSED)
@@ -2791,12 +2754,12 @@ def test_gitlab_merge_request_close_event(self, trigger_build, core_trigger_buil
self.assertEqual(resp.data["versions"], [version.verbose_name])
core_trigger_build.assert_not_called()
- @mock.patch('readthedocs.core.utils.trigger_build')
+ @mock.patch("readthedocs.core.utils.trigger_build")
def test_gitlab_merge_request_merge_event(self, trigger_build, core_trigger_build):
client = APIClient()
- merge_request_number = '8'
- identifier = '95790bf891e76fee5e1747ab589903a6a1f80f23'
+ merge_request_number = "8"
+ identifier = "95790bf891e76fee5e1747ab589903a6a1f80f23"
# create an existing external version for merge request
version = get(
Version,
@@ -2806,7 +2769,7 @@ def test_gitlab_merge_request_merge_event(self, trigger_build, core_trigger_buil
uploaded=True,
active=True,
verbose_name=merge_request_number,
- identifier=identifier
+ identifier=identifier,
)
# Update the payload for `merge` webhook event
@@ -2816,19 +2779,16 @@ def test_gitlab_merge_request_merge_event(self, trigger_build, core_trigger_buil
payload["object_attributes"]["last_commit"]["id"] = identifier
resp = client.post(
- reverse(
- 'api_webhook_gitlab',
- kwargs={'project_slug': self.project.slug}
- ),
+ reverse("api_webhook_gitlab", kwargs={"project_slug": self.project.slug}),
payload,
- format='json',
+ format="json",
headers={
GITLAB_TOKEN_HEADER: self.gitlab_integration.secret,
},
)
- external_version = self.project.versions(
- manager=EXTERNAL
- ).get(verbose_name=merge_request_number)
+ external_version = self.project.versions(manager=EXTERNAL).get(
+ verbose_name=merge_request_number
+ )
# external version should be deleted
self.assertTrue(external_version.active)
@@ -2846,42 +2806,32 @@ def test_gitlab_merge_request_no_action(self, trigger_build):
"object_kind": GITLAB_MERGE_REQUEST,
"object_attributes": {
"iid": 2,
- "last_commit": {
- "id": self.commit
- },
+ "last_commit": {"id": self.commit},
},
}
resp = client.post(
- reverse(
- 'api_webhook_gitlab',
- kwargs={'project_slug': self.project.slug}
- ),
+ reverse("api_webhook_gitlab", kwargs={"project_slug": self.project.slug}),
payload,
- format='json',
+ format="json",
headers={
GITLAB_TOKEN_HEADER: self.gitlab_integration.secret,
},
)
self.assertEqual(resp.status_code, 200)
- self.assertEqual(resp.data['detail'], 'Unhandled webhook event')
+ self.assertEqual(resp.data["detail"], "Unhandled webhook event")
def test_gitlab_merge_request_open_event_invalid_payload(self, trigger_build):
client = APIClient()
payload = {
"object_kind": GITLAB_MERGE_REQUEST,
- "object_attributes": {
- "action": GITLAB_MERGE_REQUEST_CLOSE
- },
+ "object_attributes": {"action": GITLAB_MERGE_REQUEST_CLOSE},
}
resp = client.post(
- reverse(
- 'api_webhook_gitlab',
- kwargs={'project_slug': self.project.slug}
- ),
+ reverse("api_webhook_gitlab", kwargs={"project_slug": self.project.slug}),
payload,
- format='json',
+ format="json",
)
self.assertEqual(resp.status_code, 400)
@@ -2891,18 +2841,13 @@ def test_gitlab_merge_request_close_event_invalid_payload(self, trigger_build):
payload = {
"object_kind": GITLAB_MERGE_REQUEST,
- "object_attributes": {
- "action": GITLAB_MERGE_REQUEST_CLOSE
- },
+ "object_attributes": {"action": GITLAB_MERGE_REQUEST_CLOSE},
}
resp = client.post(
- reverse(
- 'api_webhook_gitlab',
- kwargs={'project_slug': self.project.slug}
- ),
+ reverse("api_webhook_gitlab", kwargs={"project_slug": self.project.slug}),
payload,
- format='json',
+ format="json",
)
self.assertEqual(resp.status_code, 400)
@@ -2919,9 +2864,9 @@ def test_bitbucket_webhook(self, trigger_build):
"""Bitbucket webhook API."""
client = APIClient()
client.post(
- '/api/v2/webhook/bitbucket/{}/'.format(self.project.slug),
+ "/api/v2/webhook/bitbucket/{}/".format(self.project.slug),
self.bitbucket_payload,
- format='json',
+ format="json",
headers={
BITBUCKET_SIGNATURE_HEADER: get_signature(
self.bitbucket_integration, self.bitbucket_payload
@@ -2932,18 +2877,18 @@ def test_bitbucket_webhook(self, trigger_build):
[mock.call(version=mock.ANY, project=self.project)],
)
client.post(
- '/api/v2/webhook/bitbucket/{}/'.format(self.project.slug),
+ "/api/v2/webhook/bitbucket/{}/".format(self.project.slug),
{
- 'push': {
- 'changes': [
+ "push": {
+ "changes": [
{
- 'new': {'name': 'non-existent'},
- 'old': {'name': 'master'},
+ "new": {"name": "non-existent"},
+ "old": {"name": "master"},
},
],
},
},
- format='json',
+ format="json",
)
trigger_build.assert_has_calls(
[mock.call(version=mock.ANY, project=self.project)],
@@ -2951,30 +2896,32 @@ def test_bitbucket_webhook(self, trigger_build):
trigger_build_call_count = trigger_build.call_count
client.post(
- '/api/v2/webhook/bitbucket/{}/'.format(self.project.slug),
+ "/api/v2/webhook/bitbucket/{}/".format(self.project.slug),
{
- 'push': {
- 'changes': [
+ "push": {
+ "changes": [
{
- 'new': None,
+ "new": None,
},
],
},
},
- format='json',
+ format="json",
)
self.assertEqual(trigger_build_call_count, trigger_build.call_count)
- @mock.patch('readthedocs.core.views.hooks.sync_repository_task')
+ @mock.patch("readthedocs.core.views.hooks.sync_repository_task")
def test_bitbucket_push_hook_creation(
- self, sync_repository_task, trigger_build,
+ self,
+ sync_repository_task,
+ trigger_build,
):
client = APIClient()
- self.bitbucket_payload['push']['changes'][0]['old'] = None
+ self.bitbucket_payload["push"]["changes"][0]["old"] = None
resp = client.post(
- '/api/v2/webhook/bitbucket/{}/'.format(self.project.slug),
+ "/api/v2/webhook/bitbucket/{}/".format(self.project.slug),
self.bitbucket_payload,
- format='json',
+ format="json",
headers={
BITBUCKET_SIGNATURE_HEADER: get_signature(
self.bitbucket_integration, self.bitbucket_payload
@@ -2982,25 +2929,27 @@ def test_bitbucket_push_hook_creation(
},
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- self.assertFalse(resp.data['build_triggered'])
- self.assertEqual(resp.data['project'], self.project.slug)
- self.assertEqual(resp.data['versions'], [LATEST])
+ self.assertFalse(resp.data["build_triggered"])
+ self.assertEqual(resp.data["project"], self.project.slug)
+ self.assertEqual(resp.data["versions"], [LATEST])
trigger_build.assert_not_called()
latest_version = self.project.versions.get(slug=LATEST)
sync_repository_task.apply_async.assert_called_with(
args=[latest_version.pk], kwargs={"build_api_key": mock.ANY}
)
- @mock.patch('readthedocs.core.views.hooks.sync_repository_task')
+ @mock.patch("readthedocs.core.views.hooks.sync_repository_task")
def test_bitbucket_push_hook_deletion(
- self, sync_repository_task, trigger_build,
+ self,
+ sync_repository_task,
+ trigger_build,
):
client = APIClient()
- self.bitbucket_payload['push']['changes'][0]['new'] = None
+ self.bitbucket_payload["push"]["changes"][0]["new"] = None
resp = client.post(
- '/api/v2/webhook/bitbucket/{}/'.format(self.project.slug),
+ "/api/v2/webhook/bitbucket/{}/".format(self.project.slug),
self.bitbucket_payload,
- format='json',
+ format="json",
headers={
BITBUCKET_SIGNATURE_HEADER: get_signature(
self.bitbucket_integration, self.bitbucket_payload
@@ -3008,9 +2957,9 @@ def test_bitbucket_push_hook_deletion(
},
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
- self.assertFalse(resp.data['build_triggered'])
- self.assertEqual(resp.data['project'], self.project.slug)
- self.assertEqual(resp.data['versions'], [LATEST])
+ self.assertFalse(resp.data["build_triggered"])
+ self.assertEqual(resp.data["project"], self.project.slug)
+ self.assertEqual(resp.data["versions"], [LATEST])
trigger_build.assert_not_called()
latest_version = self.project.versions.get(slug=LATEST)
sync_repository_task.apply_async.assert_called_with(
@@ -3022,7 +2971,7 @@ def test_bitbucket_invalid_webhook(self, trigger_build):
client = APIClient()
payload = {"foo": "bar"}
resp = client.post(
- '/api/v2/webhook/bitbucket/{}/'.format(self.project.slug),
+ "/api/v2/webhook/bitbucket/{}/".format(self.project.slug),
payload,
format="json",
headers={
@@ -3033,26 +2982,26 @@ def test_bitbucket_invalid_webhook(self, trigger_build):
},
)
self.assertEqual(resp.status_code, 200)
- self.assertEqual(resp.data['detail'], 'Unhandled webhook event')
+ self.assertEqual(resp.data["detail"], "Unhandled webhook event")
def test_generic_api_fails_without_auth(self, trigger_build):
client = APIClient()
resp = client.post(
- '/api/v2/webhook/generic/{}/'.format(self.project.slug),
+ "/api/v2/webhook/generic/{}/".format(self.project.slug),
{},
- format='json',
+ format="json",
)
self.assertEqual(resp.status_code, 403)
self.assertEqual(
- resp.data['detail'],
- 'Authentication credentials were not provided.',
+ resp.data["detail"],
+ "Authentication credentials were not provided.",
)
def test_generic_api_respects_token_auth(self, trigger_build):
client = APIClient()
self.assertIsNotNone(self.generic_integration.token)
resp = client.post(
- '/api/v2/webhook/{}/{}/'.format(
+ "/api/v2/webhook/{}/{}/".format(
self.project.slug,
self.generic_integration.pk,
),
@@ -3060,10 +3009,10 @@ def test_generic_api_respects_token_auth(self, trigger_build):
format="json",
)
self.assertEqual(resp.status_code, 200)
- self.assertTrue(resp.data['build_triggered'])
+ self.assertTrue(resp.data["build_triggered"])
# Test nonexistent branch
resp = client.post(
- '/api/v2/webhook/{}/{}/'.format(
+ "/api/v2/webhook/{}/{}/".format(
self.project.slug,
self.generic_integration.pk,
),
@@ -3071,7 +3020,7 @@ def test_generic_api_respects_token_auth(self, trigger_build):
format="json",
)
self.assertEqual(resp.status_code, 200)
- self.assertFalse(resp.data['build_triggered'])
+ self.assertFalse(resp.data["build_triggered"])
def test_generic_api_respects_basic_auth(self, trigger_build):
client = APIClient()
@@ -3079,31 +3028,32 @@ def test_generic_api_respects_basic_auth(self, trigger_build):
self.project.users.add(user)
client.force_authenticate(user=user)
resp = client.post(
- '/api/v2/webhook/generic/{}/'.format(self.project.slug),
+ "/api/v2/webhook/generic/{}/".format(self.project.slug),
{},
- format='json',
+ format="json",
)
self.assertEqual(resp.status_code, 200)
- self.assertTrue(resp.data['build_triggered'])
+ self.assertTrue(resp.data["build_triggered"])
def test_generic_api_falls_back_to_token_auth(self, trigger_build):
client = APIClient()
user = get(User)
client.force_authenticate(user=user)
integration = Integration.objects.create(
- project=self.project, integration_type=Integration.API_WEBHOOK,
+ project=self.project,
+ integration_type=Integration.API_WEBHOOK,
)
self.assertIsNotNone(integration.token)
resp = client.post(
- '/api/v2/webhook/{}/{}/'.format(
+ "/api/v2/webhook/{}/{}/".format(
self.project.slug,
integration.pk,
),
- {'token': integration.token},
- format='json',
+ {"token": integration.token},
+ format="json",
)
self.assertEqual(resp.status_code, 200)
- self.assertTrue(resp.data['build_triggered'])
+ self.assertTrue(resp.data["build_triggered"])
def test_webhook_doesnt_build_latest_if_is_deactivated(self, trigger_build):
client = APIClient()
@@ -3116,20 +3066,20 @@ def test_webhook_doesnt_build_latest_if_is_deactivated(self, trigger_build):
latest_version.active = False
latest_version.save()
- default_branch = self.project.versions.get(slug='master')
+ default_branch = self.project.versions.get(slug="master")
default_branch.active = False
default_branch.save()
resp = client.post(
- '/api/v2/webhook/{}/{}/'.format(
+ "/api/v2/webhook/{}/{}/".format(
self.project.slug,
integration.pk,
),
- {'token': integration.token, 'branches': default_branch.slug},
- format='json',
+ {"token": integration.token, "branches": default_branch.slug},
+ format="json",
)
self.assertEqual(resp.status_code, 200)
- self.assertFalse(resp.data['build_triggered'])
+ self.assertFalse(resp.data["build_triggered"])
trigger_build.assert_not_called()
def test_webhook_builds_only_master(self, trigger_build):
@@ -3143,22 +3093,22 @@ def test_webhook_builds_only_master(self, trigger_build):
latest_version.active = False
latest_version.save()
- default_branch = self.project.versions.get(slug='master')
+ default_branch = self.project.versions.get(slug="master")
self.assertFalse(latest_version.active)
self.assertTrue(default_branch.active)
resp = client.post(
- '/api/v2/webhook/{}/{}/'.format(
+ "/api/v2/webhook/{}/{}/".format(
self.project.slug,
integration.pk,
),
- {'token': integration.token, 'branches': default_branch.slug},
- format='json',
+ {"token": integration.token, "branches": default_branch.slug},
+ format="json",
)
self.assertEqual(resp.status_code, 200)
- self.assertTrue(resp.data['build_triggered'])
- self.assertEqual(resp.data['versions'], ['master'])
+ self.assertTrue(resp.data["build_triggered"])
+ self.assertEqual(resp.data["versions"], ["master"])
def test_webhook_build_latest_and_master(self, trigger_build):
client = APIClient()
@@ -3168,13 +3118,13 @@ def test_webhook_build_latest_and_master(self, trigger_build):
)
latest_version = self.project.versions.get(slug=LATEST)
- default_branch = self.project.versions.get(slug='master')
+ default_branch = self.project.versions.get(slug="master")
self.assertTrue(latest_version.active)
self.assertTrue(default_branch.active)
resp = client.post(
- '/api/v2/webhook/{}/{}/'.format(
+ "/api/v2/webhook/{}/{}/".format(
self.project.slug,
integration.pk,
),
@@ -3186,8 +3136,8 @@ def test_webhook_build_latest_and_master(self, trigger_build):
format="json",
)
self.assertEqual(resp.status_code, 200)
- self.assertTrue(resp.data['build_triggered'])
- self.assertEqual(set(resp.data['versions']), {'latest', 'master'})
+ self.assertTrue(resp.data["build_triggered"])
+ self.assertEqual(set(resp.data["versions"]), {"latest", "master"})
def test_webhook_build_another_branch(self, trigger_build):
client = APIClient()
@@ -3196,21 +3146,21 @@ def test_webhook_build_another_branch(self, trigger_build):
integration_type=Integration.API_WEBHOOK,
)
- version_v1 = self.project.versions.get(slug='v1.0')
+ version_v1 = self.project.versions.get(slug="v1.0")
self.assertTrue(version_v1.active)
resp = client.post(
- '/api/v2/webhook/{}/{}/'.format(
+ "/api/v2/webhook/{}/{}/".format(
self.project.slug,
integration.pk,
),
- {'token': integration.token, 'branches': version_v1.slug},
- format='json',
+ {"token": integration.token, "branches": version_v1.slug},
+ format="json",
)
self.assertEqual(resp.status_code, 200)
- self.assertTrue(resp.data['build_triggered'])
- self.assertEqual(resp.data['versions'], ['v1.0'])
+ self.assertTrue(resp.data["build_triggered"])
+ self.assertEqual(resp.data["versions"], ["v1.0"])
def test_dont_allow_webhooks_without_a_secret(self, trigger_build):
client = APIClient()
@@ -3240,7 +3190,7 @@ def test_dont_allow_webhooks_without_a_secret(self, trigger_build):
@override_settings(PUBLIC_DOMAIN="readthedocs.io")
class APIVersionTests(TestCase):
- fixtures = ['eric', 'test_data']
+ fixtures = ["eric", "test_data"]
maxDiff = None # So we get an actual diff when it fails
def test_get_version_by_id(self):
@@ -3250,12 +3200,12 @@ def test_get_version_by_id(self):
Allows us to notice changes in the fields returned by the endpoint
instead of let them pass silently.
"""
- pip = Project.objects.get(slug='pip')
- version = pip.versions.get(slug='0.8')
+ pip = Project.objects.get(slug="pip")
+ version = pip.versions.get(slug="0.8")
_, build_api_key = BuildAPIKey.objects.create_key(pip)
data = {
- 'pk': version.pk,
+ "pk": version.pk,
}
resp = self.client.get(
reverse("version-detail", kwargs=data),
@@ -3276,7 +3226,6 @@ def test_get_version_by_id(self):
"analytics_disabled": False,
"canonical_url": "http://pip.readthedocs.io/en/latest/",
"cdn_enabled": False,
- "conf_py_file": "",
"container_image": None,
"container_mem_limit": None,
"container_time_limit": None,
@@ -3285,18 +3234,14 @@ def test_get_version_by_id(self):
"description": "",
"documentation_type": "sphinx",
"environment_variables": {},
- "enable_epub_build": True,
- "enable_pdf_build": True,
"features": [],
"has_valid_clone": False,
"has_valid_webhook": False,
"id": 6,
- "install_project": False,
"language": "en",
"max_concurrent_builds": None,
"name": "Pip",
"programming_language": "words",
- "python_interpreter": "python3",
"repo": "https://github.com/pypa/pip",
"repo_type": "git",
"requirements_file": None,
@@ -3326,28 +3271,30 @@ def test_get_version_by_id(self):
def test_get_active_versions(self):
"""Test the full response of
``/api/v2/version/?project__slug=pip&active=true``"""
- pip = Project.objects.get(slug='pip')
+ pip = Project.objects.get(slug="pip")
get(Version, project=pip, active=False, privacy_level=PUBLIC)
data = {
- 'project__slug': pip.slug,
- 'active': 'true',
+ "project__slug": pip.slug,
+ "active": "true",
}
url = reverse("version-list")
with self.assertNumQueries(5):
resp = self.client.get(url, data)
self.assertEqual(resp.status_code, 200)
- self.assertEqual(resp.data['count'], pip.versions.filter(active=True).count())
+ self.assertEqual(resp.data["count"], pip.versions.filter(active=True).count())
# Do the same thing for inactive versions
- data.update({
- 'active': 'false',
- })
+ data.update(
+ {
+ "active": "false",
+ }
+ )
with self.assertNumQueries(5):
resp = self.client.get(url, data)
self.assertEqual(resp.status_code, 200)
- self.assertEqual(resp.data['count'], pip.versions.filter(active=False).count())
+ self.assertEqual(resp.data["count"], pip.versions.filter(active=False).count())
def test_project_get_active_versions(self):
pip = Project.objects.get(slug="pip")
@@ -3359,12 +3306,12 @@ def test_project_get_active_versions(self):
)
def test_modify_version(self):
- pip = Project.objects.get(slug='pip')
- version = pip.versions.get(slug='0.8')
+ pip = Project.objects.get(slug="pip")
+ version = pip.versions.get(slug="0.8")
_, build_api_key = BuildAPIKey.objects.create_key(pip)
data = {
- 'pk': version.pk,
+ "pk": version.pk,
}
resp = self.client.patch(
reverse("version-detail", kwargs=data),
@@ -3373,7 +3320,7 @@ def test_modify_version(self):
headers={"authorization": f"Token {build_api_key}"},
)
self.assertEqual(resp.status_code, 200)
- self.assertEqual(resp.data['built'], False)
- self.assertEqual(resp.data['has_pdf'], True)
- self.assertEqual(resp.data['has_epub'], False)
- self.assertEqual(resp.data['has_htmlzip'], False)
+ self.assertEqual(resp.data["built"], False)
+ self.assertEqual(resp.data["has_pdf"], True)
+ self.assertEqual(resp.data["has_epub"], False)
+ self.assertEqual(resp.data["has_htmlzip"], False)
diff --git a/readthedocs/rtd_tests/tests/test_privacy.py b/readthedocs/rtd_tests/tests/test_privacy.py
index 4014bf08ac2..e45afbf4da6 100644
--- a/readthedocs/rtd_tests/tests/test_privacy.py
+++ b/readthedocs/rtd_tests/tests/test_privacy.py
@@ -1,6 +1,6 @@
-import structlog
from unittest import mock
+import structlog
from django.contrib.auth.models import User
from django.test import TestCase
from django.test.utils import override_settings
@@ -48,7 +48,6 @@ def _create_kong(
default_branch="",
project_url="http://django-kong.rtfd.org",
default_version=LATEST,
- python_interpreter="python",
description="OOHHH AH AH AH KONG SMASH",
documentation_type="sphinx",
)
diff --git a/readthedocs/rtd_tests/tests/test_project_forms.py b/readthedocs/rtd_tests/tests/test_project_forms.py
index df4d57c813d..9572df57a29 100644
--- a/readthedocs/rtd_tests/tests/test_project_forms.py
+++ b/readthedocs/rtd_tests/tests/test_project_forms.py
@@ -245,7 +245,6 @@ def test_cant_update_privacy_level(self):
{
"default_version": LATEST,
"documentation_type": SPHINX,
- "python_interpreter": "python3",
"privacy_level": PRIVATE,
"versioning_scheme": MULTIPLE_VERSIONS_WITH_TRANSLATIONS,
},
@@ -261,7 +260,6 @@ def test_can_update_privacy_level(self):
{
"default_version": LATEST,
"documentation_type": SPHINX,
- "python_interpreter": "python3",
"privacy_level": PRIVATE,
"external_builds_privacy_level": PRIVATE,
"versioning_scheme": MULTIPLE_VERSIONS_WITH_TRANSLATIONS,
@@ -279,7 +277,6 @@ def test_custom_readthedocs_yaml(self, update_docs_task):
{
"default_version": LATEST,
"documentation_type": SPHINX,
- "python_interpreter": "python3",
"privacy_level": PRIVATE,
"readthedocs_yaml_path": custom_readthedocs_yaml_path,
"versioning_scheme": MULTIPLE_VERSIONS_WITH_TRANSLATIONS,
diff --git a/readthedocs/rtd_tests/tests/test_project_views.py b/readthedocs/rtd_tests/tests/test_project_views.py
index 339ac9de311..d539a0c3774 100644
--- a/readthedocs/rtd_tests/tests/test_project_views.py
+++ b/readthedocs/rtd_tests/tests/test_project_views.py
@@ -121,7 +121,7 @@ def test_form_pass(self):
self.assertIsNotNone(proj)
for key, val in list(self.step_data["basics"].items()):
self.assertEqual(getattr(proj, key), val)
- self.assertEqual(proj.documentation_type, "sphinx")
+ self.assertIsNone(proj.documentation_type)
def test_remote_repository_is_added(self):
remote_repo = get(RemoteRepository, default_branch="default-branch")
diff --git a/readthedocs/rtd_tests/tests/test_redirects.py b/readthedocs/rtd_tests/tests/test_redirects.py
index 804d46589ef..da78fd3b1a9 100644
--- a/readthedocs/rtd_tests/tests/test_redirects.py
+++ b/readthedocs/rtd_tests/tests/test_redirects.py
@@ -116,7 +116,6 @@ def setUp(self):
Project,
slug="project-1",
documentation_type="sphinx",
- conf_py_file="test_conf.py",
versions=[fixture()],
)
self.version = self.project.versions.all()[0]