Skip to content

Remove usage of project.documentation_type in tasks #4896

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 6 additions & 3 deletions readthedocs/core/management/commands/reindex_elasticsearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

from __future__ import absolute_import
import logging
from optparse import make_option

from django.core.management.base import BaseCommand
from django.core.management.base import CommandError
Expand Down Expand Up @@ -52,7 +51,11 @@ def handle(self, *args, **options):
commit = None

try:
update_search(version.pk, commit,
delete_non_commit_files=False)
update_search(
version.pk,
commit,
doctype=version.project.documentation_type,
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@agjohnson this is the only place that I'm worried about passing a config object, we don't have one here.

delete_non_commit_files=False
)
except Exception as e:
log.exception('Reindex failed for %s, %s', version, e)
114 changes: 59 additions & 55 deletions readthedocs/projects/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -676,6 +676,7 @@ def update_app_instances(self, html=False, localmedia=False, search=False,
args=[
self.project.pk,
self.version.pk,
self.config.doctype,
],
kwargs=dict(
hostname=socket.gethostname(),
Expand All @@ -688,6 +689,7 @@ def update_app_instances(self, html=False, localmedia=False, search=False,
callback=sync_callback.s(
version_pk=self.version.pk,
commit=self.build['commit'],
doctype=self.config.doctype,
),
)

Expand Down Expand Up @@ -759,7 +761,9 @@ def build_docs_html(self):
broadcast(
type='app',
task=move_files,
args=[self.version.pk, socket.gethostname()],
args=[
self.version.pk, socket.gethostname(), self.config.doctype
],
kwargs=dict(html=True)
)
except socket.error:
Expand Down Expand Up @@ -828,7 +832,7 @@ def is_type_sphinx(self):

# Web tasks
@app.task(queue='web')
def sync_files(project_pk, version_pk, hostname=None, html=False,
def sync_files(project_pk, version_pk, doctype, hostname=None, html=False,
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm passing only the doctype here rather than the whole config object, we only use the doctype, but if someone thinks it's useful passing the config I can change it.

localmedia=False, search=False, pdf=False, epub=False):
"""
Sync build artifacts to application instances.
Expand Down Expand Up @@ -857,6 +861,7 @@ def sync_files(project_pk, version_pk, hostname=None, html=False,
move_files(
version_pk,
hostname,
doctype,
html=html,
localmedia=localmedia,
search=search,
Expand All @@ -872,7 +877,7 @@ def sync_files(project_pk, version_pk, hostname=None, html=False,


@app.task(queue='web')
def move_files(version_pk, hostname, html=False, localmedia=False,
def move_files(version_pk, hostname, doctype, html=False, localmedia=False,
search=False, pdf=False, epub=False):
"""
Task to move built documentation to web servers.
Expand Down Expand Up @@ -902,63 +907,62 @@ def move_files(version_pk, hostname, html=False, localmedia=False,
if html:
from_path = version.project.artifact_path(
version=version.slug,
type_=version.project.documentation_type,
type_=doctype,
)
target = version.project.rtd_build_path(version.slug)
Syncer.copy(from_path, target, host=hostname)

if 'sphinx' in version.project.documentation_type:
if search:
from_path = version.project.artifact_path(
version=version.slug,
type_='sphinx_search',
)
to_path = version.project.get_production_media_path(
type_='json',
version_slug=version.slug,
include_file=False,
)
Syncer.copy(from_path, to_path, host=hostname)
if search:
from_path = version.project.artifact_path(
version=version.slug,
type_='sphinx_search',
)
to_path = version.project.get_production_media_path(
type_='json',
version_slug=version.slug,
include_file=False,
)
Syncer.copy(from_path, to_path, host=hostname)

if localmedia:
from_path = version.project.artifact_path(
version=version.slug,
type_='sphinx_localmedia',
)
to_path = version.project.get_production_media_path(
type_='htmlzip',
version_slug=version.slug,
include_file=False,
)
Syncer.copy(from_path, to_path, host=hostname)
if localmedia:
from_path = version.project.artifact_path(
version=version.slug,
type_='sphinx_localmedia',
)
to_path = version.project.get_production_media_path(
type_='htmlzip',
version_slug=version.slug,
include_file=False,
)
Syncer.copy(from_path, to_path, host=hostname)

# Always move PDF's because the return code lies.
if pdf:
from_path = version.project.artifact_path(
version=version.slug,
type_='sphinx_pdf',
)
to_path = version.project.get_production_media_path(
type_='pdf',
version_slug=version.slug,
include_file=False,
)
Syncer.copy(from_path, to_path, host=hostname)
if epub:
from_path = version.project.artifact_path(
version=version.slug,
type_='sphinx_epub',
)
to_path = version.project.get_production_media_path(
type_='epub',
version_slug=version.slug,
include_file=False,
)
Syncer.copy(from_path, to_path, host=hostname)
# Always move PDF's because the return code lies.
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This comment looks outdated

if pdf:
from_path = version.project.artifact_path(
version=version.slug,
type_='sphinx_pdf',
)
to_path = version.project.get_production_media_path(
type_='pdf',
version_slug=version.slug,
include_file=False,
)
Syncer.copy(from_path, to_path, host=hostname)
if epub:
from_path = version.project.artifact_path(
version=version.slug,
type_='sphinx_epub',
)
to_path = version.project.get_production_media_path(
type_='epub',
version_slug=version.slug,
include_file=False,
)
Syncer.copy(from_path, to_path, host=hostname)


@app.task(queue='web')
def update_search(version_pk, commit, delete_non_commit_files=True):
def update_search(version_pk, commit, doctype, delete_non_commit_files=True):
"""
Task to update search indexes.

Expand All @@ -968,12 +972,12 @@ def update_search(version_pk, commit, delete_non_commit_files=True):
"""
version = Version.objects.get(pk=version_pk)

if 'sphinx' in version.project.documentation_type:
if 'sphinx' in doctype:
page_list = process_all_json_files(version, build_dir=False)
else:
log.debug(
'Unknown documentation type: %s',
version.project.documentation_type
doctype
)
return

Expand Down Expand Up @@ -1310,14 +1314,14 @@ def clear_artifacts(paths):


@app.task(queue='web')
def sync_callback(_, version_pk, commit, *args, **kwargs):
def sync_callback(_, version_pk, commit, doctype, *args, **kwargs):
"""
Called once the sync_files tasks are done.

The first argument is the result from previous tasks, which we discard.
"""
fileify(version_pk, commit=commit)
update_search(version_pk, commit=commit)
update_search(version_pk, commit=commit, doctype=doctype)


@app.task()
Expand Down
115 changes: 115 additions & 0 deletions readthedocs/rtd_tests/tests/test_projects_tasks.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
from __future__ import division, print_function, unicode_literals

from django.test import TestCase
from django_dynamic_fixture import get
from mock import patch

from readthedocs.builds.models import Version
from readthedocs.projects.models import Project
from readthedocs.projects.tasks import sync_files


class SyncFilesTests(TestCase):

def setUp(self):
self.project = get(Project)
self.version = get(Version, project=self.project)

@patch('readthedocs.builds.syncers.Syncer.copy')
@patch('readthedocs.projects.tasks.shutil.rmtree')
def test_sync_files_clean_old_artifacts(self, rmtree, copy):
sync_files(self.project.pk, self.version.pk, 'sphinx', html=True)
pdf, epub = rmtree.call_args_list

# pdf and epub are cleaned
args, _ = pdf
self.assertIn('pdf', args[0])
args, _ = epub
self.assertIn('epub', args[0])

# Artifacts are copied to the rtd-builds directory
copy.assert_called_once()
args, _ = copy.call_args
self.assertIn('artifacts', args[0])
self.assertIn('sphinx', args[0])
self.assertIn('rtd-builds', args[1])

@patch('readthedocs.builds.syncers.Syncer.copy')
@patch('readthedocs.projects.tasks.shutil.rmtree')
def test_sync_files_pdf(self, rmtree, copy):
sync_files(
self.project.pk, self.version.pk, 'sphinx', pdf=True
)

# epub is cleaned
rmtree.assert_called_once()
args, _ = rmtree.call_args
self.assertIn('epub', args[0])

# Artifacts are copied to the media directory
copy.assert_called_once()
args, _ = copy.call_args
self.assertIn('artifacts', args[0])
self.assertIn('sphinx_pdf', args[0])
self.assertIn('media/pdf', args[1])

@patch('readthedocs.builds.syncers.Syncer.copy')
@patch('readthedocs.projects.tasks.shutil.rmtree')
def test_sync_files_epub(self, rmtree, copy):
sync_files(
self.project.pk, self.version.pk, 'sphinx', epub=True
)

# pdf is cleaned
rmtree.assert_called_once()
args, _ = rmtree.call_args
self.assertIn('pdf', args[0])

# Artifacts are copied to the media directory
copy.assert_called_once()
args, _ = copy.call_args
self.assertIn('artifacts', args[0])
self.assertIn('sphinx_epub', args[0])
self.assertIn('media/epub', args[1])

@patch('readthedocs.builds.syncers.Syncer.copy')
@patch('readthedocs.projects.tasks.shutil.rmtree')
def test_sync_files_localmedia(self, rmtree, copy):
sync_files(
self.project.pk, self.version.pk, 'sphinx', localmedia=True
)
pdf, epub = rmtree.call_args_list

# pdf and epub are cleaned
args, _ = pdf
self.assertIn('pdf', args[0])
args, _ = epub
self.assertIn('epub', args[0])

# Artifacts are copied to the media directory
copy.assert_called_once()
args, _ = copy.call_args
self.assertIn('artifacts', args[0])
self.assertIn('sphinx_localmedia', args[0])
self.assertIn('media/htmlzip', args[1])

@patch('readthedocs.builds.syncers.Syncer.copy')
@patch('readthedocs.projects.tasks.shutil.rmtree')
def test_sync_files_search(self, rmtree, copy):
sync_files(
self.project.pk, self.version.pk, 'sphinx', search=True
)
pdf, epub = rmtree.call_args_list

# pdf and epub are cleaned
args, _ = pdf
self.assertIn('pdf', args[0])
args, _ = epub
self.assertIn('epub', args[0])

# Artifacts are copied to the media directory
copy.assert_called_once()
args, _ = copy.call_args
self.assertIn('artifacts', args[0])
self.assertIn('sphinx_search', args[0])
self.assertIn('media/json', args[1])