Skip to content

Search: generate full link from the server side #7070

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 8 commits into from
May 14, 2020
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 9 additions & 6 deletions readthedocs/search/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,10 @@ def get_link(self, obj):
# Generate an appropriate link for the doctypes that use htmldir,
# and always end it with / so it goes directly to proxito.
if doctype in {SPHINX_HTMLDIR, MKDOCS}:
new_path = re.sub('(^|/)index.html$', '', path)
path = path.rstrip('/') + '/'
new_path = re.sub('(^|/)index.html$', '/', path)
# docs_url already ends with /,
# make sure to not include it twice.
path = new_path.lstrip('/')

return docs_url + path

Expand Down Expand Up @@ -221,17 +223,18 @@ def get_all_projects_data(self):
"""
all_projects = self.get_all_projects()
version_slug = self._get_version().slug
projects_url = {}
project_urls = {}
for project in all_projects:
projects_url[project.slug] = project.get_docs_url(version_slug=version_slug)
project_urls[project.slug] = project.get_docs_url(version_slug=version_slug)

versions_doctype = (
Version.objects
.filter(project__slug__in=projects_url.keys(), slug=version_slug)
.filter(project__slug__in=project_urls.keys(), slug=version_slug)
.values_list('project__slug', 'documentation_type')
)

projects_data = {
project_slug: (projects_url[project_slug], doctype)
project_slug: (project_urls[project_slug], doctype)
for project_slug, doctype in versions_doctype
}
return projects_data
Expand Down
8 changes: 7 additions & 1 deletion readthedocs/search/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,13 @@ def all_projects(es_index, mock_processed_json, db, settings):
# file_basename in config are without extension so add html extension
file_name = file_basename + '.html'
version = project.versions.all()[0]
html_file = G(HTMLFile, project=project, version=version, name=file_name)
html_file = G(
HTMLFile,
project=project,
version=version,
name=file_name,
path=file_name,
)

# creating sphinx domain test objects
file_path = get_json_file_path(project.slug, file_basename)
Expand Down
13 changes: 13 additions & 0 deletions readthedocs/search/tests/data/docs/guides/index.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"path": "guides/index",
"title": "Guides",
"sections": [
{
"id": "guides",
"title": "Guides",
"content": "Content from guides/index"
}
],
"domains": [],
"domain_data": {}
}
13 changes: 13 additions & 0 deletions readthedocs/search/tests/data/docs/index.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"path": "index",
"title": "Index",
"sections": [
{
"id": "title",
"title": "Title",
"content": "Some content from index"
}
],
"domains": [],
"domain_data": {}
}
2 changes: 1 addition & 1 deletion readthedocs/search/tests/dummy_data.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
PROJECT_DATA_FILES = {
'pipeline': ['installation', 'signals'],
'kuma': ['documentation', 'docker'],
'docs': ['support', 'wiping'],
'docs': ['support', 'wiping', 'index', 'guides/index'],
}

ALL_PROJECTS = PROJECT_DATA_FILES.keys()
105 changes: 104 additions & 1 deletion readthedocs/search/tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,14 @@
from django_dynamic_fixture import G

from readthedocs.builds.models import Version
from readthedocs.projects.constants import PUBLIC
from readthedocs.projects.constants import (
MKDOCS,
MKDOCS_HTML,
PUBLIC,
SPHINX,
SPHINX_HTMLDIR,
SPHINX_SINGLEHTML,
)
from readthedocs.projects.models import HTMLFile, Project
from readthedocs.search.api import PageSearchAPIView
from readthedocs.search.documents import PageDocument
Expand Down Expand Up @@ -324,6 +331,102 @@ def test_doc_search_hidden_versions(self, api_client, all_projects):
first_result = data[0]
assert first_result['project'] == subproject.slug

@pytest.mark.parametrize('doctype', [SPHINX, SPHINX_SINGLEHTML, MKDOCS_HTML])
def test_search_correct_link_html_projects(self, api_client, doctype):
project = Project.objects.get(slug='docs')
project.versions.update(documentation_type=doctype)
version = project.versions.all().first()

# Check for a normal page.
search_params = {
'project': project.slug,
'version': version.slug,
'q': 'Support',
}
resp = self.get_search(api_client, search_params)
assert resp.status_code == 200

result = resp.data['results'][0]

assert result['project'] == project.slug
assert result['link'].endswith('en/latest/support.html')

# Check the main index page.
search_params = {
'project': project.slug,
'version': version.slug,
'q': 'Some content from index',
}
resp = self.get_search(api_client, search_params)
assert resp.status_code == 200

result = resp.data['results'][0]

assert result['project'] == project.slug
assert result['link'].endswith('en/latest/index.html')

# Check the index page of a subdirectory.
search_params = {
'project': project.slug,
'version': version.slug,
'q': 'Some content from guides/index',
}
resp = self.get_search(api_client, search_params)
assert resp.status_code == 200

result = resp.data['results'][0]

assert result['project'] == project.slug
assert result['link'].endswith('en/latest/guides/index.html')

@pytest.mark.parametrize('doctype', [SPHINX_HTMLDIR, MKDOCS])
def test_search_correct_link_htmldir_projects(self, api_client, doctype):
project = Project.objects.get(slug='docs')
project.versions.update(documentation_type=doctype)
version = project.versions.all().first()

# Check for a normal page.
search_params = {
'project': project.slug,
'version': version.slug,
'q': 'Support',
}
resp = self.get_search(api_client, search_params)
assert resp.status_code == 200

result = resp.data['results'][0]

assert result['project'] == project.slug
assert result['link'].endswith('en/latest/support.html')

# Check the main index page.
search_params = {
'project': project.slug,
'version': version.slug,
'q': 'Some content from index',
}
resp = self.get_search(api_client, search_params)
assert resp.status_code == 200

result = resp.data['results'][0]

assert result['project'] == project.slug
assert result['link'].endswith('en/latest/')

# Check the index page of a subdirectory.
search_params = {
'project': project.slug,
'version': version.slug,
'q': 'Some content from guides/index',
}
resp = self.get_search(api_client, search_params)
assert resp.status_code == 200

result = resp.data['results'][0]

assert result['project'] == project.slug
assert result['link'].endswith('en/latest/guides/')


class TestDocumentSearch(BaseTestDocumentSearch):

Expand Down