Skip to content

Reduce logging of common redirects and expected items #10497

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Jun 29, 2023
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions readthedocs/proxito/redirects.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,13 +76,13 @@ def canonical_redirect(request, project, redirect_type, external_version_slug=No

if from_url == to:
# check that we do have a response and avoid infinite redirect
log.warning(
log.debug(
"Infinite Redirect: FROM URL is the same than TO URL.",
url=to,
)
raise InfiniteRedirectException()

log.info(
log.debug(
"Canonical Redirect.", host=request.get_host(), from_url=from_url, to_url=to
)
resp = HttpResponseRedirect(to)
Expand Down
4 changes: 2 additions & 2 deletions readthedocs/proxito/views/mixins.py
Original file line number Diff line number Diff line change
Expand Up @@ -378,7 +378,7 @@ def get_redirect_response(self, request, redirect_path, proxito_path, http_statu
# Redirects shouldn't change the domain, version or language.
# However, if the new_path is already an absolute URI, just use it
new_path = request.build_absolute_uri(new_path)
log.info(
log.debug(
'Redirecting...',
from_url=request.build_absolute_uri(proxito_path),
to_url=new_path,
Expand All @@ -394,7 +394,7 @@ def get_redirect_response(self, request, redirect_path, proxito_path, http_statu
and new_path_parsed.path == old_path_parsed.path
):
# check that we do have a response and avoid infinite redirect
log.warning(
log.debug(
'Infinite Redirect: FROM URL is the same than TO URL.',
url=new_path,
)
Expand Down
4 changes: 1 addition & 3 deletions readthedocs/proxito/views/serve.py
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,6 @@ def _get_canonical_redirect_type(self, request):
.exists()
)
# For .com we need to check if the project supports custom domains.
# pylint: disable=protected-access
if canonical_domain and resolver._use_cname(project):
log.debug(
"Proxito Public Domain -> Canonical Domain Redirect.",
Expand Down Expand Up @@ -525,7 +524,6 @@ def get(self, request, proxito_path, template_name="404.html"):
with the default version and finally, if none of them are found, the Read
the Docs default page (Maze Found) is rendered by Django and served.
"""
# pylint: disable=too-many-locals
log.bind(proxito_path=proxito_path)
log.debug('Executing 404 handler.')

Expand Down Expand Up @@ -691,7 +689,7 @@ def _get_custom_404_page(self, request, project, version=None):
storage_root_path, tryfile
)
if build_media_storage.exists(storage_filename_path):
log.info(
log.debug(
"Serving custom 404.html page.",
version_slug_404=version_404.slug,
storage_filename_path=storage_filename_path,
Expand Down
4 changes: 2 additions & 2 deletions readthedocs/search/documents.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@ def update(self, *args, **kwargs):
# Hack a fix to our broken connection pooling
# This creates a new connection on every request,
# but actually works :)
log.info('Hacking Elastic indexing to fix connection pooling')
self.using = Elasticsearch(**settings.ELASTICSEARCH_DSL['default'])
log.debug("Hacking Elastic indexing to fix connection pooling")
self.using = Elasticsearch(**settings.ELASTICSEARCH_DSL["default"])
super().update(*args, **kwargs)


Expand Down
4 changes: 2 additions & 2 deletions readthedocs/search/faceted_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,8 @@ def __init__(
# Hack a fix to our broken connection pooling
# This creates a new connection on every request,
# but actually works :)
log.info('Hacking Elastic to fix search connection pooling')
self.using = Elasticsearch(**settings.ELASTICSEARCH_DSL['default'])
log.debug("Hacking Elastic to fix search connection pooling")
self.using = Elasticsearch(**settings.ELASTICSEARCH_DSL["default"])

filters = filters or {}

Expand Down