Skip to content

Resolving lint warnings and errors #1522

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 17 commits into from
Jul 31, 2015
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
17 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions prospector.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ pep8:

pylint:
max-line-length: 100
disable:
- logging-format-interpolation

mccabe:
run: false
Expand Down
12 changes: 6 additions & 6 deletions readthedocs/api/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
class ProjectResource(ModelResource, SearchMixin):
users = fields.ToManyField('readthedocs.api.base.UserResource', 'users')

class Meta:
class Meta(object):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This feels un-djangoy -- have they suggested doing this now, or is this just to make the linter happy?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Also seems there are a bunch of other places where we're doing it and it isn't alerting. Is it special casing Django model's and forms, but not the api stuff?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The linter complains if python old-style classes are used. Django makes the Meta classes into new style classes automatically since this ticket: https://code.djangoproject.com/ticket/7342

The restframework however does not. So that's why we made them new-styles classes here by hand. I think of it as "future-proofing" as all classes will be new style classes in Python 3, so we do some testing in that regard here if we make the switch now.

include_absolute_url = True
allowed_methods = ['get', 'post', 'put']
queryset = Project.objects.api()
Expand Down Expand Up @@ -121,7 +121,7 @@ def override_urls(self):
class VersionResource(ModelResource):
project = fields.ForeignKey(ProjectResource, 'project', full=True)

class Meta:
class Meta(object):
allowed_methods = ['get', 'put', 'post']
always_return_data = True
queryset = Version.objects.api()
Expand Down Expand Up @@ -212,7 +212,7 @@ class BuildResource(ModelResource):
project = fields.ForeignKey('readthedocs.api.base.ProjectResource', 'project')
version = fields.ForeignKey('readthedocs.api.base.VersionResource', 'version')

class Meta:
class Meta(object):
always_return_data = True
include_absolute_url = True
allowed_methods = ['get', 'post', 'put']
Expand Down Expand Up @@ -246,7 +246,7 @@ def override_urls(self):
class FileResource(ModelResource, SearchMixin):
project = fields.ForeignKey(ProjectResource, 'project', full=True)

class Meta:
class Meta(object):
allowed_methods = ['get', 'post']
queryset = ImportedFile.objects.all()
excludes = ['md5', 'slug']
Expand Down Expand Up @@ -278,7 +278,7 @@ def get_anchor(self, request, **kwargs):

query = request.GET.get('q', '')
redis_data = redis_client.keys("*redirects:v4*%s*" % query)
#-2 because http:
# -2 because http:
urls = [''.join(data.split(':')[6:]) for data in redis_data
if 'http://' in data]
object_list = {'objects': urls}
Expand All @@ -289,7 +289,7 @@ def get_anchor(self, request, **kwargs):

class UserResource(ModelResource):

class Meta:
class Meta(object):
allowed_methods = ['get']
queryset = User.objects.all()
fields = ['username', 'first_name', 'last_name', 'last_login', 'id']
Expand Down
4 changes: 1 addition & 3 deletions readthedocs/api/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,9 +168,7 @@ def obj_get_list(self, request=None, *args, **kwargs):
except ValueError, e:
raise NotFound(ugettext("Invalid resource lookup data provided "
"(mismatched type).: %(error)s")
% {
'error': e
})
% {'error': e})


class OwnerAuthorization(Authorization):
Expand Down
2 changes: 2 additions & 0 deletions readthedocs/bookmarks/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,12 @@
from readthedocs.bookmarks.models import Bookmark
from readthedocs.projects.models import Project


# These views are CSRF exempt because of Django's CSRF middleware failing here
# https://github.com/django/django/blob/stable/1.6.x/django/middleware/csrf.py#L135-L159
# We don't have a valid referrer because we're on a subdomain


class BookmarkExistsView(View):

@method_decorator(csrf_exempt)
Expand Down
10 changes: 7 additions & 3 deletions readthedocs/comments/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,17 @@ def create(self, *args, **kwargs):
return node

def from_hash(self, version_slug, page, node_hash, project_slug=None):
snapshots = NodeSnapshot.objects.filter(hash=node_hash, node__version__slug=version_slug, node__page=page)
snapshots = NodeSnapshot.objects.filter(hash=node_hash,
node__version__slug=version_slug,
node__page=page)

if project_slug:
snapshots = snapshots.filter(node__project__slug=project_slug)

if len(snapshots) == 0:
raise DocumentNode.DoesNotExist("No node exists on %s with a current hash of %s" % (page, node_hash))
raise DocumentNode.DoesNotExist(
"No node exists on %s with a current hash of %s" % (
page, node_hash))

if len(snapshots) == 1:
# If we have found only one snapshot, we know we have the correct node.
Expand Down Expand Up @@ -176,7 +180,7 @@ def has_been_approved_since_most_recent_node_change(self):
return False

def is_orphaned(self):
self.node # TODO
raise NotImplementedError('TODO')


class DocumentCommentSerializer(serializers.ModelSerializer):
Expand Down
3 changes: 2 additions & 1 deletion readthedocs/comments/session.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
from rest_framework.authentication import SessionAuthentication


class UnsafeSessionAuthentication(SessionAuthentication):

def authenticate(self, request):
http_request = request._request
user = getattr(http_request, 'user', None)

if not user or not user.is_active:
return None
return None

return (user, None)
14 changes: 9 additions & 5 deletions readthedocs/comments/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,9 @@
from rest_framework.viewsets import ModelViewSet
from sphinx.websupport import WebSupport

from readthedocs.comments.models import DocumentComment, DocumentNode, NodeSnapshot, DocumentCommentSerializer,\
DocumentNodeSerializer, ModerationActionSerializer
from readthedocs.comments.models import (
DocumentComment, DocumentNode, NodeSnapshot, DocumentCommentSerializer,
DocumentNodeSerializer, ModerationActionSerializer)
from readthedocs.privacy.backend import AdminNotAuthorized
from readthedocs.projects.models import Project
from readthedocs.restapi.permissions import IsOwner, CommentModeratorOrReadOnly
Expand Down Expand Up @@ -79,7 +80,6 @@ def attach_comment(request):
comment.node = snapshot.node

serialized_comment = DocumentCommentSerializer(comment)
serialized_comment.data
return Response(serialized_comment.data)


Expand Down Expand Up @@ -144,7 +144,9 @@ def update_node(request):
version = post_data['version']
page = post_data['page']

node = DocumentNode.objects.from_hash(node_hash=old_hash, project_slug=project, version_slug=version, page=page)
node = DocumentNode.objects.from_hash(
node_hash=old_hash, project_slug=project, version_slug=version,
page=page)

node.update_hash(new_hash, commit)
return Response(DocumentNodeSerializer(node).data)
Expand All @@ -168,7 +170,9 @@ def get_queryset(self):
queryset = DocumentComment.objects.filter(node=node)

except KeyError:
raise ParseError('To get comments by node, you must also provide page, version, and project.')
raise ParseError(
'To get comments by node, you must also provide page, '
'version, and project.')
except DocumentNode.DoesNotExist:
queryset = DocumentComment.objects.none()
elif qp.get('project'):
Expand Down
6 changes: 3 additions & 3 deletions readthedocs/core/hacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ def find_module(self, name, path):
try:
return imp.find_module(name, path)
except ImportError:
#raise
# raise
return FreeLoader()


Expand All @@ -25,7 +25,7 @@ def __call__(self, *args, **kwargs):
return Mock()


class FreeLoader:
class FreeLoader(object):
def load_module(self, fullname):
return Mock()

Expand All @@ -37,4 +37,4 @@ def patch_meta_path():

def unpatch_meta_path():
sys.meta_path.remove(FreeLoader._class)
#sys.meta_path = []
# sys.meta_path = []
1 change: 1 addition & 0 deletions readthedocs/core/management/commands/archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

log = logging.getLogger(__name__)


class Command(BaseCommand):
"""Custom management command to rebuild documentation for all projects on
the site. Invoked via ``./manage.py update_repos``.
Expand Down
3 changes: 2 additions & 1 deletion readthedocs/core/management/commands/build_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,8 @@ def handle(self, *args, **options):
Build/index all versions or a single project's version
'''
# Delete all existing as a cleanup for any deleted projects.
#ImportedFile.objects.all().delete()

# ImportedFile.objects.all().delete()
project = options['project']

if project:
Expand Down
1 change: 0 additions & 1 deletion readthedocs/core/management/commands/clean_builds.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@ def handle(self, *args, **options):
if latest_build.date > max_date:
log.warn('{0} is newer than {1}'.format(
latest_build, max_date))
next
path = version.get_build_path()
if path is not None:
log.info(
Expand Down
1 change: 1 addition & 0 deletions readthedocs/core/management/commands/pull.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

log = logging.getLogger(__name__)


class Command(BaseCommand):
def handle(self, *args, **options):
if len(args):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@ def handle(self, *args, **options):
commit = None
try:
page_list = parse_json.process_all_json_files(version, build_dir=False)
index_search_request(version=version, page_list=page_list, commit=commit, project_scale=0, page_scale=0, section=False, delete=False)
index_search_request(
version=version, page_list=page_list, commit=commit,
project_scale=0, page_scale=0, section=False, delete=False)
except Exception:
log.error('Build failed for %s' % version, exc_info=True)
1 change: 1 addition & 0 deletions readthedocs/core/management/commands/symlink.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

log = logging.getLogger(__name__)


class Command(BaseCommand):
def handle(self, *args, **options):
if len(args):
Expand Down
13 changes: 9 additions & 4 deletions readthedocs/core/middleware.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,8 @@ def process_request(self, request):
request.slug = request.META['HTTP_X_RTD_SLUG'].lower()
request.urlconf = 'core.subdomain_urls'
request.rtdheader = True
log.debug(LOG_TEMPLATE.format(msg='X-RTD-Slug header detetected: %s' % request.slug, **log_kwargs))
log.debug(LOG_TEMPLATE.format(
msg='X-RTD-Slug header detetected: %s' % request.slug, **log_kwargs))
except KeyError:
# Try header first, then DNS
try:
Expand All @@ -62,10 +63,14 @@ def process_request(self, request):
cache.set(host, slug, 60 * 60)
# Cache the slug -> host mapping permanently.
redis_conn.sadd("rtd_slug:v1:%s" % slug, host)
log.debug(LOG_TEMPLATE.format(msg='CNAME cached: %s->%s' % (slug, host), **log_kwargs))
log.debug(LOG_TEMPLATE.format(
msg='CNAME cached: %s->%s' % (slug, host),
**log_kwargs))
request.slug = slug
request.urlconf = 'core.subdomain_urls'
log.debug(LOG_TEMPLATE.format(msg='CNAME detetected: %s' % request.slug, **log_kwargs))
log.debug(LOG_TEMPLATE.format(
msg='CNAME detetected: %s' % request.slug,
**log_kwargs))
except:
# Some crazy person is CNAMEing to us. 404.
log.exception(LOG_TEMPLATE.format(msg='CNAME 404', **log_kwargs))
Expand All @@ -78,7 +83,7 @@ def process_request(self, request):
log.debug(LOG_TEMPLATE.format(msg='404ing long domain', **log_kwargs))
raise Http404(_('Invalid hostname'))
log.debug(LOG_TEMPLATE.format(msg='Allowing long domain name', **log_kwargs))
#raise Http404(_('Invalid hostname'))
# raise Http404(_('Invalid hostname'))
# Normal request.
return None

Expand Down
1 change: 1 addition & 0 deletions readthedocs/core/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

log = logging.getLogger(__name__)


class UserProfile (models.Model):
"""Additional information about a User.
"""
Expand Down
6 changes: 4 additions & 2 deletions readthedocs/core/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,10 +75,12 @@ def trigger_build(project, version=None, record=True, force=False, basic=False):
state='triggered',
success=True,
)
update_docs.delay(pk=project.pk, version_pk=version.pk, record=record, force=force, basic=basic, build_pk=build.pk)
update_docs.delay(pk=project.pk, version_pk=version.pk, record=record,
force=force, basic=basic, build_pk=build.pk)
else:
build = None
update_docs.delay(pk=project.pk, version_pk=version.pk, record=record, force=force, basic=basic)
update_docs.delay(pk=project.pk, version_pk=version.pk, record=record,
force=force, basic=basic)

return build

Expand Down
Loading