Skip to content

Autolint cleanup for #3821 #3822

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Mar 20, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .isort.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,6 @@ indent=' '
multi_line_output=4
default_section=THIRDPARTY
known_first_party=readthedocs,readthedocsinc
known_third_party=mock
known_third_party=mock,builtins
sections=FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
add_imports=from __future__ import division, from __future__ import print_function, from __future__ import unicode_literals
55 changes: 35 additions & 20 deletions readthedocs/vcs_support/backends/git.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,19 @@
# -*- coding: utf-8 -*-
"""Git-related utilities."""

from __future__ import absolute_import
from __future__ import (
absolute_import, division, print_function, unicode_literals)

import csv
import logging
import os
import re

from builtins import str
from six import StringIO
from six import PY2, StringIO

from readthedocs.projects.exceptions import RepositoryError
from readthedocs.vcs_support.base import BaseVCS, VCSVersion


log = logging.getLogger(__name__)


Expand All @@ -39,9 +39,8 @@ def _get_clone_url(self):
clone_url = 'https://%s@%s' % (self.token, hacked_url)
return clone_url
# Don't edit URL because all hosts aren't the same

# else:
# clone_url = 'git://%s' % (hacked_url)
# clone_url = 'git://%s' % (hacked_url)
return self.repo_url

def set_remote_url(self, url):
Expand Down Expand Up @@ -69,22 +68,24 @@ def checkout_revision(self, revision=None):
branch = self.default_branch or self.fallback_branch
revision = 'origin/%s' % branch

code, out, err = self.run(
'git', 'checkout', '--force', revision)
code, out, err = self.run('git', 'checkout', '--force', revision)
if code != 0:
log.warning("Failed to checkout revision '%s': %s",
revision, code)
log.warning("Failed to checkout revision '%s': %s", revision, code)
return [code, out, err]

def clone(self):
code, _, _ = self.run(
'git', 'clone', '--recursive', self.repo_url, '.')
code, _, _ = self.run('git', 'clone', '--recursive', self.repo_url, '.')
if code != 0:
raise RepositoryError

@property
def tags(self):
retcode, stdout, _ = self.run('git', 'show-ref', '--tags', record_as_success=True)
retcode, stdout, _ = self.run(
'git',
'show-ref',
'--tags',
record_as_success=True,
)
# error (or no tags found)
if retcode != 0:
return []
Expand All @@ -108,7 +109,8 @@ def parse_tags(self, data):
# StringIO below is expecting Unicode data, so ensure that it gets it.
if not isinstance(data, str):
data = str(data)
raw_tags = csv.reader(StringIO(data), delimiter=' ')
delimiter = str(' ').encode('utf-8') if PY2 else str(' ')
raw_tags = csv.reader(StringIO(data), delimiter=delimiter)
vcs_tags = []
for row in raw_tags:
row = [f for f in row if f != '']
Expand All @@ -122,15 +124,20 @@ def parse_tags(self, data):
@property
def branches(self):
# Only show remote branches
retcode, stdout, _ = self.run('git', 'branch', '-r', record_as_success=True)
retcode, stdout, _ = self.run(
'git',
'branch',
'-r',
record_as_success=True,
)
# error (or no branches found)
if retcode != 0:
return []
return self.parse_branches(stdout)

def parse_branches(self, data):
"""
Parse output of git branch -r
Parse output of git branch -r.

e.g.:

Expand All @@ -145,7 +152,8 @@ def parse_branches(self, data):
# StringIO below is expecting Unicode data, so ensure that it gets it.
if not isinstance(data, str):
data = str(data)
raw_branches = csv.reader(StringIO(data), delimiter=' ')
delimiter = str(' ').encode('utf-8') if PY2 else str(' ')
raw_branches = csv.reader(StringIO(data), delimiter=delimiter)
for branch in raw_branches:
branch = [f for f in branch if f != '' and f != '*']
# Handle empty branches
Expand All @@ -155,7 +163,8 @@ def parse_branches(self, data):
verbose_name = branch.replace('origin/', '')
if verbose_name in ['HEAD']:
continue
clean_branches.append(VCSVersion(self, branch, verbose_name))
clean_branches.append(
VCSVersion(self, branch, verbose_name))
else:
clean_branches.append(VCSVersion(self, branch, branch))
return clean_branches
Expand Down Expand Up @@ -193,8 +202,14 @@ def checkout(self, identifier=None):
# Update submodules
if self.submodules_exists():
self.run('git', 'submodule', 'sync')
self.run('git', 'submodule', 'update',
'--init', '--recursive', '--force')
self.run(
'git',
'submodule',
'update',
'--init',
'--recursive',
'--force',
)

return code, out, err

Expand Down
13 changes: 7 additions & 6 deletions readthedocs/vcs_support/base.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
# -*- coding: utf-8 -*-

"""Base classes for VCS backends."""
from __future__ import absolute_import
from builtins import object
from __future__ import (
absolute_import, division, print_function, unicode_literals)

import logging
import os
import shutil

from builtins import object

log = logging.getLogger(__name__)

Expand All @@ -28,8 +29,8 @@ def __init__(self, repository, identifier, verbose_name):
self.verbose_name = verbose_name

def __repr__(self):
return "<VCSVersion: %s:%s" % (self.repository.repo_url,
self.verbose_name)
return '<VCSVersion: %s:%s' % (
self.repository.repo_url, self.verbose_name)


class BaseVCS(object):
Expand Down Expand Up @@ -66,7 +67,7 @@ def check_working_dir(self):
os.makedirs(self.working_dir)

def make_clean_working_dir(self):
"""Ensures that the working dir exists and is empty"""
"""Ensures that the working dir exists and is empty."""
shutil.rmtree(self.working_dir, ignore_errors=True)
self.check_working_dir()

Expand Down