From 3e1be0ff23976c034263b6806c7f7d2b200929e4 Mon Sep 17 00:00:00 2001 From: Manuel Kaufmann Date: Tue, 9 Jan 2024 11:21:23 +0100 Subject: [PATCH] Black: run black over all the code base (Part 2) Continues with the idea started in https://github.com/readthedocs/readthedocs.org/pull/10619 --- readthedocs/config/models.py | 34 +-- readthedocs/doc_builder/backends/mkdocs.py | 44 ++-- readthedocs/doc_builder/config.py | 6 +- readthedocs/doc_builder/constants.py | 1 - .../doc_builder/python_environments.py | 98 ++++---- readthedocs/integrations/utils.py | 1 + .../rtd_tests/tests/test_doc_building.py | 214 +++++++++--------- readthedocs/rtd_tests/tests/test_footer.py | 3 +- readthedocs/rtd_tests/tests/test_resolver.py | 2 + readthedocs/search/apps.py | 2 +- 10 files changed, 202 insertions(+), 203 deletions(-) diff --git a/readthedocs/config/models.py b/readthedocs/config/models.py index 237c00f388e..16b0ad58750 100644 --- a/readthedocs/config/models.py +++ b/readthedocs/config/models.py @@ -20,15 +20,11 @@ def __init__(self, **kwargs): setattr(self, name, kwargs[name]) def as_dict(self): - return { - name: to_dict(getattr(self, name)) - for name in self.__slots__ - } + return {name: to_dict(getattr(self, name)) for name in self.__slots__} # TODO: rename this class to `Build` class BuildWithOs(Base): - __slots__ = ("os", "tools", "jobs", "apt_packages", "commands") def __init__(self, **kwargs): @@ -38,8 +34,7 @@ def __init__(self, **kwargs): class BuildTool(Base): - - __slots__ = ('version', 'full_version') + __slots__ = ("version", "full_version") class BuildJobs(Base): @@ -76,39 +71,32 @@ class Python(Base): class PythonInstallRequirements(Base): - - __slots__ = ('requirements',) + __slots__ = ("requirements",) class PythonInstall(Base): - __slots__ = ( - 'path', - 'method', - 'extra_requirements', + "path", + "method", + "extra_requirements", ) class Conda(Base): - - __slots__ = ('environment',) + __slots__ = ("environment",) class Sphinx(Base): - - __slots__ = ('builder', 'configuration', 'fail_on_warning') + __slots__ = ("builder", "configuration", "fail_on_warning") class Mkdocs(Base): - - __slots__ = ('configuration', 'fail_on_warning') + __slots__ = ("configuration", "fail_on_warning") class Submodules(Base): - - __slots__ = ('include', 'exclude', 'recursive') + __slots__ = ("include", "exclude", "recursive") class Search(Base): - - __slots__ = ('ranking', 'ignore') + __slots__ = ("ranking", "ignore") diff --git a/readthedocs/doc_builder/backends/mkdocs.py b/readthedocs/doc_builder/backends/mkdocs.py index 6fef3009818..570b1a93404 100644 --- a/readthedocs/doc_builder/backends/mkdocs.py +++ b/readthedocs/doc_builder/backends/mkdocs.py @@ -41,7 +41,7 @@ class BaseMkdocs(BaseBuilder): """Mkdocs builder.""" # The default theme for mkdocs is the 'mkdocs' theme - DEFAULT_THEME_NAME = 'mkdocs' + DEFAULT_THEME_NAME = "mkdocs" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -57,13 +57,13 @@ def __init__(self, *args, **kwargs): # for these project that were building with MkDocs in the Corporate # site. if self.project.has_feature(Feature.MKDOCS_THEME_RTD): - self.DEFAULT_THEME_NAME = 'readthedocs' + self.DEFAULT_THEME_NAME = "readthedocs" log.warning( "Project using readthedocs theme as default for MkDocs.", project_slug=self.project.slug, ) else: - self.DEFAULT_THEME_NAME = 'mkdocs' + self.DEFAULT_THEME_NAME = "mkdocs" def get_final_doctype(self): """ @@ -119,10 +119,10 @@ def load_yaml_config(self): except IOError: raise MkDocsYAMLParseError(MkDocsYAMLParseError.NOT_FOUND) except yaml.YAMLError as exc: - note = '' - if hasattr(exc, 'problem_mark'): + note = "" + if hasattr(exc, "problem_mark"): mark = exc.problem_mark - note = ' (line %d, column %d)' % ( + note = " (line %d, column %d)" % ( mark.line + 1, mark.column + 1, ) @@ -146,7 +146,7 @@ def append_conf(self): MkDocsYAMLParseError.INVALID_DOCS_DIR_CONFIG, ) - user_config['docs_dir'] = docs_dir + user_config["docs_dir"] = docs_dir static_url = self.project.proxied_static_path # Set mkdocs config values. @@ -201,14 +201,14 @@ def append_conf(self): # Use Read the Docs' analytics setup rather than mkdocs' # This supports using RTD's privacy improvements around analytics - user_config['google_analytics'] = None + user_config["google_analytics"] = None # README: make MkDocs to use ``readthedocs`` theme as default if the # user didn't specify a specific theme manually if self.project.has_feature(Feature.MKDOCS_THEME_RTD): - if 'theme' not in user_config: + if "theme" not in user_config: # mkdocs<0.17 syntax - user_config['theme'] = self.DEFAULT_THEME_NAME + user_config["theme"] = self.DEFAULT_THEME_NAME # Write the modified mkdocs configuration with safe_open(self.yaml_file, "w", encoding="utf-8") as f: @@ -219,7 +219,7 @@ def append_conf(self): # Write the mkdocs.yml to the build logs self.run( - 'cat', + "cat", os.path.relpath(self.yaml_file, self.project_path), cwd=self.project_path, ) @@ -229,9 +229,9 @@ def generate_rtd_data(self, docs_dir, mkdocs_config): # Use the analytics code from mkdocs.yml # if it isn't set already by Read the Docs, analytics_code = self.version.project.analytics_code - if not analytics_code and mkdocs_config.get('google_analytics'): + if not analytics_code and mkdocs_config.get("google_analytics"): # http://www.mkdocs.org/user-guide/configuration/#google_analytics - analytics_code = mkdocs_config['google_analytics'][0] + analytics_code = mkdocs_config["google_analytics"][0] commit = ( self.version.project.vcs_repo( @@ -271,14 +271,14 @@ def generate_rtd_data(self, docs_dir, mkdocs_config): "html_theme": readthedocs_data["theme"], "pagename": None, } - tmpl = template_loader.get_template('doc_builder/data.js.tmpl') + tmpl = template_loader.get_template("doc_builder/data.js.tmpl") return tmpl.render(data_ctx) def build(self): build_command = [ - self.python_env.venv_bin(filename='python'), - '-m', - 'mkdocs', + self.python_env.venv_bin(filename="python"), + "-m", + "mkdocs", self.builder, "--clean", "--site-dir", @@ -287,7 +287,7 @@ def build(self): os.path.relpath(self.yaml_file, self.project_path), ] if self.config.mkdocs.fail_on_warning: - build_command.append('--strict') + build_command.append("--strict") cmd_ret = self.run( *build_command, cwd=self.project_path, @@ -305,19 +305,19 @@ def get_theme_name(self, mkdocs_config): :see: http://www.mkdocs.org/about/release-notes/#theme-customization-1164 :returns: the name of the theme RTD will use """ - theme_setting = mkdocs_config.get('theme') + theme_setting = mkdocs_config.get("theme") if isinstance(theme_setting, dict): # Full nested theme config (the new configuration) - return theme_setting.get('name') or self.DEFAULT_THEME_NAME + return theme_setting.get("name") or self.DEFAULT_THEME_NAME if theme_setting: # A string which is the name of the theme return theme_setting - theme_dir = mkdocs_config.get('theme_dir') + theme_dir = mkdocs_config.get("theme_dir") if theme_dir: # Use the name of the directory in this project's custom theme directory - return theme_dir.rstrip('/').split('/')[-1] + return theme_dir.rstrip("/").split("/")[-1] return self.DEFAULT_THEME_NAME diff --git a/readthedocs/doc_builder/config.py b/readthedocs/doc_builder/config.py index dc9adb84786..4141dad01c6 100644 --- a/readthedocs/doc_builder/config.py +++ b/readthedocs/doc_builder/config.py @@ -32,9 +32,9 @@ def load_yaml_config(version, readthedocs_yaml_path=None): def get_default_formats(project): """Get a list of the default formats for ``project``.""" - formats = ['htmlzip'] + formats = ["htmlzip"] if project.enable_epub_build: - formats += ['epub'] + formats += ["epub"] if project.enable_pdf_build: - formats += ['pdf'] + formats += ["pdf"] return formats diff --git a/readthedocs/doc_builder/constants.py b/readthedocs/doc_builder/constants.py index 73228946e28..38382b077fd 100644 --- a/readthedocs/doc_builder/constants.py +++ b/readthedocs/doc_builder/constants.py @@ -1,4 +1,3 @@ - """Doc build constants.""" import re diff --git a/readthedocs/doc_builder/python_environments.py b/readthedocs/doc_builder/python_environments.py index 0a359a4acf7..c11c0f10553 100644 --- a/readthedocs/doc_builder/python_environments.py +++ b/readthedocs/doc_builder/python_environments.py @@ -60,13 +60,11 @@ def install_package(self, install): if install.method == PIP: # Prefix ./ so pip installs from a local path rather than pypi local_path = ( - os.path.join('.', install.path) if install.path != '.' else install.path + os.path.join(".", install.path) if install.path != "." else install.path ) - extra_req_param = '' + extra_req_param = "" if install.extra_requirements: - extra_req_param = '[{}]'.format( - ','.join(install.extra_requirements) - ) + extra_req_param = "[{}]".format(",".join(install.extra_requirements)) self.build_env.run( self.venv_bin(filename="python"), "-m", @@ -85,10 +83,10 @@ def install_package(self, install): ) elif install.method == SETUPTOOLS: self.build_env.run( - self.venv_bin(filename='python'), - os.path.join(install.path, 'setup.py'), - 'install', - '--force', + self.venv_bin(filename="python"), + os.path.join(install.path, "setup.py"), + "install", + "--force", cwd=self.checkout_path, bin_path=self.venv_bin(), ) @@ -133,7 +131,7 @@ def setup_base(self): https://github.com/readthedocs/readthedocs.org/issues/7322 """ cli_args = [ - '-mvirtualenv', + "-mvirtualenv", # Append the positional destination argument "$READTHEDOCS_VIRTUALENV_PATH", ] @@ -150,12 +148,12 @@ def setup_base(self): def install_core_requirements(self): """Install basic Read the Docs requirements into the virtualenv.""" pip_install_cmd = [ - self.venv_bin(filename='python'), - '-m', - 'pip', - 'install', - '--upgrade', - '--no-cache-dir', + self.venv_bin(filename="python"), + "-m", + "pip", + "install", + "--upgrade", + "--no-cache-dir", ] self._install_latest_requirements(pip_install_cmd) @@ -201,17 +199,17 @@ def install_requirements_file(self, install): requirements_file_path = install.requirements if requirements_file_path: args = [ - self.venv_bin(filename='python'), - '-m', - 'pip', - 'install', + self.venv_bin(filename="python"), + "-m", + "pip", + "install", ] if self.project.has_feature(Feature.PIP_ALWAYS_UPGRADE): - args += ['--upgrade'] + args += ["--upgrade"] args += [ - '--exists-action=w', - '--no-cache-dir', - '-r', + "--exists-action=w", + "--no-cache-dir", + "-r", requirements_file_path, ] self.build_env.run( @@ -252,12 +250,12 @@ def setup_base(self): self.build_env.run( self.conda_bin_name(), - 'env', - 'create', - '--quiet', - '--name', + "env", + "create", + "--quiet", + "--name", self.version.slug, - '--file', + "--file", self.config.conda.environment, bin_path=None, # Don't use conda bin that doesn't exist yet cwd=self.checkout_path, @@ -266,7 +264,7 @@ def setup_base(self): def _show_environment_yaml(self): """Show ``environment.yml`` file in the Build output.""" self.build_env.run( - 'cat', + "cat", self.config.conda.environment, cwd=self.checkout_path, ) @@ -300,29 +298,29 @@ def _append_core_requirements(self): environment = parse_yaml(inputfile) except IOError: log.warning( - 'There was an error while reading Conda environment file.', + "There was an error while reading Conda environment file.", ) except ParseError: log.warning( - 'There was an error while parsing Conda environment file.', + "There was an error while parsing Conda environment file.", ) else: # Append conda dependencies directly to ``dependencies`` and pip # dependencies to ``dependencies.pip`` pip_requirements, conda_requirements = self._get_core_requirements() - dependencies = environment.get('dependencies', []) - pip_dependencies = {'pip': pip_requirements} + dependencies = environment.get("dependencies", []) + pip_dependencies = {"pip": pip_requirements} for item in dependencies: - if isinstance(item, dict) and 'pip' in item: + if isinstance(item, dict) and "pip" in item: # NOTE: pip can be ``None`` - pip_requirements.extend(item.get('pip') or []) + pip_requirements.extend(item.get("pip") or []) dependencies.remove(item) break dependencies.append(pip_dependencies) dependencies.extend(conda_requirements) - environment.update({'dependencies': dependencies}) + environment.update({"dependencies": dependencies}) try: # Allow symlinks, but only the ones that resolve inside the base directory. outputfile = safe_open( @@ -343,8 +341,8 @@ def _append_core_requirements(self): yaml.safe_dump(environment, outputfile) except IOError: log.warning( - 'There was an error while writing the new Conda ' - 'environment file.', + "There was an error while writing the new Conda " + "environment file.", ) def _get_core_requirements(self): @@ -376,10 +374,10 @@ def install_core_requirements(self): # not appended to the ``environment.yml`` file. cmd = [ self.conda_bin_name(), - 'install', - '--yes', - '--quiet', - '--name', + "install", + "--yes", + "--quiet", + "--name", self.version.slug, ] cmd.extend(conda_requirements) @@ -392,18 +390,18 @@ def install_core_requirements(self): # Install requirements via ``pip install`` pip_cmd = [ - self.venv_bin(filename='python'), - '-m', - 'pip', - 'install', - '-U', - '--no-cache-dir', + self.venv_bin(filename="python"), + "-m", + "pip", + "install", + "-U", + "--no-cache-dir", ] pip_cmd.extend(pip_requirements) self.build_env.run( *pip_cmd, bin_path=self.venv_bin(), - cwd=self.checkout_path # noqa - no comma here in py27 :/ + cwd=self.checkout_path, # noqa - no comma here in py27 :/ ) def install_requirements_file(self, install): diff --git a/readthedocs/integrations/utils.py b/readthedocs/integrations/utils.py index 98eb1851567..6acfc6eec57 100644 --- a/readthedocs/integrations/utils.py +++ b/readthedocs/integrations/utils.py @@ -1,5 +1,6 @@ """Integration utility functions.""" + def normalize_request_payload(request): """ Normalize the request body, hopefully to JSON. diff --git a/readthedocs/rtd_tests/tests/test_doc_building.py b/readthedocs/rtd_tests/tests/test_doc_building.py index 3d63ae1b7a4..128dab250d3 100644 --- a/readthedocs/rtd_tests/tests/test_doc_building.py +++ b/readthedocs/rtd_tests/tests/test_doc_building.py @@ -19,22 +19,20 @@ from readthedocs.projects.models import Project DUMMY_BUILD_ID = 123 -SAMPLE_UNICODE = 'HérÉ îß sömê ünïçó∂é' -SAMPLE_UTF8_BYTES = SAMPLE_UNICODE.encode('utf-8') +SAMPLE_UNICODE = "HérÉ îß sömê ünïçó∂é" +SAMPLE_UTF8_BYTES = SAMPLE_UNICODE.encode("utf-8") # TODO: these tests need to be re-written to make usage of the Celery handlers # properly to check not recorded/recorded as success. For now, they are # minimally updated to keep working, but they could be improved. class TestLocalBuildEnvironment(TestCase): - - def test_command_not_recorded(self): api_client = mock.MagicMock() build_env = LocalBuildEnvironment(api_client=api_client) with build_env: - build_env.run('true', record=False) + build_env.run("true", record=False) self.assertEqual(len(build_env.commands), 0) api_client.command.post.assert_not_called() @@ -44,7 +42,7 @@ def test_record_command_as_success(self): build_env = LocalBuildEnvironment( project=project, build={ - 'id': 1, + "id": 1, }, api_client=api_client, ) @@ -87,33 +85,34 @@ class TestDockerBuildEnvironment(TestCase): """Test docker build environment.""" - fixtures = ['test_data', 'eric'] + fixtures = ["test_data", "eric"] def setUp(self): - self.project = Project.objects.get(slug='pip') - self.version = Version(slug='foo', verbose_name='foobar') + self.project = Project.objects.get(slug="pip") + self.version = Version(slug="foo", verbose_name="foobar") self.project.versions.add(self.version, bulk=False) def test_container_already_exists(self): """Docker container already exists.""" self.mocks.configure_mock( - 'docker_client', { - 'inspect_container.return_value': {'State': {'Running': True}}, - 'exec_create.return_value': {'Id': b'container-foobar'}, - 'exec_start.return_value': b'This is the return', - 'exec_inspect.return_value': {'ExitCode': 0}, + "docker_client", + { + "inspect_container.return_value": {"State": {"Running": True}}, + "exec_create.return_value": {"Id": b"container-foobar"}, + "exec_start.return_value": b"This is the return", + "exec_inspect.return_value": {"ExitCode": 0}, }, ) build_env = DockerBuildEnvironment( version=self.version, project=self.project, - build={'id': DUMMY_BUILD_ID}, + build={"id": DUMMY_BUILD_ID}, ) def _inner(): with build_env: - build_env.run('echo', 'test', cwd='/tmp') + build_env.run("echo", "test", cwd="/tmp") self.assertRaises(BuildAppError, _inner) self.assertEqual(self.mocks.docker_client.exec_create.call_count, 0) @@ -121,48 +120,51 @@ def _inner(): # api() is not called anymore, we use api_v2 instead self.assertFalse(self.mocks.api()(DUMMY_BUILD_ID).put.called) # The build failed before executing any command - self.assertFalse(self.mocks.mocks['api_v2.command'].post.called) - self.mocks.mocks['api_v2.build']().put.assert_called_with({ - 'id': DUMMY_BUILD_ID, - 'version': self.version.pk, - 'success': False, - 'project': self.project.pk, - 'setup_error': '', - 'exit_code': 1, - 'length': 0, - 'error': 'A build environment is currently running for this version', - 'setup': '', - 'output': '', - 'state': 'finished', - 'builder': mock.ANY, - }) + self.assertFalse(self.mocks.mocks["api_v2.command"].post.called) + self.mocks.mocks["api_v2.build"]().put.assert_called_with( + { + "id": DUMMY_BUILD_ID, + "version": self.version.pk, + "success": False, + "project": self.project.pk, + "setup_error": "", + "exit_code": 1, + "length": 0, + "error": "A build environment is currently running for this version", + "setup": "", + "output": "", + "state": "finished", + "builder": mock.ANY, + } + ) def test_container_timeout(self): """Docker container timeout and command failure.""" - response = Mock(status_code=404, reason='Container not found') + response = Mock(status_code=404, reason="Container not found") self.mocks.configure_mock( - 'docker_client', { - 'inspect_container.side_effect': [ + "docker_client", + { + "inspect_container.side_effect": [ DockerAPIError( - 'No container found', + "No container found", response, - 'No container found', + "No container found", ), - {'State': {'Running': False, 'ExitCode': 42}}, + {"State": {"Running": False, "ExitCode": 42}}, ], - 'exec_create.return_value': {'Id': b'container-foobar'}, - 'exec_start.return_value': b'This is the return', - 'exec_inspect.return_value': {'ExitCode': 0}, + "exec_create.return_value": {"Id": b"container-foobar"}, + "exec_start.return_value": b"This is the return", + "exec_inspect.return_value": {"ExitCode": 0}, }, ) build_env = DockerBuildEnvironment( version=self.version, project=self.project, - build={'id': DUMMY_BUILD_ID}, + build={"id": DUMMY_BUILD_ID}, ) with build_env: - build_env.run('echo', 'test', cwd='/tmp') + build_env.run("echo", "test", cwd="/tmp") self.assertEqual(self.mocks.docker_client.exec_create.call_count, 1) @@ -170,29 +172,33 @@ def test_container_timeout(self): self.assertFalse(self.mocks.api()(DUMMY_BUILD_ID).put.called) # The command was saved command = build_env.commands[0] - self.mocks.mocks['api_v2.command'].post.assert_called_once_with({ - 'build': DUMMY_BUILD_ID, - 'command': command.get_command(), - 'description': command.description, - 'output': command.output, - 'exit_code': 0, - 'start_time': command.start_time, - 'end_time': command.end_time, - }) - self.mocks.mocks['api_v2.build']().put.assert_called_with({ - 'id': DUMMY_BUILD_ID, - 'version': self.version.pk, - 'success': False, - 'project': self.project.pk, - 'setup_error': '', - 'exit_code': 1, - 'length': 0, - 'error': 'Build exited due to time out', - 'setup': '', - 'output': '', - 'state': 'finished', - 'builder': mock.ANY, - }) + self.mocks.mocks["api_v2.command"].post.assert_called_once_with( + { + "build": DUMMY_BUILD_ID, + "command": command.get_command(), + "description": command.description, + "output": command.output, + "exit_code": 0, + "start_time": command.start_time, + "end_time": command.end_time, + } + ) + self.mocks.mocks["api_v2.build"]().put.assert_called_with( + { + "id": DUMMY_BUILD_ID, + "version": self.version.pk, + "success": False, + "project": self.project.pk, + "setup_error": "", + "exit_code": 1, + "length": 0, + "error": "Build exited due to time out", + "setup": "", + "output": "", + "state": "finished", + "builder": mock.ANY, + } + ) # NOTE: these tests should be migrated to not use `LocalBuildEnvironment` @@ -203,31 +209,31 @@ def test_container_timeout(self): # # Also note that we require a Docker setting here for the tests to pass, but we # are not using Docker at all. -@override_settings(RTD_DOCKER_WORKDIR='/tmp') +@override_settings(RTD_DOCKER_WORKDIR="/tmp") class TestBuildCommand(TestCase): """Test build command creation.""" def test_command_env(self): """Test build command env vars.""" - env = {'FOOBAR': 'foobar', 'BIN_PATH': 'foobar'} - cmd = BuildCommand('echo', environment=env) + env = {"FOOBAR": "foobar", "BIN_PATH": "foobar"} + cmd = BuildCommand("echo", environment=env) for key in list(env.keys()): self.assertEqual(cmd._environment[key], env[key]) def test_result(self): """Test result of output using unix true/false commands.""" - cmd = BuildCommand('true') + cmd = BuildCommand("true") cmd.run() self.assertTrue(cmd.successful) - cmd = BuildCommand('false') + cmd = BuildCommand("false") cmd.run() self.assertTrue(cmd.failed) def test_missing_command(self): """Test missing command.""" - path = os.path.join('non-existant', str(uuid.uuid4())) + path = os.path.join("non-existant", str(uuid.uuid4())) self.assertFalse(os.path.exists(path)) cmd = BuildCommand(path) cmd.run() @@ -252,7 +258,9 @@ def test_output(self): # Mock BuildCommand.sanitized_output just to count the amount of calls, # but use the original method to behaves as real original_sanitized_output = cmd.sanitize_output - with patch('readthedocs.doc_builder.environments.BuildCommand.sanitize_output') as sanitize_output: # noqa + with patch( + "readthedocs.doc_builder.environments.BuildCommand.sanitize_output" + ) as sanitize_output: # noqa sanitize_output.side_effect = original_sanitized_output cmd.run() cmd.save(api_client=api_client) @@ -273,13 +281,13 @@ def test_output(self): def test_error_output(self): """Test error output from command.""" - cmd = BuildCommand(['/bin/bash', '-c', 'echo -n FOOBAR 1>&2']) + cmd = BuildCommand(["/bin/bash", "-c", "echo -n FOOBAR 1>&2"]) cmd.run() - self.assertEqual(cmd.output, 'FOOBAR') + self.assertEqual(cmd.output, "FOOBAR") self.assertEqual(cmd.error, "") def test_sanitize_output(self): - cmd = BuildCommand(['/bin/bash', '-c', 'echo']) + cmd = BuildCommand(["/bin/bash", "-c", "echo"]) checks = ( ("Hola", "Hola"), ("H\x00i", "Hi"), @@ -288,19 +296,21 @@ def test_sanitize_output(self): for output, sanitized in checks: self.assertEqual(cmd.sanitize_output(output), sanitized) - @patch('subprocess.Popen') + @patch("subprocess.Popen") def test_unicode_output(self, mock_subprocess): """Unicode output from command.""" - mock_process = Mock(**{ - 'communicate.return_value': (SAMPLE_UTF8_BYTES, b''), - }) + mock_process = Mock( + **{ + "communicate.return_value": (SAMPLE_UTF8_BYTES, b""), + } + ) mock_subprocess.return_value = mock_process - cmd = BuildCommand(['echo', 'test'], cwd='/tmp/foobar') + cmd = BuildCommand(["echo", "test"], cwd="/tmp/foobar") cmd.run() self.assertEqual( cmd.output, - 'H\xe9r\xc9 \xee\xdf s\xf6m\xea \xfcn\xef\xe7\xf3\u2202\xe9', + "H\xe9r\xc9 \xee\xdf s\xf6m\xea \xfcn\xef\xe7\xf3\u2202\xe9", ) @@ -314,22 +324,22 @@ class TestDockerBuildCommand(TestCase): def test_wrapped_command(self): """Test shell wrapping for Docker chdir.""" cmd = DockerBuildCommand( - ['pip', 'install', 'requests'], - cwd='/tmp/foobar', + ["pip", "install", "requests"], + cwd="/tmp/foobar", ) self.assertEqual( cmd.get_wrapped_command(), "/bin/sh -c 'pip install requests'", ) cmd = DockerBuildCommand( - ['python', '/tmp/foo/pip', 'install', 'Django>1.7'], - cwd='/tmp/foobar', - bin_path='/tmp/foo', + ["python", "/tmp/foo/pip", "install", "Django>1.7"], + cwd="/tmp/foobar", + bin_path="/tmp/foo", ) self.assertEqual( cmd.get_wrapped_command(), ( - '/bin/sh -c ' + "/bin/sh -c " "'PATH=/tmp/foo:$PATH " r"python /tmp/foo/pip install Django\>1.7'" ), @@ -338,20 +348,21 @@ def test_wrapped_command(self): def test_unicode_output(self): """Unicode output from command.""" self.mocks.configure_mock( - 'docker_client', { - 'exec_create.return_value': {'Id': b'container-foobar'}, - 'exec_start.return_value': SAMPLE_UTF8_BYTES, - 'exec_inspect.return_value': {'ExitCode': 0}, + "docker_client", + { + "exec_create.return_value": {"Id": b"container-foobar"}, + "exec_start.return_value": SAMPLE_UTF8_BYTES, + "exec_inspect.return_value": {"ExitCode": 0}, }, ) - cmd = DockerBuildCommand(['echo', 'test'], cwd='/tmp/foobar') + cmd = DockerBuildCommand(["echo", "test"], cwd="/tmp/foobar") cmd.build_env = Mock() cmd.build_env.get_client.return_value = self.mocks.docker_client - type(cmd.build_env).container_id = PropertyMock(return_value='foo') + type(cmd.build_env).container_id = PropertyMock(return_value="foo") cmd.run() self.assertEqual( cmd.output, - 'H\xe9r\xc9 \xee\xdf s\xf6m\xea \xfcn\xef\xe7\xf3\u2202\xe9', + "H\xe9r\xc9 \xee\xdf s\xf6m\xea \xfcn\xef\xe7\xf3\u2202\xe9", ) self.assertEqual(self.mocks.docker_client.exec_start.call_count, 1) self.assertEqual(self.mocks.docker_client.exec_create.call_count, 1) @@ -360,18 +371,19 @@ def test_unicode_output(self): def test_command_oom_kill(self): """Command is OOM killed.""" self.mocks.configure_mock( - 'docker_client', { - 'exec_create.return_value': {'Id': b'container-foobar'}, - 'exec_start.return_value': b'Killed\n', - 'exec_inspect.return_value': {'ExitCode': 137}, + "docker_client", + { + "exec_create.return_value": {"Id": b"container-foobar"}, + "exec_start.return_value": b"Killed\n", + "exec_inspect.return_value": {"ExitCode": 137}, }, ) - cmd = DockerBuildCommand(['echo', 'test'], cwd='/tmp/foobar') + cmd = DockerBuildCommand(["echo", "test"], cwd="/tmp/foobar") cmd.build_env = Mock() cmd.build_env.get_client.return_value = self.mocks.docker_client - type(cmd.build_env).container_id = PropertyMock(return_value='foo') + type(cmd.build_env).container_id = PropertyMock(return_value="foo") cmd.run() self.assertIn( - 'Command killed due to timeout or excessive memory consumption\n', + "Command killed due to timeout or excessive memory consumption\n", str(cmd.output), ) diff --git a/readthedocs/rtd_tests/tests/test_footer.py b/readthedocs/rtd_tests/tests/test_footer.py index c54b1a2155a..f6cb4b91d83 100644 --- a/readthedocs/rtd_tests/tests/test_footer.py +++ b/readthedocs/rtd_tests/tests/test_footer.py @@ -320,7 +320,6 @@ def test_not_built_versions(self): class TestFooterHTML(BaseTestFooterHTML, TestCase): - pass @@ -329,7 +328,6 @@ class TestFooterHTML(BaseTestFooterHTML, TestCase): PUBLIC_DOMAIN_USES_HTTPS=True, ) class TestVersionCompareFooter(TestCase): - fixtures = ["test_data", "eric"] def setUp(self): @@ -471,6 +469,7 @@ def test_private_highest_version(self): } self.assertDictEqual(valid_data, returned_data) + @pytest.mark.proxito @override_settings( PUBLIC_DOMAIN="readthedocs.io", diff --git a/readthedocs/rtd_tests/tests/test_resolver.py b/readthedocs/rtd_tests/tests/test_resolver.py index 1212c4b71cf..b7fa0d3fa58 100644 --- a/readthedocs/rtd_tests/tests/test_resolver.py +++ b/readthedocs/rtd_tests/tests/test_resolver.py @@ -17,6 +17,7 @@ resolver = Resolver() + @override_settings( PUBLIC_DOMAIN="readthedocs.org", RTD_DEFAULT_FEATURES=dict([RTDProductFeature(type=TYPE_CNAME).to_item()]), @@ -62,6 +63,7 @@ def setUp(self): ) self.subproject.translations.add(self.subproject_translation) + class SmartResolverPathTests(ResolverBase): def test_resolver_filename(self): url = resolver.resolve_path(project=self.pip, filename="/foo/bar/blah.html") diff --git a/readthedocs/search/apps.py b/readthedocs/search/apps.py index fbde660a4d7..430dbc5e6bc 100644 --- a/readthedocs/search/apps.py +++ b/readthedocs/search/apps.py @@ -3,7 +3,7 @@ class SearchConfig(AppConfig): default_auto_field = "django.db.models.BigAutoField" - name = 'readthedocs.search' + name = "readthedocs.search" def ready(self): import readthedocs.search.signals # noqa