diff --git a/pytest.ini b/pytest.ini index 3dccb16..e31ea24 100644 --- a/pytest.ini +++ b/pytest.ini @@ -5,7 +5,6 @@ filterwarnings = ignore::DeprecationWarning ignore::ResourceWarning -# --capture=no - disable per-test capture # --tb=long sets the length of the traceback in case of failures -addopts = --capture=no --tb=long --verbose +addopts = --tb=long --verbose pythonpath = reportsizedeltas diff --git a/reportsizedeltas/reportsizedeltas.py b/reportsizedeltas/reportsizedeltas.py index 0aca1c4..948dfb3 100644 --- a/reportsizedeltas/reportsizedeltas.py +++ b/reportsizedeltas/reportsizedeltas.py @@ -630,19 +630,26 @@ def raw_http_request(self, url: str, data: bytes | None = None): request = urllib.request.Request(url=url, headers=headers, data=data) retry_count = 0 - while retry_count <= maximum_urlopen_retries: - retry_count += 1 + while True: try: # The rate limit API is not subject to rate limiting if url.startswith("https://api.github.com") and not url.startswith("https://api.github.com/rate_limit"): self.handle_rate_limiting() return urllib.request.urlopen(url=request) - except Exception as exception: - if not determine_urlopen_retry(exception=exception): - raise exception + except urllib.error.HTTPError as exception: + if determine_urlopen_retry(exception=exception): + if retry_count < maximum_urlopen_retries: + retry_count += 1 + continue + else: + # Maximum retries reached without successfully opening URL + print("Maximum number of URL load retries exceeded") + + print(f"::error::{exception.__class__.__name__}: {exception}") + for line in exception.fp: + print(line.decode(encoding="utf-8", errors="ignore")) - # Maximum retries reached without successfully opening URL - raise TimeoutError("Maximum number of URL load retries exceeded") + raise exception def handle_rate_limiting(self) -> None: """Check whether the GitHub API request limit has been reached. @@ -664,7 +671,7 @@ def handle_rate_limiting(self) -> None: sys.exit(0) -def determine_urlopen_retry(exception) -> bool: +def determine_urlopen_retry(exception: urllib.error.HTTPError) -> bool: """Determine whether the exception warrants another attempt at opening the URL. If so, delay then return True. Otherwise, return False. diff --git a/reportsizedeltas/tests/test_reportsizedeltas.py b/reportsizedeltas/tests/test_reportsizedeltas.py index 3c33450..f56a387 100644 --- a/reportsizedeltas/tests/test_reportsizedeltas.py +++ b/reportsizedeltas/tests/test_reportsizedeltas.py @@ -901,14 +901,16 @@ def test_raw_http_request(mocker): urllib.request.urlopen.assert_called_once_with(url=request) # urllib.request.urlopen() has non-recoverable exception - urllib.request.urlopen.side_effect = Exception() + urllib.request.urlopen.side_effect = urllib.error.HTTPError( + url="http://example.com", code=404, msg="", hdrs=None, fp=None + ) mocker.patch("reportsizedeltas.determine_urlopen_retry", autospec=True, return_value=False) - with pytest.raises(expected_exception=Exception): + with pytest.raises(expected_exception=urllib.error.HTTPError): report_size_deltas.raw_http_request(url=url, data=data) # urllib.request.urlopen() has potentially recoverable exceptions, but exceeds retry count reportsizedeltas.determine_urlopen_retry.return_value = True - with pytest.raises(expected_exception=TimeoutError): + with pytest.raises(expected_exception=urllib.error.HTTPError): report_size_deltas.raw_http_request(url=url, data=data)