Skip to content

Commit dc819ff

Browse files
committed
test: two tests for #1828
1 parent 9aaa404 commit dc819ff

File tree

3 files changed

+43
-2
lines changed

3 files changed

+43
-2
lines changed

coverage/phystokens.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -57,14 +57,14 @@ def _phys_tokens(toks: TokenInfos) -> TokenInfos:
5757
if last_ttext.endswith("\\"):
5858
inject_backslash = False
5959
elif ttype == token.STRING:
60-
if last_line.endswith(last_ttext+"\\\n"):
60+
if last_line.endswith(last_ttext + "\\\n"):
6161
# Deal with special cases like such code::
6262
#
6363
# a = ["aaa",\
6464
# "bbb \
6565
# ccc"]
6666
#
67-
pass
67+
inject_backslash = True
6868
elif "\n" in ttext and ttext.split("\n", 1)[0][-1] == "\\":
6969
# It's a multi-line string and the first line ends with
7070
# a backslash, so we don't need to inject another.

tests/test_html.py

+22
Original file line numberDiff line numberDiff line change
@@ -1131,6 +1131,28 @@ def test_tabbed(self) -> None:
11311131

11321132
doesnt_contain("out/tabbed_py.html", "\t")
11331133

1134+
def test_bug_1828(self) -> None:
1135+
# https://github.com/nedbat/coveragepy/pull/1828
1136+
self.make_file("backslashes.py", """\
1137+
a = ["aaa",\\
1138+
"bbb \\
1139+
ccc"]
1140+
""")
1141+
1142+
cov = coverage.Coverage()
1143+
backslashes = self.start_import_stop(cov, "backslashes")
1144+
cov.html_report(backslashes, directory="out")
1145+
1146+
contains(
1147+
"out/backslashes_py.html",
1148+
# line 2 is `"bbb \`
1149+
r'<a id="t2" href="#t2">2</a></span>'
1150+
+ r'<span class="t"> <span class="str">"bbb \</span>',
1151+
# line 3 is `ccc"]`
1152+
r'<a id="t3" href="#t3">3</a></span>'
1153+
+ r'<span class="t"><span class="str"> ccc"</span><span class="op">]</span>',
1154+
)
1155+
11341156
def test_unicode(self) -> None:
11351157
surrogate = "\U000e0100"
11361158

tests/test_phystokens.py

+19
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,24 @@ def test_tokenize_real_file(self) -> None:
9898
real_file = os.path.join(TESTS_DIR, "test_coverage.py")
9999
self.check_file_tokenization(real_file)
100100

101+
def test_1828(self) -> None:
102+
# https://github.com/nedbat/coveragepy/pull/1828
103+
tokens = list(source_token_lines(textwrap.dedent("""
104+
x = \
105+
1
106+
a = ["aaa",\\
107+
"bbb \\
108+
ccc"]
109+
""")))
110+
assert tokens == [
111+
[],
112+
[('nam', 'x'), ('ws', ' '), ('op', '='), ('ws', ' '), ('num', '1')],
113+
[('nam', 'a'), ('ws', ' '), ('op', '='), ('ws', ' '),
114+
('op', '['), ('str', '"aaa"'), ('op', ','), ('xx', '\\')],
115+
[('ws', ' '), ('str', '"bbb \\')],
116+
[('str', ' ccc"'), ('op', ']')],
117+
]
118+
101119
@pytest.mark.parametrize("fname", [
102120
"stress_phystoken.tok",
103121
"stress_phystoken_dos.tok",
@@ -113,6 +131,7 @@ def test_stress(self, fname: str) -> None:
113131
with open(stress) as fstress:
114132
assert re.search(r"(?m) $", fstress.read()), f"{stress} needs a trailing space."
115133

134+
116135
@pytest.mark.skipif(not env.PYBEHAVIOR.soft_keywords, reason="Soft keywords are new in Python 3.10")
117136
class SoftKeywordTest(CoverageTest):
118137
"""Tests the tokenizer handling soft keywords."""

0 commit comments

Comments
 (0)