Skip to content

Commit 1ed78d5

Browse files
committed
handle multiline fstrings in 3.12
1 parent f264195 commit 1ed78d5

File tree

5 files changed

+83
-53
lines changed

5 files changed

+83
-53
lines changed

src/flake8/_compat.py

+11
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
from __future__ import annotations
2+
3+
import sys
4+
import tokenize
5+
6+
if sys.version_info >= (3, 12):
7+
FSTRING_START = tokenize.FSTRING_START
8+
FSTRING_MIDDLE = tokenize.FSTRING_MIDDLE
9+
FSTRING_END = tokenize.FSTRING_END
10+
else:
11+
FSTRING_START = FSTRING_MIDDLE = FSTRING_END = -1

src/flake8/checker.py

+8-7
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
from flake8 import exceptions
2121
from flake8 import processor
2222
from flake8 import utils
23+
from flake8._compat import FSTRING_START
2324
from flake8.discover_files import expand_paths
2425
from flake8.options.parse_args import parse_args
2526
from flake8.plugins.finder import Checkers
@@ -551,15 +552,17 @@ def check_physical_eol(
551552
) -> None:
552553
"""Run physical checks if and only if it is at the end of the line."""
553554
assert self.processor is not None
555+
if token.type == FSTRING_START: # pragma: >=3.12 cover
556+
self.processor.fstring_start(token.start[0])
554557
# a newline token ends a single physical line.
555-
if processor.is_eol_token(token):
558+
elif processor.is_eol_token(token):
556559
# if the file does not end with a newline, the NEWLINE
557560
# token is inserted by the parser, but it does not contain
558561
# the previous physical line in `token[4]`
559-
if token[4] == "":
562+
if token.line == "":
560563
self.run_physical_checks(prev_physical)
561564
else:
562-
self.run_physical_checks(token[4])
565+
self.run_physical_checks(token.line)
563566
elif processor.is_multiline_string(token):
564567
# Less obviously, a string that contains newlines is a
565568
# multiline string, either triple-quoted or with internal
@@ -572,10 +575,8 @@ def check_physical_eol(
572575
# - have to wind self.line_number back because initially it
573576
# points to the last line of the string, and we want
574577
# check_physical() to give accurate feedback
575-
line_no = token[2][0]
576-
with self.processor.inside_multiline(line_number=line_no):
577-
for line in self.processor.split_line(token):
578-
self.run_physical_checks(line)
578+
for line in self.processor.multiline_string(token):
579+
self.run_physical_checks(line)
579580

580581

581582
def _try_initialize_processpool(

src/flake8/processor.py

+26-27
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,7 @@
33

44
import argparse
55
import ast
6-
import contextlib
76
import logging
8-
import sys
97
import tokenize
108
from typing import Any
119
from typing import Generator
@@ -14,6 +12,8 @@
1412

1513
from flake8 import defaults
1614
from flake8 import utils
15+
from flake8._compat import FSTRING_END
16+
from flake8._compat import FSTRING_MIDDLE
1717
from flake8.plugins.finder import LoadedPlugin
1818

1919
LOG = logging.getLogger(__name__)
@@ -117,6 +117,7 @@ def __init__(
117117
self._file_tokens: list[tokenize.TokenInfo] | None = None
118118
# map from line number to the line we'll search for `noqa` in
119119
self._noqa_line_mapping: dict[int, str] | None = None
120+
self._fstring_start = -1
120121

121122
@property
122123
def file_tokens(self) -> list[tokenize.TokenInfo]:
@@ -129,14 +130,26 @@ def file_tokens(self) -> list[tokenize.TokenInfo]:
129130

130131
return self._file_tokens
131132

132-
@contextlib.contextmanager
133-
def inside_multiline(
134-
self, line_number: int
135-
) -> Generator[None, None, None]:
136-
"""Context-manager to toggle the multiline attribute."""
137-
self.line_number = line_number
133+
def fstring_start(self, lineno: int) -> None:
134+
"""Signal the beginning of an fstring."""
135+
self._fstring_start = lineno
136+
137+
def multiline_string(
138+
self, token: tokenize.TokenInfo
139+
) -> Generator[str, None, None]:
140+
"""Iterate through the lines of a multiline string."""
141+
if token.type == FSTRING_END:
142+
start = self._fstring_start
143+
else:
144+
start = token.start[0]
145+
138146
self.multiline = True
139-
yield
147+
self.line_number = start
148+
# intentionally don't include the last line, that line will be
149+
# terminated later by a future end-of-line
150+
for _ in range(start, token.end[0]):
151+
yield self.lines[self.line_number - 1]
152+
self.line_number += 1
140153
self.multiline = False
141154

142155
def reset_blank_before(self) -> None:
@@ -196,10 +209,7 @@ def build_logical_line_tokens(self) -> _Logical: # noqa: C901
196209
continue
197210
if token_type == tokenize.STRING:
198211
text = mutate_string(text)
199-
elif (
200-
sys.version_info >= (3, 12)
201-
and token_type == tokenize.FSTRING_MIDDLE
202-
):
212+
elif token_type == FSTRING_MIDDLE:
203213
text = "x" * len(text)
204214
if previous_row:
205215
(start_row, start_column) = start
@@ -231,19 +241,6 @@ def build_logical_line(self) -> tuple[str, str, _LogicalMapping]:
231241
self.statistics["logical lines"] += 1
232242
return joined_comments, self.logical_line, mapping_list
233243

234-
def split_line(
235-
self, token: tokenize.TokenInfo
236-
) -> Generator[str, None, None]:
237-
"""Split a physical line's line based on new-lines.
238-
239-
This also auto-increments the line number for the caller.
240-
"""
241-
# intentionally don't include the last line, that line will be
242-
# terminated later by a future end-of-line
243-
for line_no in range(token.start[0], token.end[0]):
244-
yield self.lines[line_no - 1]
245-
self.line_number += 1
246-
247244
def keyword_arguments_for(
248245
self,
249246
parameters: dict[str, bool],
@@ -398,7 +395,9 @@ def is_eol_token(token: tokenize.TokenInfo) -> bool:
398395

399396
def is_multiline_string(token: tokenize.TokenInfo) -> bool:
400397
"""Check if this is a multiline string."""
401-
return token[0] == tokenize.STRING and "\n" in token[1]
398+
return token.type == FSTRING_END or (
399+
token.type == tokenize.STRING and "\n" in token.string
400+
)
402401

403402

404403
def token_is_newline(token: tokenize.TokenInfo) -> bool:

tests/integration/test_plugins.py

+32
Original file line numberDiff line numberDiff line change
@@ -199,6 +199,38 @@ def test_physical_line_plugin_multiline_string(tmpdir, capsys):
199199
assert out == expected
200200

201201

202+
def test_physical_line_plugin_multiline_fstring(tmpdir, capsys):
203+
cfg_s = f"""\
204+
[flake8:local-plugins]
205+
extension =
206+
T = {yields_physical_line.__module__}:{yields_physical_line.__name__}
207+
"""
208+
209+
cfg = tmpdir.join("tox.ini")
210+
cfg.write(cfg_s)
211+
212+
src = '''\
213+
y = 1
214+
x = f"""
215+
hello {y}
216+
"""
217+
'''
218+
t_py = tmpdir.join("t.py")
219+
t_py.write_binary(src.encode())
220+
221+
with tmpdir.as_cwd():
222+
assert main(("t.py", "--config", str(cfg))) == 1
223+
224+
expected = '''\
225+
t.py:1:1: T001 'y = 1\\n'
226+
t.py:2:1: T001 'x = f"""\\n'
227+
t.py:3:1: T001 'hello {y}\\n'
228+
t.py:4:1: T001 '"""\\n'
229+
'''
230+
out, err = capsys.readouterr()
231+
assert out == expected
232+
233+
202234
def yields_logical_line(logical_line):
203235
yield 0, f"T001 {logical_line!r}"
204236

tests/unit/test_file_processor.py

+6-19
Original file line numberDiff line numberDiff line change
@@ -275,13 +275,15 @@ def test_processor_split_line(default_options):
275275
(3, 3),
276276
'x = """\ncontents\n"""\n',
277277
)
278-
expected = [('x = """\n', 0), ("contents\n", 1)]
278+
expected = [('x = """\n', 1, True), ("contents\n", 2, True)]
279+
assert file_processor.multiline is False
279280
actual = [
280-
(line, file_processor.line_number)
281-
for line in file_processor.split_line(token)
281+
(line, file_processor.line_number, file_processor.multiline)
282+
for line in file_processor.multiline_string(token)
282283
]
284+
assert file_processor.multiline is False
283285
assert expected == actual
284-
assert file_processor.line_number == 2
286+
assert file_processor.line_number == 3
285287

286288

287289
def test_build_ast(default_options):
@@ -321,21 +323,6 @@ def test_visited_new_blank_line(default_options):
321323
assert file_processor.blank_lines == 1
322324

323325

324-
def test_inside_multiline(default_options):
325-
"""Verify we update the line number and reset multiline."""
326-
file_processor = processor.FileProcessor(
327-
"-", default_options, lines=["a = 1\n"]
328-
)
329-
330-
assert file_processor.multiline is False
331-
assert file_processor.line_number == 0
332-
with file_processor.inside_multiline(10):
333-
assert file_processor.multiline is True
334-
assert file_processor.line_number == 10
335-
336-
assert file_processor.multiline is False
337-
338-
339326
@pytest.mark.parametrize(
340327
"string, expected",
341328
[

0 commit comments

Comments
 (0)