Skip to content

Commit

Permalink
Merge pull request #1849 from PyCQA/handle-multiline-fstrings-in-312
Browse files Browse the repository at this point in the history
handle multiline fstrings in 3.12
  • Loading branch information
asottile committed Jul 29, 2023
2 parents f264195 + 1ed78d5 commit d4d1552
Show file tree
Hide file tree
Showing 5 changed files with 83 additions and 53 deletions.
11 changes: 11 additions & 0 deletions src/flake8/_compat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from __future__ import annotations

import sys
import tokenize

if sys.version_info >= (3, 12):
FSTRING_START = tokenize.FSTRING_START
FSTRING_MIDDLE = tokenize.FSTRING_MIDDLE
FSTRING_END = tokenize.FSTRING_END
else:
FSTRING_START = FSTRING_MIDDLE = FSTRING_END = -1
15 changes: 8 additions & 7 deletions src/flake8/checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from flake8 import exceptions
from flake8 import processor
from flake8 import utils
from flake8._compat import FSTRING_START
from flake8.discover_files import expand_paths
from flake8.options.parse_args import parse_args
from flake8.plugins.finder import Checkers
Expand Down Expand Up @@ -551,15 +552,17 @@ def check_physical_eol(
) -> None:
"""Run physical checks if and only if it is at the end of the line."""
assert self.processor is not None
if token.type == FSTRING_START: # pragma: >=3.12 cover
self.processor.fstring_start(token.start[0])
# a newline token ends a single physical line.
if processor.is_eol_token(token):
elif processor.is_eol_token(token):
# if the file does not end with a newline, the NEWLINE
# token is inserted by the parser, but it does not contain
# the previous physical line in `token[4]`
if token[4] == "":
if token.line == "":
self.run_physical_checks(prev_physical)
else:
self.run_physical_checks(token[4])
self.run_physical_checks(token.line)
elif processor.is_multiline_string(token):
# Less obviously, a string that contains newlines is a
# multiline string, either triple-quoted or with internal
Expand All @@ -572,10 +575,8 @@ def check_physical_eol(
# - have to wind self.line_number back because initially it
# points to the last line of the string, and we want
# check_physical() to give accurate feedback
line_no = token[2][0]
with self.processor.inside_multiline(line_number=line_no):
for line in self.processor.split_line(token):
self.run_physical_checks(line)
for line in self.processor.multiline_string(token):
self.run_physical_checks(line)


def _try_initialize_processpool(
Expand Down
53 changes: 26 additions & 27 deletions src/flake8/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,7 @@

import argparse
import ast
import contextlib
import logging
import sys
import tokenize
from typing import Any
from typing import Generator
Expand All @@ -14,6 +12,8 @@

from flake8 import defaults
from flake8 import utils
from flake8._compat import FSTRING_END
from flake8._compat import FSTRING_MIDDLE
from flake8.plugins.finder import LoadedPlugin

LOG = logging.getLogger(__name__)
Expand Down Expand Up @@ -117,6 +117,7 @@ def __init__(
self._file_tokens: list[tokenize.TokenInfo] | None = None
# map from line number to the line we'll search for `noqa` in
self._noqa_line_mapping: dict[int, str] | None = None
self._fstring_start = -1

@property
def file_tokens(self) -> list[tokenize.TokenInfo]:
Expand All @@ -129,14 +130,26 @@ def file_tokens(self) -> list[tokenize.TokenInfo]:

return self._file_tokens

@contextlib.contextmanager
def inside_multiline(
self, line_number: int
) -> Generator[None, None, None]:
"""Context-manager to toggle the multiline attribute."""
self.line_number = line_number
def fstring_start(self, lineno: int) -> None:
"""Signal the beginning of an fstring."""
self._fstring_start = lineno

def multiline_string(
self, token: tokenize.TokenInfo
) -> Generator[str, None, None]:
"""Iterate through the lines of a multiline string."""
if token.type == FSTRING_END:
start = self._fstring_start
else:
start = token.start[0]

self.multiline = True
yield
self.line_number = start
# intentionally don't include the last line, that line will be
# terminated later by a future end-of-line
for _ in range(start, token.end[0]):
yield self.lines[self.line_number - 1]
self.line_number += 1
self.multiline = False

def reset_blank_before(self) -> None:
Expand Down Expand Up @@ -196,10 +209,7 @@ def build_logical_line_tokens(self) -> _Logical: # noqa: C901
continue
if token_type == tokenize.STRING:
text = mutate_string(text)
elif (
sys.version_info >= (3, 12)
and token_type == tokenize.FSTRING_MIDDLE
):
elif token_type == FSTRING_MIDDLE:
text = "x" * len(text)
if previous_row:
(start_row, start_column) = start
Expand Down Expand Up @@ -231,19 +241,6 @@ def build_logical_line(self) -> tuple[str, str, _LogicalMapping]:
self.statistics["logical lines"] += 1
return joined_comments, self.logical_line, mapping_list

def split_line(
self, token: tokenize.TokenInfo
) -> Generator[str, None, None]:
"""Split a physical line's line based on new-lines.
This also auto-increments the line number for the caller.
"""
# intentionally don't include the last line, that line will be
# terminated later by a future end-of-line
for line_no in range(token.start[0], token.end[0]):
yield self.lines[line_no - 1]
self.line_number += 1

def keyword_arguments_for(
self,
parameters: dict[str, bool],
Expand Down Expand Up @@ -398,7 +395,9 @@ def is_eol_token(token: tokenize.TokenInfo) -> bool:

def is_multiline_string(token: tokenize.TokenInfo) -> bool:
"""Check if this is a multiline string."""
return token[0] == tokenize.STRING and "\n" in token[1]
return token.type == FSTRING_END or (
token.type == tokenize.STRING and "\n" in token.string
)


def token_is_newline(token: tokenize.TokenInfo) -> bool:
Expand Down
32 changes: 32 additions & 0 deletions tests/integration/test_plugins.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,6 +199,38 @@ def test_physical_line_plugin_multiline_string(tmpdir, capsys):
assert out == expected


def test_physical_line_plugin_multiline_fstring(tmpdir, capsys):
cfg_s = f"""\
[flake8:local-plugins]
extension =
T = {yields_physical_line.__module__}:{yields_physical_line.__name__}
"""

cfg = tmpdir.join("tox.ini")
cfg.write(cfg_s)

src = '''\
y = 1
x = f"""
hello {y}
"""
'''
t_py = tmpdir.join("t.py")
t_py.write_binary(src.encode())

with tmpdir.as_cwd():
assert main(("t.py", "--config", str(cfg))) == 1

expected = '''\
t.py:1:1: T001 'y = 1\\n'
t.py:2:1: T001 'x = f"""\\n'
t.py:3:1: T001 'hello {y}\\n'
t.py:4:1: T001 '"""\\n'
'''
out, err = capsys.readouterr()
assert out == expected


def yields_logical_line(logical_line):
yield 0, f"T001 {logical_line!r}"

Expand Down
25 changes: 6 additions & 19 deletions tests/unit/test_file_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,13 +275,15 @@ def test_processor_split_line(default_options):
(3, 3),
'x = """\ncontents\n"""\n',
)
expected = [('x = """\n', 0), ("contents\n", 1)]
expected = [('x = """\n', 1, True), ("contents\n", 2, True)]
assert file_processor.multiline is False
actual = [
(line, file_processor.line_number)
for line in file_processor.split_line(token)
(line, file_processor.line_number, file_processor.multiline)
for line in file_processor.multiline_string(token)
]
assert file_processor.multiline is False
assert expected == actual
assert file_processor.line_number == 2
assert file_processor.line_number == 3


def test_build_ast(default_options):
Expand Down Expand Up @@ -321,21 +323,6 @@ def test_visited_new_blank_line(default_options):
assert file_processor.blank_lines == 1


def test_inside_multiline(default_options):
"""Verify we update the line number and reset multiline."""
file_processor = processor.FileProcessor(
"-", default_options, lines=["a = 1\n"]
)

assert file_processor.multiline is False
assert file_processor.line_number == 0
with file_processor.inside_multiline(10):
assert file_processor.multiline is True
assert file_processor.line_number == 10

assert file_processor.multiline is False


@pytest.mark.parametrize(
"string, expected",
[
Expand Down

0 comments on commit d4d1552

Please sign in to comment.