Skip to content

Commit

Permalink
properly handle FSTRING_MIDDLE escaped braces
Browse files Browse the repository at this point in the history
  • Loading branch information
asottile committed Aug 4, 2024
1 parent 106600d commit cb7836b
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 0 deletions.
25 changes: 25 additions & 0 deletions tests/tokenize_rt_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,6 +176,31 @@ def test_src_to_tokens_multiline_string():
]


def test_src_to_tokens_fstring_with_escapes():
src = 'f" a {{ {b} }} c"'
ret = src_to_tokens(src)
if sys.version_info >= (3, 12): # pragma: >=3.12 cover
assert ret == [
Token(name='FSTRING_START', src='f"', line=1, utf8_byte_offset=0),
Token(name='FSTRING_MIDDLE', src=' a {{', line=1, utf8_byte_offset=2), # noqa: E501
Token(name='FSTRING_MIDDLE', src=' ', line=1, utf8_byte_offset=7),
Token(name='OP', src='{', line=1, utf8_byte_offset=8),
Token(name='NAME', src='b', line=1, utf8_byte_offset=9),
Token(name='OP', src='}', line=1, utf8_byte_offset=10),
Token(name='FSTRING_MIDDLE', src=' }}', line=1, utf8_byte_offset=11), # noqa: E501
Token(name='FSTRING_MIDDLE', src=' c', line=1, utf8_byte_offset=14), # noqa: E501
Token(name='FSTRING_END', src='"', line=1, utf8_byte_offset=16),
Token(name='NEWLINE', src='', line=1, utf8_byte_offset=17),
Token(name='ENDMARKER', src='', line=2, utf8_byte_offset=0),
]
else: # pragma: <3.12 cover
assert ret == [
Token(name='STRING', src='f" a {{ {b} }} c"', line=1, utf8_byte_offset=0), # noqa: E501
Token(name='NEWLINE', src='', line=1, utf8_byte_offset=17),
Token(name='ENDMARKER', src='', line=2, utf8_byte_offset=0),
]


@pytest.mark.parametrize(
'filename',
(
Expand Down
5 changes: 5 additions & 0 deletions tokenize_rt.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,11 @@ def src_to_tokens(src: str) -> list[Token]:
end_offset += len(newtok.encode())

tok_name = tokenize.tok_name[tok_type]

if tok_name == 'FSTRING_MIDDLE': # pragma: >=3.12 cover
ecol += tok_text.count('{') + tok_text.count('}')
tok_text = tok_text.replace('{', '{{').replace('}', '}}')

tokens.append(Token(tok_name, tok_text, sline, end_offset))
last_line, last_col = eline, ecol
if sline != eline:
Expand Down

0 comments on commit cb7836b

Please sign in to comment.