Skip to content

Commit

Permalink
Merge pull request #973 from pre-commit/string-fixer-3-12
Browse files Browse the repository at this point in the history
don't rewrite string quotes inside f-strings
  • Loading branch information
asottile authored Oct 7, 2023
2 parents 6cad770 + f27ee31 commit ae9b59f
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 1 deletion.
15 changes: 14 additions & 1 deletion pre_commit_hooks/string_fixer.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,16 @@
import argparse
import io
import re
import sys
import tokenize
from typing import Sequence

if sys.version_info >= (3, 12): # pragma: >=3.12 cover
FSTRING_START = tokenize.FSTRING_START
FSTRING_END = tokenize.FSTRING_END
else: # pragma: <3.12 cover
FSTRING_START = FSTRING_END = -1

START_QUOTE_RE = re.compile('^[a-zA-Z]*"')


Expand Down Expand Up @@ -40,11 +47,17 @@ def fix_strings(filename: str) -> int:
# Basically a mutable string
splitcontents = list(contents)

fstring_depth = 0

# Iterate in reverse so the offsets are always correct
tokens_l = list(tokenize.generate_tokens(io.StringIO(contents).readline))
tokens = reversed(tokens_l)
for token_type, token_text, (srow, scol), (erow, ecol), _ in tokens:
if token_type == tokenize.STRING:
if token_type == FSTRING_START: # pragma: >=3.12 cover
fstring_depth += 1
elif token_type == FSTRING_END: # pragma: >=3.12 cover
fstring_depth -= 1
elif fstring_depth == 0 and token_type == tokenize.STRING:
new_text = handle_match(token_text)
splitcontents[
line_offsets[srow] + scol:
Expand Down
6 changes: 6 additions & 0 deletions tests/string_fixer_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,12 @@
1,
),
('"foo""bar"', "'foo''bar'", 1),
pytest.param(
"f'hello{\"world\"}'",
"f'hello{\"world\"}'",
0,
id='ignore nested fstrings',
),
)


Expand Down

0 comments on commit ae9b59f

Please sign in to comment.