Skip to content

Commit

Permalink
turn endprog into endprog_stack
Browse files Browse the repository at this point in the history
  • Loading branch information
tusharsadhwani committed Sep 10, 2023
1 parent c1ecc14 commit 644c5cc
Showing 1 changed file with 10 additions and 2 deletions.
12 changes: 10 additions & 2 deletions src/blib2to3/pgen2/tokenize.py
Original file line number Diff line number Diff line change
Expand Up @@ -482,7 +482,7 @@ def generate_tokens(
async_def_nl = False

strstart: Tuple[int, int]
endprog: Pattern[str]
endprog_stack: list[Pattern[str]] = []

while 1: # loop over lines in stream
try:
Expand All @@ -496,6 +496,7 @@ def generate_tokens(
assert contline is not None
if not line:
raise TokenError("EOF in multi-line string", strstart)
endprog = endprog_stack[-1]
endmatch = endprog.match(line)
if endmatch:
pos = end = endmatch.end(0)
Expand All @@ -515,6 +516,7 @@ def generate_tokens(
else:
yield (FSTRING_END, token, spos, epos, tokenline)
fstring_level -= 1
endprog_stack.pop()
# TODO: contstr reliance doesn't work now because we can be inside
# an fstring and still empty contstr right here.
contstr, needcont = "", 0
Expand Down Expand Up @@ -605,6 +607,7 @@ def generate_tokens(

while pos < max:
if fstring_level > 0 and not inside_fstring_braces:
endprog = endprog_stack[-1]
endmatch = endprog.match(line, pos)
if endmatch: # all on one line
start, end = endmatch.span(0)
Expand All @@ -619,6 +622,7 @@ def generate_tokens(
yield (FSTRING_MIDDLE, token, (lnum, 0), (lnum, 0), line)
yield (FSTRING_END, token, (lnum, 0), (lnum, 0), line)
fstring_level -= 1
endprog_stack.pop()
else:
# TODO: most of the positions are wrong
yield (FSTRING_MIDDLE, token, (lnum, 0), (lnum, 0), line)
Expand Down Expand Up @@ -657,11 +661,12 @@ def generate_tokens(
stashed = None
yield (COMMENT, token, spos, epos, line)
elif token in triple_quoted:
endprog = endprogs[token]
if token.startswith("f"):
yield (FSTRING_START, token, spos, epos, line)
fstring_level += 1
endprog_stack.append(endprog)

endprog = endprogs[token]
endmatch = endprog.match(line, pos)
if endmatch: # all on one line
if stashed:
Expand Down Expand Up @@ -704,6 +709,7 @@ def generate_tokens(
assert maybe_endprog is not None, f"endprog not found for {token}"
endprog = maybe_endprog
if token[-1] == "\n": # continued string
endprog_stack.append(endprog)
strstart = (lnum, start)
contstr, needcont = line[start:], 1
contline = line
Expand All @@ -727,6 +733,8 @@ def generate_tokens(
start_epos = (lnum, start + offset - 1)
yield (FSTRING_START, fstring_start, spos, start_epos, line)
fstring_level += 1
endprog = endprogs[fstring_start]
endprog_stack.append(endprog)

end_offset = pseudomatch.end() - 1
fstring_middle = line[start + offset - 1 : end_offset]
Expand Down

0 comments on commit 644c5cc

Please sign in to comment.