Skip to content

Commit

Permalink
Refactor token.matches to be centered around a positive set only. F…
Browse files Browse the repository at this point in the history
…or convenience just a string is considered a set containing one element.
  • Loading branch information
KOLANICH committed Dec 2, 2022
1 parent 8200cd7 commit 2706c6c
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 14 deletions.
6 changes: 3 additions & 3 deletions packaging/_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,15 +74,15 @@ def parse_named_requirement(requirement: str) -> Requirement:
specifier = ""
url = ""
if tokens.match("URL_SPEC"):
url = tokens.read(None).text[1:].strip()
url = tokens.read(tokens.rules).text[1:].strip()
elif not tokens.match("END"):
specifier = parse_specifier(tokens)
if tokens.try_read("SEMICOLON"):
marker = ""
while not tokens.match("END"):
# we don't validate markers here, it's done later as part of
# packaging/requirements.py
marker += tokens.read(None).text
marker += tokens.read(tokens.rules).text
else:
marker = ""
tokens.expect(
Expand Down Expand Up @@ -220,7 +220,7 @@ def parse_marker_op(tokens: Tokenizer) -> Op:
tokens.read("IN", error_message="NOT token must be follewed by IN token")
return Op("not in")
elif tokens.match("OP"):
return Op(tokens.read(None).text)
return Op(tokens.read(tokens.rules).text)
else:
return tokens.raise_syntax_error(
message='Couldn\'t parse marker operator. Expecting one of \
Expand Down
24 changes: 13 additions & 11 deletions packaging/_tokenizer.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,22 @@
import re
from typing import Dict, Generator, NoReturn, Optional
from typing import Dict, Generator, NoReturn, Optional, Set, Union

from .specifiers import Specifier

TokenNameMatchT = Union[str, Set[str]]


class Token:
def __init__(self, name: str, text: str, position: int) -> None:
self.name = name
self.text = text
self.position = position

def matches(self, name: str = "") -> bool:
if not name:
return True
def matches(self, name: TokenNameMatchT = "") -> bool:
if isinstance(name, str):
name = {name}

if self.name == name:
if self.name in name:
return True

return False
Expand Down Expand Up @@ -99,14 +101,14 @@ def peek(self) -> Token:
self.next_token = next(self.generator)
return self.next_token

def match(self, name: str) -> bool:
def match(self, name: TokenNameMatchT) -> bool:
"""
Return True if the next token matches the given arguments.
"""
token = self.peek()
return token.matches(name)

def expect(self, name: str, error_message: str) -> Token:
def expect(self, name: TokenNameMatchT, error_message: str) -> Token:
"""
Raise SyntaxError if the next token doesn't match given arguments.
"""
Expand All @@ -115,7 +117,7 @@ def expect(self, name: str, error_message: str) -> Token:
raise self.raise_syntax_error(message=error_message)
return token

def read(self, name: str, error_message: str = "") -> Token:
def read(self, name: TokenNameMatchT, error_message: str = "") -> Token:
"""Return the next token and advance to the next token.
Raise SyntaxError if the token doesn't match.
Expand All @@ -124,13 +126,13 @@ def read(self, name: str, error_message: str = "") -> Token:
self.next_token = None
return result

def try_read(self, name: str) -> Optional[Token]:
def try_read(self, name: TokenNameMatchT) -> Optional[Token]:
"""read() if the next token matches the given arguments.
Do nothing if it does not match.
"""
if self.match(name):
return self.read(None)
return self.read(self.rules)
return None

def raise_syntax_error(self, *, message: str) -> NoReturn:
Expand All @@ -144,7 +146,7 @@ def raise_syntax_error(self, *, message: str) -> NoReturn:
self.position,
)

def _make_token(self, name: str, text: str) -> Token:
def _make_token(self, name: TokenNameMatchT, text: str) -> Token:
"""
Make a token with the current position.
"""
Expand Down

0 comments on commit 2706c6c

Please sign in to comment.