Remove unused Tokenizer.full_match (#48650)

This commit is contained in:
Harmen Stoppels
2025-01-24 15:53:42 +01:00
committed by GitHub
parent 7fbb3df6b0
commit 25a3e8ba59

View File

@@ -50,7 +50,6 @@ class Tokenizer:
def __init__(self, tokens: Type[TokenBase]):
self.tokens = tokens
self.regex = re.compile("|".join(f"(?P<{token}>{token.regex})" for token in tokens))
self.full_match = True
def tokenize(self, text: str) -> Generator[Token, None, None]:
if not text: