Remove unused Tokenizer.full_match (#48650)
This commit is contained in:
parent
7fbb3df6b0
commit
25a3e8ba59
@ -50,7 +50,6 @@ class Tokenizer:
|
|||||||
def __init__(self, tokens: Type[TokenBase]):
|
def __init__(self, tokens: Type[TokenBase]):
|
||||||
self.tokens = tokens
|
self.tokens = tokens
|
||||||
self.regex = re.compile("|".join(f"(?P<{token}>{token.regex})" for token in tokens))
|
self.regex = re.compile("|".join(f"(?P<{token}>{token.regex})" for token in tokens))
|
||||||
self.full_match = True
|
|
||||||
|
|
||||||
def tokenize(self, text: str) -> Generator[Token, None, None]:
|
def tokenize(self, text: str) -> Generator[Token, None, None]:
|
||||||
if not text:
|
if not text:
|
||||||
|
Loading…
Reference in New Issue
Block a user