diff --git a/lib/spack/spack/tokenize.py b/lib/spack/spack/tokenize.py index f42ecb2e95c..f5e9b37803e 100644 --- a/lib/spack/spack/tokenize.py +++ b/lib/spack/spack/tokenize.py @@ -50,7 +50,6 @@ class Tokenizer: def __init__(self, tokens: Type[TokenBase]): self.tokens = tokens self.regex = re.compile("|".join(f"(?P<{token}>{token.regex})" for token in tokens)) - self.full_match = True def tokenize(self, text: str) -> Generator[Token, None, None]: if not text: