spec.parser / spec.token: improvements (#48063)
Follow-up to #47956 * Rename `token.py` -> `tokenize.py` * Rename `parser.py` -> `spec_parser.py` * Move common code related to iterating over tokens into `tokenize.py` * Add "unexpected character token" (i.e. `.`) to `SpecTokens` by default instead of having a separate tokenizer / regex.
This commit is contained in:
parent
396a701860
commit
687766b8ab
@ -178,8 +178,8 @@ Spec-related modules
|
||||
Contains :class:`~spack.spec.Spec`. Also implements most of the logic for concretization
|
||||
of specs.
|
||||
|
||||
:mod:`spack.parser`
|
||||
Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs.
|
||||
:mod:`spack.spec_parser`
|
||||
Contains :class:`~spack.spec_parser.SpecParser` and functions related to parsing specs.
|
||||
|
||||
:mod:`spack.version`
|
||||
Implements a simple :class:`~spack.version.Version` class with simple
|
||||
|
@ -24,12 +24,11 @@
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.extensions
|
||||
import spack.parser
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.spec_parser
|
||||
import spack.store
|
||||
import spack.token
|
||||
import spack.traverse as traverse
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.spack_json as sjson
|
||||
@ -164,12 +163,12 @@ def quote_kvp(string: str) -> str:
|
||||
or ``name==``, and we assume the rest of the argument is the value. This covers the
|
||||
common cases of passign flags, e.g., ``cflags="-O2 -g"`` on the command line.
|
||||
"""
|
||||
match = spack.parser.SPLIT_KVP.match(string)
|
||||
match = spack.spec_parser.SPLIT_KVP.match(string)
|
||||
if not match:
|
||||
return string
|
||||
|
||||
key, delim, value = match.groups()
|
||||
return f"{key}{delim}{spack.token.quote_if_needed(value)}"
|
||||
return f"{key}{delim}{spack.spec_parser.quote_if_needed(value)}"
|
||||
|
||||
|
||||
def parse_specs(
|
||||
@ -181,7 +180,7 @@ def parse_specs(
|
||||
args = [args] if isinstance(args, str) else args
|
||||
arg_string = " ".join([quote_kvp(arg) for arg in args])
|
||||
|
||||
specs = spack.parser.parse(arg_string)
|
||||
specs = spack.spec_parser.parse(arg_string)
|
||||
if not concretize:
|
||||
return specs
|
||||
|
||||
|
@ -21,7 +21,7 @@
|
||||
|
||||
import spack.config
|
||||
import spack.mirrors.mirror
|
||||
import spack.token
|
||||
import spack.tokenize
|
||||
import spack.util.web
|
||||
|
||||
from .image import ImageReference
|
||||
@ -57,7 +57,7 @@ def dispatch_open(fullurl, data=None, timeout=None):
|
||||
quoted_string = rf'"(?:({qdtext}*)|{quoted_pair})*"'
|
||||
|
||||
|
||||
class TokenType(spack.token.TokenBase):
|
||||
class WwwAuthenticateTokens(spack.tokenize.TokenBase):
|
||||
AUTH_PARAM = rf"({token}){BWS}={BWS}({token}|{quoted_string})"
|
||||
# TOKEN68 = r"([A-Za-z0-9\-._~+/]+=*)" # todo... support this?
|
||||
TOKEN = rf"{tchar}+"
|
||||
@ -68,9 +68,7 @@ class TokenType(spack.token.TokenBase):
|
||||
ANY = r"."
|
||||
|
||||
|
||||
TOKEN_REGEXES = [rf"(?P<{token}>{token.regex})" for token in TokenType]
|
||||
|
||||
ALL_TOKENS = re.compile("|".join(TOKEN_REGEXES))
|
||||
WWW_AUTHENTICATE_TOKENIZER = spack.tokenize.Tokenizer(WwwAuthenticateTokens)
|
||||
|
||||
|
||||
class State(Enum):
|
||||
@ -81,18 +79,6 @@ class State(Enum):
|
||||
AUTH_PARAM_OR_SCHEME = auto()
|
||||
|
||||
|
||||
def tokenize(input: str):
|
||||
scanner = ALL_TOKENS.scanner(input) # type: ignore[attr-defined]
|
||||
|
||||
for match in iter(scanner.match, None): # type: ignore[var-annotated]
|
||||
yield spack.token.Token(
|
||||
TokenType.__members__[match.lastgroup], # type: ignore[attr-defined]
|
||||
match.group(), # type: ignore[attr-defined]
|
||||
match.start(), # type: ignore[attr-defined]
|
||||
match.end(), # type: ignore[attr-defined]
|
||||
)
|
||||
|
||||
|
||||
class Challenge:
|
||||
__slots__ = ["scheme", "params"]
|
||||
|
||||
@ -128,7 +114,7 @@ def parse_www_authenticate(input: str):
|
||||
unquote = lambda s: _unquote(r"\1", s[1:-1])
|
||||
|
||||
mode: State = State.CHALLENGE
|
||||
tokens = tokenize(input)
|
||||
tokens = WWW_AUTHENTICATE_TOKENIZER.tokenize(input)
|
||||
|
||||
current_challenge = Challenge()
|
||||
|
||||
@ -141,36 +127,36 @@ def extract_auth_param(input: str) -> Tuple[str, str]:
|
||||
return key, value
|
||||
|
||||
while True:
|
||||
token: spack.token.Token = next(tokens)
|
||||
token: spack.tokenize.Token = next(tokens)
|
||||
|
||||
if mode == State.CHALLENGE:
|
||||
if token.kind == TokenType.EOF:
|
||||
if token.kind == WwwAuthenticateTokens.EOF:
|
||||
raise ValueError(token)
|
||||
elif token.kind == TokenType.TOKEN:
|
||||
elif token.kind == WwwAuthenticateTokens.TOKEN:
|
||||
current_challenge.scheme = token.value
|
||||
mode = State.AUTH_PARAM_LIST_START
|
||||
else:
|
||||
raise ValueError(token)
|
||||
|
||||
elif mode == State.AUTH_PARAM_LIST_START:
|
||||
if token.kind == TokenType.EOF:
|
||||
if token.kind == WwwAuthenticateTokens.EOF:
|
||||
challenges.append(current_challenge)
|
||||
break
|
||||
elif token.kind == TokenType.COMMA:
|
||||
elif token.kind == WwwAuthenticateTokens.COMMA:
|
||||
# Challenge without param list, followed by another challenge.
|
||||
challenges.append(current_challenge)
|
||||
current_challenge = Challenge()
|
||||
mode = State.CHALLENGE
|
||||
elif token.kind == TokenType.SPACE:
|
||||
elif token.kind == WwwAuthenticateTokens.SPACE:
|
||||
# A space means it must be followed by param list
|
||||
mode = State.AUTH_PARAM
|
||||
else:
|
||||
raise ValueError(token)
|
||||
|
||||
elif mode == State.AUTH_PARAM:
|
||||
if token.kind == TokenType.EOF:
|
||||
if token.kind == WwwAuthenticateTokens.EOF:
|
||||
raise ValueError(token)
|
||||
elif token.kind == TokenType.AUTH_PARAM:
|
||||
elif token.kind == WwwAuthenticateTokens.AUTH_PARAM:
|
||||
key, value = extract_auth_param(token.value)
|
||||
current_challenge.params.append((key, value))
|
||||
mode = State.NEXT_IN_LIST
|
||||
@ -178,22 +164,22 @@ def extract_auth_param(input: str) -> Tuple[str, str]:
|
||||
raise ValueError(token)
|
||||
|
||||
elif mode == State.NEXT_IN_LIST:
|
||||
if token.kind == TokenType.EOF:
|
||||
if token.kind == WwwAuthenticateTokens.EOF:
|
||||
challenges.append(current_challenge)
|
||||
break
|
||||
elif token.kind == TokenType.COMMA:
|
||||
elif token.kind == WwwAuthenticateTokens.COMMA:
|
||||
mode = State.AUTH_PARAM_OR_SCHEME
|
||||
else:
|
||||
raise ValueError(token)
|
||||
|
||||
elif mode == State.AUTH_PARAM_OR_SCHEME:
|
||||
if token.kind == TokenType.EOF:
|
||||
if token.kind == WwwAuthenticateTokens.EOF:
|
||||
raise ValueError(token)
|
||||
elif token.kind == TokenType.TOKEN:
|
||||
elif token.kind == WwwAuthenticateTokens.TOKEN:
|
||||
challenges.append(current_challenge)
|
||||
current_challenge = Challenge(token.value)
|
||||
mode = State.AUTH_PARAM_LIST_START
|
||||
elif token.kind == TokenType.AUTH_PARAM:
|
||||
elif token.kind == WwwAuthenticateTokens.AUTH_PARAM:
|
||||
key, value = extract_auth_param(token.value)
|
||||
current_challenge.params.append((key, value))
|
||||
mode = State.NEXT_IN_LIST
|
||||
|
@ -26,14 +26,14 @@ def _validate_spec(validator, is_spec, instance, schema):
|
||||
"""Check if the attributes on instance are valid specs."""
|
||||
import jsonschema
|
||||
|
||||
import spack.parser
|
||||
import spack.spec_parser
|
||||
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
|
||||
for spec_str in instance:
|
||||
try:
|
||||
spack.parser.parse(spec_str)
|
||||
spack.spec_parser.parse(spec_str)
|
||||
except SpecSyntaxError as e:
|
||||
yield jsonschema.ValidationError(str(e))
|
||||
|
||||
|
@ -77,14 +77,13 @@
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.hash_types as ht
|
||||
import spack.parser
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.provider_index
|
||||
import spack.repo
|
||||
import spack.solver
|
||||
import spack.spec_parser
|
||||
import spack.store
|
||||
import spack.token
|
||||
import spack.traverse as traverse
|
||||
import spack.util.executable
|
||||
import spack.util.hash
|
||||
@ -613,7 +612,7 @@ def __init__(self, *args):
|
||||
# If there is one argument, it's either another CompilerSpec
|
||||
# to copy or a string to parse
|
||||
if isinstance(arg, str):
|
||||
spec = spack.parser.parse_one_or_raise(f"%{arg}")
|
||||
spec = spack.spec_parser.parse_one_or_raise(f"%{arg}")
|
||||
self.name = spec.compiler.name
|
||||
self.versions = spec.compiler.versions
|
||||
|
||||
@ -951,11 +950,13 @@ def __str__(self):
|
||||
for flag_type, flags in sorted_items:
|
||||
normal = [f for f in flags if not f.propagate]
|
||||
if normal:
|
||||
result += f" {flag_type}={spack.token.quote_if_needed(' '.join(normal))}"
|
||||
value = spack.spec_parser.quote_if_needed(" ".join(normal))
|
||||
result += f" {flag_type}={value}"
|
||||
|
||||
propagated = [f for f in flags if f.propagate]
|
||||
if propagated:
|
||||
result += f" {flag_type}=={spack.token.quote_if_needed(' '.join(propagated))}"
|
||||
value = spack.spec_parser.quote_if_needed(" ".join(propagated))
|
||||
result += f" {flag_type}=={value}"
|
||||
|
||||
# TODO: somehow add this space only if something follows in Spec.format()
|
||||
if sorted_items:
|
||||
@ -1514,7 +1515,7 @@ def __init__(
|
||||
self._build_spec = None
|
||||
|
||||
if isinstance(spec_like, str):
|
||||
spack.parser.parse_one_or_raise(spec_like, self)
|
||||
spack.spec_parser.parse_one_or_raise(spec_like, self)
|
||||
|
||||
elif spec_like is not None:
|
||||
raise TypeError("Can't make spec out of %s" % type(spec_like))
|
||||
|
@ -57,9 +57,11 @@
|
||||
specs to avoid ambiguity. Both are provided because ~ can cause shell
|
||||
expansion when it is the first character in an id typed on the command line.
|
||||
"""
|
||||
import json
|
||||
import pathlib
|
||||
import re
|
||||
from typing import Iterator, List, Match, Optional
|
||||
import sys
|
||||
from typing import Iterator, List, Optional
|
||||
|
||||
from llnl.util.tty import color
|
||||
|
||||
@ -67,7 +69,7 @@
|
||||
import spack.error
|
||||
import spack.spec
|
||||
import spack.version
|
||||
from spack.token import FILENAME, Token, TokenBase, strip_quotes_and_unescape
|
||||
from spack.tokenize import Token, TokenBase, Tokenizer
|
||||
|
||||
#: Valid name for specs and variants. Here we are not using
|
||||
#: the previous "w[\w.-]*" since that would match most
|
||||
@ -96,8 +98,20 @@
|
||||
#: Regex with groups to use for splitting (optionally propagated) key-value pairs
|
||||
SPLIT_KVP = re.compile(rf"^({NAME})(==?)(.*)$")
|
||||
|
||||
#: A filename starts either with a "." or a "/" or a "{name}/, or on Windows, a drive letter
|
||||
#: followed by a colon and "\" or "." or {name}\
|
||||
WINDOWS_FILENAME = r"(?:\.|[a-zA-Z0-9-_]*\\|[a-zA-Z]:\\)(?:[a-zA-Z0-9-_\.\\]*)(?:\.json|\.yaml)"
|
||||
UNIX_FILENAME = r"(?:\.|\/|[a-zA-Z0-9-_]*\/)(?:[a-zA-Z0-9-_\.\/]*)(?:\.json|\.yaml)"
|
||||
FILENAME = WINDOWS_FILENAME if sys.platform == "win32" else UNIX_FILENAME
|
||||
|
||||
class TokenType(TokenBase):
|
||||
#: Regex to strip quotes. Group 2 will be the unquoted string.
|
||||
STRIP_QUOTES = re.compile(r"^(['\"])(.*)\1$")
|
||||
|
||||
#: Values that match this (e.g., variants, flags) can be left unquoted in Spack output
|
||||
NO_QUOTES_NEEDED = re.compile(r"^[a-zA-Z0-9,/_.-]+$")
|
||||
|
||||
|
||||
class SpecTokens(TokenBase):
|
||||
"""Enumeration of the different token kinds in the spec grammar.
|
||||
Order of declaration is extremely important, since text containing specs is parsed with a
|
||||
single regex obtained by ``"|".join(...)`` of all the regex in the order of declaration.
|
||||
@ -128,56 +142,24 @@ class TokenType(TokenBase):
|
||||
DAG_HASH = rf"(?:/(?:{HASH}))"
|
||||
# White spaces
|
||||
WS = r"(?:\s+)"
|
||||
|
||||
|
||||
class ErrorTokenType(TokenBase):
|
||||
"""Enum with regexes for error analysis"""
|
||||
|
||||
# Unexpected character
|
||||
# Unexpected character(s)
|
||||
UNEXPECTED = r"(?:.[\s]*)"
|
||||
|
||||
|
||||
#: List of all the regexes used to match spec parts, in order of precedence
|
||||
TOKEN_REGEXES = [rf"(?P<{token}>{token.regex})" for token in TokenType]
|
||||
#: List of all valid regexes followed by error analysis regexes
|
||||
ERROR_HANDLING_REGEXES = TOKEN_REGEXES + [
|
||||
rf"(?P<{token}>{token.regex})" for token in ErrorTokenType
|
||||
]
|
||||
#: Regex to scan a valid text
|
||||
ALL_TOKENS = re.compile("|".join(TOKEN_REGEXES))
|
||||
#: Regex to analyze an invalid text
|
||||
ANALYSIS_REGEX = re.compile("|".join(ERROR_HANDLING_REGEXES))
|
||||
#: Tokenizer that includes all the regexes in the SpecTokens enum
|
||||
SPEC_TOKENIZER = Tokenizer(SpecTokens)
|
||||
|
||||
|
||||
def tokenize(text: str) -> Iterator[Token]:
|
||||
"""Return a token generator from the text passed as input.
|
||||
|
||||
Raises:
|
||||
SpecTokenizationError: if we can't tokenize anymore, but didn't reach the
|
||||
end of the input text.
|
||||
SpecTokenizationError: when unexpected characters are found in the text
|
||||
"""
|
||||
scanner = ALL_TOKENS.scanner(text) # type: ignore[attr-defined]
|
||||
match: Optional[Match] = None
|
||||
for match in iter(scanner.match, None):
|
||||
# The following two assertions are to help mypy
|
||||
msg = (
|
||||
"unexpected value encountered during parsing. Please submit a bug report "
|
||||
"at https://github.com/spack/spack/issues/new/choose"
|
||||
)
|
||||
assert match is not None, msg
|
||||
assert match.lastgroup is not None, msg
|
||||
yield Token(
|
||||
TokenType.__members__[match.lastgroup], match.group(), match.start(), match.end()
|
||||
)
|
||||
|
||||
if match is None and not text:
|
||||
# We just got an empty string
|
||||
return
|
||||
|
||||
if match is None or match.end() != len(text):
|
||||
scanner = ANALYSIS_REGEX.scanner(text) # type: ignore[attr-defined]
|
||||
matches = [m for m in iter(scanner.match, None)] # type: ignore[var-annotated]
|
||||
raise SpecTokenizationError(matches, text)
|
||||
for token in SPEC_TOKENIZER.tokenize(text):
|
||||
if token.kind == SpecTokens.UNEXPECTED:
|
||||
raise SpecTokenizationError(list(SPEC_TOKENIZER.tokenize(text)), text)
|
||||
yield token
|
||||
|
||||
|
||||
class TokenContext:
|
||||
@ -195,7 +177,7 @@ def advance(self):
|
||||
"""Advance one token"""
|
||||
self.current_token, self.next_token = self.next_token, next(self.token_stream, None)
|
||||
|
||||
def accept(self, kind: TokenType):
|
||||
def accept(self, kind: SpecTokens):
|
||||
"""If the next token is of the specified kind, advance the stream and return True.
|
||||
Otherwise return False.
|
||||
"""
|
||||
@ -204,23 +186,20 @@ def accept(self, kind: TokenType):
|
||||
return True
|
||||
return False
|
||||
|
||||
def expect(self, *kinds: TokenType):
|
||||
def expect(self, *kinds: SpecTokens):
|
||||
return self.next_token and self.next_token.kind in kinds
|
||||
|
||||
|
||||
class SpecTokenizationError(spack.error.SpecSyntaxError):
|
||||
"""Syntax error in a spec string"""
|
||||
|
||||
def __init__(self, matches, text):
|
||||
message = "unexpected tokens in the spec string\n"
|
||||
message += f"{text}"
|
||||
def __init__(self, tokens: List[Token], text: str):
|
||||
message = f"unexpected characters in the spec string\n{text}\n"
|
||||
|
||||
underline = "\n"
|
||||
for match in matches:
|
||||
if match.lastgroup == str(ErrorTokenType.UNEXPECTED):
|
||||
underline += f"{'^' * (match.end() - match.start())}"
|
||||
continue
|
||||
underline += f"{' ' * (match.end() - match.start())}"
|
||||
underline = ""
|
||||
for token in tokens:
|
||||
is_error = token.kind == SpecTokens.UNEXPECTED
|
||||
underline += ("^" if is_error else " ") * (token.end - token.start)
|
||||
|
||||
message += color.colorize(f"@*r{{{underline}}}")
|
||||
super().__init__(message)
|
||||
@ -233,13 +212,13 @@ class SpecParser:
|
||||
|
||||
def __init__(self, literal_str: str):
|
||||
self.literal_str = literal_str
|
||||
self.ctx = TokenContext(filter(lambda x: x.kind != TokenType.WS, tokenize(literal_str)))
|
||||
self.ctx = TokenContext(filter(lambda x: x.kind != SpecTokens.WS, tokenize(literal_str)))
|
||||
|
||||
def tokens(self) -> List[Token]:
|
||||
"""Return the entire list of token from the initial text. White spaces are
|
||||
filtered out.
|
||||
"""
|
||||
return list(filter(lambda x: x.kind != TokenType.WS, tokenize(self.literal_str)))
|
||||
return list(filter(lambda x: x.kind != SpecTokens.WS, tokenize(self.literal_str)))
|
||||
|
||||
def next_spec(
|
||||
self, initial_spec: Optional["spack.spec.Spec"] = None
|
||||
@ -266,14 +245,14 @@ def add_dependency(dep, **edge_properties):
|
||||
initial_spec = initial_spec or spack.spec.Spec()
|
||||
root_spec = SpecNodeParser(self.ctx, self.literal_str).parse(initial_spec)
|
||||
while True:
|
||||
if self.ctx.accept(TokenType.START_EDGE_PROPERTIES):
|
||||
if self.ctx.accept(SpecTokens.START_EDGE_PROPERTIES):
|
||||
edge_properties = EdgeAttributeParser(self.ctx, self.literal_str).parse()
|
||||
edge_properties.setdefault("depflag", 0)
|
||||
edge_properties.setdefault("virtuals", ())
|
||||
dependency = self._parse_node(root_spec)
|
||||
add_dependency(dependency, **edge_properties)
|
||||
|
||||
elif self.ctx.accept(TokenType.DEPENDENCY):
|
||||
elif self.ctx.accept(SpecTokens.DEPENDENCY):
|
||||
dependency = self._parse_node(root_spec)
|
||||
add_dependency(dependency, depflag=0, virtuals=())
|
||||
|
||||
@ -321,7 +300,7 @@ def parse(
|
||||
Return
|
||||
The object passed as argument
|
||||
"""
|
||||
if not self.ctx.next_token or self.ctx.expect(TokenType.DEPENDENCY):
|
||||
if not self.ctx.next_token or self.ctx.expect(SpecTokens.DEPENDENCY):
|
||||
return initial_spec
|
||||
|
||||
if initial_spec is None:
|
||||
@ -329,17 +308,17 @@ def parse(
|
||||
|
||||
# If we start with a package name we have a named spec, we cannot
|
||||
# accept another package name afterwards in a node
|
||||
if self.ctx.accept(TokenType.UNQUALIFIED_PACKAGE_NAME):
|
||||
if self.ctx.accept(SpecTokens.UNQUALIFIED_PACKAGE_NAME):
|
||||
initial_spec.name = self.ctx.current_token.value
|
||||
|
||||
elif self.ctx.accept(TokenType.FULLY_QUALIFIED_PACKAGE_NAME):
|
||||
elif self.ctx.accept(SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME):
|
||||
parts = self.ctx.current_token.value.split(".")
|
||||
name = parts[-1]
|
||||
namespace = ".".join(parts[:-1])
|
||||
initial_spec.name = name
|
||||
initial_spec.namespace = namespace
|
||||
|
||||
elif self.ctx.accept(TokenType.FILENAME):
|
||||
elif self.ctx.accept(SpecTokens.FILENAME):
|
||||
return FileParser(self.ctx).parse(initial_spec)
|
||||
|
||||
def raise_parsing_error(string: str, cause: Optional[Exception] = None):
|
||||
@ -354,7 +333,7 @@ def add_flag(name: str, value: str, propagate: bool):
|
||||
raise_parsing_error(str(e), e)
|
||||
|
||||
while True:
|
||||
if self.ctx.accept(TokenType.COMPILER):
|
||||
if self.ctx.accept(SpecTokens.COMPILER):
|
||||
if self.has_compiler:
|
||||
raise_parsing_error("Spec cannot have multiple compilers")
|
||||
|
||||
@ -362,7 +341,7 @@ def add_flag(name: str, value: str, propagate: bool):
|
||||
initial_spec.compiler = spack.spec.CompilerSpec(compiler_name.strip(), ":")
|
||||
self.has_compiler = True
|
||||
|
||||
elif self.ctx.accept(TokenType.COMPILER_AND_VERSION):
|
||||
elif self.ctx.accept(SpecTokens.COMPILER_AND_VERSION):
|
||||
if self.has_compiler:
|
||||
raise_parsing_error("Spec cannot have multiple compilers")
|
||||
|
||||
@ -373,9 +352,9 @@ def add_flag(name: str, value: str, propagate: bool):
|
||||
self.has_compiler = True
|
||||
|
||||
elif (
|
||||
self.ctx.accept(TokenType.VERSION_HASH_PAIR)
|
||||
or self.ctx.accept(TokenType.GIT_VERSION)
|
||||
or self.ctx.accept(TokenType.VERSION)
|
||||
self.ctx.accept(SpecTokens.VERSION_HASH_PAIR)
|
||||
or self.ctx.accept(SpecTokens.GIT_VERSION)
|
||||
or self.ctx.accept(SpecTokens.VERSION)
|
||||
):
|
||||
if self.has_version:
|
||||
raise_parsing_error("Spec cannot have multiple versions")
|
||||
@ -386,32 +365,32 @@ def add_flag(name: str, value: str, propagate: bool):
|
||||
initial_spec.attach_git_version_lookup()
|
||||
self.has_version = True
|
||||
|
||||
elif self.ctx.accept(TokenType.BOOL_VARIANT):
|
||||
elif self.ctx.accept(SpecTokens.BOOL_VARIANT):
|
||||
variant_value = self.ctx.current_token.value[0] == "+"
|
||||
add_flag(self.ctx.current_token.value[1:].strip(), variant_value, propagate=False)
|
||||
|
||||
elif self.ctx.accept(TokenType.PROPAGATED_BOOL_VARIANT):
|
||||
elif self.ctx.accept(SpecTokens.PROPAGATED_BOOL_VARIANT):
|
||||
variant_value = self.ctx.current_token.value[0:2] == "++"
|
||||
add_flag(self.ctx.current_token.value[2:].strip(), variant_value, propagate=True)
|
||||
|
||||
elif self.ctx.accept(TokenType.KEY_VALUE_PAIR):
|
||||
elif self.ctx.accept(SpecTokens.KEY_VALUE_PAIR):
|
||||
match = SPLIT_KVP.match(self.ctx.current_token.value)
|
||||
assert match, "SPLIT_KVP and KEY_VALUE_PAIR do not agree."
|
||||
|
||||
name, _, value = match.groups()
|
||||
add_flag(name, strip_quotes_and_unescape(value), propagate=False)
|
||||
|
||||
elif self.ctx.accept(TokenType.PROPAGATED_KEY_VALUE_PAIR):
|
||||
elif self.ctx.accept(SpecTokens.PROPAGATED_KEY_VALUE_PAIR):
|
||||
match = SPLIT_KVP.match(self.ctx.current_token.value)
|
||||
assert match, "SPLIT_KVP and PROPAGATED_KEY_VALUE_PAIR do not agree."
|
||||
|
||||
name, _, value = match.groups()
|
||||
add_flag(name, strip_quotes_and_unescape(value), propagate=True)
|
||||
|
||||
elif self.ctx.expect(TokenType.DAG_HASH):
|
||||
elif self.ctx.expect(SpecTokens.DAG_HASH):
|
||||
if initial_spec.abstract_hash:
|
||||
break
|
||||
self.ctx.accept(TokenType.DAG_HASH)
|
||||
self.ctx.accept(SpecTokens.DAG_HASH)
|
||||
initial_spec.abstract_hash = self.ctx.current_token.value[1:]
|
||||
|
||||
else:
|
||||
@ -461,7 +440,7 @@ def __init__(self, ctx, literal_str):
|
||||
def parse(self):
|
||||
attributes = {}
|
||||
while True:
|
||||
if self.ctx.accept(TokenType.KEY_VALUE_PAIR):
|
||||
if self.ctx.accept(SpecTokens.KEY_VALUE_PAIR):
|
||||
name, value = self.ctx.current_token.value.split("=", maxsplit=1)
|
||||
name = name.strip("'\" ")
|
||||
value = value.strip("'\" ").split(",")
|
||||
@ -473,7 +452,7 @@ def parse(self):
|
||||
)
|
||||
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
||||
# TODO: Add code to accept bool variants here as soon as use variants are implemented
|
||||
elif self.ctx.accept(TokenType.END_EDGE_PROPERTIES):
|
||||
elif self.ctx.accept(SpecTokens.END_EDGE_PROPERTIES):
|
||||
break
|
||||
else:
|
||||
msg = "unexpected token in edge attributes"
|
||||
@ -536,3 +515,33 @@ def __init__(self, message, token, text):
|
||||
underline = f"\n{' '*token.start}{'^'*(token.end - token.start)}"
|
||||
message += color.colorize(f"@*r{{{underline}}}")
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
def strip_quotes_and_unescape(string: str) -> str:
|
||||
"""Remove surrounding single or double quotes from string, if present."""
|
||||
match = STRIP_QUOTES.match(string)
|
||||
if not match:
|
||||
return string
|
||||
|
||||
# replace any escaped quotes with bare quotes
|
||||
quote, result = match.groups()
|
||||
return result.replace(rf"\{quote}", quote)
|
||||
|
||||
|
||||
def quote_if_needed(value: str) -> str:
|
||||
"""Add quotes around the value if it requires quotes.
|
||||
|
||||
This will add quotes around the value unless it matches ``NO_QUOTES_NEEDED``.
|
||||
|
||||
This adds:
|
||||
* single quotes by default
|
||||
* double quotes around any value that contains single quotes
|
||||
|
||||
If double quotes are used, we json-escape the string. That is, we escape ``\\``,
|
||||
``"``, and control codes.
|
||||
|
||||
"""
|
||||
if NO_QUOTES_NEEDED.match(value):
|
||||
return value
|
||||
|
||||
return json.dumps(value) if "'" in value else f"'{value}'"
|
@ -338,10 +338,10 @@ def test_install_conflicts(conflict_spec):
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_packages", "mock_archive", "mock_fetch", "install_mockery")
|
||||
def test_install_invalid_spec(invalid_spec):
|
||||
def test_install_invalid_spec():
|
||||
# Make sure that invalid specs raise a SpackError
|
||||
with pytest.raises(SpecSyntaxError, match="unexpected tokens"):
|
||||
install(invalid_spec)
|
||||
with pytest.raises(SpecSyntaxError, match="unexpected characters"):
|
||||
install("conflict%~")
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("noop_install", "mock_packages", "config")
|
||||
|
@ -146,7 +146,7 @@ def test_spec_parse_error():
|
||||
spec("1.15:")
|
||||
|
||||
# make sure the error is formatted properly
|
||||
error_msg = "unexpected tokens in the spec string\n1.15:\n ^"
|
||||
error_msg = "unexpected characters in the spec string\n1.15:\n ^"
|
||||
assert error_msg in str(e.value)
|
||||
|
||||
|
||||
|
@ -1676,12 +1676,6 @@ def conflict_spec(request):
|
||||
return request.param
|
||||
|
||||
|
||||
@pytest.fixture(params=["conflict%~"])
|
||||
def invalid_spec(request):
|
||||
"""Specs that do not parse cleanly due to invalid formatting."""
|
||||
return request.param
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def mock_test_repo(tmpdir_factory):
|
||||
"""Create an empty repository."""
|
||||
|
@ -65,7 +65,7 @@ def test_validate_spec(validate_spec_schema):
|
||||
|
||||
# Check that invalid data throws
|
||||
data["^python@3.7@"] = "baz"
|
||||
with pytest.raises(jsonschema.ValidationError, match="unexpected tokens"):
|
||||
with pytest.raises(jsonschema.ValidationError, match="unexpected characters"):
|
||||
v.validate(data)
|
||||
|
||||
|
||||
@ -74,7 +74,7 @@ def test_module_suffixes(module_suffixes_schema):
|
||||
v = spack.schema.Validator(module_suffixes_schema)
|
||||
data = {"tcl": {"all": {"suffixes": {"^python@2.7@": "py2.7"}}}}
|
||||
|
||||
with pytest.raises(jsonschema.ValidationError, match="unexpected tokens"):
|
||||
with pytest.raises(jsonschema.ValidationError, match="unexpected characters"):
|
||||
v.validate(data)
|
||||
|
||||
|
||||
|
@ -10,10 +10,10 @@
|
||||
import spack.deptypes as dt
|
||||
import spack.directives
|
||||
import spack.error
|
||||
import spack.parser
|
||||
import spack.paths
|
||||
import spack.solver.asp
|
||||
import spack.spec
|
||||
import spack.spec_parser
|
||||
import spack.store
|
||||
import spack.variant
|
||||
import spack.version as vn
|
||||
@ -639,7 +639,7 @@ def test_satisfied_namespace(self):
|
||||
],
|
||||
)
|
||||
def test_propagate_reserved_variant_names(self, spec_string):
|
||||
with pytest.raises(spack.parser.SpecParsingError, match="Propagation"):
|
||||
with pytest.raises(spack.spec_parser.SpecParsingError, match="Propagation"):
|
||||
Spec(spec_string)
|
||||
|
||||
def test_unsatisfiable_multi_value_variant(self, default_mock_concretization):
|
||||
@ -1004,11 +1004,11 @@ def test_spec_formatting_bad_formats(self, default_mock_concretization, fmt_str)
|
||||
|
||||
def test_combination_of_wildcard_or_none(self):
|
||||
# Test that using 'none' and another value raises
|
||||
with pytest.raises(spack.parser.SpecParsingError, match="cannot be combined"):
|
||||
with pytest.raises(spack.spec_parser.SpecParsingError, match="cannot be combined"):
|
||||
Spec("multivalue-variant foo=none,bar")
|
||||
|
||||
# Test that using wildcard and another value raises
|
||||
with pytest.raises(spack.parser.SpecParsingError, match="cannot be combined"):
|
||||
with pytest.raises(spack.spec_parser.SpecParsingError, match="cannot be combined"):
|
||||
Spec("multivalue-variant foo=*,bar")
|
||||
|
||||
def test_errors_in_variant_directive(self):
|
||||
|
@ -14,8 +14,15 @@
|
||||
import spack.platforms.test
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.parser import SpecParser, SpecParsingError, SpecTokenizationError, TokenType
|
||||
from spack.token import UNIX_FILENAME, WINDOWS_FILENAME, Token
|
||||
from spack.spec_parser import (
|
||||
UNIX_FILENAME,
|
||||
WINDOWS_FILENAME,
|
||||
SpecParser,
|
||||
SpecParsingError,
|
||||
SpecTokenizationError,
|
||||
SpecTokens,
|
||||
)
|
||||
from spack.tokenize import Token
|
||||
|
||||
FAIL_ON_WINDOWS = pytest.mark.xfail(
|
||||
sys.platform == "win32",
|
||||
@ -30,7 +37,7 @@
|
||||
|
||||
def simple_package_name(name):
|
||||
"""A simple package name in canonical form"""
|
||||
return name, [Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value=name)], name
|
||||
return name, [Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value=name)], name
|
||||
|
||||
|
||||
def dependency_with_version(text):
|
||||
@ -39,17 +46,17 @@ def dependency_with_version(text):
|
||||
return (
|
||||
text,
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value=root.strip()),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value=dependency.strip()),
|
||||
Token(TokenType.VERSION, value=f"@{version}"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value=root.strip()),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value=dependency.strip()),
|
||||
Token(SpecTokens.VERSION, value=f"@{version}"),
|
||||
],
|
||||
text,
|
||||
)
|
||||
|
||||
|
||||
def compiler_with_version_range(text):
|
||||
return text, [Token(TokenType.COMPILER_AND_VERSION, value=text)], text
|
||||
return text, [Token(SpecTokens.COMPILER_AND_VERSION, value=text)], text
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
@ -81,40 +88,40 @@ def _specfile_for(spec_str, filename):
|
||||
simple_package_name("3dtk"),
|
||||
simple_package_name("ns-3-dev"),
|
||||
# Single token anonymous specs
|
||||
("%intel", [Token(TokenType.COMPILER, value="%intel")], "%intel"),
|
||||
("@2.7", [Token(TokenType.VERSION, value="@2.7")], "@2.7"),
|
||||
("@2.7:", [Token(TokenType.VERSION, value="@2.7:")], "@2.7:"),
|
||||
("@:2.7", [Token(TokenType.VERSION, value="@:2.7")], "@:2.7"),
|
||||
("+foo", [Token(TokenType.BOOL_VARIANT, value="+foo")], "+foo"),
|
||||
("~foo", [Token(TokenType.BOOL_VARIANT, value="~foo")], "~foo"),
|
||||
("-foo", [Token(TokenType.BOOL_VARIANT, value="-foo")], "~foo"),
|
||||
("%intel", [Token(SpecTokens.COMPILER, value="%intel")], "%intel"),
|
||||
("@2.7", [Token(SpecTokens.VERSION, value="@2.7")], "@2.7"),
|
||||
("@2.7:", [Token(SpecTokens.VERSION, value="@2.7:")], "@2.7:"),
|
||||
("@:2.7", [Token(SpecTokens.VERSION, value="@:2.7")], "@:2.7"),
|
||||
("+foo", [Token(SpecTokens.BOOL_VARIANT, value="+foo")], "+foo"),
|
||||
("~foo", [Token(SpecTokens.BOOL_VARIANT, value="~foo")], "~foo"),
|
||||
("-foo", [Token(SpecTokens.BOOL_VARIANT, value="-foo")], "~foo"),
|
||||
(
|
||||
"platform=test",
|
||||
[Token(TokenType.KEY_VALUE_PAIR, value="platform=test")],
|
||||
[Token(SpecTokens.KEY_VALUE_PAIR, value="platform=test")],
|
||||
"arch=test-None-None",
|
||||
),
|
||||
# Multiple tokens anonymous specs
|
||||
(
|
||||
"languages=go @4.2:",
|
||||
[
|
||||
Token(TokenType.KEY_VALUE_PAIR, value="languages=go"),
|
||||
Token(TokenType.VERSION, value="@4.2:"),
|
||||
Token(SpecTokens.KEY_VALUE_PAIR, value="languages=go"),
|
||||
Token(SpecTokens.VERSION, value="@4.2:"),
|
||||
],
|
||||
"@4.2: languages=go",
|
||||
),
|
||||
(
|
||||
"@4.2: languages=go",
|
||||
[
|
||||
Token(TokenType.VERSION, value="@4.2:"),
|
||||
Token(TokenType.KEY_VALUE_PAIR, value="languages=go"),
|
||||
Token(SpecTokens.VERSION, value="@4.2:"),
|
||||
Token(SpecTokens.KEY_VALUE_PAIR, value="languages=go"),
|
||||
],
|
||||
"@4.2: languages=go",
|
||||
),
|
||||
(
|
||||
"^zlib",
|
||||
[
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="zlib"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="zlib"),
|
||||
],
|
||||
"^zlib",
|
||||
),
|
||||
@ -122,31 +129,31 @@ def _specfile_for(spec_str, filename):
|
||||
(
|
||||
"openmpi ^hwloc",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="openmpi"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="hwloc"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="openmpi"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="hwloc"),
|
||||
],
|
||||
"openmpi ^hwloc",
|
||||
),
|
||||
(
|
||||
"openmpi ^hwloc ^libunwind",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="openmpi"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="hwloc"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="libunwind"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="openmpi"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="hwloc"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="libunwind"),
|
||||
],
|
||||
"openmpi ^hwloc ^libunwind",
|
||||
),
|
||||
(
|
||||
"openmpi ^hwloc^libunwind",
|
||||
[ # White spaces are tested
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="openmpi"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="hwloc"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="libunwind"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="openmpi"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="hwloc"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="libunwind"),
|
||||
],
|
||||
"openmpi ^hwloc ^libunwind",
|
||||
),
|
||||
@ -154,9 +161,9 @@ def _specfile_for(spec_str, filename):
|
||||
(
|
||||
"foo %bar@1.0 @2.0",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="foo"),
|
||||
Token(TokenType.COMPILER_AND_VERSION, value="%bar@1.0"),
|
||||
Token(TokenType.VERSION, value="@2.0"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="foo"),
|
||||
Token(SpecTokens.COMPILER_AND_VERSION, value="%bar@1.0"),
|
||||
Token(SpecTokens.VERSION, value="@2.0"),
|
||||
],
|
||||
"foo@2.0%bar@1.0",
|
||||
),
|
||||
@ -169,32 +176,32 @@ def _specfile_for(spec_str, filename):
|
||||
(
|
||||
"mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1+debug~qt_4 ^stackwalker@8.1_1e",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="mvapich_foo"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="_openmpi"),
|
||||
Token(TokenType.VERSION, value="@1.2:1.4,1.6"),
|
||||
Token(TokenType.COMPILER_AND_VERSION, value="%intel@12.1"),
|
||||
Token(TokenType.BOOL_VARIANT, value="+debug"),
|
||||
Token(TokenType.BOOL_VARIANT, value="~qt_4"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="stackwalker"),
|
||||
Token(TokenType.VERSION, value="@8.1_1e"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="mvapich_foo"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="_openmpi"),
|
||||
Token(SpecTokens.VERSION, value="@1.2:1.4,1.6"),
|
||||
Token(SpecTokens.COMPILER_AND_VERSION, value="%intel@12.1"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="+debug"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="~qt_4"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="stackwalker"),
|
||||
Token(SpecTokens.VERSION, value="@8.1_1e"),
|
||||
],
|
||||
"mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1+debug~qt_4 ^stackwalker@8.1_1e",
|
||||
),
|
||||
(
|
||||
"mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1~qt_4 debug=2 ^stackwalker@8.1_1e",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="mvapich_foo"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="_openmpi"),
|
||||
Token(TokenType.VERSION, value="@1.2:1.4,1.6"),
|
||||
Token(TokenType.COMPILER_AND_VERSION, value="%intel@12.1"),
|
||||
Token(TokenType.BOOL_VARIANT, value="~qt_4"),
|
||||
Token(TokenType.KEY_VALUE_PAIR, value="debug=2"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="stackwalker"),
|
||||
Token(TokenType.VERSION, value="@8.1_1e"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="mvapich_foo"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="_openmpi"),
|
||||
Token(SpecTokens.VERSION, value="@1.2:1.4,1.6"),
|
||||
Token(SpecTokens.COMPILER_AND_VERSION, value="%intel@12.1"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="~qt_4"),
|
||||
Token(SpecTokens.KEY_VALUE_PAIR, value="debug=2"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="stackwalker"),
|
||||
Token(SpecTokens.VERSION, value="@8.1_1e"),
|
||||
],
|
||||
"mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1~qt_4 debug=2 ^stackwalker@8.1_1e",
|
||||
),
|
||||
@ -202,17 +209,17 @@ def _specfile_for(spec_str, filename):
|
||||
"mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1 cppflags=-O3 +debug~qt_4 "
|
||||
"^stackwalker@8.1_1e",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="mvapich_foo"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="_openmpi"),
|
||||
Token(TokenType.VERSION, value="@1.2:1.4,1.6"),
|
||||
Token(TokenType.COMPILER_AND_VERSION, value="%intel@12.1"),
|
||||
Token(TokenType.KEY_VALUE_PAIR, value="cppflags=-O3"),
|
||||
Token(TokenType.BOOL_VARIANT, value="+debug"),
|
||||
Token(TokenType.BOOL_VARIANT, value="~qt_4"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="stackwalker"),
|
||||
Token(TokenType.VERSION, value="@8.1_1e"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="mvapich_foo"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="_openmpi"),
|
||||
Token(SpecTokens.VERSION, value="@1.2:1.4,1.6"),
|
||||
Token(SpecTokens.COMPILER_AND_VERSION, value="%intel@12.1"),
|
||||
Token(SpecTokens.KEY_VALUE_PAIR, value="cppflags=-O3"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="+debug"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="~qt_4"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="stackwalker"),
|
||||
Token(SpecTokens.VERSION, value="@8.1_1e"),
|
||||
],
|
||||
"mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1 cppflags=-O3 +debug~qt_4 "
|
||||
"^stackwalker@8.1_1e",
|
||||
@ -221,51 +228,51 @@ def _specfile_for(spec_str, filename):
|
||||
(
|
||||
"yaml-cpp@0.1.8%intel@12.1 ^boost@3.1.4",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="yaml-cpp"),
|
||||
Token(TokenType.VERSION, value="@0.1.8"),
|
||||
Token(TokenType.COMPILER_AND_VERSION, value="%intel@12.1"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="boost"),
|
||||
Token(TokenType.VERSION, value="@3.1.4"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="yaml-cpp"),
|
||||
Token(SpecTokens.VERSION, value="@0.1.8"),
|
||||
Token(SpecTokens.COMPILER_AND_VERSION, value="%intel@12.1"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="boost"),
|
||||
Token(SpecTokens.VERSION, value="@3.1.4"),
|
||||
],
|
||||
"yaml-cpp@0.1.8%intel@12.1 ^boost@3.1.4",
|
||||
),
|
||||
(
|
||||
r"builtin.yaml-cpp%gcc",
|
||||
[
|
||||
Token(TokenType.FULLY_QUALIFIED_PACKAGE_NAME, value="builtin.yaml-cpp"),
|
||||
Token(TokenType.COMPILER, value="%gcc"),
|
||||
Token(SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME, value="builtin.yaml-cpp"),
|
||||
Token(SpecTokens.COMPILER, value="%gcc"),
|
||||
],
|
||||
"yaml-cpp%gcc",
|
||||
),
|
||||
(
|
||||
r"testrepo.yaml-cpp%gcc",
|
||||
[
|
||||
Token(TokenType.FULLY_QUALIFIED_PACKAGE_NAME, value="testrepo.yaml-cpp"),
|
||||
Token(TokenType.COMPILER, value="%gcc"),
|
||||
Token(SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME, value="testrepo.yaml-cpp"),
|
||||
Token(SpecTokens.COMPILER, value="%gcc"),
|
||||
],
|
||||
"yaml-cpp%gcc",
|
||||
),
|
||||
(
|
||||
r"builtin.yaml-cpp@0.1.8%gcc@7.2.0 ^boost@3.1.4",
|
||||
[
|
||||
Token(TokenType.FULLY_QUALIFIED_PACKAGE_NAME, value="builtin.yaml-cpp"),
|
||||
Token(TokenType.VERSION, value="@0.1.8"),
|
||||
Token(TokenType.COMPILER_AND_VERSION, value="%gcc@7.2.0"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="boost"),
|
||||
Token(TokenType.VERSION, value="@3.1.4"),
|
||||
Token(SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME, value="builtin.yaml-cpp"),
|
||||
Token(SpecTokens.VERSION, value="@0.1.8"),
|
||||
Token(SpecTokens.COMPILER_AND_VERSION, value="%gcc@7.2.0"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="boost"),
|
||||
Token(SpecTokens.VERSION, value="@3.1.4"),
|
||||
],
|
||||
"yaml-cpp@0.1.8%gcc@7.2.0 ^boost@3.1.4",
|
||||
),
|
||||
(
|
||||
r"builtin.yaml-cpp ^testrepo.boost ^zlib",
|
||||
[
|
||||
Token(TokenType.FULLY_QUALIFIED_PACKAGE_NAME, value="builtin.yaml-cpp"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.FULLY_QUALIFIED_PACKAGE_NAME, value="testrepo.boost"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="zlib"),
|
||||
Token(SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME, value="builtin.yaml-cpp"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME, value="testrepo.boost"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="zlib"),
|
||||
],
|
||||
"yaml-cpp ^boost ^zlib",
|
||||
),
|
||||
@ -273,60 +280,60 @@ def _specfile_for(spec_str, filename):
|
||||
(
|
||||
r"mvapich ^stackwalker ^_openmpi", # Dependencies are reordered
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="mvapich"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="stackwalker"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="_openmpi"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="mvapich"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="stackwalker"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="_openmpi"),
|
||||
],
|
||||
"mvapich ^_openmpi ^stackwalker",
|
||||
),
|
||||
(
|
||||
r"y~f+e~d+c~b+a", # Variants are reordered
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="y"),
|
||||
Token(TokenType.BOOL_VARIANT, value="~f"),
|
||||
Token(TokenType.BOOL_VARIANT, value="+e"),
|
||||
Token(TokenType.BOOL_VARIANT, value="~d"),
|
||||
Token(TokenType.BOOL_VARIANT, value="+c"),
|
||||
Token(TokenType.BOOL_VARIANT, value="~b"),
|
||||
Token(TokenType.BOOL_VARIANT, value="+a"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="y"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="~f"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="+e"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="~d"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="+c"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="~b"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="+a"),
|
||||
],
|
||||
"y+a~b+c~d+e~f",
|
||||
),
|
||||
("@:", [Token(TokenType.VERSION, value="@:")], r""),
|
||||
("@1.6,1.2:1.4", [Token(TokenType.VERSION, value="@1.6,1.2:1.4")], r"@1.2:1.4,1.6"),
|
||||
("@:", [Token(SpecTokens.VERSION, value="@:")], r""),
|
||||
("@1.6,1.2:1.4", [Token(SpecTokens.VERSION, value="@1.6,1.2:1.4")], r"@1.2:1.4,1.6"),
|
||||
(
|
||||
r"os=fe", # Various translations associated with the architecture
|
||||
[Token(TokenType.KEY_VALUE_PAIR, value="os=fe")],
|
||||
[Token(SpecTokens.KEY_VALUE_PAIR, value="os=fe")],
|
||||
"arch=test-redhat6-None",
|
||||
),
|
||||
(
|
||||
r"os=default_os",
|
||||
[Token(TokenType.KEY_VALUE_PAIR, value="os=default_os")],
|
||||
[Token(SpecTokens.KEY_VALUE_PAIR, value="os=default_os")],
|
||||
"arch=test-debian6-None",
|
||||
),
|
||||
(
|
||||
r"target=be",
|
||||
[Token(TokenType.KEY_VALUE_PAIR, value="target=be")],
|
||||
[Token(SpecTokens.KEY_VALUE_PAIR, value="target=be")],
|
||||
f"arch=test-None-{spack.platforms.test.Test.default}",
|
||||
),
|
||||
(
|
||||
r"target=default_target",
|
||||
[Token(TokenType.KEY_VALUE_PAIR, value="target=default_target")],
|
||||
[Token(SpecTokens.KEY_VALUE_PAIR, value="target=default_target")],
|
||||
f"arch=test-None-{spack.platforms.test.Test.default}",
|
||||
),
|
||||
(
|
||||
r"platform=linux",
|
||||
[Token(TokenType.KEY_VALUE_PAIR, value="platform=linux")],
|
||||
[Token(SpecTokens.KEY_VALUE_PAIR, value="platform=linux")],
|
||||
r"arch=linux-None-None",
|
||||
),
|
||||
# Version hash pair
|
||||
(
|
||||
rf"develop-branch-version@{'abc12'*8}=develop",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="develop-branch-version"),
|
||||
Token(TokenType.VERSION_HASH_PAIR, value=f"@{'abc12'*8}=develop"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="develop-branch-version"),
|
||||
Token(SpecTokens.VERSION_HASH_PAIR, value=f"@{'abc12'*8}=develop"),
|
||||
],
|
||||
rf"develop-branch-version@{'abc12'*8}=develop",
|
||||
),
|
||||
@ -334,40 +341,40 @@ def _specfile_for(spec_str, filename):
|
||||
(
|
||||
r"x ^y@foo ^y@foo",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="x"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="y"),
|
||||
Token(TokenType.VERSION, value="@foo"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="y"),
|
||||
Token(TokenType.VERSION, value="@foo"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="x"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="y"),
|
||||
Token(SpecTokens.VERSION, value="@foo"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="y"),
|
||||
Token(SpecTokens.VERSION, value="@foo"),
|
||||
],
|
||||
r"x ^y@foo",
|
||||
),
|
||||
(
|
||||
r"x ^y@foo ^y+bar",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="x"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="y"),
|
||||
Token(TokenType.VERSION, value="@foo"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="y"),
|
||||
Token(TokenType.BOOL_VARIANT, value="+bar"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="x"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="y"),
|
||||
Token(SpecTokens.VERSION, value="@foo"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="y"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="+bar"),
|
||||
],
|
||||
r"x ^y@foo+bar",
|
||||
),
|
||||
(
|
||||
r"x ^y@foo +bar ^y@foo",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="x"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="y"),
|
||||
Token(TokenType.VERSION, value="@foo"),
|
||||
Token(TokenType.BOOL_VARIANT, value="+bar"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="y"),
|
||||
Token(TokenType.VERSION, value="@foo"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="x"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="y"),
|
||||
Token(SpecTokens.VERSION, value="@foo"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="+bar"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="y"),
|
||||
Token(SpecTokens.VERSION, value="@foo"),
|
||||
],
|
||||
r"x ^y@foo+bar",
|
||||
),
|
||||
@ -375,43 +382,43 @@ def _specfile_for(spec_str, filename):
|
||||
(
|
||||
r"_openmpi +debug-qt_4", # Parse as a single bool variant
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="_openmpi"),
|
||||
Token(TokenType.BOOL_VARIANT, value="+debug-qt_4"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="_openmpi"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="+debug-qt_4"),
|
||||
],
|
||||
r"_openmpi+debug-qt_4",
|
||||
),
|
||||
(
|
||||
r"_openmpi +debug -qt_4", # Parse as two variants
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="_openmpi"),
|
||||
Token(TokenType.BOOL_VARIANT, value="+debug"),
|
||||
Token(TokenType.BOOL_VARIANT, value="-qt_4"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="_openmpi"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="+debug"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="-qt_4"),
|
||||
],
|
||||
r"_openmpi+debug~qt_4",
|
||||
),
|
||||
(
|
||||
r"_openmpi +debug~qt_4", # Parse as two variants
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="_openmpi"),
|
||||
Token(TokenType.BOOL_VARIANT, value="+debug"),
|
||||
Token(TokenType.BOOL_VARIANT, value="~qt_4"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="_openmpi"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="+debug"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="~qt_4"),
|
||||
],
|
||||
r"_openmpi+debug~qt_4",
|
||||
),
|
||||
# Key value pairs with ":" and "," in the value
|
||||
(
|
||||
r"target=:broadwell,icelake",
|
||||
[Token(TokenType.KEY_VALUE_PAIR, value="target=:broadwell,icelake")],
|
||||
[Token(SpecTokens.KEY_VALUE_PAIR, value="target=:broadwell,icelake")],
|
||||
r"arch=None-None-:broadwell,icelake",
|
||||
),
|
||||
# Hash pair version followed by a variant
|
||||
(
|
||||
f"develop-branch-version@git.{'a' * 40}=develop+var1+var2",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="develop-branch-version"),
|
||||
Token(TokenType.VERSION_HASH_PAIR, value=f"@git.{'a' * 40}=develop"),
|
||||
Token(TokenType.BOOL_VARIANT, value="+var1"),
|
||||
Token(TokenType.BOOL_VARIANT, value="+var2"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="develop-branch-version"),
|
||||
Token(SpecTokens.VERSION_HASH_PAIR, value=f"@git.{'a' * 40}=develop"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="+var1"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="+var2"),
|
||||
],
|
||||
f"develop-branch-version@git.{'a' * 40}=develop+var1+var2",
|
||||
),
|
||||
@ -422,98 +429,101 @@ def _specfile_for(spec_str, filename):
|
||||
compiler_with_version_range("%gcc@10.1.0,12.2.1:"),
|
||||
compiler_with_version_range("%gcc@:8.4.3,10.2.1:12.1.0"),
|
||||
# Special key value arguments
|
||||
("dev_path=*", [Token(TokenType.KEY_VALUE_PAIR, value="dev_path=*")], "dev_path='*'"),
|
||||
("dev_path=*", [Token(SpecTokens.KEY_VALUE_PAIR, value="dev_path=*")], "dev_path='*'"),
|
||||
(
|
||||
"dev_path=none",
|
||||
[Token(TokenType.KEY_VALUE_PAIR, value="dev_path=none")],
|
||||
[Token(SpecTokens.KEY_VALUE_PAIR, value="dev_path=none")],
|
||||
"dev_path=none",
|
||||
),
|
||||
(
|
||||
"dev_path=../relpath/work",
|
||||
[Token(TokenType.KEY_VALUE_PAIR, value="dev_path=../relpath/work")],
|
||||
[Token(SpecTokens.KEY_VALUE_PAIR, value="dev_path=../relpath/work")],
|
||||
"dev_path=../relpath/work",
|
||||
),
|
||||
(
|
||||
"dev_path=/abspath/work",
|
||||
[Token(TokenType.KEY_VALUE_PAIR, value="dev_path=/abspath/work")],
|
||||
[Token(SpecTokens.KEY_VALUE_PAIR, value="dev_path=/abspath/work")],
|
||||
"dev_path=/abspath/work",
|
||||
),
|
||||
# One liner for flags like 'a=b=c' that are injected
|
||||
(
|
||||
"cflags=a=b=c",
|
||||
[Token(TokenType.KEY_VALUE_PAIR, value="cflags=a=b=c")],
|
||||
[Token(SpecTokens.KEY_VALUE_PAIR, value="cflags=a=b=c")],
|
||||
"cflags='a=b=c'",
|
||||
),
|
||||
(
|
||||
"cflags=a=b=c",
|
||||
[Token(TokenType.KEY_VALUE_PAIR, value="cflags=a=b=c")],
|
||||
[Token(SpecTokens.KEY_VALUE_PAIR, value="cflags=a=b=c")],
|
||||
"cflags='a=b=c'",
|
||||
),
|
||||
(
|
||||
"cflags=a=b=c+~",
|
||||
[Token(TokenType.KEY_VALUE_PAIR, value="cflags=a=b=c+~")],
|
||||
[Token(SpecTokens.KEY_VALUE_PAIR, value="cflags=a=b=c+~")],
|
||||
"cflags='a=b=c+~'",
|
||||
),
|
||||
(
|
||||
"cflags=-Wl,a,b,c",
|
||||
[Token(TokenType.KEY_VALUE_PAIR, value="cflags=-Wl,a,b,c")],
|
||||
[Token(SpecTokens.KEY_VALUE_PAIR, value="cflags=-Wl,a,b,c")],
|
||||
"cflags=-Wl,a,b,c",
|
||||
),
|
||||
# Multi quoted
|
||||
(
|
||||
'cflags=="-O3 -g"',
|
||||
[Token(TokenType.PROPAGATED_KEY_VALUE_PAIR, value='cflags=="-O3 -g"')],
|
||||
[Token(SpecTokens.PROPAGATED_KEY_VALUE_PAIR, value='cflags=="-O3 -g"')],
|
||||
"cflags=='-O3 -g'",
|
||||
),
|
||||
# Whitespace is allowed in version lists
|
||||
("@1.2:1.4 , 1.6 ", [Token(TokenType.VERSION, value="@1.2:1.4 , 1.6")], "@1.2:1.4,1.6"),
|
||||
("@1.2:1.4 , 1.6 ", [Token(SpecTokens.VERSION, value="@1.2:1.4 , 1.6")], "@1.2:1.4,1.6"),
|
||||
# But not in ranges. `a@1:` and `b` are separate specs, not a single `a@1:b`.
|
||||
(
|
||||
"a@1: b",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="a"),
|
||||
Token(TokenType.VERSION, value="@1:"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="b"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="a"),
|
||||
Token(SpecTokens.VERSION, value="@1:"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="b"),
|
||||
],
|
||||
"a@1:",
|
||||
),
|
||||
(
|
||||
"% intel @ 12.1:12.6 + debug",
|
||||
[
|
||||
Token(TokenType.COMPILER_AND_VERSION, value="% intel @ 12.1:12.6"),
|
||||
Token(TokenType.BOOL_VARIANT, value="+ debug"),
|
||||
Token(SpecTokens.COMPILER_AND_VERSION, value="% intel @ 12.1:12.6"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="+ debug"),
|
||||
],
|
||||
"%intel@12.1:12.6+debug",
|
||||
),
|
||||
(
|
||||
"@ 12.1:12.6 + debug - qt_4",
|
||||
[
|
||||
Token(TokenType.VERSION, value="@ 12.1:12.6"),
|
||||
Token(TokenType.BOOL_VARIANT, value="+ debug"),
|
||||
Token(TokenType.BOOL_VARIANT, value="- qt_4"),
|
||||
Token(SpecTokens.VERSION, value="@ 12.1:12.6"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="+ debug"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="- qt_4"),
|
||||
],
|
||||
"@12.1:12.6+debug~qt_4",
|
||||
),
|
||||
(
|
||||
"@10.4.0:10,11.3.0:target=aarch64:",
|
||||
[
|
||||
Token(TokenType.VERSION, value="@10.4.0:10,11.3.0:"),
|
||||
Token(TokenType.KEY_VALUE_PAIR, value="target=aarch64:"),
|
||||
Token(SpecTokens.VERSION, value="@10.4.0:10,11.3.0:"),
|
||||
Token(SpecTokens.KEY_VALUE_PAIR, value="target=aarch64:"),
|
||||
],
|
||||
"@10.4.0:10,11.3.0: arch=None-None-aarch64:",
|
||||
),
|
||||
(
|
||||
"@:0.4 % nvhpc",
|
||||
[Token(TokenType.VERSION, value="@:0.4"), Token(TokenType.COMPILER, value="% nvhpc")],
|
||||
[
|
||||
Token(SpecTokens.VERSION, value="@:0.4"),
|
||||
Token(SpecTokens.COMPILER, value="% nvhpc"),
|
||||
],
|
||||
"@:0.4%nvhpc",
|
||||
),
|
||||
(
|
||||
"^[virtuals=mpi] openmpi",
|
||||
[
|
||||
Token(TokenType.START_EDGE_PROPERTIES, value="^["),
|
||||
Token(TokenType.KEY_VALUE_PAIR, value="virtuals=mpi"),
|
||||
Token(TokenType.END_EDGE_PROPERTIES, value="]"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="openmpi"),
|
||||
Token(SpecTokens.START_EDGE_PROPERTIES, value="^["),
|
||||
Token(SpecTokens.KEY_VALUE_PAIR, value="virtuals=mpi"),
|
||||
Token(SpecTokens.END_EDGE_PROPERTIES, value="]"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="openmpi"),
|
||||
],
|
||||
"^[virtuals=mpi] openmpi",
|
||||
),
|
||||
@ -521,48 +531,48 @@ def _specfile_for(spec_str, filename):
|
||||
(
|
||||
"^[virtuals=mpi] openmpi+foo ^[virtuals=lapack] openmpi+bar",
|
||||
[
|
||||
Token(TokenType.START_EDGE_PROPERTIES, value="^["),
|
||||
Token(TokenType.KEY_VALUE_PAIR, value="virtuals=mpi"),
|
||||
Token(TokenType.END_EDGE_PROPERTIES, value="]"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="openmpi"),
|
||||
Token(TokenType.BOOL_VARIANT, value="+foo"),
|
||||
Token(TokenType.START_EDGE_PROPERTIES, value="^["),
|
||||
Token(TokenType.KEY_VALUE_PAIR, value="virtuals=lapack"),
|
||||
Token(TokenType.END_EDGE_PROPERTIES, value="]"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="openmpi"),
|
||||
Token(TokenType.BOOL_VARIANT, value="+bar"),
|
||||
Token(SpecTokens.START_EDGE_PROPERTIES, value="^["),
|
||||
Token(SpecTokens.KEY_VALUE_PAIR, value="virtuals=mpi"),
|
||||
Token(SpecTokens.END_EDGE_PROPERTIES, value="]"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="openmpi"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="+foo"),
|
||||
Token(SpecTokens.START_EDGE_PROPERTIES, value="^["),
|
||||
Token(SpecTokens.KEY_VALUE_PAIR, value="virtuals=lapack"),
|
||||
Token(SpecTokens.END_EDGE_PROPERTIES, value="]"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="openmpi"),
|
||||
Token(SpecTokens.BOOL_VARIANT, value="+bar"),
|
||||
],
|
||||
"^[virtuals=lapack,mpi] openmpi+bar+foo",
|
||||
),
|
||||
(
|
||||
"^[deptypes=link,build] zlib",
|
||||
[
|
||||
Token(TokenType.START_EDGE_PROPERTIES, value="^["),
|
||||
Token(TokenType.KEY_VALUE_PAIR, value="deptypes=link,build"),
|
||||
Token(TokenType.END_EDGE_PROPERTIES, value="]"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="zlib"),
|
||||
Token(SpecTokens.START_EDGE_PROPERTIES, value="^["),
|
||||
Token(SpecTokens.KEY_VALUE_PAIR, value="deptypes=link,build"),
|
||||
Token(SpecTokens.END_EDGE_PROPERTIES, value="]"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="zlib"),
|
||||
],
|
||||
"^[deptypes=build,link] zlib",
|
||||
),
|
||||
(
|
||||
"^[deptypes=link] zlib ^[deptypes=build] zlib",
|
||||
[
|
||||
Token(TokenType.START_EDGE_PROPERTIES, value="^["),
|
||||
Token(TokenType.KEY_VALUE_PAIR, value="deptypes=link"),
|
||||
Token(TokenType.END_EDGE_PROPERTIES, value="]"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="zlib"),
|
||||
Token(TokenType.START_EDGE_PROPERTIES, value="^["),
|
||||
Token(TokenType.KEY_VALUE_PAIR, value="deptypes=build"),
|
||||
Token(TokenType.END_EDGE_PROPERTIES, value="]"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="zlib"),
|
||||
Token(SpecTokens.START_EDGE_PROPERTIES, value="^["),
|
||||
Token(SpecTokens.KEY_VALUE_PAIR, value="deptypes=link"),
|
||||
Token(SpecTokens.END_EDGE_PROPERTIES, value="]"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="zlib"),
|
||||
Token(SpecTokens.START_EDGE_PROPERTIES, value="^["),
|
||||
Token(SpecTokens.KEY_VALUE_PAIR, value="deptypes=build"),
|
||||
Token(SpecTokens.END_EDGE_PROPERTIES, value="]"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="zlib"),
|
||||
],
|
||||
"^[deptypes=link] zlib ^[deptypes=build] zlib",
|
||||
),
|
||||
(
|
||||
"git-test@git.foo/bar",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, "git-test"),
|
||||
Token(TokenType.GIT_VERSION, "@git.foo/bar"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, "git-test"),
|
||||
Token(SpecTokens.GIT_VERSION, "@git.foo/bar"),
|
||||
],
|
||||
"git-test@git.foo/bar",
|
||||
),
|
||||
@ -570,24 +580,24 @@ def _specfile_for(spec_str, filename):
|
||||
(
|
||||
"zlib ++foo",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, "zlib"),
|
||||
Token(TokenType.PROPAGATED_BOOL_VARIANT, "++foo"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, "zlib"),
|
||||
Token(SpecTokens.PROPAGATED_BOOL_VARIANT, "++foo"),
|
||||
],
|
||||
"zlib++foo",
|
||||
),
|
||||
(
|
||||
"zlib ~~foo",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, "zlib"),
|
||||
Token(TokenType.PROPAGATED_BOOL_VARIANT, "~~foo"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, "zlib"),
|
||||
Token(SpecTokens.PROPAGATED_BOOL_VARIANT, "~~foo"),
|
||||
],
|
||||
"zlib~~foo",
|
||||
),
|
||||
(
|
||||
"zlib foo==bar",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, "zlib"),
|
||||
Token(TokenType.PROPAGATED_KEY_VALUE_PAIR, "foo==bar"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, "zlib"),
|
||||
Token(SpecTokens.PROPAGATED_KEY_VALUE_PAIR, "foo==bar"),
|
||||
],
|
||||
"zlib foo==bar",
|
||||
),
|
||||
@ -605,49 +615,49 @@ def test_parse_single_spec(spec_str, tokens, expected_roundtrip, mock_git_test_p
|
||||
(
|
||||
"mvapich emacs",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="mvapich"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="emacs"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="mvapich"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="emacs"),
|
||||
],
|
||||
["mvapich", "emacs"],
|
||||
),
|
||||
(
|
||||
"mvapich cppflags='-O3 -fPIC' emacs",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="mvapich"),
|
||||
Token(TokenType.KEY_VALUE_PAIR, value="cppflags='-O3 -fPIC'"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="emacs"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="mvapich"),
|
||||
Token(SpecTokens.KEY_VALUE_PAIR, value="cppflags='-O3 -fPIC'"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="emacs"),
|
||||
],
|
||||
["mvapich cppflags='-O3 -fPIC'", "emacs"],
|
||||
),
|
||||
(
|
||||
"mvapich cppflags=-O3 emacs",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="mvapich"),
|
||||
Token(TokenType.KEY_VALUE_PAIR, value="cppflags=-O3"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="emacs"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="mvapich"),
|
||||
Token(SpecTokens.KEY_VALUE_PAIR, value="cppflags=-O3"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="emacs"),
|
||||
],
|
||||
["mvapich cppflags=-O3", "emacs"],
|
||||
),
|
||||
(
|
||||
"mvapich emacs @1.1.1 %intel cflags=-O3",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="mvapich"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="emacs"),
|
||||
Token(TokenType.VERSION, value="@1.1.1"),
|
||||
Token(TokenType.COMPILER, value="%intel"),
|
||||
Token(TokenType.KEY_VALUE_PAIR, value="cflags=-O3"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="mvapich"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="emacs"),
|
||||
Token(SpecTokens.VERSION, value="@1.1.1"),
|
||||
Token(SpecTokens.COMPILER, value="%intel"),
|
||||
Token(SpecTokens.KEY_VALUE_PAIR, value="cflags=-O3"),
|
||||
],
|
||||
["mvapich", "emacs @1.1.1 %intel cflags=-O3"],
|
||||
),
|
||||
(
|
||||
'mvapich cflags="-O3 -fPIC" emacs^ncurses%intel',
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="mvapich"),
|
||||
Token(TokenType.KEY_VALUE_PAIR, value='cflags="-O3 -fPIC"'),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="emacs"),
|
||||
Token(TokenType.DEPENDENCY, value="^"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="ncurses"),
|
||||
Token(TokenType.COMPILER, value="%intel"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="mvapich"),
|
||||
Token(SpecTokens.KEY_VALUE_PAIR, value='cflags="-O3 -fPIC"'),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="emacs"),
|
||||
Token(SpecTokens.DEPENDENCY, value="^"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="ncurses"),
|
||||
Token(SpecTokens.COMPILER, value="%intel"),
|
||||
],
|
||||
['mvapich cflags="-O3 -fPIC"', "emacs ^ncurses%intel"],
|
||||
),
|
||||
@ -741,20 +751,20 @@ def test_error_reporting(text, expected_in_error):
|
||||
@pytest.mark.parametrize(
|
||||
"text,tokens",
|
||||
[
|
||||
("/abcde", [Token(TokenType.DAG_HASH, value="/abcde")]),
|
||||
("/abcde", [Token(SpecTokens.DAG_HASH, value="/abcde")]),
|
||||
(
|
||||
"foo/abcde",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="foo"),
|
||||
Token(TokenType.DAG_HASH, value="/abcde"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="foo"),
|
||||
Token(SpecTokens.DAG_HASH, value="/abcde"),
|
||||
],
|
||||
),
|
||||
(
|
||||
"foo@1.2.3 /abcde",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="foo"),
|
||||
Token(TokenType.VERSION, value="@1.2.3"),
|
||||
Token(TokenType.DAG_HASH, value="/abcde"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="foo"),
|
||||
Token(SpecTokens.VERSION, value="@1.2.3"),
|
||||
Token(SpecTokens.DAG_HASH, value="/abcde"),
|
||||
],
|
||||
),
|
||||
],
|
||||
|
@ -1,97 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Generic token support."""
|
||||
import enum
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
from typing import Optional
|
||||
|
||||
IS_WINDOWS = sys.platform == "win32"
|
||||
|
||||
#: A filename starts either with a "." or a "/" or a "{name}/,
|
||||
# or on Windows, a drive letter followed by a colon and "\"
|
||||
# or "." or {name}\
|
||||
WINDOWS_FILENAME = r"(?:\.|[a-zA-Z0-9-_]*\\|[a-zA-Z]:\\)(?:[a-zA-Z0-9-_\.\\]*)(?:\.json|\.yaml)"
|
||||
UNIX_FILENAME = r"(?:\.|\/|[a-zA-Z0-9-_]*\/)(?:[a-zA-Z0-9-_\.\/]*)(?:\.json|\.yaml)"
|
||||
if not IS_WINDOWS:
|
||||
FILENAME = UNIX_FILENAME
|
||||
else:
|
||||
FILENAME = WINDOWS_FILENAME
|
||||
|
||||
#: Values that match this (e.g., variants, flags) can be left unquoted in Spack output
|
||||
NO_QUOTES_NEEDED = re.compile(r"^[a-zA-Z0-9,/_.-]+$")
|
||||
|
||||
#: Regex to strip quotes. Group 2 will be the unquoted string.
|
||||
STRIP_QUOTES = re.compile(r"^(['\"])(.*)\1$")
|
||||
|
||||
|
||||
def strip_quotes_and_unescape(string: str) -> str:
|
||||
"""Remove surrounding single or double quotes from string, if present."""
|
||||
match = STRIP_QUOTES.match(string)
|
||||
if not match:
|
||||
return string
|
||||
|
||||
# replace any escaped quotes with bare quotes
|
||||
quote, result = match.groups()
|
||||
return result.replace(rf"\{quote}", quote)
|
||||
|
||||
|
||||
def quote_if_needed(value: str) -> str:
|
||||
"""Add quotes around the value if it requires quotes.
|
||||
|
||||
This will add quotes around the value unless it matches ``NO_QUOTES_NEEDED``.
|
||||
|
||||
This adds:
|
||||
* single quotes by default
|
||||
* double quotes around any value that contains single quotes
|
||||
|
||||
If double quotes are used, we json-escape the string. That is, we escape ``\\``,
|
||||
``"``, and control codes.
|
||||
|
||||
"""
|
||||
if NO_QUOTES_NEEDED.match(value):
|
||||
return value
|
||||
|
||||
return json.dumps(value) if "'" in value else f"'{value}'"
|
||||
|
||||
|
||||
class TokenBase(enum.Enum):
|
||||
"""Base class for an enum type with a regex value"""
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
value = len(cls.__members__) + 1
|
||||
obj = object.__new__(cls)
|
||||
obj._value_ = value
|
||||
return obj
|
||||
|
||||
def __init__(self, regex):
|
||||
self.regex = regex
|
||||
|
||||
def __str__(self):
|
||||
return f"{self._name_}"
|
||||
|
||||
|
||||
class Token:
|
||||
"""Represents tokens; generated from input by lexer and fed to parse()."""
|
||||
|
||||
__slots__ = "kind", "value", "start", "end"
|
||||
|
||||
def __init__(
|
||||
self, kind: TokenBase, value: str, start: Optional[int] = None, end: Optional[int] = None
|
||||
):
|
||||
self.kind = kind
|
||||
self.value = value
|
||||
self.start = start
|
||||
self.end = end
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
||||
|
||||
def __str__(self):
|
||||
return f"({self.kind}, {self.value})"
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.kind == other.kind) and (self.value == other.value)
|
69
lib/spack/spack/tokenize.py
Normal file
69
lib/spack/spack/tokenize.py
Normal file
@ -0,0 +1,69 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""This module provides building blocks for tokenizing strings. Users can define tokens by
|
||||
inheriting from TokenBase and defining tokens as ordered enum members. The Tokenizer class can then
|
||||
be used to iterate over tokens in a string."""
|
||||
import enum
|
||||
import re
|
||||
from typing import Generator, Match, Optional, Type
|
||||
|
||||
|
||||
class TokenBase(enum.Enum):
|
||||
"""Base class for an enum type with a regex value"""
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
value = len(cls.__members__) + 1
|
||||
obj = object.__new__(cls)
|
||||
obj._value_ = value
|
||||
return obj
|
||||
|
||||
def __init__(self, regex):
|
||||
self.regex = regex
|
||||
|
||||
def __str__(self):
|
||||
return f"{self._name_}"
|
||||
|
||||
|
||||
class Token:
|
||||
"""Represents tokens; generated from input by lexer and fed to parse()."""
|
||||
|
||||
__slots__ = "kind", "value", "start", "end"
|
||||
|
||||
def __init__(self, kind: TokenBase, value: str, start: int = 0, end: int = 0):
|
||||
self.kind = kind
|
||||
self.value = value
|
||||
self.start = start
|
||||
self.end = end
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
||||
|
||||
def __str__(self):
|
||||
return f"({self.kind}, {self.value})"
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.kind == other.kind) and (self.value == other.value)
|
||||
|
||||
|
||||
class Tokenizer:
|
||||
def __init__(self, tokens: Type[TokenBase]):
|
||||
self.tokens = tokens
|
||||
self.regex = re.compile("|".join(f"(?P<{token}>{token.regex})" for token in tokens))
|
||||
self.full_match = True
|
||||
|
||||
def tokenize(self, text: str) -> Generator[Token, None, None]:
|
||||
if not text:
|
||||
return
|
||||
scanner = self.regex.scanner(text) # type: ignore[attr-defined]
|
||||
m: Optional[Match] = None
|
||||
for m in iter(scanner.match, None):
|
||||
# The following two assertions are to help mypy
|
||||
msg = (
|
||||
"unexpected value encountered during parsing. Please submit a bug report "
|
||||
"at https://github.com/spack/spack/issues/new/choose"
|
||||
)
|
||||
assert m is not None, msg
|
||||
assert m.lastgroup is not None, msg
|
||||
yield Token(self.tokens.__members__[m.lastgroup], m.group(), m.start(), m.end())
|
@ -19,7 +19,7 @@
|
||||
|
||||
import spack.error as error
|
||||
import spack.spec
|
||||
import spack.token
|
||||
import spack.spec_parser
|
||||
|
||||
#: These are variant names used by Spack internally; packages can't use them
|
||||
reserved_names = [
|
||||
@ -465,7 +465,7 @@ def __repr__(self) -> str:
|
||||
|
||||
def __str__(self) -> str:
|
||||
delim = "==" if self.propagate else "="
|
||||
values = spack.token.quote_if_needed(",".join(str(v) for v in self.value_as_tuple))
|
||||
values = spack.spec_parser.quote_if_needed(",".join(str(v) for v in self.value_as_tuple))
|
||||
return f"{self.name}{delim}{values}"
|
||||
|
||||
|
||||
@ -514,7 +514,7 @@ def __str__(self) -> str:
|
||||
values_str = ",".join(str(x) for x in self.value_as_tuple)
|
||||
|
||||
delim = "==" if self.propagate else "="
|
||||
return f"{self.name}{delim}{spack.token.quote_if_needed(values_str)}"
|
||||
return f"{self.name}{delim}{spack.spec_parser.quote_if_needed(values_str)}"
|
||||
|
||||
|
||||
class SingleValuedVariant(AbstractVariant):
|
||||
@ -571,7 +571,7 @@ def yaml_entry(self) -> Tuple[str, SerializedValueType]:
|
||||
|
||||
def __str__(self) -> str:
|
||||
delim = "==" if self.propagate else "="
|
||||
return f"{self.name}{delim}{spack.token.quote_if_needed(str(self.value))}"
|
||||
return f"{self.name}{delim}{spack.spec_parser.quote_if_needed(str(self.value))}"
|
||||
|
||||
|
||||
class BoolValuedVariant(SingleValuedVariant):
|
||||
|
Loading…
Reference in New Issue
Block a user