Spack toolchains

Add parser and config support for toolchains. A toolchain is a configured
string alias applied at parse time. When parsing the '%' sigil the parser
will match the token after the sigil to any toolchain name, and if there is
a match it will replace the sigil and following token with the tokens for
the associated string.

For example, the following config allows for the user to enforce usage of
clang for c/cxx and gcc for fortran with a single option `%my_toolchain`

toolchains:
  my_toolchain: ^[when=%c virtuals=c ]clang ^[when=%cxx virtuals=cxx ]clang ^[when=%fortran virtuals=fortran ]gcc

Note that the space before the ']' symbol is required for proper parsing. This
is considered a bug and will be fixed when possible.

Signed-off-by: Gregory Becker <becker33@llnl.gov>
This commit is contained in:
Gregory Becker 2025-04-25 14:11:49 -07:00
parent 6eaaaa4ae7
commit 6623209ba7
No known key found for this signature in database
GPG Key ID: 2362541F6D14ED84
4 changed files with 57 additions and 3 deletions

View File

@ -60,6 +60,7 @@
import spack.schema.modules import spack.schema.modules
import spack.schema.packages import spack.schema.packages
import spack.schema.repos import spack.schema.repos
import spack.schema.toolchains
import spack.schema.upstreams import spack.schema.upstreams
import spack.schema.view import spack.schema.view
import spack.util.remote_file_cache as rfc_util import spack.util.remote_file_cache as rfc_util
@ -87,6 +88,7 @@
"bootstrap": spack.schema.bootstrap.schema, "bootstrap": spack.schema.bootstrap.schema,
"ci": spack.schema.ci.schema, "ci": spack.schema.ci.schema,
"cdash": spack.schema.cdash.schema, "cdash": spack.schema.cdash.schema,
"toolchains": spack.schema.toolchains.schema,
} }
# Same as above, but including keys for environments # Same as above, but including keys for environments

View File

@ -0,0 +1,23 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for repos.yaml configuration file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/toolchains.py
:lines: 14-
"""
from typing import Any, Dict
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {"toolchains": {"type": "object", "default": {}}}
#: Full schema with metadata
schema = {
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Spack toolchain configuration file schema",
"type": "object",
"additionalProperties": False,
"properties": properties,
}

View File

@ -782,7 +782,7 @@ def __str__(self) -> str:
child = self.spec.name if self.spec else None child = self.spec.name if self.spec else None
virtuals_string = f"virtuals={','.join(self.virtuals)}" if self.virtuals else "" virtuals_string = f"virtuals={','.join(self.virtuals)}" if self.virtuals else ""
when_string = f"when={self.when}" if self.when != Spec() else "" when_string = f"when={self.when}" if self.when != Spec() else ""
edge_attrs = filter((virtuals_string, when_string), lambda x: bool(x)) edge_attrs = filter(lambda x: bool(x), (virtuals_string, when_string))
return f"{parent} {self.depflag}[{' '.join(edge_attrs)}] --> {child}" return f"{parent} {self.depflag}[{' '.join(edge_attrs)}] --> {child}"
def flip(self) -> "DependencySpec": def flip(self) -> "DependencySpec":
@ -3531,7 +3531,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
return False return False
# Edges have been checked above already, hence deps=False # Edges have been checked above already, hence deps=False
lhs_nodes = [x for x in self.traverse(root=False)] + sorted(mock_nodes_from_old_specfiles) lhs_nodes = list(self.traverse(root=False)) + sorted(mock_nodes_from_old_specfiles)
for rhs in other.traverse(root=False): for rhs in other.traverse(root=False):
# Possible lhs nodes to match this rhs node # Possible lhs nodes to match this rhs node
lhss = [lhs for lhs in lhs_nodes if lhs.satisfies(rhs, deps=False)] lhss = [lhs for lhs in lhs_nodes if lhs.satisfies(rhs, deps=False)]

View File

@ -56,6 +56,7 @@
specs to avoid ambiguity. Both are provided because ~ can cause shell specs to avoid ambiguity. Both are provided because ~ can cause shell
expansion when it is the first character in an id typed on the command line. expansion when it is the first character in an id typed on the command line.
""" """
import itertools
import json import json
import pathlib import pathlib
import re import re
@ -66,6 +67,7 @@
from llnl.util.tty import color from llnl.util.tty import color
import spack.config
import spack.deptypes import spack.deptypes
import spack.error import spack.error
import spack.paths import spack.paths
@ -162,6 +164,15 @@ def tokenize(text: str) -> Iterator[Token]:
yield token yield token
def parseable_tokens(text: str) -> List[Token]:
"""Return non-whitespace tokens from the text passed as input
Raises:
SpecTokenizationError: when unexpected characters are found in the text
"""
return filter(lambda x: x.kind != SpecTokens.WS, tokenize(text))
class TokenContext: class TokenContext:
"""Token context passed around by parsers""" """Token context passed around by parsers"""
@ -189,6 +200,10 @@ def accept(self, kind: SpecTokens):
def expect(self, *kinds: SpecTokens): def expect(self, *kinds: SpecTokens):
return self.next_token and self.next_token.kind in kinds return self.next_token and self.next_token.kind in kinds
def push(self, token_stream: Iterator[Token]):
self.token_stream = itertools.chain(token_stream, self.token_stream)
self.advance()
class SpecTokenizationError(spack.error.SpecSyntaxError): class SpecTokenizationError(spack.error.SpecSyntaxError):
"""Syntax error in a spec string""" """Syntax error in a spec string"""
@ -238,11 +253,13 @@ class SpecParser:
def __init__(self, literal_str: str): def __init__(self, literal_str: str):
self.literal_str = literal_str self.literal_str = literal_str
self.ctx = TokenContext(filter(lambda x: x.kind != SpecTokens.WS, tokenize(literal_str))) self.ctx = TokenContext(parseable_tokens(literal_str))
def tokens(self) -> List[Token]: def tokens(self) -> List[Token]:
"""Return the entire list of token from the initial text. White spaces are """Return the entire list of token from the initial text. White spaces are
filtered out. filtered out.
Note: This list will not show tokens pushed when parsing an alias
""" """
return list(filter(lambda x: x.kind != SpecTokens.WS, tokenize(self.literal_str))) return list(filter(lambda x: x.kind != SpecTokens.WS, tokenize(self.literal_str)))
@ -268,6 +285,9 @@ def add_dependency(dep, **edge_properties):
except spack.error.SpecError as e: except spack.error.SpecError as e:
raise SpecParsingError(str(e), self.ctx.current_token, self.literal_str) from e raise SpecParsingError(str(e), self.ctx.current_token, self.literal_str) from e
# Get toolchain information outside of loop
toolchains = spack.config.get("toolchains", {})
initial_spec = initial_spec or spack.spec.Spec() initial_spec = initial_spec or spack.spec.Spec()
root_spec, parser_warnings = SpecNodeParser(self.ctx, self.literal_str).parse(initial_spec) root_spec, parser_warnings = SpecNodeParser(self.ctx, self.literal_str).parse(initial_spec)
current_spec = root_spec current_spec = root_spec
@ -297,6 +317,15 @@ def add_dependency(dep, **edge_properties):
add_dependency(dependency, **edge_properties) add_dependency(dependency, **edge_properties)
elif self.ctx.accept(SpecTokens.DEPENDENCY): elif self.ctx.accept(SpecTokens.DEPENDENCY):
# String replacement for toolchains
# Look ahead to match upcoming value to list of toolchains
if self.next_token.value in toolchains:
assert self.ctx.accept(SpecTokens.UNQUALIFIED_PACKAGE_NAME)
# accepting the token advances it to the current token
# Push associated tokens back to the TokenContext
self.ctx.push(parseable_tokens(toolchains[self.current_token.value]))
continue
is_direct = self.ctx.current_token.value[0] == "%" is_direct = self.ctx.current_token.value[0] == "%"
dependency, warnings = self._parse_node(root_spec) dependency, warnings = self._parse_node(root_spec)
edge_properties = {} edge_properties = {}