Reuse specs built with compilers not in config (#43539)
Allow reuse of specs that were built with compilers not in the current configuration. This means that specs from build caches don't need to have a matching compiler locally to be reused. Similarly when updating a distro. If a node needs to be built, only available compilers will be considered as candidates.
This commit is contained in:

committed by
GitHub

parent
dfca2c285e
commit
1fe8e63481
@@ -57,8 +57,10 @@
|
||||
import spack.build_systems.meson
|
||||
import spack.build_systems.python
|
||||
import spack.builder
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.main
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
@@ -583,10 +585,22 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
# Put spack compiler paths in module scope. (Some packages use it
|
||||
# in setup_run_environment etc, so don't put it context == build)
|
||||
link_dir = spack.paths.build_env_path
|
||||
module.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths["cc"])
|
||||
module.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths["cxx"])
|
||||
module.spack_f77 = os.path.join(link_dir, pkg.compiler.link_paths["f77"])
|
||||
module.spack_fc = os.path.join(link_dir, pkg.compiler.link_paths["fc"])
|
||||
pkg_compiler = None
|
||||
try:
|
||||
pkg_compiler = pkg.compiler
|
||||
except spack.compilers.NoCompilerForSpecError as e:
|
||||
tty.debug(f"cannot set 'spack_cc': {str(e)}")
|
||||
|
||||
if pkg_compiler is not None:
|
||||
module.spack_cc = os.path.join(link_dir, pkg_compiler.link_paths["cc"])
|
||||
module.spack_cxx = os.path.join(link_dir, pkg_compiler.link_paths["cxx"])
|
||||
module.spack_f77 = os.path.join(link_dir, pkg_compiler.link_paths["f77"])
|
||||
module.spack_fc = os.path.join(link_dir, pkg_compiler.link_paths["fc"])
|
||||
else:
|
||||
module.spack_cc = None
|
||||
module.spack_cxx = None
|
||||
module.spack_f77 = None
|
||||
module.spack_fc = None
|
||||
|
||||
# Useful directories within the prefix are encapsulated in
|
||||
# a Prefix object.
|
||||
|
@@ -109,27 +109,33 @@ def _to_dict(compiler):
|
||||
return {"compiler": d}
|
||||
|
||||
|
||||
def get_compiler_config(scope=None, init_config=False):
|
||||
def get_compiler_config(
|
||||
configuration: "spack.config.Configuration",
|
||||
*,
|
||||
scope: Optional[str] = None,
|
||||
init_config: bool = False,
|
||||
) -> List[Dict]:
|
||||
"""Return the compiler configuration for the specified architecture."""
|
||||
|
||||
config = spack.config.CONFIG.get("compilers", scope=scope) or []
|
||||
config = configuration.get("compilers", scope=scope) or []
|
||||
if config or not init_config:
|
||||
return config
|
||||
|
||||
merged_config = spack.config.CONFIG.get("compilers")
|
||||
merged_config = configuration.get("compilers")
|
||||
if merged_config:
|
||||
# Config is empty for this scope
|
||||
# Do not init config because there is a non-empty scope
|
||||
return config
|
||||
|
||||
_init_compiler_config(scope=scope)
|
||||
config = spack.config.CONFIG.get("compilers", scope=scope)
|
||||
_init_compiler_config(configuration, scope=scope)
|
||||
config = configuration.get("compilers", scope=scope)
|
||||
return config
|
||||
|
||||
|
||||
def get_compiler_config_from_packages(scope=None):
|
||||
def get_compiler_config_from_packages(
|
||||
configuration: "spack.config.Configuration", *, scope: Optional[str] = None
|
||||
) -> List[Dict]:
|
||||
"""Return the compiler configuration from packages.yaml"""
|
||||
config = spack.config.get("packages", scope=scope)
|
||||
config = configuration.get("packages", scope=scope)
|
||||
if not config:
|
||||
return []
|
||||
|
||||
@@ -216,13 +222,15 @@ def _compiler_config_from_external(config):
|
||||
return compiler_entry
|
||||
|
||||
|
||||
def _init_compiler_config(*, scope):
|
||||
def _init_compiler_config(
|
||||
configuration: "spack.config.Configuration", *, scope: Optional[str]
|
||||
) -> None:
|
||||
"""Compiler search used when Spack has no compilers."""
|
||||
compilers = find_compilers()
|
||||
compilers_dict = []
|
||||
for compiler in compilers:
|
||||
compilers_dict.append(_to_dict(compiler))
|
||||
spack.config.set("compilers", compilers_dict, scope=scope)
|
||||
configuration.set("compilers", compilers_dict, scope=scope)
|
||||
|
||||
|
||||
def compiler_config_files():
|
||||
@@ -233,7 +241,7 @@ def compiler_config_files():
|
||||
compiler_config = config.get("compilers", scope=name)
|
||||
if compiler_config:
|
||||
config_files.append(config.get_config_filename(name, "compilers"))
|
||||
compiler_config_from_packages = get_compiler_config_from_packages(scope=name)
|
||||
compiler_config_from_packages = get_compiler_config_from_packages(config, scope=name)
|
||||
if compiler_config_from_packages:
|
||||
config_files.append(config.get_config_filename(name, "packages"))
|
||||
return config_files
|
||||
@@ -246,7 +254,9 @@ def add_compilers_to_config(compilers, scope=None):
|
||||
compilers: a list of Compiler objects.
|
||||
scope: configuration scope to modify.
|
||||
"""
|
||||
compiler_config = get_compiler_config(scope, init_config=False)
|
||||
compiler_config = get_compiler_config(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=False
|
||||
)
|
||||
for compiler in compilers:
|
||||
if not compiler.cc:
|
||||
tty.debug(f"{compiler.spec} does not have a C compiler")
|
||||
@@ -295,7 +305,9 @@ def _remove_compiler_from_scope(compiler_spec, scope):
|
||||
True if one or more compiler entries were actually removed, False otherwise
|
||||
"""
|
||||
assert scope is not None, "a specific scope is needed when calling this function"
|
||||
compiler_config = get_compiler_config(scope, init_config=False)
|
||||
compiler_config = get_compiler_config(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=False
|
||||
)
|
||||
filtered_compiler_config = [
|
||||
compiler_entry
|
||||
for compiler_entry in compiler_config
|
||||
@@ -310,18 +322,23 @@ def _remove_compiler_from_scope(compiler_spec, scope):
|
||||
# We need to preserve the YAML type for comments, hence we are copying the
|
||||
# items in the list that has just been retrieved
|
||||
compiler_config[:] = filtered_compiler_config
|
||||
spack.config.set("compilers", compiler_config, scope=scope)
|
||||
spack.config.CONFIG.set("compilers", compiler_config, scope=scope)
|
||||
return True
|
||||
|
||||
|
||||
def all_compilers_config(scope=None, init_config=True):
|
||||
def all_compilers_config(
|
||||
configuration: "spack.config.Configuration",
|
||||
*,
|
||||
scope: Optional[str] = None,
|
||||
init_config: bool = True,
|
||||
) -> List["spack.compiler.Compiler"]:
|
||||
"""Return a set of specs for all the compiler versions currently
|
||||
available to build with. These are instances of CompilerSpec.
|
||||
"""
|
||||
from_packages_yaml = get_compiler_config_from_packages(scope)
|
||||
from_packages_yaml = get_compiler_config_from_packages(configuration, scope=scope)
|
||||
if from_packages_yaml:
|
||||
init_config = False
|
||||
from_compilers_yaml = get_compiler_config(scope, init_config)
|
||||
from_compilers_yaml = get_compiler_config(configuration, scope=scope, init_config=init_config)
|
||||
|
||||
result = from_compilers_yaml + from_packages_yaml
|
||||
key = lambda c: _compiler_from_config_entry(c["compiler"])
|
||||
@@ -332,7 +349,7 @@ def all_compiler_specs(scope=None, init_config=True):
|
||||
# Return compiler specs from the merged config.
|
||||
return [
|
||||
spack.spec.parse_with_version_concrete(s["compiler"]["spec"], compiler=True)
|
||||
for s in all_compilers_config(scope, init_config)
|
||||
for s in all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
|
||||
]
|
||||
|
||||
|
||||
@@ -492,9 +509,16 @@ def find_specs_by_arch(compiler_spec, arch_spec, scope=None, init_config=True):
|
||||
|
||||
|
||||
def all_compilers(scope=None, init_config=True):
|
||||
config = all_compilers_config(scope, init_config=init_config)
|
||||
compilers = list()
|
||||
for items in config:
|
||||
return all_compilers_from(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=init_config
|
||||
)
|
||||
|
||||
|
||||
def all_compilers_from(configuration, scope=None, init_config=True):
|
||||
compilers = []
|
||||
for items in all_compilers_config(
|
||||
configuration=configuration, scope=scope, init_config=init_config
|
||||
):
|
||||
items = items["compiler"]
|
||||
compilers.append(_compiler_from_config_entry(items))
|
||||
return compilers
|
||||
@@ -507,7 +531,7 @@ def compilers_for_spec(
|
||||
"""This gets all compilers that satisfy the supplied CompilerSpec.
|
||||
Returns an empty list if none are found.
|
||||
"""
|
||||
config = all_compilers_config(scope, init_config)
|
||||
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
|
||||
|
||||
matches = set(find(compiler_spec, scope, init_config))
|
||||
compilers = []
|
||||
@@ -517,7 +541,7 @@ def compilers_for_spec(
|
||||
|
||||
|
||||
def compilers_for_arch(arch_spec, scope=None):
|
||||
config = all_compilers_config(scope)
|
||||
config = all_compilers_config(spack.config.CONFIG, scope=scope)
|
||||
return list(get_compilers(config, arch_spec=arch_spec))
|
||||
|
||||
|
||||
|
@@ -1427,7 +1427,7 @@ def _concretize_separately(self, tests=False):
|
||||
|
||||
# Ensure we have compilers in compilers.yaml to avoid that
|
||||
# processes try to write the config file in parallel
|
||||
_ = spack.compilers.get_compiler_config(init_config=True)
|
||||
_ = spack.compilers.get_compiler_config(spack.config.CONFIG, init_config=True)
|
||||
|
||||
# Early return if there is nothing to do
|
||||
if len(args) == 0:
|
||||
|
@@ -976,7 +976,11 @@ def __init__(
|
||||
# a dependency of the build task. Here we add it to self.dependencies
|
||||
compiler_spec = self.pkg.spec.compiler
|
||||
arch_spec = self.pkg.spec.architecture
|
||||
if not spack.compilers.compilers_for_spec(compiler_spec, arch_spec=arch_spec):
|
||||
strict = spack.concretize.Concretizer().check_for_compiler_existence
|
||||
if (
|
||||
not spack.compilers.compilers_for_spec(compiler_spec, arch_spec=arch_spec)
|
||||
and not strict
|
||||
):
|
||||
# The compiler is in the queue, identify it as dependency
|
||||
dep = spack.compilers.pkg_spec_for_compiler(compiler_spec)
|
||||
dep.constrain(f"platform={str(arch_spec.platform)}")
|
||||
|
@@ -267,8 +267,8 @@ def _create_counter(specs: List[spack.spec.Spec], tests: bool):
|
||||
return NoDuplicatesCounter(specs, tests=tests)
|
||||
|
||||
|
||||
def all_compilers_in_config():
|
||||
return spack.compilers.all_compilers()
|
||||
def all_compilers_in_config(configuration):
|
||||
return spack.compilers.all_compilers_from(configuration)
|
||||
|
||||
|
||||
def extend_flag_list(flag_list, new_flags):
|
||||
@@ -688,8 +688,9 @@ def on_model(model):
|
||||
raise UnsatisfiableSpecError(msg)
|
||||
|
||||
|
||||
#: Data class to collect information on a requirement
|
||||
class RequirementRule(NamedTuple):
|
||||
"""Data class to collect information on a requirement"""
|
||||
|
||||
pkg_name: str
|
||||
policy: str
|
||||
requirements: List["spack.spec.Spec"]
|
||||
@@ -698,6 +699,27 @@ class RequirementRule(NamedTuple):
|
||||
message: Optional[str]
|
||||
|
||||
|
||||
class KnownCompiler(NamedTuple):
|
||||
"""Data class to collect information on compilers"""
|
||||
|
||||
spec: "spack.spec.Spec"
|
||||
os: str
|
||||
target: str
|
||||
available: bool
|
||||
compiler_obj: Optional["spack.compiler.Compiler"]
|
||||
|
||||
def _key(self):
|
||||
return self.spec, self.os, self.target
|
||||
|
||||
def __eq__(self, other: object):
|
||||
if not isinstance(other, KnownCompiler):
|
||||
return NotImplemented
|
||||
return self._key() == other._key()
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._key())
|
||||
|
||||
|
||||
class PyclingoDriver:
|
||||
def __init__(self, cores=True):
|
||||
"""Driver for the Python clingo interface.
|
||||
@@ -1065,37 +1087,29 @@ def compiler_facts(self):
|
||||
"""Facts about available compilers."""
|
||||
|
||||
self.gen.h2("Available compilers")
|
||||
indexed_possible_compilers = list(enumerate(self.possible_compilers))
|
||||
for compiler_id, compiler in indexed_possible_compilers:
|
||||
for compiler_id, compiler in enumerate(self.possible_compilers):
|
||||
self.gen.fact(fn.compiler_id(compiler_id))
|
||||
self.gen.fact(fn.compiler_name(compiler_id, compiler.spec.name))
|
||||
self.gen.fact(fn.compiler_version(compiler_id, compiler.spec.version))
|
||||
|
||||
if compiler.operating_system:
|
||||
self.gen.fact(fn.compiler_os(compiler_id, compiler.operating_system))
|
||||
|
||||
if compiler.target == "any":
|
||||
compiler.target = None
|
||||
if compiler.os:
|
||||
self.gen.fact(fn.compiler_os(compiler_id, compiler.os))
|
||||
|
||||
if compiler.target is not None:
|
||||
self.gen.fact(fn.compiler_target(compiler_id, compiler.target))
|
||||
|
||||
for flag_type, flags in compiler.flags.items():
|
||||
for flag in flags:
|
||||
self.gen.fact(fn.compiler_flag(compiler_id, flag_type, flag))
|
||||
if compiler.compiler_obj is not None:
|
||||
c = compiler.compiler_obj
|
||||
for flag_type, flags in c.flags.items():
|
||||
for flag in flags:
|
||||
self.gen.fact(fn.compiler_flag(compiler_id, flag_type, flag))
|
||||
|
||||
if compiler.available:
|
||||
self.gen.fact(fn.compiler_available(compiler_id))
|
||||
|
||||
self.gen.fact(fn.compiler_weight(compiler_id, compiler_id))
|
||||
self.gen.newline()
|
||||
|
||||
# Set compiler defaults, given a list of possible compilers
|
||||
self.gen.h2("Default compiler preferences (CompilerID, Weight)")
|
||||
|
||||
ppk = spack.package_prefs.PackagePrefs("all", "compiler", all=False)
|
||||
matches = sorted(indexed_possible_compilers, key=lambda x: ppk(x[1].spec))
|
||||
|
||||
for weight, (compiler_id, cspec) in enumerate(matches):
|
||||
f = fn.compiler_weight(compiler_id, weight)
|
||||
self.gen.fact(f)
|
||||
|
||||
def package_requirement_rules(self, pkg):
|
||||
parser = RequirementParser(spack.config.CONFIG)
|
||||
self.emit_facts_from_requirement_rules(parser.rules(pkg))
|
||||
@@ -1624,23 +1638,6 @@ def target_preferences(self):
|
||||
for i, preferred in enumerate(package_targets):
|
||||
self.gen.fact(fn.target_weight(str(preferred.architecture.target), i))
|
||||
|
||||
def flag_defaults(self):
|
||||
self.gen.h2("Compiler flag defaults")
|
||||
|
||||
# types of flags that can be on specs
|
||||
for flag in spack.spec.FlagMap.valid_compiler_flags():
|
||||
self.gen.fact(fn.flag_type(flag))
|
||||
self.gen.newline()
|
||||
|
||||
# flags from compilers.yaml
|
||||
compilers = all_compilers_in_config()
|
||||
for compiler in compilers:
|
||||
for name, flags in compiler.flags.items():
|
||||
for flag in flags:
|
||||
self.gen.fact(
|
||||
fn.compiler_version_flag(compiler.name, compiler.version, name, flag)
|
||||
)
|
||||
|
||||
def spec_clauses(
|
||||
self,
|
||||
spec: spack.spec.Spec,
|
||||
@@ -2046,9 +2043,16 @@ def target_defaults(self, specs):
|
||||
candidate_targets.append(ancestor)
|
||||
|
||||
best_targets = {uarch.family.name}
|
||||
for compiler_id, compiler in enumerate(self.possible_compilers):
|
||||
for compiler_id, known_compiler in enumerate(self.possible_compilers):
|
||||
if not known_compiler.available:
|
||||
continue
|
||||
|
||||
compiler = known_compiler.compiler_obj
|
||||
# Stub support for cross-compilation, to be expanded later
|
||||
if compiler.target is not None and compiler.target != str(uarch.family):
|
||||
if known_compiler.target is not None and compiler.target not in (
|
||||
str(uarch.family),
|
||||
"any",
|
||||
):
|
||||
self.gen.fact(fn.compiler_supports_target(compiler_id, compiler.target))
|
||||
self.gen.newline()
|
||||
continue
|
||||
@@ -2104,58 +2108,6 @@ def virtual_providers(self):
|
||||
self.gen.fact(fn.virtual(vspec))
|
||||
self.gen.newline()
|
||||
|
||||
def generate_possible_compilers(self, specs):
|
||||
compilers = all_compilers_in_config()
|
||||
|
||||
# Search for compilers which differs only by aspects that are
|
||||
# not selectable by users using the spec syntax
|
||||
seen, sanitized_list = set(), []
|
||||
for compiler in compilers:
|
||||
key = compiler.spec, compiler.operating_system, compiler.target
|
||||
if key in seen:
|
||||
warnings.warn(
|
||||
f"duplicate found for {compiler.spec} on "
|
||||
f"{compiler.operating_system}/{compiler.target}. "
|
||||
f"Edit your compilers.yaml configuration to remove it."
|
||||
)
|
||||
continue
|
||||
sanitized_list.append(compiler)
|
||||
seen.add(key)
|
||||
|
||||
cspecs = set([c.spec for c in compilers])
|
||||
|
||||
# add compiler specs from the input line to possibilities if we
|
||||
# don't require compilers to exist.
|
||||
strict = spack.concretize.Concretizer().check_for_compiler_existence
|
||||
for s in traverse.traverse_nodes(specs):
|
||||
# we don't need to validate compilers for already-built specs
|
||||
if s.concrete or not s.compiler:
|
||||
continue
|
||||
|
||||
version = s.compiler.versions.concrete
|
||||
|
||||
if not version or any(c.satisfies(s.compiler) for c in cspecs):
|
||||
continue
|
||||
|
||||
# Error when a compiler is not found and strict mode is enabled
|
||||
if strict:
|
||||
raise spack.concretize.UnavailableCompilerVersionError(s.compiler)
|
||||
|
||||
# Make up a compiler matching the input spec. This is for bootstrapping.
|
||||
compiler_cls = spack.compilers.class_for_compiler_name(s.compiler.name)
|
||||
compilers.append(
|
||||
compiler_cls(s.compiler, operating_system=None, target=None, paths=[None] * 4)
|
||||
)
|
||||
self.gen.fact(fn.allow_compiler(s.compiler.name, version))
|
||||
|
||||
return list(
|
||||
sorted(
|
||||
compilers,
|
||||
key=lambda compiler: (compiler.spec.name, compiler.spec.version),
|
||||
reverse=True,
|
||||
)
|
||||
)
|
||||
|
||||
def define_version_constraints(self):
|
||||
"""Define what version_satisfies(...) means in ASP logic."""
|
||||
for pkg_name, versions in sorted(self.version_constraints):
|
||||
@@ -2331,6 +2283,8 @@ def setup(
|
||||
self.explicitly_required_namespaces[node.name] = node.namespace
|
||||
|
||||
self.gen = ProblemInstanceBuilder()
|
||||
compiler_parser = CompilerParser(configuration=spack.config.CONFIG).with_input_specs(specs)
|
||||
|
||||
if not allow_deprecated:
|
||||
self.gen.fact(fn.deprecated_versions_not_allowed())
|
||||
|
||||
@@ -2349,17 +2303,17 @@ def setup(
|
||||
)
|
||||
specs = tuple(specs) # ensure compatible types to add
|
||||
|
||||
# get possible compilers
|
||||
self.possible_compilers = self.generate_possible_compilers(specs)
|
||||
|
||||
self.gen.h1("Reusable concrete specs")
|
||||
self.define_concrete_input_specs(specs, self.pkgs)
|
||||
if reuse:
|
||||
self.gen.fact(fn.optimize_for_reuse())
|
||||
for reusable_spec in reuse:
|
||||
compiler_parser.add_compiler_from_concrete_spec(reusable_spec)
|
||||
self.register_concrete_spec(reusable_spec, self.pkgs)
|
||||
self.concrete_specs()
|
||||
|
||||
self.possible_compilers = compiler_parser.possible_compilers()
|
||||
|
||||
self.gen.h1("Generic statements on possible packages")
|
||||
node_counter.possible_packages_facts(self.gen, fn)
|
||||
|
||||
@@ -2460,9 +2414,12 @@ def visit(node):
|
||||
def define_runtime_constraints(self):
|
||||
"""Define the constraints to be imposed on the runtimes"""
|
||||
recorder = RuntimePropertyRecorder(self)
|
||||
# TODO: Use only available compilers ?
|
||||
for compiler in self.possible_compilers:
|
||||
compiler_with_different_cls_names = {"oneapi": "intel-oneapi-compilers"}
|
||||
compiler_cls_name = compiler_with_different_cls_names.get(compiler.name, compiler.name)
|
||||
compiler_cls_name = compiler_with_different_cls_names.get(
|
||||
compiler.spec.name, compiler.spec.name
|
||||
)
|
||||
try:
|
||||
compiler_cls = spack.repo.PATH.get_pkg_class(compiler_cls_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
@@ -2840,6 +2797,90 @@ def reject_requirement_constraint(
|
||||
return False
|
||||
|
||||
|
||||
class CompilerParser:
|
||||
"""Parses configuration files, and builds a list of possible compilers for the solve."""
|
||||
|
||||
def __init__(self, configuration) -> None:
|
||||
self.compilers: Set[KnownCompiler] = set()
|
||||
for c in all_compilers_in_config(configuration):
|
||||
target = c.target if c.target != "any" else None
|
||||
candidate = KnownCompiler(
|
||||
spec=c.spec, os=c.operating_system, target=target, available=True, compiler_obj=c
|
||||
)
|
||||
if candidate in self.compilers:
|
||||
warnings.warn(
|
||||
f"duplicate found for {c.spec} on {c.operating_system}/{c.target}. "
|
||||
f"Edit your compilers.yaml configuration to remove it."
|
||||
)
|
||||
continue
|
||||
|
||||
self.compilers.add(candidate)
|
||||
|
||||
def with_input_specs(self, input_specs: List["spack.spec.Spec"]) -> "CompilerParser":
|
||||
"""Accounts for input specs when building the list of possible compilers.
|
||||
|
||||
Args:
|
||||
input_specs: specs to be concretized
|
||||
"""
|
||||
strict = spack.concretize.Concretizer().check_for_compiler_existence
|
||||
default_os = str(spack.platforms.host().default_os)
|
||||
default_target = str(archspec.cpu.host().family)
|
||||
for s in traverse.traverse_nodes(input_specs):
|
||||
# we don't need to validate compilers for already-built specs
|
||||
if s.concrete or not s.compiler:
|
||||
continue
|
||||
|
||||
version = s.compiler.versions.concrete
|
||||
|
||||
if not version or any(item.spec.satisfies(s.compiler) for item in self.compilers):
|
||||
continue
|
||||
|
||||
# Error when a compiler is not found and strict mode is enabled
|
||||
if strict:
|
||||
raise spack.concretize.UnavailableCompilerVersionError(s.compiler)
|
||||
|
||||
# Make up a compiler matching the input spec. This is for bootstrapping.
|
||||
compiler_cls = spack.compilers.class_for_compiler_name(s.compiler.name)
|
||||
compiler_obj = compiler_cls(
|
||||
s.compiler, operating_system=default_os, target=default_target, paths=[None] * 4
|
||||
)
|
||||
self.compilers.add(
|
||||
KnownCompiler(
|
||||
spec=s.compiler,
|
||||
os=default_os,
|
||||
target=default_target,
|
||||
available=True,
|
||||
compiler_obj=compiler_obj,
|
||||
)
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
def add_compiler_from_concrete_spec(self, spec: "spack.spec.Spec") -> None:
|
||||
"""Account for compilers that are coming from concrete specs, through reuse.
|
||||
|
||||
Args:
|
||||
spec: concrete spec to be reused
|
||||
"""
|
||||
assert spec.concrete, "the spec argument must be concrete"
|
||||
candidate = KnownCompiler(
|
||||
spec=spec.compiler,
|
||||
os=str(spec.architecture.os),
|
||||
target=str(spec.architecture.target.microarchitecture.family),
|
||||
available=False,
|
||||
compiler_obj=None,
|
||||
)
|
||||
self.compilers.add(candidate)
|
||||
|
||||
def possible_compilers(self) -> List[KnownCompiler]:
|
||||
# Here we have to sort two times, first sort by name and ascending version
|
||||
result = sorted(self.compilers, key=lambda x: (x.spec.name, x.spec.version), reverse=True)
|
||||
# Then stable sort to prefer available compilers and account for preferences
|
||||
ppk = spack.package_prefs.PackagePrefs("all", "compiler", all=False)
|
||||
result.sort(key=lambda x: (not x.available, ppk(x.spec)))
|
||||
return result
|
||||
|
||||
|
||||
class RuntimePropertyRecorder:
|
||||
"""An object of this class is injected in callbacks to compilers, to let them declare
|
||||
properties of the runtimes they support and of the runtimes they provide, and to add
|
||||
@@ -3179,7 +3220,9 @@ def reorder_flags(self):
|
||||
imposes order afterwards.
|
||||
"""
|
||||
# reverse compilers so we get highest priority compilers that share a spec
|
||||
compilers = dict((c.spec, c) for c in reversed(all_compilers_in_config()))
|
||||
compilers = dict(
|
||||
(c.spec, c) for c in reversed(all_compilers_in_config(spack.config.CONFIG))
|
||||
)
|
||||
cmd_specs = dict((s.name, s) for spec in self._command_line_specs for s in spec.traverse())
|
||||
|
||||
for spec in self._specs.values():
|
||||
|
@@ -1110,7 +1110,7 @@ error(100, "'{0} target={1}' is not compatible with this machine", Package, Targ
|
||||
% Compiler semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
% There must be only one compiler set per built node.
|
||||
{ node_compiler(PackageNode, CompilerID) : compiler_id(CompilerID) } :-
|
||||
{ node_compiler(PackageNode, CompilerID) : compiler_id(CompilerID), compiler_available(CompilerID) } :-
|
||||
attr("node", PackageNode),
|
||||
build(PackageNode).
|
||||
|
||||
@@ -1127,6 +1127,7 @@ attr("node_compiler_version", PackageNode, CompilerName, CompilerVersion)
|
||||
:- node_compiler(PackageNode, CompilerID),
|
||||
compiler_name(CompilerID, CompilerName),
|
||||
compiler_version(CompilerID, CompilerVersion),
|
||||
compiler_available(CompilerID),
|
||||
build(PackageNode).
|
||||
|
||||
attr("node_compiler", PackageNode, CompilerName)
|
||||
@@ -1189,7 +1190,6 @@ error(100, "{0} compiler '%{1}@{2}' incompatible with 'os={3}'", Package, Compil
|
||||
compiler_version(CompilerID, Version),
|
||||
compiler_os(CompilerID, CompilerOS),
|
||||
not os_compatible(CompilerOS, OS),
|
||||
not allow_compiler(Compiler, Version),
|
||||
build(node(X, Package)).
|
||||
|
||||
% If a package and one of its dependencies don't have the
|
||||
@@ -1210,7 +1210,6 @@ compiler_mismatch_required(PackageNode, DependencyNode)
|
||||
not compiler_match(PackageNode, DependencyNode).
|
||||
|
||||
#defined compiler_os/3.
|
||||
#defined allow_compiler/2.
|
||||
|
||||
% compilers weighted by preference according to packages.yaml
|
||||
node_compiler_weight(node(ID, Package), Weight)
|
||||
|
@@ -175,7 +175,9 @@ def test_compiler_find_mixed_suffixes(
|
||||
assert "clang@11.0.0" in output
|
||||
assert "gcc@8.4.0" in output
|
||||
|
||||
config = spack.compilers.get_compiler_config("site", False)
|
||||
config = spack.compilers.get_compiler_config(
|
||||
no_compilers_yaml, scope="site", init_config=False
|
||||
)
|
||||
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
|
||||
gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0")
|
||||
|
||||
@@ -210,7 +212,9 @@ def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, compiler
|
||||
assert "clang@11.0.0" in output
|
||||
assert "gcc@8.4.0" in output
|
||||
|
||||
config = spack.compilers.get_compiler_config("site", False)
|
||||
config = spack.compilers.get_compiler_config(
|
||||
no_compilers_yaml, scope="site", init_config=False
|
||||
)
|
||||
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
|
||||
|
||||
assert clang["paths"]["cc"] == str(compilers_dir / "clang")
|
||||
@@ -229,7 +233,9 @@ def test_compiler_find_path_order(no_compilers_yaml, working_env, compilers_dir)
|
||||
|
||||
compiler("find", "--scope=site")
|
||||
|
||||
config = spack.compilers.get_compiler_config("site", False)
|
||||
config = spack.compilers.get_compiler_config(
|
||||
no_compilers_yaml, scope="site", init_config=False
|
||||
)
|
||||
gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0")
|
||||
assert gcc["paths"] == {
|
||||
"cc": str(new_dir / "gcc-8"),
|
||||
|
@@ -700,7 +700,7 @@ def test_raising_if_compiler_target_is_over_specific(config):
|
||||
]
|
||||
arch_spec = spack.spec.ArchSpec(("linux", "ubuntu18.04", "haswell"))
|
||||
with spack.config.override("compilers", compilers):
|
||||
cfg = spack.compilers.get_compiler_config()
|
||||
cfg = spack.compilers.get_compiler_config(config)
|
||||
with pytest.raises(ValueError):
|
||||
spack.compilers.get_compilers(cfg, spack.spec.CompilerSpec("gcc@9.0.1"), arch_spec)
|
||||
|
||||
|
@@ -120,14 +120,16 @@ def current_host(request, monkeypatch):
|
||||
# is_preference is not empty if we want to supply the
|
||||
# preferred target via packages.yaml
|
||||
cpu, _, is_preference = request.param.partition("-")
|
||||
target = archspec.cpu.TARGETS[cpu]
|
||||
|
||||
monkeypatch.setattr(spack.platforms.Test, "default", cpu)
|
||||
monkeypatch.setattr(spack.platforms.Test, "front_end", cpu)
|
||||
if not is_preference:
|
||||
target = archspec.cpu.TARGETS[cpu]
|
||||
monkeypatch.setattr(archspec.cpu, "host", lambda: target)
|
||||
yield target
|
||||
else:
|
||||
target = archspec.cpu.TARGETS["sapphirerapids"]
|
||||
monkeypatch.setattr(archspec.cpu, "host", lambda: target)
|
||||
with spack.config.override("packages:all", {"target": [cpu]}):
|
||||
yield target
|
||||
|
||||
@@ -874,7 +876,7 @@ def test_concretize_anonymous_dep(self, spec_str):
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,expected_str",
|
||||
[
|
||||
# Unconstrained versions select default compiler (gcc@4.5.0)
|
||||
# Unconstrained versions select default compiler (gcc@10.2.1)
|
||||
("bowtie@1.4.0", "%gcc@10.2.1"),
|
||||
# Version with conflicts and no valid gcc select another compiler
|
||||
("bowtie@1.3.0", "%clang@15.0.0"),
|
||||
@@ -1012,7 +1014,7 @@ def test_working_around_conflicting_defaults(self, spec_str, expected):
|
||||
[("cmake", ["%clang"]), ("cmake %gcc", ["%gcc"]), ("cmake %clang", ["%clang"])],
|
||||
)
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_external_package_and_compiler_preferences(self, spec_str, expected):
|
||||
def test_external_package_and_compiler_preferences(self, spec_str, expected, mutable_config):
|
||||
packages_yaml = {
|
||||
"all": {"compiler": ["clang", "gcc"]},
|
||||
"cmake": {
|
||||
@@ -1020,7 +1022,7 @@ def test_external_package_and_compiler_preferences(self, spec_str, expected):
|
||||
"buildable": False,
|
||||
},
|
||||
}
|
||||
spack.config.set("packages", packages_yaml)
|
||||
mutable_config.set("packages", packages_yaml)
|
||||
s = Spec(spec_str).concretized()
|
||||
|
||||
assert s.external
|
||||
@@ -1877,19 +1879,16 @@ def test_version_weight_and_provenance(self):
|
||||
|
||||
@pytest.mark.regression("31169")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_not_reusing_incompatible_os_or_compiler(self):
|
||||
def test_not_reusing_incompatible_os(self):
|
||||
root_spec = Spec("b")
|
||||
s = root_spec.concretized()
|
||||
wrong_compiler, wrong_os = s.copy(), s.copy()
|
||||
wrong_compiler.compiler = spack.spec.CompilerSpec("gcc@12.1.0")
|
||||
wrong_os = s.copy()
|
||||
wrong_os.architecture = spack.spec.ArchSpec("test-ubuntu2204-x86_64")
|
||||
reusable_specs = [wrong_compiler, wrong_os]
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
result, _, _ = solver.driver.solve(setup, [root_spec], reuse=reusable_specs)
|
||||
result, _, _ = solver.driver.solve(setup, [root_spec], reuse=[wrong_os])
|
||||
concrete_spec = result.specs[0]
|
||||
assert concrete_spec.satisfies("%{}".format(s.compiler))
|
||||
assert concrete_spec.satisfies("os={}".format(s.architecture.os))
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
@@ -2360,6 +2359,29 @@ def test_select_lower_priority_package_from_repository_stack(
|
||||
assert s[name].concrete
|
||||
assert s[name].namespace == namespace
|
||||
|
||||
@pytest.mark.only_clingo("Old concretizer cannot reuse")
|
||||
def test_reuse_specs_from_non_available_compilers(self, mutable_config, mutable_database):
|
||||
"""Tests that we can reuse specs with compilers that are not configured locally."""
|
||||
# All the specs in the mutable DB have been compiled with %gcc@=10.2.1
|
||||
specs = mutable_database.query_local()
|
||||
assert all(s.satisfies("%gcc@=10.2.1") for s in specs)
|
||||
|
||||
spack.compilers.remove_compiler_from_config("gcc@=10.2.1")
|
||||
assert not spack.compilers.compilers_for_spec("gcc@=10.2.1")
|
||||
mutable_config.set("concretizer:reuse", True)
|
||||
|
||||
# mpileaks is in the database, it will be reused with gcc@=10.2.1
|
||||
root = Spec("mpileaks").concretized()
|
||||
for s in root.traverse():
|
||||
assert s.satisfies("%gcc@10.2.1")
|
||||
|
||||
# fftw is not in the database, therefore the root will be compiled with gcc@=9.4.0,
|
||||
# while the mpi is reused from the database and is compiled with gcc@=10.2.1
|
||||
root = Spec("fftw").concretized()
|
||||
assert root.satisfies("%gcc@=9.4.0")
|
||||
for s in root.traverse(root=False):
|
||||
assert s.satisfies("%gcc@10.2.1")
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def duplicates_test_repository():
|
||||
|
@@ -977,7 +977,7 @@ def test_single_file_scope(config, env_yaml):
|
||||
# from the single-file config
|
||||
assert spack.config.get("config:verify_ssl") is False
|
||||
assert spack.config.get("config:dirty") is False
|
||||
assert spack.config.get("packages:all:compiler") == ["gcc@4.5.3"]
|
||||
assert spack.config.get("packages:all:compiler") == ["gcc@4.5.3", "gcc", "clang"]
|
||||
|
||||
# from the lower config scopes
|
||||
assert spack.config.get("config:checksum") is True
|
||||
|
@@ -790,6 +790,7 @@ def no_compilers_yaml(mutable_config):
|
||||
compilers_yaml = os.path.join(local_config.path, "compilers.yaml")
|
||||
if os.path.exists(compilers_yaml):
|
||||
os.remove(compilers_yaml)
|
||||
return mutable_config
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
|
@@ -1,5 +1,6 @@
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc, clang]
|
||||
providers:
|
||||
mpi: [openmpi, mpich, zmpi]
|
||||
lapack: [openblas-with-lapack]
|
||||
|
51
lib/spack/spack/test/solver/intermediate.py
Normal file
51
lib/spack/spack/test/solver/intermediate.py
Normal file
@@ -0,0 +1,51 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Unit tests for objects turning configuration into an intermediate format used by the solver."""
|
||||
import pytest
|
||||
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.spec
|
||||
from spack.concretize import UnavailableCompilerVersionError
|
||||
from spack.solver import asp
|
||||
|
||||
|
||||
class TestCompilerParser:
|
||||
def test_expected_order_mock_config(self, config):
|
||||
"""Tests the expected preference order in the mock compiler configuration"""
|
||||
parser = asp.CompilerParser(config)
|
||||
expected_order = ["gcc@=10.2.1", "gcc@=9.4.0", "gcc@=9.4.0", "clang@=15.0.0"]
|
||||
for c, expected in zip(parser.possible_compilers(), expected_order):
|
||||
assert c.spec.satisfies(expected)
|
||||
|
||||
@pytest.mark.parametrize("spec_str", ["a %gcc@=13.2.0", "a ^b %gcc@=13.2.0"])
|
||||
def test_compiler_from_input_raise(self, spec_str, config):
|
||||
"""Tests that having an unknown compiler in the input spec raises an exception, if we
|
||||
don't allow bootstrapping missing compilers.
|
||||
"""
|
||||
spec = spack.spec.Spec(spec_str)
|
||||
with pytest.raises(UnavailableCompilerVersionError):
|
||||
asp.CompilerParser(config).with_input_specs([spec])
|
||||
|
||||
def test_compilers_inferred_from_concrete_specs(self, mutable_config, mutable_database):
|
||||
"""Test that compilers inferred from concrete specs, that are not in the local
|
||||
configuration too, are last in the preference order.
|
||||
"""
|
||||
spack.compilers.remove_compiler_from_config("gcc@=10.2.1")
|
||||
assert not spack.compilers.compilers_for_spec("gcc@=10.2.1")
|
||||
|
||||
parser = asp.CompilerParser(mutable_config)
|
||||
for reuse_spec in mutable_database.query():
|
||||
parser.add_compiler_from_concrete_spec(reuse_spec)
|
||||
|
||||
expected_order = [
|
||||
("gcc@=9.4.0", True),
|
||||
("gcc@=9.4.0", True),
|
||||
("clang@=15.0.0", True),
|
||||
("gcc@=10.2.1", False),
|
||||
]
|
||||
for c, (expected, available) in zip(parser.possible_compilers(), expected_order):
|
||||
assert c.spec.satisfies(expected)
|
||||
assert c.available is available
|
Reference in New Issue
Block a user