Allow using compilers from the local store

To do this we introduce a new fact, that is true
when a compiler is used as a link dependency.

If we don't have this fact, we enforce only run
dependencies in the ASP problem.
This commit is contained in:
Massimiliano Culpo 2025-01-23 17:28:42 +01:00
parent 56c685e374
commit a8dd481bbf
No known key found for this signature in database
GPG Key ID: 3E52BB992233066C
3 changed files with 78 additions and 32 deletions

View File

@ -142,8 +142,7 @@ def _parse_link_paths(string):
class CompilerPropertyDetector:
def __init__(self, compiler_spec: "spack.spec.Spec"):
assert compiler_spec.external, "only external compiler specs are allowed, so far"
assert compiler_spec.concrete, "only concrete compiler specs are allowed, so far"
assert compiler_spec.concrete, "only concrete compiler specs are allowed"
self.spec = compiler_spec
self.cache = COMPILER_CACHE
@ -153,6 +152,11 @@ def compiler_environment(self):
import spack.schema.environment
import spack.util.module_cmd
# No modifications for Spack managed compilers
if not self.spec.external:
yield
return
# Avoid modifying os.environ if possible.
environment = self.spec.extra_attributes.get("environment", {})
modules = self.spec.external_modules or []
@ -178,7 +182,6 @@ def compiler_environment(self):
os.environ.update(backup_env)
def _compile_dummy_c_source(self) -> Optional[str]:
assert self.spec.external, "only external compiler specs are allowed, so far"
compiler_pkg = self.spec.package
if getattr(compiler_pkg, "cc"):
cc = compiler_pkg.cc
@ -201,16 +204,16 @@ def _compile_dummy_c_source(self) -> Optional[str]:
)
cc_exe = spack.util.executable.Executable(cc)
# FIXME (compiler as nodes): this operation should be encapsulated somewhere else
compiler_flags = self.spec.extra_attributes.get("flags", {})
for flag_type in [
"cflags" if cc == compiler_pkg.cc else "cxxflags",
"cppflags",
"ldflags",
]:
current_flags = compiler_flags.get(flag_type, "").strip()
if current_flags:
cc_exe.add_default_arg(*current_flags.split(" "))
if self.spec.external:
compiler_flags = self.spec.extra_attributes.get("flags", {})
for flag_type in [
"cflags" if cc == compiler_pkg.cc else "cxxflags",
"cppflags",
"ldflags",
]:
current_flags = compiler_flags.get(flag_type, "").strip()
if current_flags:
cc_exe.add_default_arg(*current_flags.split(" "))
with self.compiler_environment():
return cc_exe("-v", fin, "-o", fout, output=str, error=str)
@ -253,9 +256,6 @@ def implicit_rpaths(self) -> List[str]:
link_dirs = parse_non_system_link_dirs(output)
all_required_libs = list(self.spec.package.required_libs) + ["libc", "libc++", "libstdc++"]
dynamic_linker = self.default_dynamic_linker()
# FIXME (compiler as nodes): is this needed ?
# if dynamic_linker is None:
# return []
result = DefaultDynamicLinkerFilter(dynamic_linker)(
paths_containing_libs(link_dirs, all_required_libs)
)

View File

@ -2987,6 +2987,7 @@ def setup(
reuse: list of concrete specs that can be reused
allow_deprecated: if True adds deprecated versions into the solve
"""
reuse = reuse or []
check_packages_exist(specs)
node_counter = create_counter(specs, tests=self.tests, possible_graph=self.possible_graph)
@ -3009,6 +3010,7 @@ def setup(
self.explicitly_required_namespaces[node.name] = node.namespace
self.gen = ProblemInstanceBuilder()
self.gen.h1("Generic information")
if using_libc_compatibility():
for libc in self.libcs:
self.gen.fact(fn.host_libc(libc.name, libc.version))
@ -3016,6 +3018,10 @@ def setup(
if not allow_deprecated:
self.gen.fact(fn.deprecated_versions_not_allowed())
self.gen.newline()
for pkg_name in spack.compilers.config.supported_compilers():
self.gen.fact(fn.compiler_package(pkg_name))
# Calculate develop specs
# they will be used in addition to command line specs
# in determining known versions/targets/os
@ -3032,6 +3038,17 @@ def setup(
specs = tuple(specs) # ensure compatible types to add
_ = spack.compilers.config.all_compilers(init_config=True)
self.possible_compilers = possible_compilers(configuration=spack.config.CONFIG)
for x in self.possible_compilers:
if x.external:
continue
reuse.append(x)
for dep in x.traverse(root=False, deptype="run"):
reuse.extend(dep.traverse(deptype=("link", "run")))
# reuse.extend([x for x in self.possible_compilers if not x.external])
self.gen.h1("Reusable concrete specs")
self.define_concrete_input_specs(specs, self.pkgs)
if reuse:
@ -3040,9 +3057,6 @@ def setup(
self.register_concrete_spec(reusable_spec, self.pkgs)
self.concrete_specs()
_ = spack.compilers.config.all_compilers(init_config=True)
self.possible_compilers = possible_compilers(configuration=spack.config.CONFIG)
self.gen.h1("Generic statements on possible packages")
node_counter.possible_packages_facts(self.gen, fn)
@ -3391,9 +3405,9 @@ def value(self) -> str:
def possible_compilers(*, configuration) -> List["spack.spec.Spec"]:
result = set()
for c in spack.compilers.config.all_compilers_from(configuration):
# FIXME (compiler as nodes): Discard early specs that are not marked for this target?
# Compilers defined in configuration
for c in spack.compilers.config.all_compilers_from(configuration):
if using_libc_compatibility() and not c_compiler_runs(c):
try:
compiler = c.extra_attributes["compilers"]["c"]
@ -3419,6 +3433,10 @@ def possible_compilers(*, configuration) -> List["spack.spec.Spec"]:
result.add(c)
# Compilers from the local store
for pkg_name in spack.compilers.config.supported_compilers():
result.update(spack.store.STORE.db.query(pkg_name))
result = list(result)
result.sort()
return result

View File

@ -1280,6 +1280,13 @@ attr("node_version_satisfies", node(X, Runtime), VersionRange) :-
attr("compatible_runtime", PackageNode, Runtime, VersionRange),
concrete(PackageNode).
% If a compiler package is depended on with type link, it's used as a library
compiler_used_as_a_library(node(X, Child), Hash) :-
concrete(node(X, Child)),
attr("hash", node(X, Child), Hash),
compiler_package(Child), % Used to restrict grounding for this rule
attr("depends_on", _, node(X, Child), "link").
%-----------------------------------------------------------------------------
% Runtimes
%-----------------------------------------------------------------------------
@ -1385,11 +1392,11 @@ language("c").
language("cxx").
language("fortran").
% FIXME (compiler as nodes): remove when we lift this constraint
error(10, "Only external compilers are allowed for the {0} language", Language)
error(10, "Only external, or concrete, compilers are allowed for the {0} language", Language)
:- provider(ProviderNode, node(_, Language)),
language(Language),
not external(ProviderNode).
not external(ProviderNode),
not concrete(ProviderNode).
error(10, "{0} compiler '{2}@{3}' incompatible with 'target={1}'", Package, Target, Compiler, Version)
:- attr("node_target", node(X, Package), Target),
@ -1467,32 +1474,53 @@ hash_attr(Hash, "node_version_satisfies", PackageName, Constraint) :-
% This recovers the exact semantics for hash reuse hash and depends_on are where
% splices are decided, and virtual_on_edge can result in name-changes, which is
% why they are all treated separately.
imposed_constraint(Hash, Attr, PackageName) :-
hash_attr(Hash, Attr, PackageName).
imposed_constraint(Hash, Attr, PackageName, A1) :-
hash_attr(Hash, Attr, PackageName, A1), Attr != "hash".
imposed_constraint(Hash, Attr, PackageName, Arg1, Arg2) :-
hash_attr(Hash, Attr, PackageName, Arg1, Arg2),
imposed_constraint(Hash, Attr, PackageName) :- hash_attr(Hash, Attr, PackageName), Attr != "virtual_node".
imposed_constraint(Hash, Attr, PackageName, A1) :- hash_attr(Hash, Attr, PackageName, A1), Attr != "hash".
imposed_constraint(Hash, Attr, PackageName, A1, A2) :-
hash_attr(Hash, Attr, PackageName, A1, A2),
Attr != "depends_on",
Attr != "virtual_on_edge".
imposed_constraint(Hash, Attr, PackageName, A1, A2, A3) :-
hash_attr(Hash, Attr, PackageName, A1, A2, A3).
imposed_constraint(Hash, Attr, PackageName, A1, A2, A3) :- hash_attr(Hash, Attr, PackageName, A1, A2, A3).
imposed_constraint(Hash, "hash", PackageName, Hash) :- installed_hash(PackageName, Hash).
% If a compiler is not used as a library, we just enforce "run" dependency, so we
% can get by with a much smaller search space.
avoid_compiler_link_dependency(Hash, DepName) :-
hash_attr(Hash, "depends_on", PackageName, DepName, "link"),
not hash_attr(Hash, "depends_on", PackageName, DepName, "run"),
hash_attr(Hash, "hash", DepName, DepHash),
compiler_package(PackageName),
not compiler_used_as_a_library(node(_, PackageName), Hash).
% Without splicing, we simply recover the exact semantics
imposed_constraint(ParentHash, "hash", ChildName, ChildHash) :-
hash_attr(ParentHash, "hash", ChildName, ChildHash),
ChildHash != ParentHash,
not avoid_compiler_link_dependency(ParentHash, ChildName),
not abi_splice_conditions_hold(_, _, ChildName, ChildHash).
imposed_constraint(Hash, "depends_on", PackageName, DepName, Type) :-
hash_attr(Hash, "depends_on", PackageName, DepName, Type),
hash_attr(Hash, "hash", DepName, DepHash),
not avoid_compiler_link_dependency(Hash, DepName),
not attr("splice_at_hash", _, _, DepName, DepHash).
imposed_constraint(Hash, "virtual_on_edge", PackageName, DepName, VirtName) :-
hash_attr(Hash, "virtual_on_edge", PackageName, DepName, VirtName),
not avoid_compiler_link_dependency(Hash, DepName),
not attr("splice_at_hash", _, _, DepName,_).
imposed_constraint(Hash, "virtual_node", VirtName) :-
hash_attr(Hash, "virtual_on_edge", PackageName, DepName, VirtName),
hash_attr(Hash, "virtual_node", VirtName),
not avoid_compiler_link_dependency(Hash, DepName),
not attr("splice_at_hash", _, _, DepName,_).
% Rules pertaining to attr("splice_at_hash") and abi_splice_conditions_hold will
% be conditionally loaded from splices.lp