From 8fc1ccc686cdcdda71aab2d94abcfab3d99a2a89 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 11 Apr 2025 20:09:21 +0200 Subject: [PATCH] solver: encode `%` as "build requirement", not as "dependency" (#50011) This PR fixes the issues with `%` and reused specs, due to https://github.com/spack/spack/issues/49847#issuecomment-2774640234 It does so by adding another layer of indirection, so that whenever a spec `foo %bar` is encountered, the `%bar` part is encoded as an `attr("build_requirement", ...)`. Then: 1. If `foo` is a node to be built, then the build requirement implies a dependency 2. Otherwise it implies looking e.g. reused specs metadata, and ensure it matches --------- Signed-off-by: Massimiliano Culpo --- lib/spack/spack/solver/asp.py | 59 +++++++++++++---- lib/spack/spack/solver/concretize.lp | 51 +++++++++++++-- lib/spack/spack/test/concretization/core.py | 33 ++++++++++ .../spack/test/concretization/requirements.py | 65 +++++++++++++++++++ 4 files changed, 191 insertions(+), 17 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 971b8a977c4..0c03e0b47e3 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -287,9 +287,33 @@ def specify(spec): return spack.spec.Spec(spec) -def remove_node(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunction]: - """Transformation that removes all "node" and "virtual_node" from the input list of facts.""" - return list(filter(lambda x: x.args[0] not in ("node", "virtual_node"), facts)) +def remove_facts( + *to_be_removed: str, +) -> Callable[[spack.spec.Spec, List[AspFunction]], List[AspFunction]]: + """Returns a transformation function that removes facts from the input list of facts.""" + + def _remove(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunction]: + return list(filter(lambda x: x.args[0] not in to_be_removed, facts)) + + return _remove + + +def remove_build_deps(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunction]: + build_deps = {x.args[2]: x.args[1] for x in facts if x.args[0] == "depends_on"} + result = [] + for x in facts: + current_name = x.args[1] + if current_name in build_deps: + x.name = "build_requirement" + result.append(fn.attr("build_requirement", build_deps[current_name], x)) + continue + + if x.args[0] == "depends_on": + continue + + result.append(x) + + return result def all_libcs() -> Set[spack.spec.Spec]: @@ -1880,7 +1904,7 @@ def condition( if not context: context = ConditionContext() - context.transform_imposed = remove_node + context.transform_imposed = remove_facts("node", "virtual_node") if imposed_spec: imposed_name = imposed_spec.name or imposed_name @@ -1980,7 +2004,7 @@ def track_dependencies(input_spec, requirements): return requirements + [fn.attr("track_dependencies", input_spec.name)] def dependency_holds(input_spec, requirements): - result = remove_node(input_spec, requirements) + [ + result = remove_facts("node", "virtual_node")(input_spec, requirements) + [ fn.attr( "dependency_holds", pkg.name, input_spec.name, dt.flag_to_string(t) ) @@ -2170,7 +2194,10 @@ def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]): pkg_name, ConstraintOrigin.REQUIRE ) if not virtual: - context.transform_imposed = remove_node + context.transform_required = remove_build_deps + context.transform_imposed = remove_facts( + "node", "virtual_node", "depends_on" + ) # else: for virtuals we want to emit "node" and # "virtual_node" in imposed specs @@ -2232,16 +2259,18 @@ def external_packages(self): if pkg_name not in self.pkgs: continue - self.gen.h2(f"External package: {pkg_name}") # Check if the external package is buildable. If it is # not then "external()" is a fact, unless we can # reuse an already installed spec. external_buildable = data.get("buildable", True) + externals = data.get("externals", []) + if not external_buildable or externals: + self.gen.h2(f"External package: {pkg_name}") + if not external_buildable: self.gen.fact(fn.buildable_false(pkg_name)) # Read a list of all the specs for this package - externals = data.get("externals", []) candidate_specs = [ spack.spec.parse_with_version_concrete(x["spec"]) for x in externals ] @@ -2572,6 +2601,16 @@ def _spec_clauses( # already-installed concrete specs. if concrete_build_deps or dspec.depflag != dt.BUILD: clauses.append(fn.attr("hash", dep.name, dep.dag_hash())) + elif not concrete_build_deps and dspec.depflag: + clauses.append( + fn.attr( + "concrete_build_dependency", spec.name, dep.name, dep.dag_hash() + ) + ) + for virtual_name in dspec.virtuals: + clauses.append( + fn.attr("virtual_on_build_edge", spec.name, dep.name, virtual_name) + ) # if the spec is abstract, descend into dependencies. # if it's concrete, then the hashes above take care of dependency @@ -3265,15 +3304,13 @@ def literal_specs(self, specs): # These facts are needed to compute the "condition_set" of the root pkg_name = clause.args[1] self.gen.fact(fn.mentioned_in_literal(trigger_id, root_name, pkg_name)) - elif clause_name == "depends_on": - pkg_name = clause.args[2] - self.gen.fact(fn.mentioned_in_literal(trigger_id, root_name, pkg_name)) requirements.append( fn.attr( "virtual_root" if spack.repo.PATH.is_virtual(spec.name) else "root", spec.name ) ) + requirements = [x for x in requirements if x.args[0] != "depends_on"] cache[imposed_spec_key] = (effect_id, requirements) self.gen.fact(fn.pkg_fact(spec.name, fn.condition_effect(condition_id, effect_id))) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index de25bbf5dac..36064ee7b35 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -175,12 +175,21 @@ trigger_node(TriggerID, Node, Node) :- % Since we trigger the existence of literal nodes from a condition, we need to construct the condition_set/2 mentioned_in_literal(Root, Mentioned) :- mentioned_in_literal(TriggerID, Root, Mentioned), solve_literal(TriggerID). -condition_set(node(min_dupe_id, Root), node(min_dupe_id, Root)) :- mentioned_in_literal(Root, Root). +literal_node(Root, node(min_dupe_id, Root)) :- mentioned_in_literal(Root, Root). -1 { condition_set(node(min_dupe_id, Root), node(0..Y-1, Mentioned)) : max_dupes(Mentioned, Y) } 1 :- +1 { literal_node(Root, node(0..Y-1, Mentioned)) : max_dupes(Mentioned, Y) } 1 :- mentioned_in_literal(Root, Mentioned), Mentioned != Root, internal_error("must have exactly one condition_set for literals"). +1 { build_dependency_of_literal_node(LiteralNode, node(0..Y-1, BuildDependency)) : max_dupes(BuildDependency, Y) } 1 :- + literal_node(Root, LiteralNode), + build(LiteralNode), + attr("build_requirement", LiteralNode, build_requirement("node", BuildDependency)). + +condition_set(node(min_dupe_id, Root), LiteralNode) :- literal_node(Root, LiteralNode). +condition_set(LiteralNode, BuildNode) :- build_dependency_of_literal_node(LiteralNode, BuildNode). + + % Discriminate between "roots" that have been explicitly requested, and roots that are deduced from "virtual roots" explicitly_requested_root(node(min_dupe_id, Package)) :- solve_literal(TriggerID), @@ -472,10 +481,35 @@ provider(ProviderNode, VirtualNode) :- attr("provider_set", ProviderNode, Virtua imposed_constraint(ID, "depends_on", A1, A2, A3), internal_error("Build deps must land in exactly one duplicate"). -1 { build_requirement(node(X, Parent), node(0..Y-1, BuildDependency)) : max_dupes(BuildDependency, Y) } 1 +% If the parent is built, then we have a build_requirement on another node. For concrete nodes, +% or external nodes, we don't since we are trimming their build dependencies. +1 { attr("depends_on", node(X, Parent), node(0..Y-1, BuildDependency), "build") : max_dupes(BuildDependency, Y) } 1 :- attr("build_requirement", node(X, Parent), build_requirement("node", BuildDependency)), - impose(ID, node(X, Parent)), - imposed_constraint(ID,"build_requirement",Parent,_). + build(node(X, Parent)), + not external(node(X, Parent)). + +:- attr("build_requirement", ParentNode, build_requirement("node", BuildDependency)), + concrete(ParentNode), + not attr("concrete_build_dependency", ParentNode, BuildDependency, _). + +:- attr("build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, Constraint)), + attr("concrete_build_dependency", ParentNode, BuildDependency, BuildDependencyHash), + not 1 { pkg_fact(BuildDependency, version_satisfies(Constraint, Version)) : hash_attr(BuildDependencyHash, "version", BuildDependency, Version) } 1. + +:- attr("build_requirement", ParentNode, build_requirement("provider_set", BuildDependency, Virtual)), + attr("concrete_build_dependency", ParentNode, BuildDependency, BuildDependencyHash), + attr("virtual_on_build_edge", ParentNode, BuildDependency, Virtual), + not 1 { pkg_fact(BuildDependency, version_satisfies(Constraint, Version)) : hash_attr(BuildDependencyHash, "version", BuildDependency, Version) } 1. + +% Asking for gcc@10 %gcc@9 shouldn't give us back an external gcc@10, just because of the hack +% we have on externals +:- attr("build_requirement", node(X, Parent), build_requirement("node", BuildDependency)), + Parent == BuildDependency, + external(node(X, Parent)). + +build_requirement(node(X, Parent), node(Y, BuildDependency)) :- + attr("depends_on", node(X, Parent), node(Y, BuildDependency), "build"), + attr("build_requirement", node(X, Parent), build_requirement("node", BuildDependency)). 1 { virtual_build_requirement(ParentNode, node(0..Y-1, Virtual)) : max_dupes(Virtual, Y) } 1 :- attr("dependency_holds", ParentNode, Virtual, "build"), @@ -496,7 +530,6 @@ attr("node_version_satisfies", node(X, BuildDependency), Constraint) :- attr("build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, Constraint)), build_requirement(ParentNode, node(X, BuildDependency)). -attr("depends_on", node(X, Parent), node(Y, BuildDependency), "build") :- build_requirement(node(X, Parent), node(Y, BuildDependency)). 1 { attr("provider_set", node(X, BuildDependency), node(0..Y-1, Virtual)) : max_dupes(Virtual, Y) } 1 :- attr("build_requirement", ParentNode, build_requirement("provider_set", BuildDependency, Virtual)), @@ -882,6 +915,12 @@ requirement_weight(node(ID, Package), Group, W) :- requirement_policy(Package, Group, "one_of"), requirement_group_satisfied(node(ID, Package), Group). + { attr("build_requirement", node(ID, Package), BuildRequirement) : condition_requirement(TriggerID, "build_requirement", Package, BuildRequirement) } :- + pkg_fact(Package, condition_trigger(ConditionID, TriggerID)), + requirement_group_member(ConditionID, Package, Group), + activate_requirement(node(ID, Package), Group), + requirement_group(Package, Group). + requirement_group_satisfied(node(ID, Package), X) :- 1 { condition_holds(Y, node(ID, Package)) : requirement_group_member(Y, Package, X) } , requirement_policy(Package, X, "any_of"), diff --git a/lib/spack/spack/test/concretization/core.py b/lib/spack/spack/test/concretization/core.py index b6c8453da4a..b28c3337f11 100644 --- a/lib/spack/spack/test/concretization/core.py +++ b/lib/spack/spack/test/concretization/core.py @@ -3333,3 +3333,36 @@ def test_specifying_compilers_with_virtuals_syntax(default_mock_concretization): assert mpich["fortran"].satisfies("gcc") assert mpich["c"].satisfies("llvm") assert mpich["cxx"].satisfies("llvm") + + +@pytest.mark.regression("49847") +@pytest.mark.xfail(sys.platform == "win32", reason="issues with install mockery") +def test_reuse_when_input_specifies_build_dep(install_mockery, do_not_check_runtimes_on_reuse): + """Test that we can reuse a spec when specifying build dependencies in the input""" + pkgb_old = spack.concretize.concretize_one(spack.spec.Spec("pkg-b@0.9 %gcc@9")) + PackageInstaller([pkgb_old.package], fake=True, explicit=True).install() + + with spack.config.override("concretizer:reuse", True): + result = spack.concretize.concretize_one("pkg-b %gcc") + assert pkgb_old.dag_hash() == result.dag_hash() + + result = spack.concretize.concretize_one("pkg-a ^pkg-b %gcc@9") + assert pkgb_old.dag_hash() == result["pkg-b"].dag_hash() + assert result.satisfies("%gcc@9") + + result = spack.concretize.concretize_one("pkg-a %gcc@10 ^pkg-b %gcc@9") + assert pkgb_old.dag_hash() == result["pkg-b"].dag_hash() + + +@pytest.mark.regression("49847") +def test_reuse_when_requiring_build_dep( + install_mockery, do_not_check_runtimes_on_reuse, mutable_config +): + """Test that we can reuse a spec when specifying build dependencies in requirements""" + mutable_config.set("packages:all:require", "%gcc") + pkgb_old = spack.concretize.concretize_one(spack.spec.Spec("pkg-b@0.9")) + PackageInstaller([pkgb_old.package], fake=True, explicit=True).install() + + with spack.config.override("concretizer:reuse", True): + result = spack.concretize.concretize_one("pkg-b") + assert pkgb_old.dag_hash() == result.dag_hash(), result.tree() diff --git a/lib/spack/spack/test/concretization/requirements.py b/lib/spack/spack/test/concretization/requirements.py index fe768611de7..c381a2d6d7d 100644 --- a/lib/spack/spack/test/concretization/requirements.py +++ b/lib/spack/spack/test/concretization/requirements.py @@ -1239,3 +1239,68 @@ def test_virtual_requirement_respects_any_of(concretize_scope, mock_packages): with pytest.raises(spack.error.SpackError): spack.concretize.concretize_one("mpileaks ^[virtuals=mpi] zmpi") + + +@pytest.mark.parametrize( + "packages_yaml,expected_reuse,expected_contraints", + [ + ( + """ +packages: + all: + require: + - "%gcc" + """, + True, + # To minimize installed specs we reuse pkg-b compiler, since the requirement allows it + ["%gcc@9"], + ), + ( + """ +packages: + all: + require: + - "%gcc@10" + """, + False, + ["%gcc@10"], + ), + ( + """ +packages: + all: + require: + - "%gcc" + pkg-a: + require: + - "%gcc@10" + """, + True, + ["%gcc@10"], + ), + ], +) +@pytest.mark.regression("49847") +def test_requirements_on_compilers_and_reuse( + concretize_scope, mock_packages, packages_yaml, expected_reuse, expected_contraints +): + """Tests that we can require compilers with `%` in configuration files, and still get reuse + of specs (even though reused specs have no build dependency in the ASP encoding). + """ + input_spec = "pkg-a" + + reused_spec = spack.concretize.concretize_one("pkg-b@0.9 %gcc@9") + reused_nodes = list(reused_spec.traverse()) + update_packages_config(packages_yaml) + root_specs = [Spec(input_spec)] + + with spack.config.override("concretizer:reuse", True): + solver = spack.solver.asp.Solver() + setup = spack.solver.asp.SpackSolverSetup() + result, _, _ = solver.driver.solve(setup, root_specs, reuse=reused_nodes) + pkga = result.specs[0] + is_pkgb_reused = pkga["pkg-b"].dag_hash() == reused_spec.dag_hash() + + assert is_pkgb_reused == expected_reuse + for c in expected_contraints: + assert pkga.satisfies(c), print(pkga.tree())