Add a "requires" directive, extend functionality of package requirements (#36286)

Add a "require" directive to packages, which functions exactly like
requirements specified in packages.yaml (uses the same fact-generation
logic); update both to allow making the requirement conditional.

* Packages may now use "require" to add constraints. This can be useful
  for something like "require(%gcc)" (where before we had to add a
  conflict for every compiler except gcc).
* Requirements (in packages.yaml or in a "require" directive) can be
  conditional on a spec, e.g. "require(%gcc, when=@1.0.0)" (version
  1.0.0 can only build with gcc).
* Requirements may include a message which clarifies why they are needed.
  The concretizer assigns a high priority to errors which generate these
  messages (in particular over errors for unsatisfied requirements that
  do not produce messages, but also over a number of more-generic
  errors).
This commit is contained in:
Massimiliano Culpo
2023-05-08 19:12:26 +02:00
committed by GitHub
parent d0cba2bf35
commit 0139288ced
14 changed files with 627 additions and 135 deletions

View File

@@ -26,6 +26,7 @@ class OpenMpi(Package):
* ``resource``
* ``variant``
* ``version``
* ``requires``
"""
import collections.abc
@@ -66,6 +67,7 @@ class OpenMpi(Package):
"variant",
"resource",
"build_system",
"requires",
]
#: These are variant names used by Spack internally; packages can't use them
@@ -864,6 +866,45 @@ def _execute_maintainer(pkg):
return _execute_maintainer
@directive("requirements")
def requires(*requirement_specs, policy="one_of", when=None, msg=None):
"""Allows a package to request a configuration to be present in all valid solutions.
For instance, a package that is known to compile only with GCC can declare:
requires("%gcc")
A package that requires Apple-Clang on Darwin can declare instead:
requires("%apple-clang", when="platform=darwin", msg="Apple Clang is required on Darwin")
Args:
requirement_specs: spec expressing the requirement
when: optional constraint that triggers the requirement. If None the requirement
is applied unconditionally.
msg: optional user defined message
"""
def _execute_requires(pkg):
if policy not in ("one_of", "any_of"):
err_msg = (
f"the 'policy' argument of the 'requires' directive in {pkg.name} is set "
f"to a wrong value (only 'one_of' or 'any_of' are allowed)"
)
raise DirectiveError(err_msg)
when_spec = make_when_spec(when)
if not when_spec:
return
# Save in a list the requirements and the associated custom messages
when_spec_list = pkg.requirements.setdefault(tuple(requirement_specs), [])
when_spec_list.append((when_spec, policy, msg))
return _execute_requires
class DirectiveError(spack.error.SpackError):
"""This is raised when something is wrong with a package directive."""

View File

@@ -37,8 +37,17 @@
"type": "object",
"additionalProperties": False,
"properties": {
"one_of": {"type": "array"},
"any_of": {"type": "array"},
"one_of": {
"type": "array",
"items": {"type": "string"},
},
"any_of": {
"type": "array",
"items": {"type": "string"},
},
"spec": {"type": "string"},
"message": {"type": "string"},
"when": {"type": "string"},
},
},
{"type": "string"},

View File

@@ -7,12 +7,14 @@
import collections
import collections.abc
import copy
import enum
import itertools
import os
import pprint
import re
import types
import warnings
from typing import List
import archspec.cpu
@@ -98,24 +100,38 @@ def getter(node):
ast_type = ast_getter("ast_type", "type")
ast_sym = ast_getter("symbol", "term")
#: Order of precedence for version origins. Topmost types are preferred.
version_origin_fields = [
"spec",
"dev_spec",
"external",
"packages_yaml",
"package_requirements",
"package_py",
"installed",
]
#: Look up version precedence strings by enum id
version_origin_str = {i: name for i, name in enumerate(version_origin_fields)}
class Provenance(enum.IntEnum):
"""Enumeration of the possible provenances of a version."""
#: Enumeration like object to mark version provenance
version_provenance = collections.namedtuple( # type: ignore
"VersionProvenance", version_origin_fields
)(**{name: i for i, name in enumerate(version_origin_fields)})
# A spec literal
SPEC = enum.auto()
# A dev spec literal
DEV_SPEC = enum.auto()
# An external spec declaration
EXTERNAL = enum.auto()
# The 'packages' section of the configuration
PACKAGES_YAML = enum.auto()
# A package requirement
PACKAGE_REQUIREMENT = enum.auto()
# A 'package.py' file
PACKAGE_PY = enum.auto()
# An installed spec
INSTALLED = enum.auto()
def __str__(self):
return f"{self._name_.lower()}"
class RequirementKind(enum.Enum):
"""Purpose / provenance of a requirement"""
#: Default requirement expressed under the 'all' attribute of packages.yaml
DEFAULT = enum.auto()
#: Requirement expressed on a virtual package
VIRTUAL = enum.auto()
#: Requirement expressed on a specific package
PACKAGE = enum.auto()
DeclaredVersion = collections.namedtuple("DeclaredVersion", ["version", "idx", "origin"])
@@ -635,6 +651,12 @@ def raise_if_errors(self):
raise UnsatisfiableSpecError(msg)
#: Data class to collect information on a requirement
RequirementRule = collections.namedtuple(
"RequirementRule", ["pkg_name", "policy", "requirements", "condition", "kind", "message"]
)
class PyclingoDriver(object):
def __init__(self, cores=True):
"""Driver for the Python clingo interface.
@@ -883,8 +905,7 @@ def pkg_version_rules(self, pkg):
"""
def key_fn(version):
# Origins are sorted by precedence defined in `version_origin_str`,
# then by order added.
# Origins are sorted by "provenance" first, see the Provenance enumeration above
return version.origin, version.idx
pkg = packagize(pkg)
@@ -900,10 +921,7 @@ def key_fn(version):
for weight, declared_version in enumerate(most_to_least_preferred):
self.gen.fact(
fn.version_declared(
pkg.name,
declared_version.version,
weight,
version_origin_str[declared_version.origin],
pkg.name, declared_version.version, weight, str(declared_version.origin)
)
)
@@ -1012,35 +1030,83 @@ def package_compiler_defaults(self, pkg):
)
def package_requirement_rules(self, pkg):
rules = self.requirement_rules_from_package_py(pkg)
rules.extend(self.requirement_rules_from_packages_yaml(pkg))
self.emit_facts_from_requirement_rules(rules)
def requirement_rules_from_package_py(self, pkg):
rules = []
for requirements, conditions in pkg.requirements.items():
for when_spec, policy, message in conditions:
rules.append(
RequirementRule(
pkg_name=pkg.name,
policy=policy,
requirements=requirements,
kind=RequirementKind.PACKAGE,
condition=when_spec,
message=message,
)
)
return rules
def requirement_rules_from_packages_yaml(self, pkg):
pkg_name = pkg.name
config = spack.config.get("packages")
requirements, raise_on_failure = config.get(pkg_name, {}).get("require", []), True
requirements = config.get(pkg_name, {}).get("require", [])
kind = RequirementKind.PACKAGE
if not requirements:
requirements, raise_on_failure = config.get("all", {}).get("require", []), False
rules = self._rules_from_requirements(pkg_name, requirements)
self.emit_facts_from_requirement_rules(
rules, virtual=False, raise_on_failure=raise_on_failure
)
requirements = config.get("all", {}).get("require", [])
kind = RequirementKind.DEFAULT
return self._rules_from_requirements(pkg_name, requirements, kind=kind)
def _rules_from_requirements(self, pkg_name, requirements):
def _rules_from_requirements(self, pkg_name: str, requirements, *, kind: RequirementKind):
"""Manipulate requirements from packages.yaml, and return a list of tuples
with a uniform structure (name, policy, requirements).
"""
if isinstance(requirements, str):
rules = [(pkg_name, "one_of", [requirements])]
rules = [self._rule_from_str(pkg_name, requirements, kind)]
else:
rules = []
for requirement in requirements:
if isinstance(requirement, str):
# A string represents a spec that must be satisfied. It is
# equivalent to a one_of group with a single element
rules.append((pkg_name, "one_of", [requirement]))
rules.append(self._rule_from_str(pkg_name, requirement, kind))
else:
for policy in ("one_of", "any_of"):
for policy in ("spec", "one_of", "any_of"):
if policy in requirement:
rules.append((pkg_name, policy, requirement[policy]))
constraints = requirement[policy]
# "spec" is for specifying a single spec
if policy == "spec":
constraints = [constraints]
policy = "one_of"
rules.append(
RequirementRule(
pkg_name=pkg_name,
policy=policy,
requirements=constraints,
kind=kind,
message=requirement.get("message"),
condition=requirement.get("when"),
)
)
return rules
def _rule_from_str(
self, pkg_name: str, requirements: str, kind: RequirementKind
) -> RequirementRule:
return RequirementRule(
pkg_name=pkg_name,
policy="one_of",
requirements=[requirements],
kind=kind,
condition=None,
message=None,
)
def pkg_rules(self, pkg, tests):
pkg = packagize(pkg)
@@ -1267,27 +1333,55 @@ def provider_requirements(self):
assert self.possible_virtuals is not None, msg
for virtual_str in sorted(self.possible_virtuals):
requirements = packages_yaml.get(virtual_str, {}).get("require", [])
rules = self._rules_from_requirements(virtual_str, requirements)
self.emit_facts_from_requirement_rules(rules, virtual=True)
rules = self._rules_from_requirements(
virtual_str, requirements, kind=RequirementKind.VIRTUAL
)
self.emit_facts_from_requirement_rules(rules)
def emit_facts_from_requirement_rules(self, rules, *, virtual=False, raise_on_failure=True):
"""Generate facts to enforce requirements from packages.yaml.
def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
"""Generate facts to enforce requirements.
Args:
rules: rules for which we want facts to be emitted
virtual: if True the requirements are on a virtual spec
raise_on_failure: if True raise an exception when a requirement condition is invalid
for the current spec. If False, just skip that condition
"""
for requirement_grp_id, (pkg_name, policy, requirement_grp) in enumerate(rules):
for requirement_grp_id, rule in enumerate(rules):
virtual = rule.kind == RequirementKind.VIRTUAL
pkg_name, policy, requirement_grp = rule.pkg_name, rule.policy, rule.requirements
requirement_weight = 0
main_requirement_condition = spack.directives.make_when_spec(rule.condition)
if main_requirement_condition is False:
continue
# Write explicitly if a requirement is conditional or not
if main_requirement_condition != spack.spec.Spec():
msg = f"condition to activate requirement {requirement_grp_id}"
try:
main_condition_id = self.condition(
main_requirement_condition, name=pkg_name, msg=msg
)
except Exception as e:
if rule.kind != RequirementKind.DEFAULT:
raise RuntimeError("cannot emit requirements for the solver") from e
continue
self.gen.fact(
fn.requirement_conditional(pkg_name, requirement_grp_id, main_condition_id)
)
self.gen.fact(fn.requirement_group(pkg_name, requirement_grp_id))
self.gen.fact(fn.requirement_policy(pkg_name, requirement_grp_id, policy))
requirement_weight = 0
if rule.message:
self.gen.fact(fn.requirement_message(pkg_name, requirement_grp_id, rule.message))
self.gen.newline()
for spec_str in requirement_grp:
spec = spack.spec.Spec(spec_str)
if not spec.name:
spec.name = pkg_name
spec.attach_git_version_lookup()
when_spec = spec
if virtual:
when_spec = spack.spec.Spec(pkg_name)
@@ -1297,12 +1391,16 @@ def emit_facts_from_requirement_rules(self, rules, *, virtual=False, raise_on_fa
required_spec=when_spec, imposed_spec=spec, name=pkg_name, node=virtual
)
except Exception as e:
if raise_on_failure:
# Do not raise if the rule comes from the 'all' subsection, since usability
# would be impaired. If a rule does not apply for a specific package, just
# discard it.
if rule.kind != RequirementKind.DEFAULT:
raise RuntimeError("cannot emit requirements for the solver") from e
continue
self.gen.fact(fn.requirement_group_member(member_id, pkg_name, requirement_grp_id))
self.gen.fact(fn.requirement_has_weight(member_id, requirement_weight))
self.gen.newline()
requirement_weight += 1
def external_packages(self):
@@ -1345,7 +1443,7 @@ def external_packages(self):
]
for version, idx, external_id in external_versions:
self.declared_versions[pkg_name].append(
DeclaredVersion(version=version, idx=idx, origin=version_provenance.external)
DeclaredVersion(version=version, idx=idx, origin=Provenance.EXTERNAL)
)
# Declare external conditions with a local index into packages.yaml
@@ -1627,7 +1725,7 @@ def key_fn(item):
v, version_info = item
self.possible_versions[pkg_name].add(v)
self.declared_versions[pkg_name].append(
DeclaredVersion(version=v, idx=idx, origin=version_provenance.package_py)
DeclaredVersion(version=v, idx=idx, origin=Provenance.PACKAGE_PY)
)
deprecated = version_info.get("deprecated", False)
if deprecated:
@@ -1640,7 +1738,7 @@ def key_fn(item):
# v can be a string so force it into an actual version for comparisons
ver = vn.Version(v)
self.declared_versions[pkg_name].append(
DeclaredVersion(version=ver, idx=idx, origin=version_provenance.packages_yaml)
DeclaredVersion(version=ver, idx=idx, origin=Provenance.PACKAGES_YAML)
)
self.possible_versions[pkg_name].add(ver)
@@ -1684,6 +1782,7 @@ def platform_defaults(self):
self.gen.h2("Default platform")
platform = spack.platforms.host()
self.gen.fact(fn.node_platform_default(platform))
self.gen.fact(fn.allowed_platform(platform))
def os_defaults(self, specs):
self.gen.h2("Possible operating systems")
@@ -2005,9 +2104,7 @@ def _facts_from_concrete_spec(self, spec, possible):
for dep in spec.traverse():
self.possible_versions[dep.name].add(dep.version)
self.declared_versions[dep.name].append(
DeclaredVersion(
version=dep.version, idx=0, origin=version_provenance.installed
)
DeclaredVersion(version=dep.version, idx=0, origin=Provenance.INSTALLED)
)
self.possible_oses.add(dep.os)
@@ -2078,13 +2175,11 @@ def setup(self, driver, specs, reuse=None):
# traverse all specs and packages to build dict of possible versions
self.build_version_dict(possible)
self.add_concrete_versions_from_specs(specs, version_provenance.spec)
self.add_concrete_versions_from_specs(dev_specs, version_provenance.dev_spec)
self.add_concrete_versions_from_specs(specs, Provenance.SPEC)
self.add_concrete_versions_from_specs(dev_specs, Provenance.DEV_SPEC)
req_version_specs = _get_versioned_specs_from_pkg_requirements()
self.add_concrete_versions_from_specs(
req_version_specs, version_provenance.package_requirements
)
self.add_concrete_versions_from_specs(req_version_specs, Provenance.PACKAGE_REQUIREMENT)
self.gen.h1("Concrete input spec definitions")
self.define_concrete_input_specs(specs, possible)
@@ -2187,9 +2282,15 @@ def _specs_from_requires(pkg_name, section):
if isinstance(spec_group, str):
spec_strs.append(spec_group)
else:
# Otherwise it is a one_of or any_of: get the values
(x,) = spec_group.values()
spec_strs.extend(x)
# Otherwise it is an object. The object can contain a single
# "spec" constraint, or a list of them with "any_of" or
# "one_of" policy.
if "spec" in spec_group:
new_constraints = [spec_group["spec"]]
else:
key = "one_of" if "one_of" in spec_group else "any_of"
new_constraints = spec_group[key]
spec_strs.extend(new_constraints)
extracted_specs = []
for spec_str in spec_strs:

View File

@@ -461,13 +461,24 @@ error(100, "Attempted to use external for '{0}' which does not satisfy any confi
% Config required semantics
%-----------------------------------------------------------------------------
activate_requirement_rules(Package) :- attr("node", Package).
activate_requirement_rules(Package) :- attr("virtual_node", Package).
package_in_dag(Package) :- attr("node", Package).
package_in_dag(Package) :- attr("virtual_node", Package).
activate_requirement(Package, X) :-
package_in_dag(Package),
requirement_group(Package, X),
not requirement_conditional(Package, X, _).
activate_requirement(Package, X) :-
package_in_dag(Package),
requirement_group(Package, X),
condition_holds(Y),
requirement_conditional(Package, X, Y).
requirement_group_satisfied(Package, X) :-
1 { condition_holds(Y) : requirement_group_member(Y, Package, X) } 1,
activate_requirement_rules(Package),
requirement_policy(Package, X, "one_of"),
activate_requirement(Package, X),
requirement_group(Package, X).
requirement_weight(Package, Group, W) :-
@@ -479,8 +490,8 @@ requirement_weight(Package, Group, W) :-
requirement_group_satisfied(Package, X) :-
1 { condition_holds(Y) : requirement_group_member(Y, Package, X) } ,
activate_requirement_rules(Package),
requirement_policy(Package, X, "any_of"),
activate_requirement(Package, X),
requirement_group(Package, X).
requirement_weight(Package, Group, W) :-
@@ -494,12 +505,23 @@ requirement_weight(Package, Group, W) :-
requirement_policy(Package, Group, "any_of"),
requirement_group_satisfied(Package, Group).
error(100, "Cannot satisfy the requirements in packages.yaml for package '{0}'. You may want to delete them to proceed with concretization. To check where the requirements are defined run 'spack config blame packages'", Package) :-
activate_requirement_rules(Package),
error(100, "cannot satisfy a requirement for package '{0}'.", Package) :-
activate_requirement(Package, X),
requirement_group(Package, X),
not requirement_message(Package, X, _),
not requirement_group_satisfied(Package, X).
error(10, Message) :-
activate_requirement(Package, X),
requirement_group(Package, X),
requirement_message(Package, X, Message),
not requirement_group_satisfied(Package, X).
#defined requirement_group/2.
#defined requirement_conditional/3.
#defined requirement_message/3.
#defined requirement_group_member/3.
#defined requirement_has_weight/2.
#defined requirement_policy/3.
@@ -691,6 +713,8 @@ variant_single_value(Package, "dev_path")
%-----------------------------------------------------------------------------
% if no platform is set, fall back to the default
:- attr("node_platform", _, Platform), not allowed_platform(Platform).
attr("node_platform", Package, Platform)
:- attr("node", Package),
not attr("node_platform_set", Package),

View File

@@ -2,13 +2,14 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import pathlib
import sys
import pytest
import spack.build_systems.generic
import spack.config
import spack.error
import spack.package_base
import spack.repo
import spack.util.spack_yaml as syaml
@@ -595,3 +596,216 @@ def test_non_existing_variants_under_all(concretize_scope, mock_packages):
spec = Spec("callpath ^zmpi").concretized()
assert "~foo" not in spec
@pytest.mark.parametrize(
"packages_yaml,spec_str,expected_satisfies",
[
# In the tests below we set the compiler preference to "gcc" to be explicit on the
# fact that "clang" is not the preferred compiler. That helps making more robust the
# tests that verify enforcing "%clang" as a requirement.
(
"""\
packages:
all:
compiler: ["gcc", "clang"]
libelf:
require:
- one_of: ["%clang"]
when: "@0.8.13"
""",
"libelf",
[("@0.8.13%clang", True), ("%gcc", False)],
),
(
"""\
packages:
all:
compiler: ["gcc", "clang"]
libelf:
require:
- one_of: ["%clang"]
when: "@0.8.13"
""",
"libelf@0.8.12",
[("%clang", False), ("%gcc", True)],
),
(
"""\
packages:
all:
compiler: ["gcc", "clang"]
libelf:
require:
- spec: "%clang"
when: "@0.8.13"
""",
"libelf@0.8.12",
[("%clang", False), ("%gcc", True)],
),
(
"""\
packages:
all:
compiler: ["gcc", "clang"]
libelf:
require:
- spec: "@0.8.13"
when: "%clang"
""",
"libelf@0.8.13%gcc",
[("%clang", False), ("%gcc", True), ("@0.8.13", True)],
),
],
)
def test_conditional_requirements_from_packages_yaml(
packages_yaml, spec_str, expected_satisfies, concretize_scope, mock_packages
):
"""Test that conditional requirements are required when the condition is met,
and optional when the condition is not met.
"""
if spack.config.get("config:concretizer") == "original":
pytest.skip("Original concretizer does not support configuration requirements")
update_packages_config(packages_yaml)
spec = Spec(spec_str).concretized()
for match_str, expected in expected_satisfies:
assert spec.satisfies(match_str) is expected
@pytest.mark.parametrize(
"packages_yaml,spec_str,expected_message",
[
(
"""\
packages:
mpileaks:
require:
- one_of: ["~debug"]
message: "debug is not allowed"
""",
"mpileaks+debug",
"debug is not allowed",
),
(
"""\
packages:
libelf:
require:
- one_of: ["%clang"]
message: "can only be compiled with clang"
""",
"libelf%gcc",
"can only be compiled with clang",
),
(
"""\
packages:
libelf:
require:
- one_of: ["%clang"]
when: platform=test
message: "can only be compiled with clang on the test platform"
""",
"libelf%gcc",
"can only be compiled with clang on ",
),
(
"""\
packages:
libelf:
require:
- spec: "%clang"
when: platform=test
message: "can only be compiled with clang on the test platform"
""",
"libelf%gcc",
"can only be compiled with clang on ",
),
(
"""\
packages:
libelf:
require:
- one_of: ["%clang", "%intel"]
when: platform=test
message: "can only be compiled with clang or intel on the test platform"
""",
"libelf%gcc",
"can only be compiled with clang or intel",
),
],
)
def test_requirements_fail_with_custom_message(
packages_yaml, spec_str, expected_message, concretize_scope, mock_packages
):
"""Test that specs failing due to requirements not being satisfiable fail with a
custom error message.
"""
if spack.config.get("config:concretizer") == "original":
pytest.skip("Original concretizer does not support configuration requirements")
update_packages_config(packages_yaml)
with pytest.raises(spack.error.SpackError, match=expected_message):
Spec(spec_str).concretized()
def test_skip_requirement_when_default_requirement_condition_cannot_be_met(
concretize_scope, mock_packages
):
"""Tests that we can express a requirement condition under 'all' also in cases where
the corresponding condition spec mentions variants or versions that don't exist in the
package. For those packages the requirement rule is not emitted, since it can be
determined to be always false.
"""
if spack.config.get("config:concretizer") == "original":
pytest.skip("Original concretizer does not support configuration requirements")
packages_yaml = """
packages:
all:
require:
- one_of: ["%clang"]
when: "+shared"
"""
update_packages_config(packages_yaml)
s = Spec("mpileaks").concretized()
assert s.satisfies("%clang +shared")
# Sanity checks that 'callpath' doesn't have the shared variant, but that didn't
# cause failures during concretization.
assert "shared" not in s["callpath"].variants
def test_requires_directive(concretize_scope, mock_packages):
if spack.config.get("config:concretizer") == "original":
pytest.skip("Original concretizer does not support configuration requirements")
compilers_yaml = pathlib.Path(concretize_scope) / "compilers.yaml"
compilers_yaml.write_text(
"""
compilers::
- compiler:
spec: gcc@12.0.0
paths:
cc: /usr/bin/clang-12
cxx: /usr/bin/clang++-12
f77: null
fc: null
operating_system: debian6
target: x86_64
modules: []
"""
)
spack.config.config.clear_caches()
# This package requires either clang or gcc
s = Spec("requires_clang_or_gcc").concretized()
assert s.satisfies("%gcc@12.0.0")
# This package can only be compiled with clang
with pytest.raises(spack.error.SpackError, match="can only be compiled with Clang"):
Spec("requires_clang").concretized()

View File

@@ -770,7 +770,7 @@ def concretize_scope(mutable_config, tmpdir):
spack.config.ConfigScope("concretize", str(tmpdir.join("concretize")))
)
yield
yield str(tmpdir.join("concretize"))
mutable_config.pop_scope()
spack.repo.path._provider_index = None