Compare commits
29 Commits
develop
...
features/t
Author | SHA1 | Date | |
---|---|---|---|
![]() |
6737591016 | ||
![]() |
66bb19084c | ||
![]() |
9cdb25497d | ||
![]() |
26c5f5265d | ||
![]() |
a16d10edc9 | ||
![]() |
0d021717ec | ||
![]() |
231d537a2e | ||
![]() |
452a84d9eb | ||
![]() |
b03d5b7885 | ||
![]() |
e0aa378f9e | ||
![]() |
2c2282dcb4 | ||
![]() |
f27eb55f29 | ||
![]() |
019957a225 | ||
![]() |
7472a1db8a | ||
![]() |
fb6ffc45d7 | ||
![]() |
ebf4c8b445 | ||
![]() |
79610ce80e | ||
![]() |
462379b92f | ||
![]() |
893d56ac2c | ||
![]() |
b4f938adf4 | ||
![]() |
6623209ba7 | ||
![]() |
6eaaaa4ae7 | ||
![]() |
89e0cf886d | ||
![]() |
e24bd2ef3c | ||
![]() |
2c43131aca | ||
![]() |
d792121cde | ||
![]() |
ce3fcf011f | ||
![]() |
e66ae3959d | ||
![]() |
02513eae7e |
@ -60,6 +60,7 @@
|
||||
import spack.schema.modules
|
||||
import spack.schema.packages
|
||||
import spack.schema.repos
|
||||
import spack.schema.toolchains
|
||||
import spack.schema.upstreams
|
||||
import spack.schema.view
|
||||
import spack.util.remote_file_cache as rfc_util
|
||||
@ -87,6 +88,7 @@
|
||||
"bootstrap": spack.schema.bootstrap.schema,
|
||||
"ci": spack.schema.ci.schema,
|
||||
"cdash": spack.schema.cdash.schema,
|
||||
"toolchains": spack.schema.toolchains.schema,
|
||||
}
|
||||
|
||||
# Same as above, but including keys for environments
|
||||
|
@ -1090,12 +1090,13 @@ def _handle_solver_bug(
|
||||
stream=out,
|
||||
)
|
||||
if wrong_output:
|
||||
msg = (
|
||||
"internal solver error: the following specs were concretized, but do not satisfy the "
|
||||
"input:\n - "
|
||||
+ "\n - ".join(str(s) for s, _ in wrong_output)
|
||||
+ "\n Please report a bug at https://github.com/spack/spack/issues"
|
||||
)
|
||||
msg = "internal solver error: the following specs were concretized, but do not satisfy "
|
||||
msg += "the input:\n"
|
||||
for in_spec, out_spec in wrong_output:
|
||||
msg += f" - input: {in_spec}\n"
|
||||
msg += f" output: {out_spec.long_spec}\n"
|
||||
msg += "\n Please report a bug at https://github.com/spack/spack/issues"
|
||||
|
||||
# try to write the input/output specs to a temporary directory for bug reports
|
||||
try:
|
||||
tmpdir = tempfile.mkdtemp(prefix="spack-asp-", dir=root)
|
||||
|
23
lib/spack/spack/schema/toolchains.py
Normal file
23
lib/spack/spack/schema/toolchains.py
Normal file
@ -0,0 +1,23 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for repos.yaml configuration file.
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/toolchains.py
|
||||
:lines: 14-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties: Dict[str, Any] = {"toolchains": {"type": "object", "default": {}}}
|
||||
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Spack toolchain configuration file schema",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": properties,
|
||||
}
|
@ -572,7 +572,7 @@ def format_unsolved(unsolved_specs):
|
||||
for input_spec, candidate in unsolved_specs:
|
||||
msg += f"\n\tInput spec: {str(input_spec)}"
|
||||
if candidate:
|
||||
msg += f"\n\tCandidate spec: {str(candidate)}"
|
||||
msg += f"\n\tCandidate spec: {candidate.long_spec}"
|
||||
else:
|
||||
msg += "\n\t(No candidate specs from solver)"
|
||||
return msg
|
||||
@ -1416,12 +1416,17 @@ class ConstraintOrigin(enum.Enum):
|
||||
result.
|
||||
"""
|
||||
|
||||
CONDITIONAL_SPEC = 0
|
||||
DEPENDS_ON = 1
|
||||
REQUIRE = 2
|
||||
|
||||
@staticmethod
|
||||
def _SUFFIXES() -> Dict["ConstraintOrigin", str]:
|
||||
return {ConstraintOrigin.DEPENDS_ON: "_dep", ConstraintOrigin.REQUIRE: "_req"}
|
||||
return {
|
||||
ConstraintOrigin.CONDITIONAL_SPEC: "_cond",
|
||||
ConstraintOrigin.DEPENDS_ON: "_dep",
|
||||
ConstraintOrigin.REQUIRE: "_req",
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def append_type_suffix(pkg_id: str, kind: "ConstraintOrigin") -> str:
|
||||
@ -1877,6 +1882,73 @@ def _get_condition_id(
|
||||
|
||||
return cond_id
|
||||
|
||||
def condition_clauses(
|
||||
self,
|
||||
required_spec: spack.spec.Spec,
|
||||
imposed_spec: Optional[spack.spec.Spec] = None,
|
||||
*,
|
||||
required_name: Optional[str] = None,
|
||||
imposed_name: Optional[str] = None,
|
||||
msg: Optional[str] = None,
|
||||
context: Optional[ConditionContext] = None,
|
||||
):
|
||||
"""Generate facts for a dependency or virtual provider condition.
|
||||
|
||||
Arguments:
|
||||
required_spec: the constraints that triggers this condition
|
||||
imposed_spec: the constraints that are imposed when this condition is triggered
|
||||
required_name: name for ``required_spec``
|
||||
(required if required_spec is anonymous, ignored if not)
|
||||
imposed_name: name for ``imposed_spec``
|
||||
(required if imposed_spec is anonymous, ignored if not)
|
||||
msg: description of the condition
|
||||
context: if provided, indicates how to modify the clause-sets for the required/imposed
|
||||
specs based on the type of constraint they are generated for (e.g. `depends_on`)
|
||||
Returns:
|
||||
int: id of the condition created by this function
|
||||
"""
|
||||
clauses = []
|
||||
required_name = required_spec.name or required_name
|
||||
if not required_name:
|
||||
raise ValueError(f"Must provide a name for anonymous condition: '{required_spec}'")
|
||||
|
||||
if not context:
|
||||
context = ConditionContext()
|
||||
context.transform_imposed = remove_facts("node", "virtual_node")
|
||||
|
||||
if imposed_spec:
|
||||
imposed_name = imposed_spec.name or imposed_name
|
||||
if not imposed_name:
|
||||
raise ValueError(f"Must provide a name for imposed constraint: '{imposed_spec}'")
|
||||
|
||||
with named_spec(required_spec, required_name), named_spec(imposed_spec, imposed_name):
|
||||
# Check if we can emit the requirements before updating the condition ID counter.
|
||||
# In this way, if a condition can't be emitted but the exception is handled in the
|
||||
# caller, we won't emit partial facts.
|
||||
|
||||
condition_id = next(self._id_counter)
|
||||
requirement_context = context.requirement_context()
|
||||
trigger_id = self._get_condition_id(
|
||||
required_spec, cache=self._trigger_cache, body=True, context=requirement_context
|
||||
)
|
||||
clauses.append(fn.pkg_fact(required_spec.name, fn.condition(condition_id)))
|
||||
clauses.append(fn.condition_reason(condition_id, msg))
|
||||
clauses.append(
|
||||
fn.pkg_fact(required_spec.name, fn.condition_trigger(condition_id, trigger_id))
|
||||
)
|
||||
if not imposed_spec:
|
||||
return clauses, condition_id
|
||||
|
||||
impose_context = context.impose_context()
|
||||
effect_id = self._get_condition_id(
|
||||
imposed_spec, cache=self._effect_cache, body=False, context=impose_context
|
||||
)
|
||||
clauses.append(
|
||||
fn.pkg_fact(required_spec.name, fn.condition_effect(condition_id, effect_id))
|
||||
)
|
||||
|
||||
return clauses, condition_id
|
||||
|
||||
def condition(
|
||||
self,
|
||||
required_spec: spack.spec.Spec,
|
||||
@ -1902,46 +1974,18 @@ def condition(
|
||||
Returns:
|
||||
int: id of the condition created by this function
|
||||
"""
|
||||
required_name = required_spec.name or required_name
|
||||
if not required_name:
|
||||
raise ValueError(f"Must provide a name for anonymous condition: '{required_spec}'")
|
||||
clauses, condition_id = self.condition_clauses(
|
||||
required_spec=required_spec,
|
||||
imposed_spec=imposed_spec,
|
||||
required_name=required_name,
|
||||
imposed_name=imposed_name,
|
||||
msg=msg,
|
||||
context=context,
|
||||
)
|
||||
for clause in clauses:
|
||||
self.gen.fact(clause)
|
||||
|
||||
if not context:
|
||||
context = ConditionContext()
|
||||
context.transform_imposed = remove_facts("node", "virtual_node")
|
||||
|
||||
if imposed_spec:
|
||||
imposed_name = imposed_spec.name or imposed_name
|
||||
if not imposed_name:
|
||||
raise ValueError(f"Must provide a name for imposed constraint: '{imposed_spec}'")
|
||||
|
||||
with named_spec(required_spec, required_name), named_spec(imposed_spec, imposed_name):
|
||||
# Check if we can emit the requirements before updating the condition ID counter.
|
||||
# In this way, if a condition can't be emitted but the exception is handled in the
|
||||
# caller, we won't emit partial facts.
|
||||
|
||||
condition_id = next(self._id_counter)
|
||||
requirement_context = context.requirement_context()
|
||||
trigger_id = self._get_condition_id(
|
||||
required_spec, cache=self._trigger_cache, body=True, context=requirement_context
|
||||
)
|
||||
self.gen.fact(fn.pkg_fact(required_spec.name, fn.condition(condition_id)))
|
||||
self.gen.fact(fn.condition_reason(condition_id, msg))
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(required_spec.name, fn.condition_trigger(condition_id, trigger_id))
|
||||
)
|
||||
if not imposed_spec:
|
||||
return condition_id
|
||||
|
||||
impose_context = context.impose_context()
|
||||
effect_id = self._get_condition_id(
|
||||
imposed_spec, cache=self._effect_cache, body=False, context=impose_context
|
||||
)
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(required_spec.name, fn.condition_effect(condition_id, effect_id))
|
||||
)
|
||||
|
||||
return condition_id
|
||||
return condition_id
|
||||
|
||||
def impose(self, condition_id, imposed_spec, node=True, body=False):
|
||||
imposed_constraints = self.spec_clauses(imposed_spec, body=body)
|
||||
@ -2212,6 +2256,10 @@ def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
|
||||
msg=f"{input_spec} is a requirement for package {pkg_name}",
|
||||
context=context,
|
||||
)
|
||||
|
||||
# Conditions don't handle conditional dependencies directly
|
||||
# Those are handled separately here
|
||||
self.generate_conditional_dep_conditions(spec, member_id)
|
||||
except Exception as e:
|
||||
# Do not raise if the rule comes from the 'all' subsection, since usability
|
||||
# would be impaired. If a rule does not apply for a specific package, just
|
||||
@ -2574,6 +2622,10 @@ def _spec_clauses(
|
||||
if transitive:
|
||||
# TODO: Eventually distinguish 2 deps on the same pkg (build and link)
|
||||
for dspec in spec.edges_to_dependencies():
|
||||
# Ignore conditional dependencies, they are handled by caller
|
||||
if dspec.when != spack.spec.Spec():
|
||||
continue
|
||||
|
||||
dep = dspec.spec
|
||||
|
||||
if spec.concrete:
|
||||
@ -3200,6 +3252,9 @@ def setup(
|
||||
self.gen.h1("Spec Constraints")
|
||||
self.literal_specs(specs)
|
||||
|
||||
self.trigger_rules()
|
||||
self.effect_rules()
|
||||
|
||||
self.gen.h1("Variant Values defined in specs")
|
||||
self.define_variant_values()
|
||||
|
||||
@ -3333,11 +3388,49 @@ def literal_specs(self, specs):
|
||||
cache[imposed_spec_key] = (effect_id, requirements)
|
||||
self.gen.fact(fn.pkg_fact(spec.name, fn.condition_effect(condition_id, effect_id)))
|
||||
|
||||
# Create subcondition with any conditional dependencies
|
||||
# self.spec_clauses does not do anything with conditional
|
||||
# dependencies
|
||||
self.generate_conditional_dep_conditions(spec, condition_id)
|
||||
|
||||
if self.concretize_everything:
|
||||
self.gen.fact(fn.solve_literal(trigger_id))
|
||||
|
||||
self.effect_rules()
|
||||
|
||||
def generate_conditional_dep_conditions(self, spec, condition_id):
|
||||
for dspec in spec.traverse_edges():
|
||||
# Ignore unconditional deps
|
||||
if dspec.when == spack.spec.Spec():
|
||||
continue
|
||||
|
||||
# Cannot use "virtual_node" attr as key for condition
|
||||
# because reused specs do not track virtual nodes.
|
||||
# Instead, track whether the parent uses the virtual
|
||||
def virtual_handler(input_spec, requirements):
|
||||
ret = remove_facts("virtual_node")(input_spec, requirements)
|
||||
for edge in input_spec.traverse_edges(root=False, cover="edges"):
|
||||
if spack.repo.PATH.is_virtual(edge.spec.name):
|
||||
ret.append(fn.attr("uses_virtual", edge.parent.name, edge.spec.name))
|
||||
return ret
|
||||
|
||||
context = ConditionContext()
|
||||
context.source = ConstraintOrigin.append_type_suffix(
|
||||
dspec.parent.name, ConstraintOrigin.CONDITIONAL_SPEC
|
||||
)
|
||||
# Default is to remove node-like attrs, override here
|
||||
context.transform_required = virtual_handler
|
||||
context.transform_imposed = lambda x, y: y
|
||||
|
||||
subcondition_id = self.condition(
|
||||
dspec.when,
|
||||
dspec.spec,
|
||||
required_name=dspec.parent.name,
|
||||
context=context,
|
||||
msg=f"Conditional dependency in literal ^[when={dspec.when}]{dspec.spec}",
|
||||
)
|
||||
self.gen.fact(fn.subcondition(subcondition_id, condition_id))
|
||||
|
||||
def validate_and_define_versions_from_requirements(
|
||||
self, *, allow_deprecated: bool, require_checksum: bool
|
||||
):
|
||||
@ -3785,6 +3878,7 @@ class SpecBuilder:
|
||||
r"^package_hash$",
|
||||
r"^root$",
|
||||
r"^track_dependencies$",
|
||||
r"^uses_virtual$",
|
||||
r"^variant_default_value_from_cli$",
|
||||
r"^virtual_node$",
|
||||
r"^virtual_on_incoming_edges$",
|
||||
|
@ -429,8 +429,23 @@ trigger_and_effect(Package, TriggerID, EffectID)
|
||||
|
||||
% condition_holds(ID, node(ID, Package)) implies all imposed_constraints, unless do_not_impose(ID, node(ID, Package))
|
||||
% is derived. This allows imposed constraints to be canceled in special cases.
|
||||
|
||||
% Effects of direct conditions hold if the trigger holds
|
||||
impose(EffectID, node(X, Package))
|
||||
:- trigger_and_effect(Package, TriggerID, EffectID),
|
||||
:- pkg_fact(Package, condition_effect(ConditionID, EffectID)),
|
||||
not subcondition(ConditionID, _),
|
||||
trigger_and_effect(Package, TriggerID, EffectID),
|
||||
trigger_node(TriggerID, _, node(X, Package)),
|
||||
trigger_condition_holds(TriggerID, node(X, Package)),
|
||||
not do_not_impose(EffectID, node(X, Package)).
|
||||
|
||||
% Effects of subconditions hold if the trigger holds and the
|
||||
% primary condition holds
|
||||
impose(EffectID, node(X, Package))
|
||||
:- pkg_fact(Package, condition_effect(SubconditionId, EffectID)),
|
||||
subcondition(SubconditionID, ConditionID),
|
||||
condition_holds(ConditionID, node(X, Package)),
|
||||
trigger_and_effect(Package, TriggerID, EffectID),
|
||||
trigger_node(TriggerID, _, node(X, Package)),
|
||||
trigger_condition_holds(TriggerID, node(X, Package)),
|
||||
not do_not_impose(EffectID, node(X, Package)).
|
||||
@ -645,6 +660,16 @@ virtual_condition_holds(node(Y, A2), Virtual)
|
||||
attr("virtual_on_edge", node(X, A1), node(Y, A2), Virtual),
|
||||
not build(node(X, A1)).
|
||||
|
||||
% Simplified virtual information for conditionl requirements in
|
||||
% conditional dependencies
|
||||
% Most specs track virtuals on edges
|
||||
attr("uses_virtual", PackageNode, Virtual) :-
|
||||
attr("virtual_on_edge", PackageNode, _, Virtual).
|
||||
|
||||
% Reused specs don't track a real edge to build-only deps
|
||||
attr("uses_virtual", PackageNode, Virtual) :-
|
||||
attr("virtual_on_build_edge", PackageNode, _, Virtual).
|
||||
|
||||
% we cannot have additional variant values when we are working with concrete specs
|
||||
:- attr("node", node(ID, Package)),
|
||||
attr("hash", node(ID, Package), Hash),
|
||||
@ -660,6 +685,7 @@ virtual_condition_holds(node(Y, A2), Virtual)
|
||||
internal_error("imposed hash without imposing all flag values").
|
||||
|
||||
#defined condition/2.
|
||||
#defined subcondition/2.
|
||||
#defined condition_requirement/3.
|
||||
#defined condition_requirement/4.
|
||||
#defined condition_requirement/5.
|
||||
|
@ -720,7 +720,7 @@ class DependencySpec:
|
||||
virtuals: virtual packages provided from child to parent node.
|
||||
"""
|
||||
|
||||
__slots__ = "parent", "spec", "depflag", "virtuals", "direct"
|
||||
__slots__ = "parent", "spec", "depflag", "virtuals", "direct", "when"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -730,12 +730,14 @@ def __init__(
|
||||
depflag: dt.DepFlag,
|
||||
virtuals: Tuple[str, ...],
|
||||
direct: bool = False,
|
||||
when: Optional["Spec"] = None,
|
||||
):
|
||||
self.parent = parent
|
||||
self.spec = spec
|
||||
self.depflag = depflag
|
||||
self.virtuals = tuple(sorted(set(virtuals)))
|
||||
self.direct = direct
|
||||
self.when = when or Spec()
|
||||
|
||||
def update_deptypes(self, depflag: dt.DepFlag) -> bool:
|
||||
"""Update the current dependency types"""
|
||||
@ -766,6 +768,7 @@ def copy(self) -> "DependencySpec":
|
||||
depflag=self.depflag,
|
||||
virtuals=self.virtuals,
|
||||
direct=self.direct,
|
||||
when=self.when,
|
||||
)
|
||||
|
||||
def _cmp_iter(self):
|
||||
@ -777,10 +780,13 @@ def _cmp_iter(self):
|
||||
def __str__(self) -> str:
|
||||
parent = self.parent.name if self.parent else None
|
||||
child = self.spec.name if self.spec else None
|
||||
return f"{parent} {self.depflag}[virtuals={','.join(self.virtuals)}] --> {child}"
|
||||
virtuals_string = f"virtuals={','.join(self.virtuals)}" if self.virtuals else ""
|
||||
when_string = f"when='{self.when}'" if self.when != Spec() else ""
|
||||
edge_attrs = filter(lambda x: bool(x), (virtuals_string, when_string))
|
||||
return f"{parent} {self.depflag}[{' '.join(edge_attrs)}] --> {child}"
|
||||
|
||||
def flip(self) -> "DependencySpec":
|
||||
"""Flip the dependency, and drop virtual information"""
|
||||
"""Flip the dependency, and drop virtual and conditional information"""
|
||||
return DependencySpec(
|
||||
parent=self.spec, spec=self.parent, depflag=self.depflag, virtuals=()
|
||||
)
|
||||
@ -1021,6 +1027,7 @@ def select(
|
||||
child: Optional[str] = None,
|
||||
depflag: dt.DepFlag = dt.ALL,
|
||||
virtuals: Optional[Union[str, Sequence[str]]] = None,
|
||||
when: Optional["Spec"] = None,
|
||||
) -> List[DependencySpec]:
|
||||
"""Selects a list of edges and returns them.
|
||||
|
||||
@ -1040,6 +1047,7 @@ def select(
|
||||
child: name of the child package
|
||||
depflag: allowed dependency types in flag form
|
||||
virtuals: list of virtuals or specific virtual on the edge
|
||||
when: condition on conditional dependency, or Spec() for unconditional dependency only
|
||||
"""
|
||||
if not depflag:
|
||||
return []
|
||||
@ -1065,6 +1073,9 @@ def select(
|
||||
else:
|
||||
selected = (dep for dep in selected if any(v in dep.virtuals for v in virtuals))
|
||||
|
||||
if when is not None:
|
||||
selected = (dep for dep in selected if dep.when == when)
|
||||
|
||||
return list(selected)
|
||||
|
||||
def clear(self):
|
||||
@ -1612,6 +1623,7 @@ def edges_to_dependencies(
|
||||
depflag: dt.DepFlag = dt.ALL,
|
||||
*,
|
||||
virtuals: Optional[Union[str, Sequence[str]]] = None,
|
||||
when: Optional["Spec"] = None,
|
||||
) -> List[DependencySpec]:
|
||||
"""Returns a list of edges connecting this node in the DAG to children.
|
||||
|
||||
@ -1619,9 +1631,13 @@ def edges_to_dependencies(
|
||||
name: filter dependencies by package name
|
||||
depflag: allowed dependency types
|
||||
virtuals: allowed virtuals
|
||||
when: condition on conditional dependencies (or Spec() for unconditional)
|
||||
"""
|
||||
return [
|
||||
d for d in self._dependencies.select(child=name, depflag=depflag, virtuals=virtuals)
|
||||
d
|
||||
for d in self._dependencies.select(
|
||||
child=name, depflag=depflag, virtuals=virtuals, when=when
|
||||
)
|
||||
]
|
||||
|
||||
@property
|
||||
@ -1633,20 +1649,26 @@ def edge_attributes(self) -> str:
|
||||
|
||||
union = DependencySpec(parent=Spec(), spec=self, depflag=0, virtuals=())
|
||||
all_direct_edges = all(x.direct for x in edges)
|
||||
dep_conditions = set()
|
||||
|
||||
for edge in edges:
|
||||
union.update_deptypes(edge.depflag)
|
||||
union.update_virtuals(edge.virtuals)
|
||||
dep_conditions.add(edge.when)
|
||||
|
||||
deptypes_str = ""
|
||||
if not all_direct_edges and union.depflag:
|
||||
deptypes_str = f"deptypes={','.join(dt.flag_to_tuple(union.depflag))}"
|
||||
|
||||
virtuals_str = f"virtuals={','.join(union.virtuals)}" if union.virtuals else ""
|
||||
if not deptypes_str and not virtuals_str:
|
||||
return ""
|
||||
result = f"{deptypes_str} {virtuals_str}".strip()
|
||||
return f"[{result}]"
|
||||
|
||||
conditions = [str(c) for c in dep_conditions if c != Spec()]
|
||||
when_str = f"when='{','.join(conditions)}'" if conditions else ""
|
||||
|
||||
result = " ".join(filter(lambda x: bool(x), (when_str, deptypes_str, virtuals_str)))
|
||||
if result:
|
||||
result = f"[{result}]"
|
||||
return result
|
||||
|
||||
def dependencies(
|
||||
self,
|
||||
@ -1654,6 +1676,7 @@ def dependencies(
|
||||
deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL,
|
||||
*,
|
||||
virtuals: Optional[Union[str, Sequence[str]]] = None,
|
||||
when: Optional["Spec"] = None,
|
||||
) -> List["Spec"]:
|
||||
"""Returns a list of direct dependencies (nodes in the DAG)
|
||||
|
||||
@ -1661,11 +1684,15 @@ def dependencies(
|
||||
name: filter dependencies by package name
|
||||
deptype: allowed dependency types
|
||||
virtuals: allowed virtuals
|
||||
when: condition on conditional dependency or Spec() for unconditional
|
||||
"""
|
||||
if not isinstance(deptype, dt.DepFlag):
|
||||
deptype = dt.canonicalize(deptype)
|
||||
return [
|
||||
d.spec for d in self.edges_to_dependencies(name, depflag=deptype, virtuals=virtuals)
|
||||
d.spec
|
||||
for d in self.edges_to_dependencies(
|
||||
name, depflag=deptype, virtuals=virtuals, when=when
|
||||
)
|
||||
]
|
||||
|
||||
def dependents(
|
||||
@ -1752,7 +1779,13 @@ def _set_architecture(self, **kwargs):
|
||||
setattr(self.architecture, new_attr, new_value)
|
||||
|
||||
def _add_dependency(
|
||||
self, spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[str, ...], direct: bool = False
|
||||
self,
|
||||
spec: "Spec",
|
||||
*,
|
||||
depflag: dt.DepFlag,
|
||||
virtuals: Tuple[str, ...],
|
||||
direct: bool = False,
|
||||
when: Optional["Spec"] = None,
|
||||
):
|
||||
"""Called by the parser to add another spec as a dependency.
|
||||
|
||||
@ -1760,23 +1793,33 @@ def _add_dependency(
|
||||
depflag: dependency type for this edge
|
||||
virtuals: virtuals on this edge
|
||||
direct: if True denotes a direct dependency (associated with the % sigil)
|
||||
when: if non-None, condition under which dependency holds
|
||||
"""
|
||||
if when is None:
|
||||
when = Spec()
|
||||
|
||||
if spec.name not in self._dependencies or not spec.name:
|
||||
self.add_dependency_edge(spec, depflag=depflag, virtuals=virtuals, direct=direct)
|
||||
self.add_dependency_edge(
|
||||
spec, depflag=depflag, virtuals=virtuals, direct=direct, when=when
|
||||
)
|
||||
return
|
||||
|
||||
# Keep the intersection of constraints when a dependency is added multiple times with
|
||||
# the same deptype. Add a new dependency if it is added with a compatible deptype
|
||||
# (for example, a build-only dependency is compatible with a link-only dependenyc).
|
||||
# (for example, a build-only dependency is compatible with a link-only dependency).
|
||||
# The only restrictions, currently, are that we cannot add edges with overlapping
|
||||
# dependency types and we cannot add multiple edges that have link/run dependency types.
|
||||
# See ``spack.deptypes.compatible``.
|
||||
orig = self._dependencies[spec.name]
|
||||
try:
|
||||
dspec = next(dspec for dspec in orig if depflag == dspec.depflag)
|
||||
dspec = next(
|
||||
dspec for dspec in orig if depflag == dspec.depflag and when == dspec.when
|
||||
)
|
||||
except StopIteration:
|
||||
# Error if we have overlapping or incompatible deptypes
|
||||
if any(not dt.compatible(dspec.depflag, depflag) for dspec in orig):
|
||||
if any(not dt.compatible(dspec.depflag, depflag) for dspec in orig) and all(
|
||||
dspec.when == when for dspec in orig
|
||||
):
|
||||
edge_attrs = f"deptypes={dt.flag_to_chars(depflag).strip()}"
|
||||
required_dep_str = f"^[{edge_attrs}] {str(spec)}"
|
||||
|
||||
@ -1785,7 +1828,9 @@ def _add_dependency(
|
||||
f"\t'{str(self)}' cannot depend on '{required_dep_str}'"
|
||||
)
|
||||
|
||||
self.add_dependency_edge(spec, depflag=depflag, virtuals=virtuals, direct=direct)
|
||||
self.add_dependency_edge(
|
||||
spec, depflag=depflag, virtuals=virtuals, direct=direct, when=when
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
@ -1803,6 +1848,7 @@ def add_dependency_edge(
|
||||
depflag: dt.DepFlag,
|
||||
virtuals: Tuple[str, ...],
|
||||
direct: bool = False,
|
||||
when: Optional["Spec"] = None,
|
||||
):
|
||||
"""Add a dependency edge to this spec.
|
||||
|
||||
@ -1811,13 +1857,20 @@ def add_dependency_edge(
|
||||
deptypes: dependency types for this edge
|
||||
virtuals: virtuals provided by this edge
|
||||
direct: if True denotes a direct dependency
|
||||
when: if non-None, condition under which dependency holds
|
||||
"""
|
||||
if when is None:
|
||||
when = Spec()
|
||||
|
||||
# Check if we need to update edges that are already present
|
||||
selected = self._dependencies.select(child=dependency_spec.name)
|
||||
for edge in selected:
|
||||
has_errors, details = False, []
|
||||
msg = f"cannot update the edge from {edge.parent.name} to {edge.spec.name}"
|
||||
|
||||
if edge.when != when:
|
||||
continue
|
||||
|
||||
# If the dependency is to an existing spec, we can update dependency
|
||||
# types. If it is to a new object, check deptype compatibility.
|
||||
if id(edge.spec) != id(dependency_spec) and not dt.compatible(edge.depflag, depflag):
|
||||
@ -1841,7 +1894,7 @@ def add_dependency_edge(
|
||||
raise spack.error.SpecError(msg, "\n".join(details))
|
||||
|
||||
for edge in selected:
|
||||
if id(dependency_spec) == id(edge.spec):
|
||||
if id(dependency_spec) == id(edge.spec) and edge.when == when:
|
||||
# If we are here, it means the edge object was previously added to
|
||||
# both the parent and the child. When we update this object they'll
|
||||
# both see the deptype modification.
|
||||
@ -1850,7 +1903,7 @@ def add_dependency_edge(
|
||||
return
|
||||
|
||||
edge = DependencySpec(
|
||||
self, dependency_spec, depflag=depflag, virtuals=virtuals, direct=direct
|
||||
self, dependency_spec, depflag=depflag, virtuals=virtuals, direct=direct, when=when
|
||||
)
|
||||
self._dependencies.add(edge)
|
||||
dependency_spec._dependents.add(edge)
|
||||
@ -2085,14 +2138,14 @@ def long_spec(self):
|
||||
new_name = spack.aliases.BUILTIN_TO_LEGACY_COMPILER.get(current_name, current_name)
|
||||
# note: depflag not allowed, currently, on "direct" edges
|
||||
edge_attributes = ""
|
||||
if item.virtuals:
|
||||
if item.virtuals or item.when != Spec():
|
||||
edge_attributes = item.spec.format("{edge_attributes}") + " "
|
||||
|
||||
parts.append(f"%{edge_attributes}{item.spec.format()}".replace(current_name, new_name))
|
||||
for item in sorted(transitive, key=lambda x: x.spec.name):
|
||||
# Recurse to attach build deps in order
|
||||
edge_attributes = ""
|
||||
if item.virtuals or item.depflag:
|
||||
if item.virtuals or item.depflag or item.when != Spec():
|
||||
edge_attributes = item.spec.format("{edge_attributes}") + " "
|
||||
parts.append(f"^{edge_attributes}{str(item.spec)}")
|
||||
return " ".join(parts).strip()
|
||||
@ -3126,41 +3179,22 @@ def _constrain_dependencies(self, other: "Spec") -> bool:
|
||||
if any(not d.name for d in other.traverse(root=False)):
|
||||
raise UnconstrainableDependencySpecError(other)
|
||||
|
||||
# Handle common first-order constraints directly
|
||||
# Note: This doesn't handle constraining transitive dependencies with the same name
|
||||
# as direct dependencies
|
||||
changed = False
|
||||
common_dependencies = {x.name for x in self.dependencies()}
|
||||
common_dependencies &= {x.name for x in other.dependencies()}
|
||||
for name in common_dependencies:
|
||||
changed |= self[name].constrain(other[name], deps=True)
|
||||
if name in self._dependencies:
|
||||
# WARNING: This function is an implementation detail of the
|
||||
# WARNING: original concretizer. Since with that greedy
|
||||
# WARNING: algorithm we don't allow multiple nodes from
|
||||
# WARNING: the same package in a DAG, here we hard-code
|
||||
# WARNING: using index 0 i.e. we assume that we have only
|
||||
# WARNING: one edge from package "name"
|
||||
edges_from_name = self._dependencies[name]
|
||||
changed |= edges_from_name[0].update_deptypes(other._dependencies[name][0].depflag)
|
||||
changed |= edges_from_name[0].update_virtuals(
|
||||
other._dependencies[name][0].virtuals
|
||||
reference_spec = self.copy(deps=True)
|
||||
for edge in other.edges_to_dependencies():
|
||||
existing = self.edges_to_dependencies(edge.spec.name, when=edge.when)
|
||||
if existing:
|
||||
existing[0].spec.constrain(edge.spec)
|
||||
existing[0].update_deptypes(edge.depflag)
|
||||
existing[0].update_virtuals(edge.virtuals)
|
||||
else:
|
||||
self.add_dependency_edge(
|
||||
edge.spec,
|
||||
depflag=edge.depflag,
|
||||
virtuals=edge.virtuals,
|
||||
direct=edge.direct,
|
||||
when=edge.when,
|
||||
)
|
||||
|
||||
# Update with additional constraints from other spec
|
||||
# operate on direct dependencies only, because a concrete dep
|
||||
# represented by hash may have structure that needs to be preserved
|
||||
for name in other.direct_dep_difference(self):
|
||||
dep_spec_copy = other._get_dependency(name)
|
||||
self._add_dependency(
|
||||
dep_spec_copy.spec.copy(),
|
||||
depflag=dep_spec_copy.depflag,
|
||||
virtuals=dep_spec_copy.virtuals,
|
||||
direct=dep_spec_copy.direct,
|
||||
)
|
||||
changed = True
|
||||
|
||||
return changed
|
||||
return self != reference_spec
|
||||
|
||||
def common_dependencies(self, other):
|
||||
"""Return names of dependencies that self and other have in common."""
|
||||
@ -3397,10 +3431,6 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
if not other._dependencies:
|
||||
return True
|
||||
|
||||
# If we have no dependencies, we can't satisfy any constraints.
|
||||
if not self._dependencies and self.original_spec_format() >= 5 and not self.external:
|
||||
return False
|
||||
|
||||
# If we arrived here, the lhs root node satisfies the rhs root node. Now we need to check
|
||||
# all the edges that have an abstract parent, and verify that they match some edge in the
|
||||
# lhs.
|
||||
@ -3411,6 +3441,11 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
lhs_edges: Dict[str, Set[DependencySpec]] = collections.defaultdict(set)
|
||||
mock_nodes_from_old_specfiles = set()
|
||||
for rhs_edge in other.traverse_edges(root=False, cover="edges"):
|
||||
# Skip checking any conditional edge that is not satisfied
|
||||
if rhs_edge.when != Spec() and not self.satisfies(rhs_edge.when):
|
||||
# TODO: this misses the case that the rhs statically satisfies its own condition
|
||||
continue
|
||||
|
||||
# If we are checking for ^mpi we need to verify if there is any edge
|
||||
if spack.repo.PATH.is_virtual(rhs_edge.spec.name):
|
||||
rhs_edge.update_virtuals(virtuals=(rhs_edge.spec.name,))
|
||||
@ -3468,6 +3503,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
for lhs_edge in self.traverse_edges(
|
||||
root=False, cover="edges", deptype=("link", "run")
|
||||
):
|
||||
# TODO: do we need to avoid conditional edges here
|
||||
lhs_edges[lhs_edge.spec.name].add(lhs_edge)
|
||||
for virtual_name in lhs_edge.virtuals:
|
||||
lhs_edges[virtual_name].add(lhs_edge)
|
||||
@ -3484,6 +3520,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
return False
|
||||
|
||||
for virtual in rhs_edge.virtuals:
|
||||
# TODO: consider how this could apply to conditional edges
|
||||
has_virtual = any(
|
||||
virtual in edge.virtuals for edge in lhs_edges[current_dependency_name]
|
||||
)
|
||||
@ -3491,11 +3528,22 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
return False
|
||||
|
||||
# Edges have been checked above already, hence deps=False
|
||||
lhs_nodes = [x for x in self.traverse(root=False)] + sorted(mock_nodes_from_old_specfiles)
|
||||
return all(
|
||||
any(lhs.satisfies(rhs, deps=False) for lhs in lhs_nodes)
|
||||
for rhs in other.traverse(root=False)
|
||||
)
|
||||
lhs_nodes = list(self.traverse(root=False)) + sorted(mock_nodes_from_old_specfiles)
|
||||
for rhs in other.traverse(root=False):
|
||||
# Possible lhs nodes to match this rhs node
|
||||
lhss = [lhs for lhs in lhs_nodes if lhs.satisfies(rhs, deps=False)]
|
||||
|
||||
# Check whether the node needs matching (not a conditional that isn't satisfied)
|
||||
if not any(self.satisfies(e.when) for e in rhs.edges_from_dependents()):
|
||||
# TODO: This technically misses the case that the edge is analogous
|
||||
# to an edge lower in the DAG, and could give a false negative in that case
|
||||
continue
|
||||
|
||||
# If there is no matching lhs for this rhs node
|
||||
if not lhss:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@property # type: ignore[misc] # decorated prop not supported in mypy
|
||||
def patches(self):
|
||||
@ -3629,6 +3677,7 @@ def spid(spec):
|
||||
depflag=edge.depflag,
|
||||
virtuals=edge.virtuals,
|
||||
direct=edge.direct,
|
||||
when=edge.when,
|
||||
)
|
||||
|
||||
def copy(self, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, **kwargs):
|
||||
|
@ -56,16 +56,18 @@
|
||||
specs to avoid ambiguity. Both are provided because ~ can cause shell
|
||||
expansion when it is the first character in an id typed on the command line.
|
||||
"""
|
||||
import itertools
|
||||
import json
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
import warnings
|
||||
from typing import Iterator, List, Optional, Tuple, Union
|
||||
from typing import Iterable, Iterator, List, Optional, Tuple, Union
|
||||
|
||||
from llnl.util.tty import color
|
||||
|
||||
import spack.config
|
||||
import spack.deptypes
|
||||
import spack.error
|
||||
import spack.paths
|
||||
@ -162,6 +164,15 @@ def tokenize(text: str) -> Iterator[Token]:
|
||||
yield token
|
||||
|
||||
|
||||
def parseable_tokens(text: str) -> Iterator[Token]:
|
||||
"""Return non-whitespace tokens from the text passed as input
|
||||
|
||||
Raises:
|
||||
SpecTokenizationError: when unexpected characters are found in the text
|
||||
"""
|
||||
return filter(lambda x: x.kind != SpecTokens.WS, tokenize(text))
|
||||
|
||||
|
||||
class TokenContext:
|
||||
"""Token context passed around by parsers"""
|
||||
|
||||
@ -189,6 +200,16 @@ def accept(self, kind: SpecTokens):
|
||||
def expect(self, *kinds: SpecTokens):
|
||||
return self.next_token and self.next_token.kind in kinds
|
||||
|
||||
def push(self, token_stream: Iterator[Token]):
|
||||
# New tokens need to go before next_token, which comes before the rest of the stream
|
||||
next_token_iterator: Iterable[Token] = (
|
||||
iter((self.next_token,)) if self.next_token else iter(())
|
||||
)
|
||||
self.token_stream = itertools.chain(token_stream, next_token_iterator, self.token_stream)
|
||||
self.current_token = None
|
||||
self.next_token = None
|
||||
self.advance()
|
||||
|
||||
|
||||
class SpecTokenizationError(spack.error.SpecSyntaxError):
|
||||
"""Syntax error in a spec string"""
|
||||
@ -238,11 +259,13 @@ class SpecParser:
|
||||
|
||||
def __init__(self, literal_str: str):
|
||||
self.literal_str = literal_str
|
||||
self.ctx = TokenContext(filter(lambda x: x.kind != SpecTokens.WS, tokenize(literal_str)))
|
||||
self.ctx = TokenContext(parseable_tokens(literal_str))
|
||||
|
||||
def tokens(self) -> List[Token]:
|
||||
"""Return the entire list of token from the initial text. White spaces are
|
||||
filtered out.
|
||||
|
||||
Note: This list will not show tokens pushed when parsing an alias
|
||||
"""
|
||||
return list(filter(lambda x: x.kind != SpecTokens.WS, tokenize(self.literal_str)))
|
||||
|
||||
@ -268,6 +291,9 @@ def add_dependency(dep, **edge_properties):
|
||||
except spack.error.SpecError as e:
|
||||
raise SpecParsingError(str(e), self.ctx.current_token, self.literal_str) from e
|
||||
|
||||
# Get toolchain information outside of loop
|
||||
toolchains = spack.config.CONFIG.get("toolchains", {})
|
||||
|
||||
initial_spec = initial_spec or spack.spec.Spec()
|
||||
root_spec, parser_warnings = SpecNodeParser(self.ctx, self.literal_str).parse(initial_spec)
|
||||
current_spec = root_spec
|
||||
@ -297,6 +323,15 @@ def add_dependency(dep, **edge_properties):
|
||||
add_dependency(dependency, **edge_properties)
|
||||
|
||||
elif self.ctx.accept(SpecTokens.DEPENDENCY):
|
||||
# String replacement for toolchains
|
||||
# Look ahead to match upcoming value to list of toolchains
|
||||
if self.ctx.next_token.value in toolchains:
|
||||
assert self.ctx.accept(SpecTokens.UNQUALIFIED_PACKAGE_NAME)
|
||||
# accepting the token advances it to be the current token
|
||||
# Push associated tokens back to the TokenContext
|
||||
self.ctx.push(parseable_tokens(toolchains[self.ctx.current_token.value]))
|
||||
continue
|
||||
|
||||
is_direct = self.ctx.current_token.value[0] == "%"
|
||||
dependency, warnings = self._parse_node(root_spec)
|
||||
edge_properties = {}
|
||||
@ -511,10 +546,10 @@ def parse(self):
|
||||
name = name[:-1]
|
||||
value = value.strip("'\" ").split(",")
|
||||
attributes[name] = value
|
||||
if name not in ("deptypes", "virtuals"):
|
||||
if name not in ("deptypes", "virtuals", "when"):
|
||||
msg = (
|
||||
"the only edge attributes that are currently accepted "
|
||||
'are "deptypes" and "virtuals"'
|
||||
'are "deptypes", "virtuals", and "when"'
|
||||
)
|
||||
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
||||
# TODO: Add code to accept bool variants here as soon as use variants are implemented
|
||||
@ -528,6 +563,11 @@ def parse(self):
|
||||
if "deptypes" in attributes:
|
||||
deptype_string = attributes.pop("deptypes")
|
||||
attributes["depflag"] = spack.deptypes.canonicalize(deptype_string)
|
||||
|
||||
# Turn "when" into a spec
|
||||
if "when" in attributes:
|
||||
attributes["when"] = spack.spec.Spec(attributes["when"][0])
|
||||
|
||||
return attributes
|
||||
|
||||
|
||||
@ -573,8 +613,9 @@ class SpecParsingError(spack.error.SpecSyntaxError):
|
||||
|
||||
def __init__(self, message, token, text):
|
||||
message += f"\n{text}"
|
||||
underline = f"\n{' '*token.start}{'^'*(token.end - token.start)}"
|
||||
message += color.colorize(f"@*r{{{underline}}}")
|
||||
if token:
|
||||
underline = f"\n{' '*token.start}{'^'*(token.end - token.start)}"
|
||||
message += color.colorize(f"@*r{{{underline}}}")
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
|
@ -0,0 +1,67 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import pytest
|
||||
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.spec
|
||||
|
||||
|
||||
@pytest.mark.parametrize("holds,mpi", [(True, "zmpi"), (True, "mpich"), (False, "mpich")])
|
||||
def test_conditional_deps(holds, mpi, config, mock_packages):
|
||||
sigil = "+" if holds else "~"
|
||||
request = f"hdf5{sigil}mpi ^[when='^mpi' virtuals=mpi]{mpi}"
|
||||
concrete = spack.concretize.concretize_one(request)
|
||||
|
||||
assert (mpi in concrete) == holds
|
||||
assert ("mpi" in concrete) == holds
|
||||
|
||||
|
||||
@pytest.mark.parametrize("c", [True, False])
|
||||
@pytest.mark.parametrize("cxx", [True, False])
|
||||
@pytest.mark.parametrize("fortran", [True, False])
|
||||
def test_conditional_compilers(c, cxx, fortran, mutable_config, mock_packages):
|
||||
# Configure two gcc compilers that could be concretized to
|
||||
# We will confirm concretization matches the less preferred one
|
||||
extra_attributes_block = {
|
||||
"compilers": {"c": "/path/to/gcc", "cxx": "/path/to/g++", "fortran": "/path/to/fortran"}
|
||||
}
|
||||
spack.config.CONFIG.set(
|
||||
"packages:gcc:externals::",
|
||||
[
|
||||
{
|
||||
"spec": "gcc@12.3.1 languages=c,c++,fortran",
|
||||
"prefix": "/path",
|
||||
"extra_attributes": extra_attributes_block,
|
||||
},
|
||||
{
|
||||
"spec": "gcc@10.3.1 languages=c,c++,fortran",
|
||||
"prefix": "/path",
|
||||
"extra_attributes": extra_attributes_block,
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
# Abstract spec parametrized to depend/not on c/cxx/fortran
|
||||
# and with conditional dependencies for each on the less preferred gcc
|
||||
abstract = spack.spec.Spec("conditional-languages")
|
||||
abstract.constrain(f"c={c}")
|
||||
abstract.constrain(f"cxx={cxx}")
|
||||
abstract.constrain(f"fortran={fortran}")
|
||||
|
||||
preferred_gcc = spack.concretize.concretize_one(abstract)
|
||||
abstract.constrain(
|
||||
"^[when='%c' virtuals=c]gcc@10.3.1 "
|
||||
"^[when='%cxx' virtuals=cxx]gcc@10.3.1 "
|
||||
"^[when='%fortran' virtuals=fortran]gcc@10.3.1"
|
||||
)
|
||||
concrete = spack.concretize.concretize_one(abstract)
|
||||
|
||||
# We should get the dependency we specified for each language we enabled
|
||||
assert concrete.satisfies("%[virtuals=c]gcc@10.3.1") == c
|
||||
assert concrete.satisfies("%[virtuals=cxx]gcc@10.3.1") == cxx
|
||||
assert concrete.satisfies("%[virtuals=fortran]gcc@10.3.1") == fortran
|
||||
|
||||
# The only time the two concrete specs are the same is if we don't use gcc at all
|
||||
assert (concrete == preferred_gcc) == (not any((c, cxx, fortran)))
|
@ -81,8 +81,10 @@ def test_internal_error_handling_formatting(tmp_path):
|
||||
assert "the following specs were not solved:\n - baz+z\n" in output
|
||||
assert (
|
||||
"the following specs were concretized, but do not satisfy the input:\n"
|
||||
" - foo+x\n"
|
||||
" - bar+y\n"
|
||||
" - input: foo+x\n"
|
||||
" output: foo@=1.0~x\n"
|
||||
" - input: bar+y\n"
|
||||
" output: x@=1.0~y"
|
||||
) in output
|
||||
|
||||
files = {f.name: str(f) for f in tmp_path.glob("spack-asp-*/*.json")}
|
||||
|
@ -12,6 +12,7 @@
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.solver.asp
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.version
|
||||
@ -1301,3 +1302,52 @@ def test_requirements_on_compilers_and_reuse(
|
||||
assert is_pkgb_reused == expected_reuse
|
||||
for c in expected_contraints:
|
||||
assert pkga.satisfies(c)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"abstract,req_is_noop",
|
||||
[
|
||||
("hdf5+mpi", False),
|
||||
("hdf5~mpi", True),
|
||||
("conditional-languages+c", False),
|
||||
("conditional-languages+cxx", False),
|
||||
("conditional-languages+fortran", False),
|
||||
("conditional-languages~c~cxx~fortran", True),
|
||||
],
|
||||
)
|
||||
def test_requirements_conditional_deps(abstract, req_is_noop, mutable_config, mock_packages):
|
||||
required_spec = (
|
||||
"%[when='^c' virtuals=c]gcc@10.3.1 "
|
||||
"%[when='^cxx' virtuals=cxx]gcc@10.3.1 "
|
||||
"%[when='^fortran' virtuals=fortran]gcc@10.3.1 "
|
||||
"^[when='^mpi' virtuals=mpi]zmpi"
|
||||
)
|
||||
abstract = spack.spec.Spec(abstract)
|
||||
|
||||
# Configure two gcc compilers that could be concretized to
|
||||
# We will confirm concretization matches the less preferred one
|
||||
extra_attributes_block = {
|
||||
"compilers": {"c": "/path/to/gcc", "cxx": "/path/to/g++", "fortran": "/path/to/fortran"}
|
||||
}
|
||||
spack.config.CONFIG.set(
|
||||
"packages:gcc:externals::",
|
||||
[
|
||||
{
|
||||
"spec": "gcc@12.3.1 languages=c,c++,fortran",
|
||||
"prefix": "/path",
|
||||
"extra_attributes": extra_attributes_block,
|
||||
},
|
||||
{
|
||||
"spec": "gcc@10.3.1 languages=c,c++,fortran",
|
||||
"prefix": "/path",
|
||||
"extra_attributes": extra_attributes_block,
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
no_requirements = spack.concretize.concretize_one(abstract)
|
||||
spack.config.CONFIG.set(f"packages:{abstract.name}", {"require": required_spec})
|
||||
requirements = spack.concretize.concretize_one(abstract)
|
||||
|
||||
assert requirements.satisfies(required_spec)
|
||||
assert (requirements == no_requirements) == req_is_noop # show the reqs change concretization
|
||||
|
@ -2054,7 +2054,7 @@ def shell_as(shell):
|
||||
@pytest.fixture()
|
||||
def nullify_globals(request, monkeypatch):
|
||||
ensure_configuration_fixture_run_before(request)
|
||||
monkeypatch.setattr(spack.config, "CONFIG", None)
|
||||
monkeypatch.setattr(spack.config, "CONFIG", {}) # So basic get operations do not throw
|
||||
monkeypatch.setattr(spack.caches, "MISC_CACHE", None)
|
||||
monkeypatch.setattr(spack.caches, "FETCH_CACHE", None)
|
||||
monkeypatch.setattr(spack.repo, "PATH", None)
|
||||
|
@ -327,6 +327,26 @@ class TestSpecSemantics:
|
||||
"ba5e334fe247335f3a116decfb5284100791dc302b5571ff5e664d8f9a6806c2"
|
||||
),
|
||||
),
|
||||
(
|
||||
"libelf",
|
||||
"%[when='^c' virtuals=c]gcc ^[when='+mpi' virtuals=mpi]mpich",
|
||||
"libelf %[when='^c' virtuals=c]gcc ^[when='+mpi' virtuals=mpi]mpich",
|
||||
),
|
||||
(
|
||||
"libelf %[when='^c' virtuals=c]gcc",
|
||||
"%[when='^c' virtuals=c]gcc@10.3.1",
|
||||
"libelf%[when='^c' virtuals=c]gcc@10.3.1",
|
||||
),
|
||||
(
|
||||
"libelf %[when='^c' virtuals=c]gcc",
|
||||
"%[when='^c' virtuals=c]gcc@10.3.1 ^[when='+mpi'] mpich",
|
||||
"libelf%[when='^c' virtuals=c]gcc@10.3.1 ^[when='+mpi']mpich",
|
||||
),
|
||||
(
|
||||
"libelf %[when='^c' virtuals=c]gcc",
|
||||
"%[when='^cxx' virtuals=cxx]gcc@10.3.1",
|
||||
"libelf%[when='^c' virtuals=c]gcc %[when='^cxx' virtuals=cxx]gcc@10.3.1",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_abstract_specs_can_constrain_each_other(self, lhs, rhs, expected):
|
||||
@ -573,6 +593,14 @@ def test_abstract_specs_with_propagation(self, lhs, rhs, expected, constrained):
|
||||
c.constrain(lhs)
|
||||
assert c == constrained
|
||||
|
||||
def test_satisfies_conditional_dep(self):
|
||||
concrete = spack.concretize.concretize_one("mpileaks^mpich")
|
||||
assert concrete.satisfies("^[when='^mpi' virtuals=mpi]mpich")
|
||||
assert concrete.satisfies("^[when='^notapackage']zmpi")
|
||||
assert not concrete.satisfies("^[virtuals=blas]mpich")
|
||||
assert not concrete.satisfies("^[when='^mpi' virtuals=blas]mpich")
|
||||
assert not concrete.satisfies("^[when='^mpi']zmpi")
|
||||
|
||||
def test_satisfies_single_valued_variant(self):
|
||||
"""Tests that the case reported in
|
||||
https://github.com/spack/spack/pull/2386#issuecomment-282147639
|
||||
|
@ -11,6 +11,7 @@
|
||||
import spack.binary_distribution
|
||||
import spack.cmd
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.platforms.test
|
||||
import spack.repo
|
||||
import spack.solver.asp
|
||||
@ -737,6 +738,19 @@ def _specfile_for(spec_str, filename):
|
||||
],
|
||||
"mvapich %gcc languages:='c,c++' arch=None-None-x86_64",
|
||||
),
|
||||
# Test conditional dependencies
|
||||
(
|
||||
"foo ^[when='%c' virtuals=c]gcc",
|
||||
[
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, "foo"),
|
||||
Token(SpecTokens.START_EDGE_PROPERTIES, "^["),
|
||||
Token(SpecTokens.KEY_VALUE_PAIR, "when='%c'"),
|
||||
Token(SpecTokens.KEY_VALUE_PAIR, "virtuals=c"),
|
||||
Token(SpecTokens.END_EDGE_PROPERTIES, "]"),
|
||||
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, "gcc"),
|
||||
],
|
||||
"foo ^[when='%c' virtuals=c] gcc",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_parse_single_spec(spec_str, tokens, expected_roundtrip, mock_git_test_package):
|
||||
@ -882,6 +896,39 @@ def test_cli_spec_roundtrip(args, expected):
|
||||
assert expected == output_string
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,toolchain,expected_roundtrip",
|
||||
[
|
||||
(
|
||||
"foo%my_toolchain",
|
||||
{"my_toolchain": "%[when='%c' virtuals=c]gcc"},
|
||||
["foo %[when='%c' virtuals=c] gcc"],
|
||||
),
|
||||
(
|
||||
"foo%my_toolchain2",
|
||||
{"my_toolchain2": "%[when='%c' virtuals=c]gcc ^[when='+mpi' virtuals=mpi]mpich"},
|
||||
["foo %[when='%c' virtuals=c] gcc ^[when='+mpi' virtuals=mpi] mpich"],
|
||||
),
|
||||
(
|
||||
"foo%my_toolchain bar%my_toolchain2",
|
||||
{
|
||||
"my_toolchain": "%[when='%c' virtuals=c]gcc",
|
||||
"my_toolchain2": "%[when='%c' virtuals=c]gcc ^[when='+mpi' virtuals=mpi]mpich",
|
||||
},
|
||||
[
|
||||
"foo %[when='%c' virtuals=c] gcc",
|
||||
"bar %[when='%c' virtuals=c] gcc ^[when='+mpi' virtuals=mpi] mpich",
|
||||
],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_parse_toolchain(spec_str, toolchain, expected_roundtrip, mutable_config):
|
||||
spack.config.CONFIG.set("toolchains", toolchain)
|
||||
parser = SpecParser(spec_str)
|
||||
for expected in expected_roundtrip:
|
||||
assert expected == str(parser.next_spec())
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"text,expected_in_error",
|
||||
[
|
||||
|
@ -1211,19 +1211,19 @@ complete -c spack -n '__fish_spack_using_command config' -l scope -r -d 'configu
|
||||
|
||||
# spack config get
|
||||
set -g __fish_spack_optspecs_spack_config_get h/help
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 config get' -f -a 'bootstrap cdash ci compilers concretizer config definitions develop env_vars include mirrors modules packages repos upstreams view'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 config get' -f -a 'bootstrap cdash ci compilers concretizer config definitions develop env_vars include mirrors modules packages repos toolchains upstreams view'
|
||||
complete -c spack -n '__fish_spack_using_command config get' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command config get' -s h -l help -d 'show this help message and exit'
|
||||
|
||||
# spack config blame
|
||||
set -g __fish_spack_optspecs_spack_config_blame h/help
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 config blame' -f -a 'bootstrap cdash ci compilers concretizer config definitions develop env_vars include mirrors modules packages repos upstreams view'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 config blame' -f -a 'bootstrap cdash ci compilers concretizer config definitions develop env_vars include mirrors modules packages repos toolchains upstreams view'
|
||||
complete -c spack -n '__fish_spack_using_command config blame' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command config blame' -s h -l help -d 'show this help message and exit'
|
||||
|
||||
# spack config edit
|
||||
set -g __fish_spack_optspecs_spack_config_edit h/help print-file
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 config edit' -f -a 'bootstrap cdash ci compilers concretizer config definitions develop env_vars include mirrors modules packages repos upstreams view'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 config edit' -f -a 'bootstrap cdash ci compilers concretizer config definitions develop env_vars include mirrors modules packages repos toolchains upstreams view'
|
||||
complete -c spack -n '__fish_spack_using_command config edit' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command config edit' -s h -l help -d 'show this help message and exit'
|
||||
complete -c spack -n '__fish_spack_using_command config edit' -l print-file -f -a print_file
|
||||
|
@ -0,0 +1,20 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class ConditionalLanguages(Package):
|
||||
"""Conditional depends on c/cxx/fortran with a variant for each"""
|
||||
|
||||
homepage = "https://dev.null"
|
||||
|
||||
version("1.0")
|
||||
|
||||
variant("c", default=False, description="depend on c")
|
||||
variant("cxx", default=False, description="depend on cxx")
|
||||
variant("fortran", default=False, description="depend on fortran")
|
||||
|
||||
depends_on("c", type="build", when="+c")
|
||||
depends_on("cxx", type="build", when="+cxx")
|
||||
depends_on("fortran", type="build", when="+fortran")
|
Loading…
Reference in New Issue
Block a user