Mixed-source cflags (#41049)

Allow flags from different sources (compilers, `require:`, command-line
specs, and `depends_on`) to be merged together, and enforce a consistent
order among them.

The order is based on the sources, e.g. flags on specs from the command
line always come last. Some flag order consistency issues are fixed:

1. Flags from `compilers.yaml` and the command line were always intra- and
   inter-source order consistent.
2. Flags from dependents and packages.yaml (introduced via `require:`)
   were not: for `-a -b` from one source and `-c` from another, the final
   result might rearrange `-a -b`, and would also be inconsistent in terms
   of whether `-c` came before or after.

(1) is/was handled by going back to the original source, i.e., flags are
retrieved directly from the command line spec rather than the solver.

(2) is addressed by:

* Keeping track of grouped flags in the solver
* Keeping track of flag sources in the solver on a per-flag basis

The latter info is used in this PR to enforce DAG ordering on flags
applied from multiple dependents to the same package, e.g., for this
graph:

```
   a
  /|\
 b | c
  \|/
   d
```

If `a`, `b`, and `c` impose flags on `d`, the combined flags on `d` will
contain the flags of `a`, `b`, and `c` -- in that order. 

Conflicting flags are allowed (e.g. -O2 and -O3). `Spec.satisifes()` has
been updated such that X satisfies Y as long as X has *at least* all of
the flags that Y has. This is also true in the solver constraints.
`.satisfies` does not account for how order can change behavior (so
`-O2 -O3` can satisfy `-O3 -O2`); it is expected that this can be
addressed later (e.g. by prohibiting flag conflicts).

`Spec.constrain` and `.intersects` have been updated to be consistent
with this new definition of `.satisfies`.
This commit is contained in:
Peter Scheibel 2024-09-06 10:37:33 -07:00 committed by GitHub
parent 2b9a621d19
commit 78a4d3e7d2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 731 additions and 173 deletions

View File

@ -1022,6 +1022,102 @@ def __iter__(self):
ConditionSpecCache = Dict[str, Dict[ConditionSpecKey, ConditionIdFunctionPair]]
class ConstraintOrigin(enum.Enum):
"""Generates identifiers that can be pased into the solver attached
to constraints, and then later retrieved to determine the origin of
those constraints when ``SpecBuilder`` creates Specs from the solve
result.
"""
DEPENDS_ON = 1
REQUIRE = 2
@staticmethod
def _SUFFIXES() -> Dict["ConstraintOrigin", str]:
return {ConstraintOrigin.DEPENDS_ON: "_dep", ConstraintOrigin.REQUIRE: "_req"}
@staticmethod
def append_type_suffix(pkg_id: str, kind: "ConstraintOrigin") -> str:
"""Given a package identifier and a constraint kind, generate a string ID."""
suffix = ConstraintOrigin._SUFFIXES()[kind]
return f"{pkg_id}{suffix}"
@staticmethod
def strip_type_suffix(source: str) -> Tuple[int, Optional[str]]:
"""Take a combined package/type ID generated by
``append_type_suffix``, and extract the package ID and
an associated weight.
"""
if not source:
return -1, None
for kind, suffix in ConstraintOrigin._SUFFIXES().items():
if source.endswith(suffix):
return kind.value, source[: -len(suffix)]
return -1, source
class SourceContext:
"""Tracks context in which a Spec's clause-set is generated (i.e.
with ``SpackSolverSetup.spec_clauses``).
Facts generated for the spec may include this context.
"""
def __init__(self):
# This can be "literal" for constraints that come from a user
# spec (e.g. from the command line); it can be the output of
# `ConstraintOrigin.append_type_suffix`; the default is "none"
# (which means it isn't important to keep track of the source
# in that case).
self.source = "none"
class ConditionIdContext(SourceContext):
"""Derived from a ``ConditionContext``: for clause-sets generated by
imposed/required specs, stores an associated transform.
This is primarily used for tracking whether we are generating clauses
in the context of a required spec, or for an imposed spec.
Is not a subclass of ``ConditionContext`` because it exists in a
lower-level context with less information.
"""
def __init__(self):
super().__init__()
self.transform = None
class ConditionContext(SourceContext):
"""Tracks context in which a condition (i.e. ``SpackSolverSetup.condition``)
is generated (e.g. for a `depends_on`).
This may modify the required/imposed specs generated as relevant
for the context.
"""
def __init__(self):
super().__init__()
# transformation applied to facts from the required spec. Defaults
# to leave facts as they are.
self.transform_required = None
# transformation applied to facts from the imposed spec. Defaults
# to removing "node" and "virtual_node" facts.
self.transform_imposed = None
def requirement_context(self) -> ConditionIdContext:
ctxt = ConditionIdContext()
ctxt.source = self.source
ctxt.transform = self.transform_required
return ctxt
def impose_context(self) -> ConditionIdContext:
ctxt = ConditionIdContext()
ctxt.source = self.source
ctxt.transform = self.transform_imposed
return ctxt
class SpackSolverSetup:
"""Class to set up and run a Spack concretization solve."""
@ -1197,8 +1293,9 @@ def compiler_facts(self):
if compiler.compiler_obj is not None:
c = compiler.compiler_obj
for flag_type, flags in c.flags.items():
flag_group = " ".join(flags)
for flag in flags:
self.gen.fact(fn.compiler_flag(compiler_id, flag_type, flag))
self.gen.fact(fn.compiler_flag(compiler_id, flag_type, flag, flag_group))
if compiler.available:
self.gen.fact(fn.compiler_available(compiler_id))
@ -1375,7 +1472,7 @@ def _get_condition_id(
named_cond: spack.spec.Spec,
cache: ConditionSpecCache,
body: bool,
transform: Optional[TransformFunction] = None,
context: ConditionIdContext,
) -> int:
"""Get the id for one half of a condition (either a trigger or an imposed constraint).
@ -1389,15 +1486,15 @@ def _get_condition_id(
"""
pkg_cache = cache[named_cond.name]
named_cond_key = (str(named_cond), transform)
named_cond_key = (str(named_cond), context.transform)
result = pkg_cache.get(named_cond_key)
if result:
return result[0]
cond_id = next(self._id_counter)
requirements = self.spec_clauses(named_cond, body=body)
if transform:
requirements = transform(named_cond, requirements)
requirements = self.spec_clauses(named_cond, body=body, context=context)
if context.transform:
requirements = context.transform(named_cond, requirements)
pkg_cache[named_cond_key] = (cond_id, requirements)
return cond_id
@ -1408,8 +1505,7 @@ def condition(
imposed_spec: Optional[spack.spec.Spec] = None,
name: Optional[str] = None,
msg: Optional[str] = None,
transform_required: Optional[TransformFunction] = None,
transform_imposed: Optional[TransformFunction] = remove_node,
context: Optional[ConditionContext] = None,
):
"""Generate facts for a dependency or virtual provider condition.
@ -1418,10 +1514,8 @@ def condition(
imposed_spec: the constraints that are imposed when this condition is triggered
name: name for `required_spec` (required if required_spec is anonymous, ignored if not)
msg: description of the condition
transform_required: transformation applied to facts from the required spec. Defaults
to leave facts as they are.
transform_imposed: transformation applied to facts from the imposed spec. Defaults
to removing "node" and "virtual_node" facts.
context: if provided, indicates how to modify the clause-sets for the required/imposed
specs based on the type of constraint they are generated for (e.g. `depends_on`)
Returns:
int: id of the condition created by this function
"""
@ -1429,14 +1523,19 @@ def condition(
if not name:
raise ValueError(f"Must provide a name for anonymous condition: '{required_spec}'")
if not context:
context = ConditionContext()
context.transform_imposed = remove_node
with spec_with_name(required_spec, name):
# Check if we can emit the requirements before updating the condition ID counter.
# In this way, if a condition can't be emitted but the exception is handled in the
# caller, we won't emit partial facts.
condition_id = next(self._id_counter)
requirement_context = context.requirement_context()
trigger_id = self._get_condition_id(
required_spec, cache=self._trigger_cache, body=True, transform=transform_required
required_spec, cache=self._trigger_cache, body=True, context=requirement_context
)
self.gen.fact(fn.pkg_fact(required_spec.name, fn.condition(condition_id)))
self.gen.fact(fn.condition_reason(condition_id, msg))
@ -1446,8 +1545,9 @@ def condition(
if not imposed_spec:
return condition_id
impose_context = context.impose_context()
effect_id = self._get_condition_id(
imposed_spec, cache=self._effect_cache, body=False, transform=transform_imposed
imposed_spec, cache=self._effect_cache, body=False, context=impose_context
)
self.gen.fact(
fn.pkg_fact(required_spec.name, fn.condition_effect(condition_id, effect_id))
@ -1455,8 +1555,8 @@ def condition(
return condition_id
def impose(self, condition_id, imposed_spec, node=True, name=None, body=False):
imposed_constraints = self.spec_clauses(imposed_spec, body=body, required_from=name)
def impose(self, condition_id, imposed_spec, node=True, body=False):
imposed_constraints = self.spec_clauses(imposed_spec, body=body)
for pred in imposed_constraints:
# imposed "node"-like conditions are no-ops
if not node and pred.args[0] in ("node", "virtual_node"):
@ -1528,14 +1628,14 @@ def dependency_holds(input_spec, requirements):
if t & depflag
]
self.condition(
cond,
dep.spec,
name=pkg.name,
msg=msg,
transform_required=track_dependencies,
transform_imposed=dependency_holds,
context = ConditionContext()
context.source = ConstraintOrigin.append_type_suffix(
pkg.name, ConstraintOrigin.DEPENDS_ON
)
context.transform_required = track_dependencies
context.transform_imposed = dependency_holds
self.condition(cond, dep.spec, name=pkg.name, msg=msg, context=context)
self.gen.newline()
@ -1613,17 +1713,21 @@ def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
when_spec = spack.spec.Spec(pkg_name)
try:
# With virtual we want to emit "node" and "virtual_node" in imposed specs
transform: Optional[TransformFunction] = remove_node
if virtual:
transform = None
context = ConditionContext()
context.source = ConstraintOrigin.append_type_suffix(
pkg_name, ConstraintOrigin.REQUIRE
)
if not virtual:
context.transform_imposed = remove_node
# else: for virtuals we want to emit "node" and
# "virtual_node" in imposed specs
member_id = self.condition(
required_spec=when_spec,
imposed_spec=spec,
name=pkg_name,
transform_imposed=transform,
msg=f"{input_spec} is a requirement for package {pkg_name}",
context=context,
)
except Exception as e:
# Do not raise if the rule comes from the 'all' subsection, since usability
@ -1722,7 +1826,9 @@ def external_imposition(input_spec, requirements):
]
try:
self.condition(spec, spec, msg=msg, transform_imposed=external_imposition)
context = ConditionContext()
context.transform_imposed = external_imposition
self.condition(spec, spec, msg=msg, context=context)
except (spack.error.SpecError, RuntimeError) as e:
warnings.warn(f"while setting up external spec {spec}: {e}")
continue
@ -1797,6 +1903,7 @@ def spec_clauses(
expand_hashes: bool = False,
concrete_build_deps=False,
required_from: Optional[str] = None,
context: Optional[SourceContext] = None,
) -> List[AspFunction]:
"""Wrap a call to `_spec_clauses()` into a try/except block with better error handling.
@ -1812,6 +1919,7 @@ def spec_clauses(
transitive=transitive,
expand_hashes=expand_hashes,
concrete_build_deps=concrete_build_deps,
context=context,
)
except RuntimeError as exc:
msg = str(exc)
@ -1828,6 +1936,7 @@ def _spec_clauses(
transitive: bool = True,
expand_hashes: bool = False,
concrete_build_deps: bool = False,
context: Optional[SourceContext] = None,
) -> List[AspFunction]:
"""Return a list of clauses for a spec mandates are true.
@ -1839,6 +1948,8 @@ def _spec_clauses(
expand_hashes: if True, descend into hashes of concrete specs (default False)
concrete_build_deps: if False, do not include pure build deps of concrete specs
(as they have no effect on runtime constraints)
context: tracks what constraint this clause set is generated for (e.g. a
`depends_on` constraint in a package.py file)
Normally, if called with ``transitive=True``, ``spec_clauses()`` just generates
hashes for the dependency requirements of concrete specs. If ``expand_hashes``
@ -1925,13 +2036,19 @@ def _spec_clauses(
self.compiler_version_constraints.add(spec.compiler)
# compiler flags
source = context.source if context else "none"
for flag_type, flags in spec.compiler_flags.items():
flag_group = " ".join(flags)
for flag in flags:
clauses.append(f.node_flag(spec.name, flag_type, flag))
clauses.append(
f.node_flag(spec.name, fn.node_flag(flag_type, flag, flag_group, source))
)
if not spec.concrete and flag.propagate is True:
clauses.append(
f.propagate(
spec.name, fn.node_flag(flag_type, flag), fn.edge_types("link", "run")
spec.name,
fn.node_flag(flag_type, flag, flag_group, source),
fn.edge_types("link", "run"),
)
)
@ -2013,6 +2130,7 @@ def _spec_clauses(
body=body,
expand_hashes=expand_hashes,
concrete_build_deps=concrete_build_deps,
context=context,
)
)
@ -2648,7 +2766,9 @@ def literal_specs(self, specs):
effect_id, requirements = cache[imposed_spec_key]
else:
effect_id = next(self._id_counter)
requirements = self.spec_clauses(spec)
context = SourceContext()
context.source = "literal"
requirements = self.spec_clauses(spec, context=context)
root_name = spec.name
for clause in requirements:
clause_name = clause.args[0]
@ -3405,11 +3525,10 @@ def node_compiler_version(self, node, compiler, version):
self._specs[node].compiler = spack.spec.CompilerSpec(compiler)
self._specs[node].compiler.versions = vn.VersionList([vn.Version(version)])
def node_flag(self, node, flag_type, flag):
self._specs[node].compiler_flags.add_flag(flag_type, flag, False)
def node_flag_source(self, node, flag_type, source):
self._flag_sources[(node, flag_type)].add(source)
def node_flag(self, node, node_flag):
self._specs[node].compiler_flags.add_flag(
node_flag.flag_type, node_flag.flag, False, node_flag.flag_group, node_flag.source
)
def external_spec_selected(self, node, idx):
"""This means that the external spec and index idx has been selected for this package."""
@ -3450,15 +3569,23 @@ def virtual_on_edge(self, parent_node, provider_node, virtual):
dependencies[0].update_virtuals((virtual,))
def reorder_flags(self):
"""Order compiler flags on specs in predefined order.
We order flags so that any node's flags will take priority over
those of its dependents. That is, the deepest node in the DAG's
flags will appear last on the compile line, in the order they
were specified.
"""For each spec, determine the order of compiler flags applied to it.
The solver determines which flags are on nodes; this routine
imposes order afterwards.
imposes order afterwards. The order is:
1. Flags applied in compiler definitions should come first
2. Flags applied by dependents are ordered topologically (with a
dependency on `traverse` to resolve the partial order into a
stable total order)
3. Flags from requirements are then applied (requirements always
come from the package and never a parent)
4. Command-line flags should come last
Additionally, for each source (requirements, compiler, command line, and
dependents), flags from that source should retain their order and grouping:
e.g. for `y cflags="-z -a"` "-z" and "-a" should never have any intervening
flags inserted, and should always appear in that order.
"""
# reverse compilers so we get highest priority compilers that share a spec
compilers = dict(
@ -3473,40 +3600,78 @@ def reorder_flags(self):
flagmap_from_compiler = compilers[spec.compiler].flags
for flag_type in spec.compiler_flags.valid_compiler_flags():
from_compiler = flagmap_from_compiler.get(flag_type, [])
from_sources = []
# order is determined by the DAG. A spec's flags come after any of its ancestors
# on the compile line
node = SpecBuilder.make_node(pkg=spec.name)
source_key = (node, flag_type)
if source_key in self._flag_sources:
order = [
SpecBuilder.make_node(pkg=s.name)
for s in spec.traverse(order="post", direction="parents")
]
sorted_sources = sorted(
self._flag_sources[source_key], key=lambda s: order.index(s)
ordered_flags = []
# 1. Put compiler flags first
from_compiler = tuple(flagmap_from_compiler.get(flag_type, []))
extend_flag_list(ordered_flags, from_compiler)
# 2. Add all sources (the compiler is one of them, so skip any
# flag group that matches it exactly)
flag_groups = set()
for flag in self._specs[node].compiler_flags.get(flag_type, []):
flag_groups.add(
spack.spec.CompilerFlag(
flag.flag_group,
propagate=flag.propagate,
flag_group=flag.flag_group,
source=flag.source,
)
)
# add flags from each source, lowest to highest precedence
for node in sorted_sources:
all_src_flags = list()
per_pkg_sources = [self._specs[node]]
# For flags that are applied by dependents, put flags from parents
# before children; we depend on the stability of traverse() to
# achieve a stable flag order for flags introduced in this manner.
topo_order = list(s.name for s in spec.traverse(order="post", direction="parents"))
lex_order = list(sorted(flag_groups))
def _order_index(flag_group):
source = flag_group.source
# Note: if 'require: ^dependency cflags=...' is ever possible,
# this will topologically sort for require as well
type_index, pkg_source = ConstraintOrigin.strip_type_suffix(source)
if pkg_source in topo_order:
major_index = topo_order.index(pkg_source)
# If for x->y, x has multiple depends_on declarations that
# are activated, and each adds cflags to y, we fall back on
# alphabetical ordering to maintain a total order
minor_index = lex_order.index(flag_group)
else:
major_index = len(topo_order) + lex_order.index(flag_group)
minor_index = 0
return (type_index, major_index, minor_index)
prioritized_groups = sorted(flag_groups, key=lambda x: _order_index(x))
for grp in prioritized_groups:
grp_flags = tuple(
x for (x, y) in spack.compiler.tokenize_flags(grp.flag_group)
)
if grp_flags == from_compiler:
continue
as_compiler_flags = list(
spack.spec.CompilerFlag(
x,
propagate=grp.propagate,
flag_group=grp.flag_group,
source=grp.source,
)
for x in grp_flags
)
extend_flag_list(ordered_flags, as_compiler_flags)
# 3. Now put cmd-line flags last
if node.pkg in cmd_specs:
per_pkg_sources.append(cmd_specs[node.pkg])
for source in per_pkg_sources:
all_src_flags.extend(source.compiler_flags.get(flag_type, []))
extend_flag_list(from_sources, all_src_flags)
cmd_flags = cmd_specs[node.pkg].compiler_flags.get(flag_type, [])
extend_flag_list(ordered_flags, cmd_flags)
# compiler flags from compilers config are lowest precedence
ordered_compiler_flags = list(llnl.util.lang.dedupe(from_compiler + from_sources))
compiler_flags = spec.compiler_flags.get(flag_type, [])
msg = "%s does not equal %s" % (set(compiler_flags), set(ordered_flags))
assert set(compiler_flags) == set(ordered_flags), msg
msg = f"{set(compiler_flags)} does not equal {set(ordered_compiler_flags)}"
assert set(compiler_flags) == set(ordered_compiler_flags), msg
spec.compiler_flags.update({flag_type: ordered_compiler_flags})
spec.compiler_flags.update({flag_type: ordered_flags})
def deprecated(self, node: NodeArgument, version: str) -> None:
tty.warn(f'using "{node.pkg}@{version}" which is a deprecated version')
@ -3570,10 +3735,9 @@ def build_specs(self, function_tuples):
continue
# if we've already gotten a concrete spec for this pkg,
# do not bother calling actions on it except for node_flag_source,
# since node_flag_source is tracking information not in the spec itself
# do not bother calling actions on it
spec = self._specs.get(args[0])
if spec and spec.concrete and name != "node_flag_source":
if spec and spec.concrete:
continue
action(*args)
@ -3633,7 +3797,8 @@ def _develop_specs_from_env(spec, env):
assert spec.variants["dev_path"].value == path, error_msg
else:
spec.variants.setdefault("dev_path", spack.variant.SingleValuedVariant("dev_path", path))
spec.constrain(dev_info["spec"])
assert spec.satisfies(dev_info["spec"])
def _is_reusable(spec: spack.spec.Spec, packages, local: bool) -> bool:

View File

@ -43,7 +43,7 @@
internal_error("Only nodes can have node_compiler_version").
:- attr("variant_value", PackageNode, _, _), not attr("node", PackageNode),
internal_error("variant_value true for a non-node").
:- attr("node_flag", PackageNode, _, _), not attr("node", PackageNode),
:- attr("node_flag", PackageNode, _), not attr("node", PackageNode),
internal_error("node_flag assigned for non-node").
:- attr("external_spec_selected", PackageNode, _), not attr("node", PackageNode),
internal_error("external_spec_selected for non-node").
@ -51,10 +51,6 @@
internal_error("non-node depends on something").
:- attr("depends_on", _, ChildNode, _), not attr("node", ChildNode),
internal_error("something depends_on a non-node").
:- attr("node_flag_source", Node, _, _), not attr("node", Node),
internal_error("node_flag_source assigned for a non-node").
:- attr("node_flag_source", _, _, SourceNode), not attr("node", SourceNode),
internal_error("node_flag_source assigned with a non-node source").
:- attr("virtual_node", VirtualNode), not provider(_, VirtualNode),
internal_error("virtual node with no provider").
:- provider(_, VirtualNode), not attr("virtual_node", VirtualNode),
@ -152,7 +148,6 @@ unification_set(SetID, VirtualNode)
% TODO: literals, at the moment, can only influence the "root" unification set. This needs to be extended later.
% Node attributes that have multiple node arguments (usually, only the first argument is a node)
multiple_nodes_attribute("node_flag_source").
multiple_nodes_attribute("depends_on").
multiple_nodes_attribute("virtual_on_edge").
multiple_nodes_attribute("provider_set").
@ -390,7 +385,6 @@ trigger_condition_holds(ID, RequestorNode) :-
attr(Name, node(X, A1), A2, A3) : condition_requirement(ID, Name, A1, A2, A3), condition_nodes(ID, PackageNode, node(X, A1)), not multiple_nodes_attribute(Name);
attr(Name, node(X, A1), A2, A3, A4) : condition_requirement(ID, Name, A1, A2, A3, A4), condition_nodes(ID, PackageNode, node(X, A1));
% Special cases
attr("node_flag_source", node(X, A1), A2, node(Y, A3)) : condition_requirement(ID, "node_flag_source", A1, A2, A3), condition_nodes(ID, PackageNode, node(X, A1)), condition_nodes(ID, PackageNode, node(Y, A3));
not cannot_hold(ID, PackageNode).
condition_holds(ConditionID, node(X, Package))
@ -438,13 +432,6 @@ attr(Name, node(X, A1), A2) :- impose(ID, PackageNode), imposed_constrai
attr(Name, node(X, A1), A2, A3) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2, A3), imposed_nodes(ID, PackageNode, node(X, A1)), not multiple_nodes_attribute(Name).
attr(Name, node(X, A1), A2, A3, A4) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2, A3, A4), imposed_nodes(ID, PackageNode, node(X, A1)).
% For node flag sources we need to look at the condition_set of the source, since it is the dependent
% of the package on which I want to impose the constraint
attr("node_flag_source", node(X, A1), A2, node(Y, A3))
:- impose(ID, node(X, A1)),
imposed_constraint(ID, "node_flag_source", A1, A2, A3),
condition_set(node(Y, A3), node(X, A1)).
% Provider set is relevant only for literals, since it's the only place where `^[virtuals=foo] bar`
% might appear in the HEAD of a rule
attr("provider_set", node(min_dupe_id, Provider), node(min_dupe_id, Virtual))
@ -485,8 +472,8 @@ virtual_condition_holds(node(Y, A2), Virtual)
% we cannot have additional flag values when we are working with concrete specs
:- attr("node", node(ID, Package)),
attr("hash", node(ID, Package), Hash),
attr("node_flag", node(ID, Package), FlagType, Flag),
not imposed_constraint(Hash, "node_flag", Package, FlagType, Flag),
attr("node_flag", node(ID, Package), node_flag(FlagType, Flag, _, _)),
not imposed_constraint(Hash, "node_flag", Package, node_flag(FlagType, Flag, _, _)),
internal_error("imposed hash without imposing all flag values").
#defined condition/2.
@ -787,22 +774,15 @@ required_provider(Provider, Virtual)
:- provider(node(Y, Package), node(X, Virtual)), required_provider(Provider, Virtual), Package != Provider.
% TODO: the following two choice rules allow the solver to add compiler
% TODO: the following choice rule allows the solver to add compiler
% flags if their only source is from a requirement. This is overly-specific
% and should use a more-generic approach like in https://github.com/spack/spack/pull/37180
{ attr("node_flag", node(ID, Package), FlagType, FlagValue) } :-
{ attr("node_flag", node(ID, Package), NodeFlag) } :-
requirement_group_member(ConditionID, Package, RequirementID),
activate_requirement(node(ID, Package), RequirementID),
pkg_fact(Package, condition_effect(ConditionID, EffectID)),
imposed_constraint(EffectID, "node_flag_set", Package, FlagType, FlagValue).
{ attr("node_flag_source", node(NodeID1, Package1), FlagType, node(NodeID2, Package2)) } :-
requirement_group_member(ConditionID, Package1, RequirementID),
activate_requirement(node(NodeID1, Package1), RequirementID),
pkg_fact(Package1, condition_effect(ConditionID, EffectID)),
imposed_constraint(EffectID, "node_flag_source", Package1, FlagType, Package2),
imposed_nodes(EffectID, node(NodeID2, Package2), node(NodeID1, Package1)).
imposed_constraint(EffectID, "node_flag_set", Package, NodeFlag).
requirement_weight(node(ID, Package), Group, W) :-
W = #min {
@ -1048,23 +1028,22 @@ variant_is_propagated(PackageNode, Variant) :-
% 1. The same flag type is not set on this node
% 2. This node has the same compiler as the propagation source
propagated_flag(node(PackageID, Package), node_flag(FlagType, Flag), SourceNode) :-
propagate(node(PackageID, Package), node_flag(FlagType, Flag), _),
not attr("node_flag_set", node(PackageID, Package), FlagType, _),
propagated_flag(node(PackageID, Package), node_flag(FlagType, Flag, FlagGroup, Source), SourceNode) :-
propagate(node(PackageID, Package), node_flag(FlagType, Flag, FlagGroup, Source), _),
not attr("node_flag_set", node(PackageID, Package), node_flag(FlagType, _, _, "literal")),
% Same compiler as propagation source
node_compiler(node(PackageID, Package), CompilerID),
node_compiler(SourceNode, CompilerID),
attr("propagate", SourceNode, node_flag(FlagType, Flag), _),
attr("propagate", SourceNode, node_flag(FlagType, Flag, FlagGroup, Source), _),
node(PackageID, Package) != SourceNode,
not runtime(Package).
attr("node_flag", PackageNode, FlagType, Flag) :- propagated_flag(PackageNode, node_flag(FlagType, Flag), _).
attr("node_flag_source", PackageNode, FlagType, SourceNode) :- propagated_flag(PackageNode, node_flag(FlagType, _), SourceNode).
attr("node_flag", PackageNode, NodeFlag) :- propagated_flag(PackageNode, NodeFlag, _).
% Cannot propagate the same flag from two distinct sources
error(100, "{0} and {1} cannot both propagate compiler flags '{2}' to {3}", Source1, Source2, Package, FlagType) :-
propagated_flag(node(ID, Package), node_flag(FlagType, _), node(_, Source1)),
propagated_flag(node(ID, Package), node_flag(FlagType, _), node(_, Source2)),
propagated_flag(node(ID, Package), node_flag(FlagType, _, _, _), node(_, Source1)),
propagated_flag(node(ID, Package), node_flag(FlagType, _, _, _), node(_, Source2)),
Source1 < Source2.
%----
@ -1351,23 +1330,18 @@ error(100, "Compiler {1}@{2} requested for {0} cannot be found. Set install_miss
% Compiler flags
%-----------------------------------------------------------------------------
% remember where flags came from
attr("node_flag_source", PackageNode, FlagType, PackageNode) :- attr("node_flag_set", PackageNode, FlagType, _).
attr("node_flag_source", PackageNode, FlagType, PackageNode) :- attr("node_flag", PackageNode, FlagType, _), attr("hash", PackageNode, _).
% compiler flags from compilers.yaml are put on nodes if compiler matches
attr("node_flag", PackageNode, FlagType, Flag)
:- compiler_flag(CompilerID, FlagType, Flag),
attr("node_flag", PackageNode, node_flag(FlagType, Flag, FlagGroup, CompilerID))
:- compiler_flag(CompilerID, FlagType, Flag, FlagGroup),
node_compiler(PackageNode, CompilerID),
flag_type(FlagType),
compiler_id(CompilerID),
compiler_name(CompilerID, CompilerName),
compiler_version(CompilerID, Version).
% Flag set to something
attr("node_flag", PackageNode, FlagType, Flag) :- attr("node_flag_set", PackageNode, FlagType, Flag).
attr("node_flag", PackageNode, NodeFlag) :- attr("node_flag_set", PackageNode, NodeFlag).
#defined compiler_flag/3.
#defined compiler_flag/4.
%-----------------------------------------------------------------------------

View File

@ -230,6 +230,13 @@ class NodeArgument(NamedTuple):
pkg: str
class NodeFlag(NamedTuple):
flag_type: str
flag: str
flag_group: str
source: str
def intermediate_repr(sym):
"""Returns an intermediate representation of clingo models for Spack's spec builder.
@ -248,6 +255,13 @@ def intermediate_repr(sym):
return NodeArgument(
id=intermediate_repr(sym.arguments[0]), pkg=intermediate_repr(sym.arguments[1])
)
elif sym.name == "node_flag":
return NodeFlag(
flag_type=intermediate_repr(sym.arguments[0]),
flag=intermediate_repr(sym.arguments[1]),
flag_group=intermediate_repr(sym.arguments[2]),
source=intermediate_repr(sym.arguments[3]),
)
except RuntimeError:
# This happens when using clingo w/ CFFI and trying to access ".name" for symbols
# that are not functions

View File

@ -13,6 +13,7 @@
#show attr/2.
#show attr/3.
#show attr/4.
#show attr/5.
% names of optimization criteria
#show opt_criterion/2.

View File

@ -781,17 +781,49 @@ class CompilerFlag(str):
propagate (bool): if ``True`` the flag value will
be passed to the package's dependencies. If
``False`` it will not
flag_group (str): if this flag was introduced along
with several flags via a single source, then
this will store all such flags
source (str): identifies the type of constraint that
introduced this flag (e.g. if a package has
``depends_on(... cflags=-g)``, then the ``source``
for "-g" would indicate ``depends_on``.
"""
def __new__(cls, value, **kwargs):
obj = str.__new__(cls, value)
obj.propagate = kwargs.pop("propagate", False)
obj.flag_group = kwargs.pop("flag_group", value)
obj.source = kwargs.pop("source", None)
return obj
_valid_compiler_flags = ["cflags", "cxxflags", "fflags", "ldflags", "ldlibs", "cppflags"]
def _shared_subset_pair_iterate(container1, container2):
"""
[0, a, c, d, f]
[a, d, e, f]
yields [(a, a), (d, d), (f, f)]
no repeated elements
"""
a_idx, b_idx = 0, 0
max_a, max_b = len(container1), len(container2)
while a_idx < max_a and b_idx < max_b:
if container1[a_idx] == container2[b_idx]:
yield (container1[a_idx], container2[b_idx])
a_idx += 1
b_idx += 1
else:
while container1[a_idx] < container2[b_idx]:
a_idx += 1
while container1[a_idx] > container2[b_idx]:
b_idx += 1
class FlagMap(lang.HashableMap):
__slots__ = ("spec",)
@ -800,23 +832,9 @@ def __init__(self, spec):
self.spec = spec
def satisfies(self, other):
return all(f in self and self[f] == other[f] for f in other)
return all(f in self and set(self[f]) >= set(other[f]) for f in other)
def intersects(self, other):
common_types = set(self) & set(other)
for flag_type in common_types:
if not self[flag_type] or not other[flag_type]:
# At least one of the two is empty
continue
if self[flag_type] != other[flag_type]:
return False
if not all(
f1.propagate == f2.propagate for f1, f2 in zip(self[flag_type], other[flag_type])
):
# At least one propagation flag didn't match
return False
return True
def constrain(self, other):
@ -824,28 +842,28 @@ def constrain(self, other):
Return whether the spec changed.
"""
if other.spec and other.spec._concrete:
for k in self:
if k not in other:
raise UnsatisfiableCompilerFlagSpecError(self[k], "<absent>")
changed = False
for k in other:
if k in self and not set(self[k]) <= set(other[k]):
raise UnsatisfiableCompilerFlagSpecError(
" ".join(f for f in self[k]), " ".join(f for f in other[k])
for flag_type in other:
if flag_type not in self:
self[flag_type] = other[flag_type]
changed = True
else:
extra_other = set(other[flag_type]) - set(self[flag_type])
if extra_other:
self[flag_type] = list(self[flag_type]) + list(
x for x in other[flag_type] if x in extra_other
)
elif k not in self:
self[k] = other[k]
changed = True
# Check that the propagation values match
if self[k] == other[k]:
for i in range(len(other[k])):
if self[k][i].propagate != other[k][i].propagate:
raise UnsatisfiableCompilerFlagSpecError(
self[k][i].propagate, other[k][i].propagate
)
# Next, if any flags in other propagate, we force them to propagate in our case
shared = list(sorted(set(other[flag_type]) - extra_other))
for x, y in _shared_subset_pair_iterate(shared, sorted(self[flag_type])):
if x.propagate:
y.propagate = True
# TODO: what happens if flag groups with a partial (but not complete)
# intersection specify different behaviors for flag propagation?
return changed
@staticmethod
@ -858,7 +876,7 @@ def copy(self):
clone[name] = compiler_flag
return clone
def add_flag(self, flag_type, value, propagation):
def add_flag(self, flag_type, value, propagation, flag_group=None, source=None):
"""Stores the flag's value in CompilerFlag and adds it
to the FlagMap
@ -869,7 +887,8 @@ def add_flag(self, flag_type, value, propagation):
propagation (bool): if ``True`` the flag value will be passed to
the packages' dependencies. If``False`` it will not be passed
"""
flag = CompilerFlag(value, propagate=propagation)
flag_group = flag_group or value
flag = CompilerFlag(value, propagate=propagation, flag_group=flag_group, source=source)
if flag_type not in self:
self[flag_type] = [flag]
@ -1665,8 +1684,9 @@ def _add_flag(self, name, value, propagate):
elif name in valid_flags:
assert self.compiler_flags is not None
flags_and_propagation = spack.compiler.tokenize_flags(value, propagate)
flag_group = " ".join(x for (x, y) in flags_and_propagation)
for flag, propagation in flags_and_propagation:
self.compiler_flags.add_flag(name, flag, propagation)
self.compiler_flags.add_flag(name, flag, propagation, flag_group)
else:
# FIXME:
# All other flags represent variants. 'foo=true' and 'foo=false'

View File

@ -401,14 +401,6 @@ def test_spec_flags_maintain_order(self, mutable_config, gcc11_with_flags):
s.compiler_flags[x] == ["-O0", "-g"] for x in ("cflags", "cxxflags", "fflags")
)
@pytest.mark.xfail(reason="Broken, needs to be fixed")
def test_compiler_flags_from_compiler_and_dependent(self):
client = Spec("cmake-client %clang@12.2.0 platform=test os=fe target=fe cflags==-g")
client.concretize()
cmake = client["cmake"]
for spec in [client, cmake]:
assert spec.compiler_flags["cflags"] == ["-O3", "-g"]
def test_compiler_flags_differ_identical_compilers(self, mutable_config, clang12_with_flags):
mutable_config.set("compilers", [clang12_with_flags])
# Correct arch to use test compiler that has flags
@ -442,6 +434,13 @@ def test_compiler_flags_differ_identical_compilers(self, mutable_config, clang12
["hypre cflags='-g'", "^openblas cflags='-O3'"],
["^openblas cflags='-g'"],
),
# Setting propagation on parent and dependency -> the
# dependency propagation flags override
(
"hypre cflags=='-g' ^openblas cflags=='-O3'",
["hypre cflags='-g'", "^openblas cflags='-O3'"],
["^openblas cflags='-g'"],
),
# Propagation doesn't go across build dependencies
(
"cmake-client cflags=='-O2 -g'",

View File

@ -0,0 +1,333 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import pytest
import spack.build_systems.generic
import spack.config
import spack.environment as ev
import spack.error
import spack.package_base
import spack.repo
import spack.util.spack_yaml as syaml
import spack.version
from spack.spec import Spec
from spack.test.conftest import create_test_repo
"""
These tests include the following package DAGs:
Firstly, w, x, y where w and x apply cflags to y.
w
|\
x |
|/
y
Secondly, v, y which where v does not apply cflags to y - this is for testing
mixing with compiler flag propagation in the absence of compiler flags applied
by dependents.
v
|
y
Finally, a diamond dag to check that the topological order is resolved into
a total order:
t
|\
u x
|/
y
"""
_pkgx = (
"x",
"""\
class X(Package):
version("1.1")
version("1.0")
variant("activatemultiflag", default=False)
depends_on('y cflags="-d1"', when="~activatemultiflag")
depends_on('y cflags="-d1 -d2"', when="+activatemultiflag")
""",
)
_pkgy = (
"y",
"""\
class Y(Package):
version("2.1")
version("2.0")
""",
)
_pkgw = (
"w",
"""\
class W(Package):
version("3.1")
version("3.0")
variant("moveflaglater", default=False)
depends_on('x +activatemultiflag')
depends_on('y cflags="-d0"', when="~moveflaglater")
depends_on('y cflags="-d3"', when="+moveflaglater")
""",
)
_pkgv = (
"v",
"""\
class V(Package):
version("4.1")
version("4.0")
depends_on("y")
""",
)
_pkgt = (
"t",
"""\
class T(Package):
version("5.0")
depends_on("u")
depends_on("x+activatemultiflag")
depends_on("y cflags='-c1 -c2'")
""",
)
_pkgu = (
"u",
"""\
class U(Package):
version("6.0")
depends_on("y cflags='-e1 -e2'")
""",
)
@pytest.fixture
def _create_test_repo(tmpdir, mutable_config):
yield create_test_repo(tmpdir, [_pkgt, _pkgu, _pkgv, _pkgw, _pkgx, _pkgy])
@pytest.fixture
def test_repo(_create_test_repo, monkeypatch, mock_stage):
with spack.repo.use_repositories(_create_test_repo) as mock_repo_path:
yield mock_repo_path
def update_concretize_scope(conf_str, section):
conf = syaml.load_config(conf_str)
spack.config.set(section, conf[section], scope="concretize")
def test_mix_spec_and_requirements(concretize_scope, test_repo):
conf_str = """\
packages:
y:
require: cflags="-c"
"""
update_concretize_scope(conf_str, "packages")
s1 = Spec('y cflags="-a"').concretized()
assert s1.satisfies('cflags="-a -c"')
def test_mix_spec_and_dependent(concretize_scope, test_repo):
s1 = Spec('x ^y cflags="-a"').concretized()
assert s1["y"].satisfies('cflags="-a -d1"')
def _compiler_cfg_one_entry_with_cflags(cflags):
return f"""\
compilers::
- compiler:
spec: gcc@12.100.100
paths:
cc: /usr/bin/fake-gcc
cxx: /usr/bin/fake-g++
f77: null
fc: null
flags:
cflags: {cflags}
operating_system: debian6
modules: []
"""
def test_mix_spec_and_compiler_cfg(concretize_scope, test_repo):
conf_str = _compiler_cfg_one_entry_with_cflags("-Wall")
update_concretize_scope(conf_str, "compilers")
s1 = Spec('y %gcc@12.100.100 cflags="-O2"').concretized()
assert s1.satisfies('cflags="-Wall -O2"')
@pytest.mark.parametrize(
"cmd_flags,req_flags,cmp_flags,dflags,expected_order",
[
("-a -b", "-c", None, False, "-c -a -b"),
("-x7 -x4", "-x5 -x6", None, False, "-x5 -x6 -x7 -x4"),
("-x7 -x4", "-x5 -x6", "-x3 -x8", False, "-x3 -x8 -x5 -x6 -x7 -x4"),
("-x7 -x4", "-x5 -x6", "-x3 -x8", True, "-x3 -x8 -d1 -d2 -x5 -x6 -x7 -x4"),
("-x7 -x4", None, "-x3 -x8", False, "-x3 -x8 -x7 -x4"),
("-x7 -x4", None, "-x3 -x8", True, "-x3 -x8 -d1 -d2 -x7 -x4"),
# The remaining test cover cases of intersection
("-a -b", "-a -c", None, False, "-c -a -b"),
("-a -b", None, "-a -c", False, "-c -a -b"),
("-a -b", "-a -c", "-a -d", False, "-d -c -a -b"),
("-a -d2 -d1", "-d2 -c", "-d1 -b", True, "-b -c -a -d2 -d1"),
("-a", "-d0 -d2 -c", "-d1 -b", True, "-b -d1 -d0 -d2 -c -a"),
],
)
def test_flag_order_and_grouping(
concretize_scope, test_repo, cmd_flags, req_flags, cmp_flags, dflags, expected_order
):
"""Check consistent flag ordering and grouping on a package "y"
with flags introduced from a variety of sources.
The ordering rules are explained in ``asp.SpecBuilder.reorder_flags``.
"""
if req_flags:
conf_str = f"""\
packages:
y:
require: cflags="{req_flags}"
"""
update_concretize_scope(conf_str, "packages")
if cmp_flags:
conf_str = _compiler_cfg_one_entry_with_cflags(cmp_flags)
update_concretize_scope(conf_str, "compilers")
compiler_spec = ""
if cmp_flags:
compiler_spec = "%gcc@12.100.100"
if dflags:
spec_str = f"x+activatemultiflag {compiler_spec} ^y"
expected_dflags = "-d1 -d2"
else:
spec_str = f"y {compiler_spec}"
expected_dflags = None
if cmd_flags:
spec_str += f' cflags="{cmd_flags}"'
root_spec = Spec(spec_str).concretized()
spec = root_spec["y"]
satisfy_flags = " ".join(x for x in [cmd_flags, req_flags, cmp_flags, expected_dflags] if x)
assert spec.satisfies(f'cflags="{satisfy_flags}"')
assert spec.compiler_flags["cflags"] == expected_order.split()
def test_two_dependents_flag_mixing(concretize_scope, test_repo):
root_spec1 = Spec("w~moveflaglater").concretized()
spec1 = root_spec1["y"]
assert spec1.compiler_flags["cflags"] == "-d0 -d1 -d2".split()
root_spec2 = Spec("w+moveflaglater").concretized()
spec2 = root_spec2["y"]
assert spec2.compiler_flags["cflags"] == "-d3 -d1 -d2".split()
def test_propagate_and_compiler_cfg(concretize_scope, test_repo):
conf_str = _compiler_cfg_one_entry_with_cflags("-f2")
update_concretize_scope(conf_str, "compilers")
root_spec = Spec("v %gcc@12.100.100 cflags=='-f1'").concretized()
assert root_spec["y"].satisfies("cflags='-f1 -f2'")
# Note: setting flags on a dependency overrides propagation, which
# is tested in test/concretize.py:test_compiler_flag_propagation
def test_propagate_and_pkg_dep(concretize_scope, test_repo):
root_spec1 = Spec("x ~activatemultiflag cflags=='-f1'").concretized()
assert root_spec1["y"].satisfies("cflags='-f1 -d1'")
def test_propagate_and_require(concretize_scope, test_repo):
conf_str = """\
packages:
y:
require: cflags="-f2"
"""
update_concretize_scope(conf_str, "packages")
root_spec1 = Spec("v cflags=='-f1'").concretized()
assert root_spec1["y"].satisfies("cflags='-f1 -f2'")
# Next, check that a requirement does not "undo" a request for
# propagation from the command-line spec
conf_str = """\
packages:
v:
require: cflags="-f1"
"""
update_concretize_scope(conf_str, "packages")
root_spec2 = Spec("v cflags=='-f1'").concretized()
assert root_spec2["y"].satisfies("cflags='-f1'")
# Note: requirements cannot enforce propagation: any attempt to do
# so will generate a concretization error; this likely relates to
# the note about #37180 in concretize.lp
def test_dev_mix_flags(tmp_path, concretize_scope, mutable_mock_env_path, test_repo):
src_dir = tmp_path / "x-src"
env_content = f"""\
spack:
specs:
- y %gcc@12.100.100 cflags=='-fsanitize=address'
develop:
y:
spec: y cflags=='-fsanitize=address'
path: {src_dir}
"""
conf_str = _compiler_cfg_one_entry_with_cflags("-f1")
update_concretize_scope(conf_str, "compilers")
manifest_file = tmp_path / ev.manifest_name
manifest_file.write_text(env_content)
e = ev.create("test", manifest_file)
with e:
e.concretize()
e.write()
(result,) = list(j for i, j in e.concretized_specs() if j.name == "y")
assert result["y"].satisfies("cflags='-fsanitize=address -f1'")
def test_diamond_dep_flag_mixing(concretize_scope, test_repo):
"""A diamond where each dependent applies flags to the bottom
dependency. The goal is to ensure that the flag ordering is
(a) topological and (b) repeatable for elements not subject to
this partial ordering (i.e. the flags for the left and right
nodes of the diamond always appear in the same order).
`Spec.traverse` is responsible for handling both of these needs.
"""
root_spec1 = Spec("t").concretized()
spec1 = root_spec1["y"]
assert spec1.satisfies('cflags="-c1 -c2 -d1 -d2 -e1 -e2"')
assert spec1.compiler_flags["cflags"] == "-c1 -c2 -e1 -e2 -d1 -d2".split()

View File

@ -245,6 +245,65 @@ def test_abstract_specs_can_constrain_each_other(self, lhs, rhs, expected):
assert c1 == c2
assert c1 == expected
@pytest.mark.parametrize(
"lhs,rhs,expected_lhs,expected_rhs,propagated_lhs,propagated_rhs",
[
(
'mpich cppflags="-O3"',
'mpich cppflags="-O2"',
'mpich cppflags="-O3 -O2"',
'mpich cppflags="-O2 -O3"',
[],
[],
),
(
'mpich cflags="-O3 -g"',
'mpich cflags=="-O3"',
'mpich cflags="-O3 -g"',
'mpich cflags=="-O3 -g"',
[("cflags", "-O3")],
[("cflags", "-O3")],
),
],
)
def test_constrain_compiler_flags(
self, lhs, rhs, expected_lhs, expected_rhs, propagated_lhs, propagated_rhs
):
"""Constraining is asymmetric for compiler flags. Also note that
Spec equality does not account for flag propagation, so the checks
here are manual.
"""
lhs, rhs, expected_lhs, expected_rhs = (
Spec(lhs),
Spec(rhs),
Spec(expected_lhs),
Spec(expected_rhs),
)
assert lhs.intersects(rhs)
assert rhs.intersects(lhs)
c1, c2 = lhs.copy(), rhs.copy()
c1.constrain(rhs)
c2.constrain(lhs)
assert c1 == expected_lhs
assert c2 == expected_rhs
for x in [c1, c2]:
assert x.satisfies(lhs)
assert x.satisfies(rhs)
def _propagated_flags(_spec):
result = set()
for flagtype in _spec.compiler_flags:
for flag in _spec.compiler_flags[flagtype]:
if flag.propagate:
result.add((flagtype, flag))
return result
assert set(propagated_lhs) <= _propagated_flags(c1)
assert set(propagated_rhs) <= _propagated_flags(c2)
def test_constrain_specs_by_hash(self, default_mock_concretization, database):
"""Test that Specs specified only by their hashes can constrain eachother."""
mpich_dag_hash = "/" + database.query_one("mpich").dag_hash()
@ -311,14 +370,11 @@ def test_concrete_specs_which_satisfies_abstract(self, lhs, rhs, default_mock_co
("mpich~~foo", "mpich++foo"),
("mpich++foo", "mpich~~foo"),
("mpich foo==True", "mpich foo==False"),
('mpich cppflags="-O3"', 'mpich cppflags="-O2"'),
('mpich cppflags="-O3"', 'mpich cppflags=="-O3"'),
("libelf@0:2.0", "libelf@2.1:3"),
("libelf@0:2.5%gcc@4.8:4.9", "libelf@2.1:3%gcc@4.5:4.7"),
("libelf+debug", "libelf~debug"),
("libelf+debug~foo", "libelf+debug+foo"),
("libelf debug=True", "libelf debug=False"),
('libelf cppflags="-O3"', 'libelf cppflags="-O2"'),
("libelf platform=test target=be os=be", "libelf target=fe os=fe"),
("namespace=builtin.mock", "namespace=builtin"),
],
@ -347,10 +403,6 @@ def test_constraining_abstract_specs_with_empty_intersection(self, lhs, rhs):
("mpich", "mpich++foo"),
("mpich", "mpich~~foo"),
("mpich", "mpich foo==1"),
# Flags semantics is currently different from other variant
("mpich", 'mpich cflags="-O3"'),
("mpich cflags=-O3", 'mpich cflags="-O3 -Ofast"'),
("mpich cflags=-O2", 'mpich cflags="-O3"'),
("multivalue-variant foo=bar", "multivalue-variant +foo"),
("multivalue-variant foo=bar", "multivalue-variant ~foo"),
("multivalue-variant fee=bar", "multivalue-variant fee=baz"),
@ -1451,8 +1503,8 @@ def test_abstract_contains_semantic(lhs, rhs, expected, mock_packages):
(CompilerSpec, "gcc@5", "gcc@5-tag", (True, False, True)),
# Flags (flags are a map, so for convenience we initialize a full Spec)
# Note: the semantic is that of sv variants, not mv variants
(Spec, "cppflags=-foo", "cppflags=-bar", (False, False, False)),
(Spec, "cppflags='-bar -foo'", "cppflags=-bar", (False, False, False)),
(Spec, "cppflags=-foo", "cppflags=-bar", (True, False, False)),
(Spec, "cppflags='-bar -foo'", "cppflags=-bar", (True, True, False)),
(Spec, "cppflags=-foo", "cppflags=-foo", (True, True, True)),
(Spec, "cppflags=-foo", "cflags=-foo", (True, False, False)),
# Versions