
If you have an environment like ``` $ cat spack.yaml spack: specs: [openmpi@4.1.0+cuda] ``` this PR provides a new command `spack change` that you can use to adjust environment specs from the command line: ``` $ spack change openmpi~cuda $ cat spack.yaml spack: specs: [openmpi@4.1.0~cuda] ``` in other words, this allows you to tweak the details of environment specs from the command line. Notes: * This is only allowed for environments that do not define matrices * This is possible but not anticipated to be needed immediately * If this were done, it should probably only be done for "named"/not-anonymous specs (i.e. we can change `openmpi+cuda` but not spec like `+cuda` or `@4.0.1~cuda`)
245 lines
8.1 KiB
Python
245 lines
8.1 KiB
Python
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
#
|
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
import itertools
|
|
|
|
from six import string_types
|
|
|
|
import spack.variant
|
|
from spack.error import SpackError
|
|
from spack.spec import Spec
|
|
|
|
|
|
class SpecList(object):
|
|
def __init__(self, name="specs", yaml_list=None, reference=None):
|
|
# Normalize input arguments
|
|
yaml_list = yaml_list or []
|
|
reference = reference or {}
|
|
|
|
self.name = name
|
|
self._reference = reference # TODO: Do we need defensive copy here?
|
|
|
|
# Validate yaml_list before assigning
|
|
if not all(isinstance(s, string_types) or isinstance(s, (list, dict)) for s in yaml_list):
|
|
raise ValueError(
|
|
"yaml_list can contain only valid YAML types! Found:\n %s"
|
|
% [type(s) for s in yaml_list]
|
|
)
|
|
self.yaml_list = yaml_list[:]
|
|
|
|
# Expansions can be expensive to compute and difficult to keep updated
|
|
# We cache results and invalidate when self.yaml_list changes
|
|
self._expanded_list = None
|
|
self._constraints = None
|
|
self._specs = None
|
|
|
|
@property
|
|
def is_matrix(self):
|
|
for item in self.specs_as_yaml_list:
|
|
if isinstance(item, dict):
|
|
return True
|
|
return False
|
|
|
|
@property
|
|
def specs_as_yaml_list(self):
|
|
if self._expanded_list is None:
|
|
self._expanded_list = self._expand_references(self.yaml_list)
|
|
return self._expanded_list
|
|
|
|
@property
|
|
def specs_as_constraints(self):
|
|
if self._constraints is None:
|
|
constraints = []
|
|
for item in self.specs_as_yaml_list:
|
|
if isinstance(item, dict): # matrix of specs
|
|
constraints.extend(_expand_matrix_constraints(item))
|
|
else: # individual spec
|
|
constraints.append([Spec(item)])
|
|
self._constraints = constraints
|
|
|
|
return self._constraints
|
|
|
|
@property
|
|
def specs(self):
|
|
if self._specs is None:
|
|
specs = []
|
|
# This could be slightly faster done directly from yaml_list,
|
|
# but this way is easier to maintain.
|
|
for constraint_list in self.specs_as_constraints:
|
|
spec = constraint_list[0].copy()
|
|
for const in constraint_list[1:]:
|
|
spec.constrain(const)
|
|
specs.append(spec)
|
|
self._specs = specs
|
|
|
|
return self._specs
|
|
|
|
def add(self, spec):
|
|
self.yaml_list.append(str(spec))
|
|
|
|
# expanded list can be updated without invalidation
|
|
if self._expanded_list is not None:
|
|
self._expanded_list.append(str(spec))
|
|
|
|
# Invalidate cache variables when we change the list
|
|
self._constraints = None
|
|
self._specs = None
|
|
|
|
def remove(self, spec):
|
|
# Get spec to remove from list
|
|
remove = [
|
|
s
|
|
for s in self.yaml_list
|
|
if (isinstance(s, string_types) and not s.startswith("$")) and Spec(s) == Spec(spec)
|
|
]
|
|
if not remove:
|
|
msg = "Cannot remove %s from SpecList %s\n" % (spec, self.name)
|
|
msg += "Either %s is not in %s or %s is " % (spec, self.name, spec)
|
|
msg += "expanded from a matrix and cannot be removed directly."
|
|
raise SpecListError(msg)
|
|
assert len(remove) == 1
|
|
self.yaml_list.remove(remove[0])
|
|
|
|
# invalidate cache variables when we change the list
|
|
self._expanded_list = None
|
|
self._constraints = None
|
|
self._specs = None
|
|
|
|
def extend(self, other, copy_reference=True):
|
|
self.yaml_list.extend(other.yaml_list)
|
|
self._expanded_list = None
|
|
self._constraints = None
|
|
self._specs = None
|
|
|
|
if copy_reference:
|
|
self._reference = other._reference
|
|
|
|
def update_reference(self, reference):
|
|
self._reference = reference
|
|
self._expanded_list = None
|
|
self._constraints = None
|
|
self._specs = None
|
|
|
|
def _parse_reference(self, name):
|
|
sigil = ""
|
|
name = name[1:]
|
|
|
|
# Parse specs as constraints
|
|
if name.startswith("^") or name.startswith("%"):
|
|
sigil = name[0]
|
|
name = name[1:]
|
|
|
|
# Make sure the reference is valid
|
|
if name not in self._reference:
|
|
msg = "SpecList %s refers to " % self.name
|
|
msg += "named list %s " % name
|
|
msg += "which does not appear in its reference dict"
|
|
raise UndefinedReferenceError(msg)
|
|
|
|
return (name, sigil)
|
|
|
|
def _expand_references(self, yaml):
|
|
if isinstance(yaml, list):
|
|
ret = []
|
|
|
|
for item in yaml:
|
|
# if it's a reference, expand it
|
|
if isinstance(item, string_types) and item.startswith("$"):
|
|
# replace the reference and apply the sigil if needed
|
|
name, sigil = self._parse_reference(item)
|
|
referent = [
|
|
_sigilify(item, sigil) for item in self._reference[name].specs_as_yaml_list
|
|
]
|
|
ret.extend(referent)
|
|
else:
|
|
# else just recurse
|
|
ret.append(self._expand_references(item))
|
|
return ret
|
|
elif isinstance(yaml, dict):
|
|
# There can't be expansions in dicts
|
|
return dict((name, self._expand_references(val)) for (name, val) in yaml.items())
|
|
else:
|
|
# Strings are just returned
|
|
return yaml
|
|
|
|
def __len__(self):
|
|
return len(self.specs)
|
|
|
|
def __getitem__(self, key):
|
|
return self.specs[key]
|
|
|
|
|
|
def _expand_matrix_constraints(matrix_config):
|
|
# recurse so we can handle nested matrices
|
|
expanded_rows = []
|
|
for row in matrix_config["matrix"]:
|
|
new_row = []
|
|
for r in row:
|
|
if isinstance(r, dict):
|
|
# Flatten the nested matrix into a single row of constraints
|
|
new_row.extend(
|
|
[
|
|
[" ".join([str(c) for c in expanded_constraint_list])]
|
|
for expanded_constraint_list in _expand_matrix_constraints(r)
|
|
]
|
|
)
|
|
else:
|
|
new_row.append([r])
|
|
expanded_rows.append(new_row)
|
|
|
|
excludes = matrix_config.get("exclude", []) # only compute once
|
|
sigil = matrix_config.get("sigil", "")
|
|
|
|
results = []
|
|
for combo in itertools.product(*expanded_rows):
|
|
# Construct a combined spec to test against excludes
|
|
flat_combo = [constraint for constraint_list in combo for constraint in constraint_list]
|
|
flat_combo = [Spec(x) for x in flat_combo]
|
|
|
|
test_spec = flat_combo[0].copy()
|
|
for constraint in flat_combo[1:]:
|
|
test_spec.constrain(constraint)
|
|
|
|
# Abstract variants don't have normal satisfaction semantics
|
|
# Convert all variants to concrete types.
|
|
# This method is best effort, so all existing variants will be
|
|
# converted before any error is raised.
|
|
# Catch exceptions because we want to be able to operate on
|
|
# abstract specs without needing package information
|
|
try:
|
|
spack.variant.substitute_abstract_variants(test_spec)
|
|
except spack.variant.UnknownVariantError:
|
|
pass
|
|
if any(test_spec.satisfies(x) for x in excludes):
|
|
continue
|
|
|
|
if sigil:
|
|
flat_combo[0] = Spec(sigil + str(flat_combo[0]))
|
|
|
|
# Add to list of constraints
|
|
results.append(flat_combo)
|
|
|
|
return results
|
|
|
|
|
|
def _sigilify(item, sigil):
|
|
if isinstance(item, dict):
|
|
if sigil:
|
|
item["sigil"] = sigil
|
|
return item
|
|
else:
|
|
return sigil + item
|
|
|
|
|
|
class SpecListError(SpackError):
|
|
"""Error class for all errors related to SpecList objects."""
|
|
|
|
|
|
class UndefinedReferenceError(SpecListError):
|
|
"""Error class for undefined references in Spack stacks."""
|
|
|
|
|
|
class InvalidSpecConstraintError(SpecListError):
|
|
"""Error class for invalid spec constraints at concretize time."""
|