Environment: separate parsing concerns from SpecList (#49973)
Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
This commit is contained in:
parent
b3ded1332e
commit
25beeef865
@ -31,7 +31,6 @@
|
|||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.schema.env
|
import spack.schema.env
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.spec_list
|
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.user_environment as uenv
|
import spack.user_environment as uenv
|
||||||
import spack.util.environment
|
import spack.util.environment
|
||||||
@ -44,10 +43,10 @@
|
|||||||
from spack.installer import PackageInstaller
|
from spack.installer import PackageInstaller
|
||||||
from spack.schema.env import TOP_LEVEL_KEY
|
from spack.schema.env import TOP_LEVEL_KEY
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
from spack.spec_list import SpecList
|
|
||||||
from spack.util.path import substitute_path_variables
|
from spack.util.path import substitute_path_variables
|
||||||
|
|
||||||
from ..enums import ConfigScopePriority
|
from ..enums import ConfigScopePriority
|
||||||
|
from .list import SpecList, SpecListError, SpecListParser
|
||||||
|
|
||||||
SpecPair = spack.concretize.SpecPair
|
SpecPair = spack.concretize.SpecPair
|
||||||
|
|
||||||
@ -932,8 +931,10 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
|
|||||||
self.new_specs: List[Spec] = []
|
self.new_specs: List[Spec] = []
|
||||||
self.views: Dict[str, ViewDescriptor] = {}
|
self.views: Dict[str, ViewDescriptor] = {}
|
||||||
|
|
||||||
|
#: Parser for spec lists
|
||||||
|
self._spec_lists_parser = SpecListParser()
|
||||||
#: Specs from "spack.yaml"
|
#: Specs from "spack.yaml"
|
||||||
self.spec_lists: Dict[str, SpecList] = {user_speclist_name: SpecList()}
|
self.spec_lists: Dict[str, SpecList] = {}
|
||||||
#: User specs from the last concretization
|
#: User specs from the last concretization
|
||||||
self.concretized_user_specs: List[Spec] = []
|
self.concretized_user_specs: List[Spec] = []
|
||||||
#: Roots associated with the last concretization, in order
|
#: Roots associated with the last concretization, in order
|
||||||
@ -1001,26 +1002,6 @@ def write_transaction(self):
|
|||||||
"""Get a write lock context manager for use in a `with` block."""
|
"""Get a write lock context manager for use in a `with` block."""
|
||||||
return lk.WriteTransaction(self.txlock, acquire=self._re_read)
|
return lk.WriteTransaction(self.txlock, acquire=self._re_read)
|
||||||
|
|
||||||
def _process_definition(self, entry):
|
|
||||||
"""Process a single spec definition item."""
|
|
||||||
when_string = entry.get("when")
|
|
||||||
if when_string is not None:
|
|
||||||
when = spack.spec.eval_conditional(when_string)
|
|
||||||
assert len([x for x in entry if x != "when"]) == 1
|
|
||||||
else:
|
|
||||||
when = True
|
|
||||||
assert len(entry) == 1
|
|
||||||
|
|
||||||
if when:
|
|
||||||
for name, spec_list in entry.items():
|
|
||||||
if name == "when":
|
|
||||||
continue
|
|
||||||
user_specs = SpecList(name, spec_list, self.spec_lists.copy())
|
|
||||||
if name in self.spec_lists:
|
|
||||||
self.spec_lists[name].extend(user_specs)
|
|
||||||
else:
|
|
||||||
self.spec_lists[name] = user_specs
|
|
||||||
|
|
||||||
def _process_view(self, env_view: Optional[Union[bool, str, Dict]]):
|
def _process_view(self, env_view: Optional[Union[bool, str, Dict]]):
|
||||||
"""Process view option(s), which can be boolean, string, or None.
|
"""Process view option(s), which can be boolean, string, or None.
|
||||||
|
|
||||||
@ -1082,21 +1063,24 @@ def _process_concrete_includes(self):
|
|||||||
|
|
||||||
def _construct_state_from_manifest(self):
|
def _construct_state_from_manifest(self):
|
||||||
"""Set up user specs and views from the manifest file."""
|
"""Set up user specs and views from the manifest file."""
|
||||||
self.spec_lists = collections.OrderedDict()
|
|
||||||
self.views = {}
|
self.views = {}
|
||||||
|
self._sync_speclists()
|
||||||
|
self._process_view(spack.config.get("view", True))
|
||||||
|
self._process_concrete_includes()
|
||||||
|
|
||||||
for item in spack.config.get("definitions", []):
|
def _sync_speclists(self):
|
||||||
self._process_definition(item)
|
self.spec_lists = {}
|
||||||
|
self.spec_lists.update(
|
||||||
|
self._spec_lists_parser.parse_definitions(
|
||||||
|
data=spack.config.CONFIG.get("definitions", [])
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
env_configuration = self.manifest[TOP_LEVEL_KEY]
|
env_configuration = self.manifest[TOP_LEVEL_KEY]
|
||||||
spec_list = env_configuration.get(user_speclist_name, [])
|
spec_list = env_configuration.get(user_speclist_name, [])
|
||||||
user_specs = SpecList(
|
self.spec_lists[user_speclist_name] = self._spec_lists_parser.parse_user_specs(
|
||||||
user_speclist_name, [s for s in spec_list if s], self.spec_lists.copy()
|
name=user_speclist_name, yaml_list=spec_list
|
||||||
)
|
)
|
||||||
self.spec_lists[user_speclist_name] = user_specs
|
|
||||||
|
|
||||||
self._process_view(spack.config.get("view", True))
|
|
||||||
self._process_concrete_includes()
|
|
||||||
|
|
||||||
def all_concretized_user_specs(self) -> List[Spec]:
|
def all_concretized_user_specs(self) -> List[Spec]:
|
||||||
"""Returns all of the concretized user specs of the environment and
|
"""Returns all of the concretized user specs of the environment and
|
||||||
@ -1167,9 +1151,7 @@ def clear(self, re_read=False):
|
|||||||
re_read: If ``True``, do not clear ``new_specs``. This value cannot be read from yaml,
|
re_read: If ``True``, do not clear ``new_specs``. This value cannot be read from yaml,
|
||||||
and needs to be maintained when re-reading an existing environment.
|
and needs to be maintained when re-reading an existing environment.
|
||||||
"""
|
"""
|
||||||
self.spec_lists = collections.OrderedDict()
|
self.spec_lists = {}
|
||||||
self.spec_lists[user_speclist_name] = SpecList()
|
|
||||||
|
|
||||||
self._dev_specs = {}
|
self._dev_specs = {}
|
||||||
self.concretized_order = [] # roots of last concretize, in order
|
self.concretized_order = [] # roots of last concretize, in order
|
||||||
self.concretized_user_specs = [] # user specs from last concretize
|
self.concretized_user_specs = [] # user specs from last concretize
|
||||||
@ -1276,22 +1258,6 @@ def destroy(self):
|
|||||||
"""Remove this environment from Spack entirely."""
|
"""Remove this environment from Spack entirely."""
|
||||||
shutil.rmtree(self.path)
|
shutil.rmtree(self.path)
|
||||||
|
|
||||||
def update_stale_references(self, from_list=None):
|
|
||||||
"""Iterate over spec lists updating references."""
|
|
||||||
if not from_list:
|
|
||||||
from_list = next(iter(self.spec_lists.keys()))
|
|
||||||
index = list(self.spec_lists.keys()).index(from_list)
|
|
||||||
|
|
||||||
# spec_lists is an OrderedDict to ensure lists read from the manifest
|
|
||||||
# are maintainted in order, hence, all list entries after the modified
|
|
||||||
# list may refer to the modified list requiring stale references to be
|
|
||||||
# updated.
|
|
||||||
for i, (name, speclist) in enumerate(
|
|
||||||
list(self.spec_lists.items())[index + 1 :], index + 1
|
|
||||||
):
|
|
||||||
new_reference = dict((n, self.spec_lists[n]) for n in list(self.spec_lists.keys())[:i])
|
|
||||||
speclist.update_reference(new_reference)
|
|
||||||
|
|
||||||
def add(self, user_spec, list_name=user_speclist_name):
|
def add(self, user_spec, list_name=user_speclist_name):
|
||||||
"""Add a single user_spec (non-concretized) to the Environment
|
"""Add a single user_spec (non-concretized) to the Environment
|
||||||
|
|
||||||
@ -1311,18 +1277,17 @@ def add(self, user_spec, list_name=user_speclist_name):
|
|||||||
elif not spack.repo.PATH.exists(spec.name) and not spec.abstract_hash:
|
elif not spack.repo.PATH.exists(spec.name) and not spec.abstract_hash:
|
||||||
virtuals = spack.repo.PATH.provider_index.providers.keys()
|
virtuals = spack.repo.PATH.provider_index.providers.keys()
|
||||||
if spec.name not in virtuals:
|
if spec.name not in virtuals:
|
||||||
msg = "no such package: %s" % spec.name
|
raise SpackEnvironmentError(f"no such package: {spec.name}")
|
||||||
raise SpackEnvironmentError(msg)
|
|
||||||
|
|
||||||
list_to_change = self.spec_lists[list_name]
|
list_to_change = self.spec_lists[list_name]
|
||||||
existing = str(spec) in list_to_change.yaml_list
|
existing = str(spec) in list_to_change.yaml_list
|
||||||
if not existing:
|
if not existing:
|
||||||
list_to_change.add(str(spec))
|
list_to_change.add(str(spec))
|
||||||
self.update_stale_references(list_name)
|
|
||||||
if list_name == user_speclist_name:
|
if list_name == user_speclist_name:
|
||||||
self.manifest.add_user_spec(str(user_spec))
|
self.manifest.add_user_spec(str(user_spec))
|
||||||
else:
|
else:
|
||||||
self.manifest.add_definition(str(user_spec), list_name=list_name)
|
self.manifest.add_definition(str(user_spec), list_name=list_name)
|
||||||
|
self._sync_speclists()
|
||||||
|
|
||||||
return bool(not existing)
|
return bool(not existing)
|
||||||
|
|
||||||
@ -1366,18 +1331,17 @@ def change_existing_spec(
|
|||||||
"There are no specs named {0} in {1}".format(match_spec.name, list_name)
|
"There are no specs named {0} in {1}".format(match_spec.name, list_name)
|
||||||
)
|
)
|
||||||
elif len(matches) > 1 and not allow_changing_multiple_specs:
|
elif len(matches) > 1 and not allow_changing_multiple_specs:
|
||||||
raise ValueError("{0} matches multiple specs".format(str(match_spec)))
|
raise ValueError(f"{str(match_spec)} matches multiple specs")
|
||||||
|
|
||||||
for idx, spec in matches:
|
for idx, spec in matches:
|
||||||
override_spec = Spec.override(spec, change_spec)
|
override_spec = Spec.override(spec, change_spec)
|
||||||
self.spec_lists[list_name].replace(idx, str(override_spec))
|
|
||||||
if list_name == user_speclist_name:
|
if list_name == user_speclist_name:
|
||||||
self.manifest.override_user_spec(str(override_spec), idx=idx)
|
self.manifest.override_user_spec(str(override_spec), idx=idx)
|
||||||
else:
|
else:
|
||||||
self.manifest.override_definition(
|
self.manifest.override_definition(
|
||||||
str(spec), override=str(override_spec), list_name=list_name
|
str(spec), override=str(override_spec), list_name=list_name
|
||||||
)
|
)
|
||||||
self.update_stale_references(from_list=list_name)
|
self._sync_speclists()
|
||||||
|
|
||||||
def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
||||||
"""Remove specs from an environment that match a query_spec"""
|
"""Remove specs from an environment that match a query_spec"""
|
||||||
@ -1405,22 +1369,17 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
|||||||
raise SpackEnvironmentError(f"{err_msg_header}, no spec matches")
|
raise SpackEnvironmentError(f"{err_msg_header}, no spec matches")
|
||||||
|
|
||||||
old_specs = set(self.user_specs)
|
old_specs = set(self.user_specs)
|
||||||
new_specs = set()
|
|
||||||
|
# Remove specs from the appropriate spec list
|
||||||
for spec in matches:
|
for spec in matches:
|
||||||
if spec not in list_to_change:
|
if spec not in list_to_change:
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
list_to_change.remove(spec)
|
list_to_change.remove(spec)
|
||||||
self.update_stale_references(list_name)
|
except SpecListError as e:
|
||||||
new_specs = set(self.user_specs)
|
|
||||||
except spack.spec_list.SpecListError as e:
|
|
||||||
# define new specs list
|
|
||||||
new_specs = set(self.user_specs)
|
|
||||||
msg = str(e)
|
msg = str(e)
|
||||||
if force:
|
if force:
|
||||||
msg += " It will be removed from the concrete specs."
|
msg += " It will be removed from the concrete specs."
|
||||||
# Mock new specs, so we can remove this spec from concrete spec lists
|
|
||||||
new_specs.remove(spec)
|
|
||||||
tty.warn(msg)
|
tty.warn(msg)
|
||||||
else:
|
else:
|
||||||
if list_name == user_speclist_name:
|
if list_name == user_speclist_name:
|
||||||
@ -1428,7 +1387,11 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
|||||||
else:
|
else:
|
||||||
self.manifest.remove_definition(str(spec), list_name=list_name)
|
self.manifest.remove_definition(str(spec), list_name=list_name)
|
||||||
|
|
||||||
# If force, update stale concretized specs
|
# Recompute "definitions" and user specs
|
||||||
|
self._sync_speclists()
|
||||||
|
new_specs = set(self.user_specs)
|
||||||
|
|
||||||
|
# If 'force', update stale concretized specs
|
||||||
for spec in old_specs - new_specs:
|
for spec in old_specs - new_specs:
|
||||||
if force and spec in self.concretized_user_specs:
|
if force and spec in self.concretized_user_specs:
|
||||||
i = self.concretized_user_specs.index(spec)
|
i = self.concretized_user_specs.index(spec)
|
||||||
@ -2827,6 +2790,8 @@ def add_definition(self, user_spec: str, list_name: str) -> None:
|
|||||||
item[list_name].append(user_spec)
|
item[list_name].append(user_spec)
|
||||||
break
|
break
|
||||||
|
|
||||||
|
# "definitions" can be remote, so we need to update the global config too
|
||||||
|
spack.config.CONFIG.set("definitions", defs, scope=self.scope_name)
|
||||||
self.changed = True
|
self.changed = True
|
||||||
|
|
||||||
def remove_definition(self, user_spec: str, list_name: str) -> None:
|
def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||||
@ -2853,6 +2818,8 @@ def remove_definition(self, user_spec: str, list_name: str) -> None:
|
|||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
# "definitions" can be remote, so we need to update the global config too
|
||||||
|
spack.config.CONFIG.set("definitions", defs, scope=self.scope_name)
|
||||||
self.changed = True
|
self.changed = True
|
||||||
|
|
||||||
def override_definition(self, user_spec: str, *, override: str, list_name: str) -> None:
|
def override_definition(self, user_spec: str, *, override: str, list_name: str) -> None:
|
||||||
@ -2878,6 +2845,8 @@ def override_definition(self, user_spec: str, *, override: str, list_name: str)
|
|||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
# "definitions" can be remote, so we need to update the global config too
|
||||||
|
spack.config.CONFIG.set("definitions", defs, scope=self.scope_name)
|
||||||
self.changed = True
|
self.changed = True
|
||||||
|
|
||||||
def _iterate_on_definitions(self, definitions, *, list_name, err_msg):
|
def _iterate_on_definitions(self, definitions, *, list_name, err_msg):
|
||||||
|
@ -2,36 +2,24 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import itertools
|
import itertools
|
||||||
from typing import List
|
from typing import Any, Dict, List, NamedTuple, Optional, Union
|
||||||
|
|
||||||
import spack.spec
|
import spack.spec
|
||||||
|
import spack.util.spack_yaml
|
||||||
import spack.variant
|
import spack.variant
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
|
|
||||||
|
|
||||||
class SpecList:
|
class SpecList:
|
||||||
def __init__(self, name="specs", yaml_list=None, reference=None):
|
def __init__(self, *, name: str = "specs", yaml_list=None, expanded_list=None):
|
||||||
# Normalize input arguments
|
|
||||||
yaml_list = yaml_list or []
|
|
||||||
reference = reference or {}
|
|
||||||
|
|
||||||
self.name = name
|
self.name = name
|
||||||
self._reference = reference # TODO: Do we need defensive copy here?
|
self.yaml_list = yaml_list[:] if yaml_list is not None else []
|
||||||
|
|
||||||
# Validate yaml_list before assigning
|
|
||||||
if not all(isinstance(s, str) or isinstance(s, (list, dict)) for s in yaml_list):
|
|
||||||
raise ValueError(
|
|
||||||
"yaml_list can contain only valid YAML types! Found:\n %s"
|
|
||||||
% [type(s) for s in yaml_list]
|
|
||||||
)
|
|
||||||
self.yaml_list = yaml_list[:]
|
|
||||||
|
|
||||||
# Expansions can be expensive to compute and difficult to keep updated
|
# Expansions can be expensive to compute and difficult to keep updated
|
||||||
# We cache results and invalidate when self.yaml_list changes
|
# We cache results and invalidate when self.yaml_list changes
|
||||||
self._expanded_list = None
|
self.specs_as_yaml_list = expanded_list or []
|
||||||
self._constraints = None
|
self._constraints = None
|
||||||
self._specs = None
|
self._specs: Optional[List[Spec]] = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_matrix(self):
|
def is_matrix(self):
|
||||||
@ -40,12 +28,6 @@ def is_matrix(self):
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@property
|
|
||||||
def specs_as_yaml_list(self):
|
|
||||||
if self._expanded_list is None:
|
|
||||||
self._expanded_list = self._expand_references(self.yaml_list)
|
|
||||||
return self._expanded_list
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def specs_as_constraints(self):
|
def specs_as_constraints(self):
|
||||||
if self._constraints is None:
|
if self._constraints is None:
|
||||||
@ -62,7 +44,7 @@ def specs_as_constraints(self):
|
|||||||
@property
|
@property
|
||||||
def specs(self) -> List[Spec]:
|
def specs(self) -> List[Spec]:
|
||||||
if self._specs is None:
|
if self._specs is None:
|
||||||
specs = []
|
specs: List[Spec] = []
|
||||||
# This could be slightly faster done directly from yaml_list,
|
# This could be slightly faster done directly from yaml_list,
|
||||||
# but this way is easier to maintain.
|
# but this way is easier to maintain.
|
||||||
for constraint_list in self.specs_as_constraints:
|
for constraint_list in self.specs_as_constraints:
|
||||||
@ -74,12 +56,13 @@ def specs(self) -> List[Spec]:
|
|||||||
|
|
||||||
return self._specs
|
return self._specs
|
||||||
|
|
||||||
def add(self, spec):
|
def add(self, spec: Spec):
|
||||||
self.yaml_list.append(str(spec))
|
spec_str = str(spec)
|
||||||
|
self.yaml_list.append(spec_str)
|
||||||
|
|
||||||
# expanded list can be updated without invalidation
|
# expanded list can be updated without invalidation
|
||||||
if self._expanded_list is not None:
|
if self.specs_as_yaml_list is not None:
|
||||||
self._expanded_list.append(str(spec))
|
self.specs_as_yaml_list.append(spec_str)
|
||||||
|
|
||||||
# Invalidate cache variables when we change the list
|
# Invalidate cache variables when we change the list
|
||||||
self._constraints = None
|
self._constraints = None
|
||||||
@ -101,83 +84,18 @@ def remove(self, spec):
|
|||||||
# Remove may contain more than one string representation of the same spec
|
# Remove may contain more than one string representation of the same spec
|
||||||
for item in remove:
|
for item in remove:
|
||||||
self.yaml_list.remove(item)
|
self.yaml_list.remove(item)
|
||||||
|
self.specs_as_yaml_list.remove(item)
|
||||||
|
|
||||||
# invalidate cache variables when we change the list
|
# invalidate cache variables when we change the list
|
||||||
self._expanded_list = None
|
|
||||||
self._constraints = None
|
self._constraints = None
|
||||||
self._specs = None
|
self._specs = None
|
||||||
|
|
||||||
def replace(self, idx: int, spec: str):
|
def extend(self, other: "SpecList", copy_reference=True) -> None:
|
||||||
"""Replace the existing spec at the index with the new one.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
idx: index of the spec to replace in the speclist
|
|
||||||
spec: new spec
|
|
||||||
"""
|
|
||||||
self.yaml_list[idx] = spec
|
|
||||||
|
|
||||||
# invalidate cache variables when we change the list
|
|
||||||
self._expanded_list = None
|
|
||||||
self._constraints = None
|
|
||||||
self._specs = None
|
|
||||||
|
|
||||||
def extend(self, other, copy_reference=True):
|
|
||||||
self.yaml_list.extend(other.yaml_list)
|
self.yaml_list.extend(other.yaml_list)
|
||||||
self._expanded_list = None
|
self.specs_as_yaml_list.extend(other.specs_as_yaml_list)
|
||||||
self._constraints = None
|
self._constraints = None
|
||||||
self._specs = None
|
self._specs = None
|
||||||
|
|
||||||
if copy_reference:
|
|
||||||
self._reference = other._reference
|
|
||||||
|
|
||||||
def update_reference(self, reference):
|
|
||||||
self._reference = reference
|
|
||||||
self._expanded_list = None
|
|
||||||
self._constraints = None
|
|
||||||
self._specs = None
|
|
||||||
|
|
||||||
def _parse_reference(self, name):
|
|
||||||
sigil = ""
|
|
||||||
name = name[1:]
|
|
||||||
|
|
||||||
# Parse specs as constraints
|
|
||||||
if name.startswith("^") or name.startswith("%"):
|
|
||||||
sigil = name[0]
|
|
||||||
name = name[1:]
|
|
||||||
|
|
||||||
# Make sure the reference is valid
|
|
||||||
if name not in self._reference:
|
|
||||||
msg = f"SpecList '{self.name}' refers to named list '{name}'"
|
|
||||||
msg += " which does not appear in its reference dict."
|
|
||||||
raise UndefinedReferenceError(msg)
|
|
||||||
|
|
||||||
return (name, sigil)
|
|
||||||
|
|
||||||
def _expand_references(self, yaml):
|
|
||||||
if isinstance(yaml, list):
|
|
||||||
ret = []
|
|
||||||
|
|
||||||
for item in yaml:
|
|
||||||
# if it's a reference, expand it
|
|
||||||
if isinstance(item, str) and item.startswith("$"):
|
|
||||||
# replace the reference and apply the sigil if needed
|
|
||||||
name, sigil = self._parse_reference(item)
|
|
||||||
|
|
||||||
referent = [
|
|
||||||
_sigilify(item, sigil) for item in self._reference[name].specs_as_yaml_list
|
|
||||||
]
|
|
||||||
ret.extend(referent)
|
|
||||||
else:
|
|
||||||
# else just recurse
|
|
||||||
ret.append(self._expand_references(item))
|
|
||||||
return ret
|
|
||||||
elif isinstance(yaml, dict):
|
|
||||||
# There can't be expansions in dicts
|
|
||||||
return dict((name, self._expand_references(val)) for (name, val) in yaml.items())
|
|
||||||
else:
|
|
||||||
# Strings are just returned
|
|
||||||
return yaml
|
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
return len(self.specs)
|
return len(self.specs)
|
||||||
|
|
||||||
@ -251,6 +169,111 @@ def _sigilify(item, sigil):
|
|||||||
return sigil + item
|
return sigil + item
|
||||||
|
|
||||||
|
|
||||||
|
class Definition(NamedTuple):
|
||||||
|
name: str
|
||||||
|
yaml_list: List[Union[str, Dict]]
|
||||||
|
when: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
class SpecListParser:
|
||||||
|
"""Parse definitions and user specs from data in environments"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.definitions: Dict[str, SpecList] = {}
|
||||||
|
|
||||||
|
def parse_definitions(self, *, data: List[Dict[str, Any]]) -> Dict[str, SpecList]:
|
||||||
|
definitions_from_yaml: Dict[str, List[Definition]] = {}
|
||||||
|
for item in data:
|
||||||
|
value = self._parse_yaml_definition(item)
|
||||||
|
definitions_from_yaml.setdefault(value.name, []).append(value)
|
||||||
|
|
||||||
|
self.definitions = {}
|
||||||
|
self._build_definitions(definitions_from_yaml)
|
||||||
|
return self.definitions
|
||||||
|
|
||||||
|
def parse_user_specs(self, *, name, yaml_list) -> SpecList:
|
||||||
|
definition = Definition(name=name, yaml_list=yaml_list, when=None)
|
||||||
|
return self._speclist_from_definitions(name, [definition])
|
||||||
|
|
||||||
|
def _parse_yaml_definition(self, yaml_entry) -> Definition:
|
||||||
|
when_string = yaml_entry.get("when")
|
||||||
|
|
||||||
|
if (when_string and len(yaml_entry) > 2) or (not when_string and len(yaml_entry) > 1):
|
||||||
|
mark = spack.util.spack_yaml.get_mark_from_yaml_data(yaml_entry)
|
||||||
|
attributes = ", ".join(x for x in yaml_entry if x != "when")
|
||||||
|
error_msg = f"definition must have a single attribute, got many: {attributes}"
|
||||||
|
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
|
||||||
|
|
||||||
|
for name, yaml_list in yaml_entry.items():
|
||||||
|
if name == "when":
|
||||||
|
continue
|
||||||
|
return Definition(name=name, yaml_list=yaml_list, when=when_string)
|
||||||
|
|
||||||
|
# If we are here, it means only "when" is in the entry
|
||||||
|
mark = spack.util.spack_yaml.get_mark_from_yaml_data(yaml_entry)
|
||||||
|
error_msg = "definition must have a single attribute, got none"
|
||||||
|
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
|
||||||
|
|
||||||
|
def _build_definitions(self, definitions_from_yaml: Dict[str, List[Definition]]):
|
||||||
|
for name, definitions in definitions_from_yaml.items():
|
||||||
|
self.definitions[name] = self._speclist_from_definitions(name, definitions)
|
||||||
|
|
||||||
|
def _speclist_from_definitions(self, name, definitions) -> SpecList:
|
||||||
|
combined_yaml_list = []
|
||||||
|
for def_part in definitions:
|
||||||
|
if def_part.when is not None and not spack.spec.eval_conditional(def_part.when):
|
||||||
|
continue
|
||||||
|
combined_yaml_list.extend(def_part.yaml_list)
|
||||||
|
expanded_list = self._expand_yaml_list(combined_yaml_list)
|
||||||
|
return SpecList(name=name, yaml_list=combined_yaml_list, expanded_list=expanded_list)
|
||||||
|
|
||||||
|
def _expand_yaml_list(self, raw_yaml_list):
|
||||||
|
result = []
|
||||||
|
for item in raw_yaml_list:
|
||||||
|
if isinstance(item, str) and item.startswith("$"):
|
||||||
|
result.extend(self._expand_reference(item))
|
||||||
|
continue
|
||||||
|
|
||||||
|
value = item
|
||||||
|
if isinstance(item, dict):
|
||||||
|
value = self._expand_yaml_matrix(item)
|
||||||
|
result.append(value)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _expand_reference(self, item: str):
|
||||||
|
sigil, name = "", item[1:]
|
||||||
|
if name.startswith("^") or name.startswith("%"):
|
||||||
|
sigil, name = name[0], name[1:]
|
||||||
|
|
||||||
|
if name not in self.definitions:
|
||||||
|
mark = spack.util.spack_yaml.get_mark_from_yaml_data(item)
|
||||||
|
error_msg = f"trying to expand the name '{name}', which is not defined yet"
|
||||||
|
raise UndefinedReferenceError(f"{mark.name}:{mark.line + 1}: {error_msg}")
|
||||||
|
|
||||||
|
value = self.definitions[name].specs_as_yaml_list
|
||||||
|
if not sigil:
|
||||||
|
return value
|
||||||
|
return [_sigilify(x, sigil) for x in value]
|
||||||
|
|
||||||
|
def _expand_yaml_matrix(self, matrix_yaml):
|
||||||
|
extra_attributes = set(matrix_yaml) - {"matrix", "exclude"}
|
||||||
|
if extra_attributes:
|
||||||
|
mark = spack.util.spack_yaml.get_mark_from_yaml_data(matrix_yaml)
|
||||||
|
error_msg = f"extra attributes in spec matrix: {','.join(sorted(extra_attributes))}"
|
||||||
|
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
|
||||||
|
|
||||||
|
if "matrix" not in matrix_yaml:
|
||||||
|
mark = spack.util.spack_yaml.get_mark_from_yaml_data(matrix_yaml)
|
||||||
|
error_msg = "matrix is missing the 'matrix' attribute"
|
||||||
|
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
|
||||||
|
|
||||||
|
# Assume data has been validated against the YAML schema
|
||||||
|
result = {"matrix": [self._expand_yaml_list(row) for row in matrix_yaml["matrix"]]}
|
||||||
|
if "exclude" in matrix_yaml:
|
||||||
|
result["exclude"] = matrix_yaml["exclude"]
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
class SpecListError(SpackError):
|
class SpecListError(SpackError):
|
||||||
"""Error class for all errors related to SpecList objects."""
|
"""Error class for all errors related to SpecList objects."""
|
||||||
|
|
@ -20,7 +20,7 @@
|
|||||||
SpackEnvironmentViewError,
|
SpackEnvironmentViewError,
|
||||||
_error_on_nonempty_view_dir,
|
_error_on_nonempty_view_dir,
|
||||||
)
|
)
|
||||||
from spack.spec_list import UndefinedReferenceError
|
from spack.environment.list import UndefinedReferenceError
|
||||||
|
|
||||||
pytestmark = pytest.mark.not_on_windows("Envs are not supported on windows")
|
pytestmark = pytest.mark.not_on_windows("Envs are not supported on windows")
|
||||||
|
|
||||||
@ -107,7 +107,8 @@ def test_env_change_spec_in_definition(tmp_path, mock_packages, mutable_mock_env
|
|||||||
|
|
||||||
assert any(x.intersects("mpileaks@2.1%gcc") for x in e.user_specs)
|
assert any(x.intersects("mpileaks@2.1%gcc") for x in e.user_specs)
|
||||||
|
|
||||||
e.change_existing_spec(spack.spec.Spec("mpileaks@2.2"), list_name="desired_specs")
|
with e:
|
||||||
|
e.change_existing_spec(spack.spec.Spec("mpileaks@2.2"), list_name="desired_specs")
|
||||||
e.write()
|
e.write()
|
||||||
|
|
||||||
# Ensure changed specs are in memory
|
# Ensure changed specs are in memory
|
||||||
@ -776,10 +777,8 @@ def test_env_with_include_def_missing(mutable_mock_env_path, mock_packages):
|
|||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
e = ev.Environment(env_path)
|
with pytest.raises(UndefinedReferenceError, match=r"which is not defined"):
|
||||||
with e:
|
_ = ev.Environment(env_path)
|
||||||
with pytest.raises(UndefinedReferenceError, match=r"which does not appear"):
|
|
||||||
e.concretize()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.regression("41292")
|
@pytest.mark.regression("41292")
|
||||||
|
@ -6,53 +6,56 @@
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import spack.concretize
|
import spack.concretize
|
||||||
|
from spack.environment.list import SpecListParser
|
||||||
from spack.installer import PackageInstaller
|
from spack.installer import PackageInstaller
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
from spack.spec_list import SpecList
|
|
||||||
|
DEFAULT_EXPANSION = [
|
||||||
|
"mpileaks",
|
||||||
|
"zmpi@1.0",
|
||||||
|
"mpich@3.0",
|
||||||
|
{"matrix": [["hypre"], ["%gcc@4.5.0", "%clang@3.3"]]},
|
||||||
|
"libelf",
|
||||||
|
]
|
||||||
|
|
||||||
|
DEFAULT_CONSTRAINTS = [
|
||||||
|
[Spec("mpileaks")],
|
||||||
|
[Spec("zmpi@1.0")],
|
||||||
|
[Spec("mpich@3.0")],
|
||||||
|
[Spec("hypre"), Spec("%gcc@4.5.0")],
|
||||||
|
[Spec("hypre"), Spec("%clang@3.3")],
|
||||||
|
[Spec("libelf")],
|
||||||
|
]
|
||||||
|
|
||||||
|
DEFAULT_SPECS = [
|
||||||
|
Spec("mpileaks"),
|
||||||
|
Spec("zmpi@1.0"),
|
||||||
|
Spec("mpich@3.0"),
|
||||||
|
Spec("hypre%gcc@4.5.0"),
|
||||||
|
Spec("hypre%clang@3.3"),
|
||||||
|
Spec("libelf"),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def parser_and_speclist():
|
||||||
|
"""Default configuration of parser and user spec list for tests"""
|
||||||
|
parser = SpecListParser()
|
||||||
|
parser.parse_definitions(
|
||||||
|
data=[
|
||||||
|
{"gccs": ["%gcc@4.5.0"]},
|
||||||
|
{"clangs": ["%clang@3.3"]},
|
||||||
|
{"mpis": ["zmpi@1.0", "mpich@3.0"]},
|
||||||
|
]
|
||||||
|
)
|
||||||
|
result = parser.parse_user_specs(
|
||||||
|
name="specs",
|
||||||
|
yaml_list=["mpileaks", "$mpis", {"matrix": [["hypre"], ["$gccs", "$clangs"]]}, "libelf"],
|
||||||
|
)
|
||||||
|
return parser, result
|
||||||
|
|
||||||
|
|
||||||
class TestSpecList:
|
class TestSpecList:
|
||||||
default_input = ["mpileaks", "$mpis", {"matrix": [["hypre"], ["$gccs", "$clangs"]]}, "libelf"]
|
|
||||||
|
|
||||||
default_reference = {
|
|
||||||
"gccs": SpecList("gccs", ["%gcc@4.5.0"]),
|
|
||||||
"clangs": SpecList("clangs", ["%clang@3.3"]),
|
|
||||||
"mpis": SpecList("mpis", ["zmpi@1.0", "mpich@3.0"]),
|
|
||||||
}
|
|
||||||
|
|
||||||
default_expansion = [
|
|
||||||
"mpileaks",
|
|
||||||
"zmpi@1.0",
|
|
||||||
"mpich@3.0",
|
|
||||||
{"matrix": [["hypre"], ["%gcc@4.5.0", "%clang@3.3"]]},
|
|
||||||
"libelf",
|
|
||||||
]
|
|
||||||
|
|
||||||
default_constraints = [
|
|
||||||
[Spec("mpileaks")],
|
|
||||||
[Spec("zmpi@1.0")],
|
|
||||||
[Spec("mpich@3.0")],
|
|
||||||
[Spec("hypre"), Spec("%gcc@4.5.0")],
|
|
||||||
[Spec("hypre"), Spec("%clang@3.3")],
|
|
||||||
[Spec("libelf")],
|
|
||||||
]
|
|
||||||
|
|
||||||
default_specs = [
|
|
||||||
Spec("mpileaks"),
|
|
||||||
Spec("zmpi@1.0"),
|
|
||||||
Spec("mpich@3.0"),
|
|
||||||
Spec("hypre%gcc@4.5.0"),
|
|
||||||
Spec("hypre%clang@3.3"),
|
|
||||||
Spec("libelf"),
|
|
||||||
]
|
|
||||||
|
|
||||||
def test_spec_list_expansions(self):
|
|
||||||
speclist = SpecList("specs", self.default_input, self.default_reference)
|
|
||||||
|
|
||||||
assert speclist.specs_as_yaml_list == self.default_expansion
|
|
||||||
assert speclist.specs_as_constraints == self.default_constraints
|
|
||||||
assert speclist.specs == self.default_specs
|
|
||||||
|
|
||||||
@pytest.mark.regression("28749")
|
@pytest.mark.regression("28749")
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"specs,expected",
|
"specs,expected",
|
||||||
@ -86,116 +89,87 @@ def test_spec_list_expansions(self):
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_spec_list_constraint_ordering(self, specs, expected):
|
def test_spec_list_constraint_ordering(self, specs, expected):
|
||||||
speclist = SpecList("specs", specs)
|
result = SpecListParser().parse_user_specs(name="specs", yaml_list=specs)
|
||||||
expected_specs = [Spec(x) for x in expected]
|
assert result.specs == [Spec(x) for x in expected]
|
||||||
assert speclist.specs == expected_specs
|
|
||||||
|
|
||||||
def test_spec_list_add(self):
|
def test_mock_spec_list(self, parser_and_speclist):
|
||||||
speclist = SpecList("specs", self.default_input, self.default_reference)
|
"""Tests expected properties on the default mock spec list"""
|
||||||
|
parser, mock_list = parser_and_speclist
|
||||||
|
assert mock_list.specs_as_yaml_list == DEFAULT_EXPANSION
|
||||||
|
assert mock_list.specs_as_constraints == DEFAULT_CONSTRAINTS
|
||||||
|
assert mock_list.specs == DEFAULT_SPECS
|
||||||
|
|
||||||
assert speclist.specs_as_yaml_list == self.default_expansion
|
def test_spec_list_add(self, parser_and_speclist):
|
||||||
assert speclist.specs_as_constraints == self.default_constraints
|
parser, mock_list = parser_and_speclist
|
||||||
assert speclist.specs == self.default_specs
|
mock_list.add("libdwarf")
|
||||||
|
|
||||||
speclist.add("libdwarf")
|
assert mock_list.specs_as_yaml_list == DEFAULT_EXPANSION + ["libdwarf"]
|
||||||
|
assert mock_list.specs_as_constraints == DEFAULT_CONSTRAINTS + [[Spec("libdwarf")]]
|
||||||
|
assert mock_list.specs == DEFAULT_SPECS + [Spec("libdwarf")]
|
||||||
|
|
||||||
assert speclist.specs_as_yaml_list == self.default_expansion + ["libdwarf"]
|
def test_spec_list_remove(self, parser_and_speclist):
|
||||||
assert speclist.specs_as_constraints == self.default_constraints + [[Spec("libdwarf")]]
|
parser, mock_list = parser_and_speclist
|
||||||
assert speclist.specs == self.default_specs + [Spec("libdwarf")]
|
mock_list.remove("libelf")
|
||||||
|
|
||||||
def test_spec_list_remove(self):
|
assert mock_list.specs_as_yaml_list + ["libelf"] == DEFAULT_EXPANSION
|
||||||
speclist = SpecList("specs", self.default_input, self.default_reference)
|
assert mock_list.specs_as_constraints + [[Spec("libelf")]] == DEFAULT_CONSTRAINTS
|
||||||
|
assert mock_list.specs + [Spec("libelf")] == DEFAULT_SPECS
|
||||||
|
|
||||||
assert speclist.specs_as_yaml_list == self.default_expansion
|
def test_spec_list_extension(self, parser_and_speclist):
|
||||||
assert speclist.specs_as_constraints == self.default_constraints
|
parser, mock_list = parser_and_speclist
|
||||||
assert speclist.specs == self.default_specs
|
other_list = parser.parse_user_specs(
|
||||||
|
name="specs", yaml_list=[{"matrix": [["callpath"], ["%intel@18"]]}]
|
||||||
speclist.remove("libelf")
|
|
||||||
|
|
||||||
assert speclist.specs_as_yaml_list + ["libelf"] == self.default_expansion
|
|
||||||
|
|
||||||
assert speclist.specs_as_constraints + [[Spec("libelf")]] == self.default_constraints
|
|
||||||
|
|
||||||
assert speclist.specs + [Spec("libelf")] == self.default_specs
|
|
||||||
|
|
||||||
def test_spec_list_update_reference(self):
|
|
||||||
speclist = SpecList("specs", self.default_input, self.default_reference)
|
|
||||||
|
|
||||||
assert speclist.specs_as_yaml_list == self.default_expansion
|
|
||||||
assert speclist.specs_as_constraints == self.default_constraints
|
|
||||||
assert speclist.specs == self.default_specs
|
|
||||||
|
|
||||||
new_mpis = SpecList("mpis", self.default_reference["mpis"].yaml_list)
|
|
||||||
new_mpis.add("mpich@3.3")
|
|
||||||
new_reference = self.default_reference.copy()
|
|
||||||
new_reference["mpis"] = new_mpis
|
|
||||||
|
|
||||||
speclist.update_reference(new_reference)
|
|
||||||
|
|
||||||
expansion = list(self.default_expansion)
|
|
||||||
expansion.insert(3, "mpich@3.3")
|
|
||||||
constraints = list(self.default_constraints)
|
|
||||||
constraints.insert(3, [Spec("mpich@3.3")])
|
|
||||||
specs = list(self.default_specs)
|
|
||||||
specs.insert(3, Spec("mpich@3.3"))
|
|
||||||
|
|
||||||
assert speclist.specs_as_yaml_list == expansion
|
|
||||||
assert speclist.specs_as_constraints == constraints
|
|
||||||
assert speclist.specs == specs
|
|
||||||
|
|
||||||
def test_spec_list_extension(self):
|
|
||||||
speclist = SpecList("specs", self.default_input, self.default_reference)
|
|
||||||
|
|
||||||
assert speclist.specs_as_yaml_list == self.default_expansion
|
|
||||||
assert speclist.specs_as_constraints == self.default_constraints
|
|
||||||
assert speclist.specs == self.default_specs
|
|
||||||
|
|
||||||
new_ref = self.default_reference.copy()
|
|
||||||
otherlist = SpecList("specs", ["zlib", {"matrix": [["callpath"], ["%intel@18"]]}], new_ref)
|
|
||||||
|
|
||||||
speclist.extend(otherlist)
|
|
||||||
|
|
||||||
assert speclist.specs_as_yaml_list == (
|
|
||||||
self.default_expansion + otherlist.specs_as_yaml_list
|
|
||||||
)
|
)
|
||||||
assert speclist.specs == self.default_specs + otherlist.specs
|
mock_list.extend(other_list)
|
||||||
assert speclist._reference is new_ref
|
|
||||||
|
assert mock_list.specs_as_yaml_list == (DEFAULT_EXPANSION + other_list.specs_as_yaml_list)
|
||||||
|
assert mock_list.specs == DEFAULT_SPECS + other_list.specs
|
||||||
|
|
||||||
|
def test_spec_list_nested_matrices(self, parser_and_speclist):
|
||||||
|
parser, _ = parser_and_speclist
|
||||||
|
|
||||||
def test_spec_list_nested_matrices(self):
|
|
||||||
inner_matrix = [{"matrix": [["zlib", "libelf"], ["%gcc", "%intel"]]}]
|
inner_matrix = [{"matrix": [["zlib", "libelf"], ["%gcc", "%intel"]]}]
|
||||||
outer_addition = ["+shared", "~shared"]
|
outer_addition = ["+shared", "~shared"]
|
||||||
outer_matrix = [{"matrix": [inner_matrix, outer_addition]}]
|
outer_matrix = [{"matrix": [inner_matrix, outer_addition]}]
|
||||||
speclist = SpecList("specs", outer_matrix)
|
result = parser.parse_user_specs(name="specs", yaml_list=outer_matrix)
|
||||||
|
|
||||||
expected_components = itertools.product(
|
expected_components = itertools.product(
|
||||||
["zlib", "libelf"], ["%gcc", "%intel"], ["+shared", "~shared"]
|
["zlib", "libelf"], ["%gcc", "%intel"], ["+shared", "~shared"]
|
||||||
)
|
)
|
||||||
expected = [Spec(" ".join(combo)) for combo in expected_components]
|
expected = [Spec(" ".join(combo)) for combo in expected_components]
|
||||||
assert set(speclist.specs) == set(expected)
|
assert set(result.specs) == set(expected)
|
||||||
|
|
||||||
@pytest.mark.regression("16897")
|
@pytest.mark.regression("16897")
|
||||||
def test_spec_list_recursion_specs_as_constraints(self):
|
def test_spec_list_recursion_specs_as_constraints(self):
|
||||||
input = ["mpileaks", "$mpis", {"matrix": [["hypre"], ["$%gccs", "$%clangs"]]}, "libelf"]
|
input = ["mpileaks", "$mpis", {"matrix": [["hypre"], ["$%gccs", "$%clangs"]]}, "libelf"]
|
||||||
|
|
||||||
reference = {
|
definitions = [
|
||||||
"gccs": SpecList("gccs", ["gcc@4.5.0"]),
|
{"gccs": ["gcc@4.5.0"]},
|
||||||
"clangs": SpecList("clangs", ["clang@3.3"]),
|
{"clangs": ["clang@3.3"]},
|
||||||
"mpis": SpecList("mpis", ["zmpi@1.0", "mpich@3.0"]),
|
{"mpis": ["zmpi@1.0", "mpich@3.0"]},
|
||||||
}
|
|
||||||
|
|
||||||
speclist = SpecList("specs", input, reference)
|
|
||||||
|
|
||||||
assert speclist.specs_as_yaml_list == self.default_expansion
|
|
||||||
assert speclist.specs_as_constraints == self.default_constraints
|
|
||||||
assert speclist.specs == self.default_specs
|
|
||||||
|
|
||||||
def test_spec_list_matrix_exclude(self, mock_packages):
|
|
||||||
# Test on non-boolean variants for regression for #16841
|
|
||||||
matrix = [
|
|
||||||
{"matrix": [["multivalue-variant"], ["foo=bar", "foo=baz"]], "exclude": ["foo=bar"]}
|
|
||||||
]
|
]
|
||||||
speclist = SpecList("specs", matrix)
|
|
||||||
assert len(speclist.specs) == 1
|
parser = SpecListParser()
|
||||||
|
parser.parse_definitions(data=definitions)
|
||||||
|
result = parser.parse_user_specs(name="specs", yaml_list=input)
|
||||||
|
|
||||||
|
assert result.specs_as_yaml_list == DEFAULT_EXPANSION
|
||||||
|
assert result.specs_as_constraints == DEFAULT_CONSTRAINTS
|
||||||
|
assert result.specs == DEFAULT_SPECS
|
||||||
|
|
||||||
|
@pytest.mark.regression("16841")
|
||||||
|
def test_spec_list_matrix_exclude(self, mock_packages):
|
||||||
|
parser = SpecListParser()
|
||||||
|
result = parser.parse_user_specs(
|
||||||
|
name="specs",
|
||||||
|
yaml_list=[
|
||||||
|
{
|
||||||
|
"matrix": [["multivalue-variant"], ["foo=bar", "foo=baz"]],
|
||||||
|
"exclude": ["foo=bar"],
|
||||||
|
}
|
||||||
|
],
|
||||||
|
)
|
||||||
|
assert len(result.specs) == 1
|
||||||
|
|
||||||
def test_spec_list_exclude_with_abstract_hashes(self, mock_packages, install_mockery):
|
def test_spec_list_exclude_with_abstract_hashes(self, mock_packages, install_mockery):
|
||||||
# Put mpich in the database so it can be referred to by hash.
|
# Put mpich in the database so it can be referred to by hash.
|
||||||
@ -205,9 +179,10 @@ def test_spec_list_exclude_with_abstract_hashes(self, mock_packages, install_moc
|
|||||||
|
|
||||||
# Create matrix and exclude +debug, which excludes the first mpich after its abstract hash
|
# Create matrix and exclude +debug, which excludes the first mpich after its abstract hash
|
||||||
# is resolved.
|
# is resolved.
|
||||||
speclist = SpecList(
|
parser = SpecListParser()
|
||||||
"specs",
|
result = parser.parse_user_specs(
|
||||||
[
|
name="specs",
|
||||||
|
yaml_list=[
|
||||||
{
|
{
|
||||||
"matrix": [
|
"matrix": [
|
||||||
["mpileaks"],
|
["mpileaks"],
|
||||||
@ -220,5 +195,5 @@ def test_spec_list_exclude_with_abstract_hashes(self, mock_packages, install_moc
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Ensure that only mpich~debug is selected, and that the assembled spec remains abstract.
|
# Ensure that only mpich~debug is selected, and that the assembled spec remains abstract.
|
||||||
assert len(speclist.specs) == 1
|
assert len(result.specs) == 1
|
||||||
assert speclist.specs[0] == Spec(f"mpileaks ^callpath ^mpich/{mpich_2.dag_hash(5)}")
|
assert result.specs[0] == Spec(f"mpileaks ^callpath ^mpich/{mpich_2.dag_hash(5)}")
|
||||||
|
Loading…
Reference in New Issue
Block a user