spec: change semantic of __getitem__
Now __getitem__ can pick items in the transitive link/run graph, or from direct build dependencies. Signed-off-by: Massimiliano Culpo <massimiliano.culpo@gmail.com>
This commit is contained in:
parent
1dfbf91eac
commit
d48bddc196
@ -442,7 +442,7 @@ def add_flag(name: str, value: str, propagate: bool):
|
||||
build_dependency.name = name_conversion[build_dependency.name]
|
||||
|
||||
initial_spec._add_dependency(
|
||||
build_dependency, depflag=spack.deptypes.BUILD, virtuals=()
|
||||
build_dependency, depflag=spack.deptypes.BUILD, virtuals=(), direct=True
|
||||
)
|
||||
|
||||
elif (
|
||||
|
@ -631,15 +631,22 @@ class DependencySpec:
|
||||
virtuals: virtual packages provided from child to parent node.
|
||||
"""
|
||||
|
||||
__slots__ = "parent", "spec", "depflag", "virtuals"
|
||||
__slots__ = "parent", "spec", "depflag", "virtuals", "direct"
|
||||
|
||||
def __init__(
|
||||
self, parent: "Spec", spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[str, ...]
|
||||
self,
|
||||
parent: "Spec",
|
||||
spec: "Spec",
|
||||
*,
|
||||
depflag: dt.DepFlag,
|
||||
virtuals: Tuple[str, ...],
|
||||
direct: bool = False,
|
||||
):
|
||||
self.parent = parent
|
||||
self.spec = spec
|
||||
self.depflag = depflag
|
||||
self.virtuals = tuple(sorted(set(virtuals)))
|
||||
self.direct = direct
|
||||
|
||||
def update_deptypes(self, depflag: dt.DepFlag) -> bool:
|
||||
"""Update the current dependency types"""
|
||||
@ -1636,10 +1643,18 @@ def _set_architecture(self, **kwargs):
|
||||
else:
|
||||
setattr(self.architecture, new_attr, new_value)
|
||||
|
||||
def _add_dependency(self, spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[str, ...]):
|
||||
"""Called by the parser to add another spec as a dependency."""
|
||||
def _add_dependency(
|
||||
self, spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[str, ...], direct: bool = False
|
||||
):
|
||||
"""Called by the parser to add another spec as a dependency.
|
||||
|
||||
Args:
|
||||
depflag: dependency type for this edge
|
||||
virtuals: virtuals on this edge
|
||||
direct: if True denotes a direct dependency (associated with the % sigil)
|
||||
"""
|
||||
if spec.name not in self._dependencies or not spec.name:
|
||||
self.add_dependency_edge(spec, depflag=depflag, virtuals=virtuals)
|
||||
self.add_dependency_edge(spec, depflag=depflag, virtuals=virtuals, direct=direct)
|
||||
return
|
||||
|
||||
# Keep the intersection of constraints when a dependency is added multiple times with
|
||||
@ -1674,7 +1689,12 @@ def _add_dependency(self, spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[
|
||||
)
|
||||
|
||||
def add_dependency_edge(
|
||||
self, dependency_spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[str, ...]
|
||||
self,
|
||||
dependency_spec: "Spec",
|
||||
*,
|
||||
depflag: dt.DepFlag,
|
||||
virtuals: Tuple[str, ...],
|
||||
direct: bool = False,
|
||||
):
|
||||
"""Add a dependency edge to this spec.
|
||||
|
||||
@ -1682,6 +1702,7 @@ def add_dependency_edge(
|
||||
dependency_spec: spec of the dependency
|
||||
deptypes: dependency types for this edge
|
||||
virtuals: virtuals provided by this edge
|
||||
direct: if True denotes a direct dependency
|
||||
"""
|
||||
# Check if we need to update edges that are already present
|
||||
selected = self._dependencies.select(child=dependency_spec.name)
|
||||
@ -1720,7 +1741,9 @@ def add_dependency_edge(
|
||||
edge.update_virtuals(virtuals=virtuals)
|
||||
return
|
||||
|
||||
edge = DependencySpec(self, dependency_spec, depflag=depflag, virtuals=virtuals)
|
||||
edge = DependencySpec(
|
||||
self, dependency_spec, depflag=depflag, virtuals=virtuals, direct=direct
|
||||
)
|
||||
self._dependencies.add(edge)
|
||||
dependency_spec._dependents.add(edge)
|
||||
|
||||
@ -2069,9 +2092,6 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
if self.architecture:
|
||||
d.update(self.architecture.to_dict())
|
||||
|
||||
if self.compiler:
|
||||
d.update(self.compiler.to_dict())
|
||||
|
||||
if self.namespace:
|
||||
d["namespace"] = self.namespace
|
||||
|
||||
@ -2317,8 +2337,6 @@ def override(init_spec, change_spec):
|
||||
else:
|
||||
raise ValueError("{0} is not a variant of {1}".format(vname, new_spec.name))
|
||||
|
||||
if change_spec.compiler:
|
||||
new_spec.compiler = change_spec.compiler
|
||||
if change_spec.compiler_flags:
|
||||
for flagname, flagvals in change_spec.compiler_flags.items():
|
||||
new_spec.compiler_flags[flagname] = flagvals
|
||||
@ -2990,12 +3008,6 @@ def constrain(self, other, deps=True):
|
||||
self.namespace = other.namespace
|
||||
changed = True
|
||||
|
||||
if self.compiler is not None and other.compiler is not None:
|
||||
changed |= self.compiler.constrain(other.compiler)
|
||||
elif self.compiler is None and other.compiler is not None:
|
||||
changed |= self.compiler != other.compiler
|
||||
self.compiler = other.compiler
|
||||
|
||||
changed |= self.versions.intersect(other.versions)
|
||||
changed |= self.variants.constrain(other.variants)
|
||||
|
||||
@ -3022,10 +3034,8 @@ def constrain(self, other, deps=True):
|
||||
|
||||
return changed
|
||||
|
||||
def _constrain_dependencies(self, other):
|
||||
def _constrain_dependencies(self, other: "Spec") -> bool:
|
||||
"""Apply constraints of other spec's dependencies to this spec."""
|
||||
other = self._autospec(other)
|
||||
|
||||
if not other._dependencies:
|
||||
return False
|
||||
|
||||
@ -3040,8 +3050,10 @@ def _constrain_dependencies(self, other):
|
||||
|
||||
# Handle common first-order constraints directly
|
||||
changed = False
|
||||
for name in self.common_dependencies(other):
|
||||
changed |= self[name].constrain(other[name], deps=False)
|
||||
common_dependencies = {x.name for x in self.dependencies()}
|
||||
common_dependencies &= {x.name for x in other.dependencies()}
|
||||
for name in common_dependencies:
|
||||
changed |= self[name].constrain(other[name], deps=True)
|
||||
if name in self._dependencies:
|
||||
# WARNING: This function is an implementation detail of the
|
||||
# WARNING: original concretizer. Since with that greedy
|
||||
@ -3070,7 +3082,7 @@ def _constrain_dependencies(self, other):
|
||||
return changed
|
||||
|
||||
def common_dependencies(self, other):
|
||||
"""Return names of dependencies that self an other have in common."""
|
||||
"""Return names of dependencies that self and other have in common."""
|
||||
common = set(s.name for s in self.traverse(root=False))
|
||||
common.intersection_update(s.name for s in other.traverse(root=False))
|
||||
return common
|
||||
@ -3170,10 +3182,6 @@ def intersects(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
if not self.versions.intersects(other.versions):
|
||||
return False
|
||||
|
||||
if self.compiler and other.compiler:
|
||||
if not self.compiler.intersects(other.compiler):
|
||||
return False
|
||||
|
||||
if not self.variants.intersects(other.variants):
|
||||
return False
|
||||
|
||||
@ -3196,8 +3204,10 @@ def _intersects_dependencies(self, other):
|
||||
return True
|
||||
|
||||
# Handle first-order constraints directly
|
||||
for name in self.common_dependencies(other):
|
||||
if not self[name].intersects(other[name], deps=False):
|
||||
common_dependencies = {x.name for x in self.dependencies()}
|
||||
common_dependencies &= {x.name for x in other.dependencies()}
|
||||
for name in common_dependencies:
|
||||
if not self[name].intersects(other[name], deps=True):
|
||||
return False
|
||||
|
||||
# For virtual dependencies, we need to dig a little deeper.
|
||||
@ -3274,12 +3284,6 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
if not self.versions.satisfies(other.versions):
|
||||
return False
|
||||
|
||||
if self.compiler and other.compiler:
|
||||
if not self.compiler.satisfies(other.compiler):
|
||||
return False
|
||||
elif other.compiler and not self.compiler:
|
||||
return False
|
||||
|
||||
if not self.variants.satisfies(other.variants):
|
||||
return False
|
||||
|
||||
@ -3420,7 +3424,6 @@ def _dup(self, other, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, clearde
|
||||
self.name != other.name
|
||||
and self.versions != other.versions
|
||||
and self.architecture != other.architecture
|
||||
and self.compiler != other.compiler
|
||||
and self.variants != other.variants
|
||||
and self._normal != other._normal
|
||||
and self.concrete != other.concrete
|
||||
@ -3436,8 +3439,6 @@ def _dup(self, other, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, clearde
|
||||
self.name = other.name
|
||||
self.versions = other.versions.copy()
|
||||
self.architecture = other.architecture.copy() if other.architecture else None
|
||||
# FIXME (compiler as nodes): removing compiler attribute
|
||||
# self.compiler = other.compiler.copy() if other.compiler else None
|
||||
if cleardeps:
|
||||
self._dependents = _EdgeMap(store_by_child=False)
|
||||
self._dependencies = _EdgeMap(store_by_child=True)
|
||||
@ -3565,9 +3566,8 @@ def __getitem__(self, name: str):
|
||||
query_parameters = re.split(r"\s*,\s*", csv)
|
||||
|
||||
order = lambda: itertools.chain(
|
||||
self.traverse_edges(deptype=dt.LINK, order="breadth", cover="edges"),
|
||||
self.edges_to_dependencies(depflag=dt.BUILD | dt.RUN | dt.TEST),
|
||||
self.traverse_edges(deptype=dt.ALL, order="breadth", cover="edges"),
|
||||
self.traverse_edges(deptype=dt.LINK | dt.RUN, order="breadth", cover="edges"),
|
||||
self.edges_to_dependencies(depflag=dt.BUILD | dt.TEST),
|
||||
)
|
||||
|
||||
# Consider runtime dependencies and direct build/test deps before transitive dependencies,
|
||||
@ -3603,8 +3603,11 @@ def __contains__(self, spec):
|
||||
# if anonymous or same name, we only have to look at the root
|
||||
if not spec.name or spec.name == self.name:
|
||||
return self.satisfies(spec)
|
||||
else:
|
||||
return any(s.satisfies(spec) for s in self.traverse(root=False))
|
||||
|
||||
try:
|
||||
return self[spec.name].satisfies(spec)
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
def eq_dag(self, other, deptypes=True, vs=None, vo=None):
|
||||
"""True if the full dependency DAGs of specs are equal."""
|
||||
@ -3654,7 +3657,6 @@ def _cmp_node(self):
|
||||
yield self.namespace
|
||||
yield self.versions
|
||||
yield self.variants
|
||||
yield self.compiler
|
||||
yield self.compiler_flags
|
||||
yield self.architecture
|
||||
yield self.abstract_hash
|
||||
@ -3708,9 +3710,6 @@ def format(self, format_string: str = DEFAULT_FORMAT, color: Optional[bool] = Fa
|
||||
|
||||
name
|
||||
version
|
||||
compiler
|
||||
compiler.name
|
||||
compiler.version
|
||||
compiler_flags
|
||||
variants
|
||||
architecture
|
||||
@ -3953,15 +3952,28 @@ def __str__(self):
|
||||
if not self._dependencies:
|
||||
return self.format()
|
||||
|
||||
root_str = [self.format()]
|
||||
sorted_dependencies = sorted(
|
||||
self.traverse(root=False), key=lambda x: (x.name, x.abstract_hash)
|
||||
name_conversion = {
|
||||
"llvm": "clang",
|
||||
"intel-oneapi-compilers": "oneapi",
|
||||
"llvm-amdgpu": "rocmcc",
|
||||
"intel-oneapi-compiler-classic": "intel",
|
||||
"acfl": "arm",
|
||||
}
|
||||
parts = [self.format()]
|
||||
direct, transitive = lang.stable_partition(
|
||||
self.edges_to_dependencies(), predicate_fn=lambda x: x.direct
|
||||
)
|
||||
sorted_dependencies = [
|
||||
d.format("{edge_attributes} " + DEFAULT_FORMAT) for d in sorted_dependencies
|
||||
]
|
||||
spec_str = " ^".join(root_str + sorted_dependencies)
|
||||
return spec_str.strip()
|
||||
for item in sorted(direct, key=lambda x: x.spec.name):
|
||||
current_name = item.spec.name
|
||||
new_name = name_conversion.get(current_name, current_name)
|
||||
parts.append(f"%{item.spec.format()}".replace(current_name, new_name))
|
||||
for item in sorted(transitive, key=lambda x: x.spec.name):
|
||||
# Recurse to attach build deps in order
|
||||
edge_attributes = ""
|
||||
if item.virtuals or item.depflag:
|
||||
edge_attributes = item.spec.format("{edge_attributes}") + " "
|
||||
parts.append(f"^{edge_attributes}{str(item.spec)}")
|
||||
return " ".join(parts).strip()
|
||||
|
||||
@property
|
||||
def colored_str(self):
|
||||
|
@ -904,7 +904,9 @@ def test_simultaneous_concretization_of_specs(self, abstract_specs):
|
||||
concrete_specs = spack.concretize.concretize_specs_together(abstract_specs)
|
||||
|
||||
# Check there's only one configuration of each package in the DAG
|
||||
names = set(dep.name for spec in concrete_specs for dep in spec.traverse())
|
||||
names = set(
|
||||
dep.name for spec in concrete_specs for dep in spec.traverse(deptype=("link", "run"))
|
||||
)
|
||||
for name in names:
|
||||
name_specs = set(spec[name] for spec in concrete_specs if name in spec)
|
||||
assert len(name_specs) == 1
|
||||
|
@ -704,12 +704,9 @@ def test_unsatisfiable_multi_value_variant(self, default_mock_concretization):
|
||||
a.concretize()
|
||||
|
||||
def test_copy_satisfies_transitive(self):
|
||||
spec = Spec("dttop")
|
||||
spec.concretize()
|
||||
spec = Spec("dttop").concretized()
|
||||
copy = spec.copy()
|
||||
for s in spec.traverse():
|
||||
assert s.satisfies(copy[s.name])
|
||||
assert copy[s.name].satisfies(s)
|
||||
assert {s.dag_hash() for s in spec.traverse()} == {s.dag_hash() for s in copy.traverse()}
|
||||
|
||||
def test_intersects_virtual(self):
|
||||
assert Spec("mpich").intersects(Spec("mpi"))
|
||||
|
@ -14,6 +14,7 @@
|
||||
import spack.parser
|
||||
import spack.platforms.test
|
||||
import spack.repo
|
||||
import spack.solver.asp
|
||||
import spack.spec
|
||||
from spack.parser import (
|
||||
UNIX_FILENAME,
|
||||
@ -165,7 +166,7 @@ def _specfile_for(spec_str, filename):
|
||||
Token(TokenType.COMPILER_AND_VERSION, value="%bar@1.0"),
|
||||
Token(TokenType.VERSION, value="@2.0"),
|
||||
],
|
||||
"foo@2.0%bar@1.0",
|
||||
"foo@2.0 %bar@1.0",
|
||||
),
|
||||
# Single dependency with version
|
||||
dependency_with_version("openmpi ^hwloc@1.2e6"),
|
||||
@ -187,7 +188,7 @@ def _specfile_for(spec_str, filename):
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="stackwalker"),
|
||||
Token(TokenType.VERSION, value="@8.1_1e"),
|
||||
],
|
||||
"mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1+debug~qt_4 ^stackwalker@8.1_1e",
|
||||
"mvapich_foo ^_openmpi@1.2:1.4,1.6+debug~qt_4 %intel@12.1 ^stackwalker@8.1_1e",
|
||||
),
|
||||
(
|
||||
"mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1~qt_4 debug=2 ^stackwalker@8.1_1e",
|
||||
@ -203,7 +204,7 @@ def _specfile_for(spec_str, filename):
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="stackwalker"),
|
||||
Token(TokenType.VERSION, value="@8.1_1e"),
|
||||
],
|
||||
"mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1~qt_4 debug=2 ^stackwalker@8.1_1e",
|
||||
"mvapich_foo ^_openmpi@1.2:1.4,1.6~qt_4 debug=2 %intel@12.1 ^stackwalker@8.1_1e",
|
||||
),
|
||||
(
|
||||
"mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1 cppflags=-O3 +debug~qt_4 "
|
||||
@ -221,8 +222,8 @@ def _specfile_for(spec_str, filename):
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="stackwalker"),
|
||||
Token(TokenType.VERSION, value="@8.1_1e"),
|
||||
],
|
||||
"mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1 cppflags=-O3 +debug~qt_4 "
|
||||
"^stackwalker@8.1_1e",
|
||||
"mvapich_foo ^_openmpi@1.2:1.4,1.6 cppflags=-O3 +debug~qt_4 %intel@12.1"
|
||||
" ^stackwalker@8.1_1e",
|
||||
),
|
||||
# Specs containing YAML or JSON in the package name
|
||||
(
|
||||
@ -235,7 +236,7 @@ def _specfile_for(spec_str, filename):
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="boost"),
|
||||
Token(TokenType.VERSION, value="@3.1.4"),
|
||||
],
|
||||
"yaml-cpp@0.1.8%intel@12.1 ^boost@3.1.4",
|
||||
"yaml-cpp@0.1.8 %intel@12.1 ^boost@3.1.4",
|
||||
),
|
||||
(
|
||||
r"builtin.yaml-cpp%gcc",
|
||||
@ -243,7 +244,7 @@ def _specfile_for(spec_str, filename):
|
||||
Token(TokenType.FULLY_QUALIFIED_PACKAGE_NAME, value="builtin.yaml-cpp"),
|
||||
Token(TokenType.COMPILER, value="%gcc"),
|
||||
],
|
||||
"yaml-cpp%gcc",
|
||||
"yaml-cpp %gcc",
|
||||
),
|
||||
(
|
||||
r"testrepo.yaml-cpp%gcc",
|
||||
@ -251,7 +252,7 @@ def _specfile_for(spec_str, filename):
|
||||
Token(TokenType.FULLY_QUALIFIED_PACKAGE_NAME, value="testrepo.yaml-cpp"),
|
||||
Token(TokenType.COMPILER, value="%gcc"),
|
||||
],
|
||||
"yaml-cpp%gcc",
|
||||
"yaml-cpp %gcc",
|
||||
),
|
||||
(
|
||||
r"builtin.yaml-cpp@0.1.8%gcc@7.2.0 ^boost@3.1.4",
|
||||
@ -263,7 +264,7 @@ def _specfile_for(spec_str, filename):
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="boost"),
|
||||
Token(TokenType.VERSION, value="@3.1.4"),
|
||||
],
|
||||
"yaml-cpp@0.1.8%gcc@7.2.0 ^boost@3.1.4",
|
||||
"yaml-cpp@0.1.8 %gcc@7.2.0 ^boost@3.1.4",
|
||||
),
|
||||
(
|
||||
r"builtin.yaml-cpp ^testrepo.boost ^zlib",
|
||||
@ -490,7 +491,7 @@ def _specfile_for(spec_str, filename):
|
||||
Token(TokenType.COMPILER_AND_VERSION, value="% intel @ 12.1:12.6"),
|
||||
Token(TokenType.BOOL_VARIANT, value="+ debug"),
|
||||
],
|
||||
"%intel@12.1:12.6+debug",
|
||||
"+debug %intel@12.1:12.6",
|
||||
),
|
||||
(
|
||||
"@ 12.1:12.6 + debug - qt_4",
|
||||
@ -512,7 +513,7 @@ def _specfile_for(spec_str, filename):
|
||||
(
|
||||
"@:0.4 % nvhpc",
|
||||
[Token(TokenType.VERSION, value="@:0.4"), Token(TokenType.COMPILER, value="% nvhpc")],
|
||||
"@:0.4%nvhpc",
|
||||
"@:0.4 %nvhpc",
|
||||
),
|
||||
(
|
||||
"^[virtuals=mpi] openmpi",
|
||||
@ -820,7 +821,6 @@ def test_dep_spec_by_hash(database, config):
|
||||
mpileaks_hash_zmpi.replace_hash()
|
||||
assert "zmpi" in mpileaks_hash_zmpi
|
||||
assert mpileaks_hash_zmpi["zmpi"] == zmpi
|
||||
assert mpileaks_zmpi.compiler.satisfies(mpileaks_hash_zmpi.compiler)
|
||||
|
||||
mpileaks_hash_fake_and_zmpi = SpecParser(
|
||||
f"mpileaks ^/{fake.dag_hash()[:4]} ^ /{zmpi.dag_hash()[:5]}"
|
||||
@ -983,13 +983,6 @@ def test_disambiguate_hash_by_spec(spec1, spec2, constraint, mock_packages, monk
|
||||
("x@1.2%y@1.2@2.3:2.4", "version"),
|
||||
# Duplicate dependency
|
||||
("x ^y@1 ^y@2", "Cannot depend on incompatible specs"),
|
||||
# Duplicate compiler
|
||||
("x%intel%intel", "compiler"),
|
||||
("x%intel%gcc", "compiler"),
|
||||
("x%gcc%intel", "compiler"),
|
||||
("x ^y%intel%intel", "compiler"),
|
||||
("x ^y%intel%gcc", "compiler"),
|
||||
("x ^y%gcc%intel", "compiler"),
|
||||
# Duplicate Architectures
|
||||
("x arch=linux-rhel7-x86_64 arch=linux-rhel7-x86_64", "two architectures"),
|
||||
("x arch=linux-rhel7-x86_64 arch=linux-rhel7-ppc64le", "two architectures"),
|
||||
@ -1144,7 +1137,7 @@ def test_parse_filename_missing_slash_as_spec(specfile_for, tmpdir, filename):
|
||||
)
|
||||
|
||||
# make sure that only happens when the spec ends in yaml
|
||||
with pytest.raises(spack.repo.UnknownPackageError) as exc_info:
|
||||
with pytest.raises(spack.solver.asp.UnsatisfiableSpecError) as exc_info:
|
||||
SpecParser("builtin.mock.doesnotexist").next_spec().concretize()
|
||||
assert not exc_info.value.long_message or (
|
||||
"Did you mean to specify a filename with" not in exc_info.value.long_message
|
||||
|
Loading…
Reference in New Issue
Block a user