Add efficient deptype flag and spack.deptypes module (#39472)

This commit replaces the internal representation of deptypes with `int`, which is more compact
and faster to operate with.

Double loops like:
```
any(x in ys for x in xs)
```
are replaced by constant operations bool(xs & ys), where xs and ys are dependency types. 

Global constants are exposed for convenience in `spack.deptypes`
This commit is contained in:
Harmen Stoppels 2023-09-14 12:25:24 +02:00 committed by GitHub
parent d50f296d4f
commit 6838ee6bb8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
34 changed files with 532 additions and 458 deletions

View File

@ -16,6 +16,7 @@
import spack.builder import spack.builder
import spack.config import spack.config
import spack.deptypes as dt
import spack.detection import spack.detection
import spack.multimethod import spack.multimethod
import spack.package_base import spack.package_base
@ -226,7 +227,7 @@ def update_external_dependencies(self, extendee_spec=None):
python.external_path = self.spec.external_path python.external_path = self.spec.external_path
python._mark_concrete() python._mark_concrete()
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"), virtuals=()) self.spec.add_dependency_edge(python, depflag=dt.BUILD | dt.LINK | dt.RUN, virtuals=())
class PythonPackage(PythonExtension): class PythonPackage(PythonExtension):

View File

@ -308,7 +308,7 @@ def append_dep(s, d):
dependencies.append({"spec": s, "depends": d}) dependencies.append({"spec": s, "depends": d})
for spec in spec_list: for spec in spec_list:
for s in spec.traverse(deptype=all): for s in spec.traverse(deptype="all"):
if s.external: if s.external:
tty.msg("Will not stage external pkg: {0}".format(s)) tty.msg("Will not stage external pkg: {0}".format(s))
continue continue
@ -316,7 +316,7 @@ def append_dep(s, d):
skey = _spec_deps_key(s) skey = _spec_deps_key(s)
spec_labels[skey] = s spec_labels[skey] = s
for d in s.dependencies(deptype=all): for d in s.dependencies(deptype="all"):
dkey = _spec_deps_key(d) dkey = _spec_deps_key(d)
if d.external: if d.external:
tty.msg("Will not stage external dep: {0}".format(d)) tty.msg("Will not stage external dep: {0}".format(d))
@ -1035,7 +1035,7 @@ def main_script_replacements(cmd):
if enable_artifacts_buildcache: if enable_artifacts_buildcache:
# Get dependencies transitively, so they're all # Get dependencies transitively, so they're all
# available in the artifacts buildcache. # available in the artifacts buildcache.
dep_jobs = [d for d in release_spec.traverse(deptype=all, root=False)] dep_jobs = [d for d in release_spec.traverse(deptype="all", root=False)]
else: else:
# In this case, "needs" is only used for scheduling # In this case, "needs" is only used for scheduling
# purposes, so we only get the direct dependencies. # purposes, so we only get the direct dependencies.

View File

@ -12,7 +12,7 @@
import spack.cmd import spack.cmd
import spack.config import spack.config
import spack.dependency as dep import spack.deptypes as dt
import spack.environment as ev import spack.environment as ev
import spack.mirror import spack.mirror
import spack.modules import spack.modules
@ -114,16 +114,13 @@ def __call__(self, parser, namespace, jobs, option_string):
class DeptypeAction(argparse.Action): class DeptypeAction(argparse.Action):
"""Creates a tuple of valid dependency types from a deptype argument.""" """Creates a flag of valid dependency types from a deptype argument."""
def __call__(self, parser, namespace, values, option_string=None): def __call__(self, parser, namespace, values, option_string=None):
deptype = dep.all_deptypes if not values or values == "all":
if values: deptype = dt.ALL
deptype = tuple(x.strip() for x in values.split(",")) else:
if deptype == ("all",): deptype = dt.canonicalize(values.split(","))
deptype = "all"
deptype = dep.canonical_deptype(deptype)
setattr(namespace, self.dest, deptype) setattr(namespace, self.dest, deptype)
@ -285,9 +282,8 @@ def deptype():
return Args( return Args(
"--deptype", "--deptype",
action=DeptypeAction, action=DeptypeAction,
default=dep.all_deptypes, default=dt.ALL,
help="comma-separated list of deptypes to traverse\n\ndefault=%s" help="comma-separated list of deptypes to traverse (default=%s)" % ",".join(dt.ALL_TYPES),
% ",".join(dep.all_deptypes),
) )

View File

@ -10,6 +10,7 @@
import spack.build_environment as build_environment import spack.build_environment as build_environment
import spack.cmd import spack.cmd
import spack.cmd.common.arguments as arguments import spack.cmd.common.arguments as arguments
import spack.deptypes as dt
import spack.error import spack.error
import spack.paths import spack.paths
import spack.spec import spack.spec
@ -46,9 +47,9 @@ def __init__(self, context="build"):
raise ValueError("context can only be build or test") raise ValueError("context can only be build or test")
if context == "build": if context == "build":
self.direct_deps = ("build", "link", "run") self.direct_deps = dt.BUILD | dt.LINK | dt.RUN
else: else:
self.direct_deps = ("build", "test", "link", "run") self.direct_deps = dt.BUILD | dt.TEST | dt.LINK | dt.RUN
self.has_uninstalled_deps = False self.has_uninstalled_deps = False
@ -71,8 +72,8 @@ def accept(self, item):
def neighbors(self, item): def neighbors(self, item):
# Direct deps: follow build & test edges. # Direct deps: follow build & test edges.
# Transitive deps: follow link / run. # Transitive deps: follow link / run.
deptypes = self.direct_deps if item.depth == 0 else ("link", "run") depflag = self.direct_deps if item.depth == 0 else dt.LINK | dt.RUN
return item.edge.spec.edges_to_dependencies(deptype=deptypes) return item.edge.spec.edges_to_dependencies(depflag=depflag)
def emulate_env_utility(cmd_name, context, args): def emulate_env_utility(cmd_name, context, args):

View File

@ -74,7 +74,7 @@ def dependencies(parser, args):
spec, spec,
transitive=args.transitive, transitive=args.transitive,
expand_virtuals=args.expand_virtuals, expand_virtuals=args.expand_virtuals,
deptype=args.deptype, depflag=args.deptype,
) )
if spec.name in dependencies: if spec.name in dependencies:

View File

@ -74,19 +74,19 @@ def graph(parser, args):
if args.static: if args.static:
args.dot = True args.dot = True
static_graph_dot(specs, deptype=args.deptype) static_graph_dot(specs, depflag=args.deptype)
return return
if args.dot: if args.dot:
builder = SimpleDAG() builder = SimpleDAG()
if args.color: if args.color:
builder = DAGWithDependencyTypes() builder = DAGWithDependencyTypes()
graph_dot(specs, builder=builder, deptype=args.deptype) graph_dot(specs, builder=builder, depflag=args.deptype)
return return
# ascii is default: user doesn't need to provide it explicitly # ascii is default: user doesn't need to provide it explicitly
debug = spack.config.get("config:debug") debug = spack.config.get("config:debug")
graph_ascii(specs[0], debug=debug, deptype=args.deptype) graph_ascii(specs[0], debug=debug, depflag=args.deptype)
for spec in specs[1:]: for spec in specs[1:]:
print() # extra line bt/w independent graphs print() # extra line bt/w independent graphs
graph_ascii(spec, debug=debug) graph_ascii(spec, debug=debug)

View File

@ -11,6 +11,7 @@
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
import spack.cmd.common.arguments as arguments import spack.cmd.common.arguments as arguments
import spack.deptypes as dt
import spack.fetch_strategy as fs import spack.fetch_strategy as fs
import spack.install_test import spack.install_test
import spack.repo import spack.repo
@ -160,7 +161,7 @@ def print_dependencies(pkg):
for deptype in ("build", "link", "run"): for deptype in ("build", "link", "run"):
color.cprint("") color.cprint("")
color.cprint(section_title("%s Dependencies:" % deptype.capitalize())) color.cprint(section_title("%s Dependencies:" % deptype.capitalize()))
deps = sorted(pkg.dependencies_of_type(deptype)) deps = sorted(pkg.dependencies_of_type(dt.flag_from_string(deptype)))
if deps: if deps:
colify(deps, indent=4) colify(deps, indent=4)
else: else:

View File

@ -16,7 +16,7 @@
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
import spack.cmd.common.arguments as arguments import spack.cmd.common.arguments as arguments
import spack.dependency import spack.deptypes as dt
import spack.repo import spack.repo
from spack.version import VersionList from spack.version import VersionList
@ -149,8 +149,8 @@ def rows_for_ncols(elts, ncols):
def get_dependencies(pkg): def get_dependencies(pkg):
all_deps = {} all_deps = {}
for deptype in spack.dependency.all_deptypes: for deptype in dt.ALL_TYPES:
deps = pkg.dependencies_of_type(deptype) deps = pkg.dependencies_of_type(dt.flag_from_string(deptype))
all_deps[deptype] = [d for d in deps] all_deps[deptype] = [d for d in deps]
return all_deps return all_deps
@ -275,8 +275,8 @@ def head(n, span_id, title, anchor=None):
out.write("\n") out.write("\n")
out.write("</dd>\n") out.write("</dd>\n")
for deptype in spack.dependency.all_deptypes: for deptype in dt.ALL_TYPES:
deps = pkg_cls.dependencies_of_type(deptype) deps = pkg_cls.dependencies_of_type(dt.flag_from_string(deptype))
if deps: if deps:
out.write("<dt>%s Dependencies:</dt>\n" % deptype.capitalize()) out.write("<dt>%s Dependencies:</dt>\n" % deptype.capitalize())
out.write("<dd>\n") out.write("<dd>\n")

View File

@ -11,6 +11,7 @@
import llnl.util.tty as tty import llnl.util.tty as tty
import spack.cmd import spack.cmd
import spack.deptypes as dt
import spack.error import spack.error
import spack.hash_types as hash_types import spack.hash_types as hash_types
import spack.platforms import spack.platforms
@ -158,13 +159,13 @@ def entries_to_specs(entries):
dependencies = entry["dependencies"] dependencies = entry["dependencies"]
for name, properties in dependencies.items(): for name, properties in dependencies.items():
dep_hash = properties["hash"] dep_hash = properties["hash"]
deptypes = properties["type"] depflag = dt.canonicalize(properties["type"])
if dep_hash in spec_dict: if dep_hash in spec_dict:
if entry["hash"] not in spec_dict: if entry["hash"] not in spec_dict:
continue continue
parent_spec = spec_dict[entry["hash"]] parent_spec = spec_dict[entry["hash"]]
dep_spec = spec_dict[dep_hash] dep_spec = spec_dict[dep_hash]
parent_spec._add_dependency(dep_spec, deptypes=deptypes, virtuals=()) parent_spec._add_dependency(dep_spec, depflag=depflag, virtuals=())
for spec in spec_dict.values(): for spec in spec_dict.values():
spack.spec.reconstruct_virtuals_on_edges(spec) spack.spec.reconstruct_virtuals_on_edges(spec)

View File

@ -27,6 +27,8 @@
import time import time
from typing import Any, Callable, Dict, Generator, List, NamedTuple, Set, Type, Union from typing import Any, Callable, Dict, Generator, List, NamedTuple, Set, Type, Union
import spack.deptypes as dt
try: try:
import uuid import uuid
@ -89,7 +91,7 @@
#: Types of dependencies tracked by the database #: Types of dependencies tracked by the database
#: We store by DAG hash, so we track the dependencies that the DAG hash includes. #: We store by DAG hash, so we track the dependencies that the DAG hash includes.
_TRACKED_DEPENDENCIES = ht.dag_hash.deptype _TRACKED_DEPENDENCIES = ht.dag_hash.depflag
#: Default list of fields written for each install record #: Default list of fields written for each install record
DEFAULT_INSTALL_RECORD_FIELDS = ( DEFAULT_INSTALL_RECORD_FIELDS = (
@ -795,7 +797,7 @@ def _assign_dependencies(self, spec_reader, hash_key, installs, data):
tty.warn(msg) tty.warn(msg)
continue continue
spec._add_dependency(child, deptypes=dtypes, virtuals=virtuals) spec._add_dependency(child, depflag=dt.canonicalize(dtypes), virtuals=virtuals)
def _read_from_file(self, filename): def _read_from_file(self, filename):
"""Fill database from file, do not maintain old data. """Fill database from file, do not maintain old data.
@ -1146,7 +1148,7 @@ def _add(
# Retrieve optional arguments # Retrieve optional arguments
installation_time = installation_time or _now() installation_time = installation_time or _now()
for edge in spec.edges_to_dependencies(deptype=_TRACKED_DEPENDENCIES): for edge in spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES):
if edge.spec.dag_hash() in self._data: if edge.spec.dag_hash() in self._data:
continue continue
# allow missing build-only deps. This prevents excessive # allow missing build-only deps. This prevents excessive
@ -1154,7 +1156,7 @@ def _add(
# is missing a build dep; there's no need to install the # is missing a build dep; there's no need to install the
# build dep's build dep first, and there's no need to warn # build dep's build dep first, and there's no need to warn
# about it missing. # about it missing.
dep_allow_missing = allow_missing or edge.deptypes == ("build",) dep_allow_missing = allow_missing or edge.depflag == dt.BUILD
self._add( self._add(
edge.spec, edge.spec,
directory_layout, directory_layout,
@ -1198,10 +1200,10 @@ def _add(
self._data[key] = InstallRecord(new_spec, path, installed, ref_count=0, **extra_args) self._data[key] = InstallRecord(new_spec, path, installed, ref_count=0, **extra_args)
# Connect dependencies from the DB to the new copy. # Connect dependencies from the DB to the new copy.
for dep in spec.edges_to_dependencies(deptype=_TRACKED_DEPENDENCIES): for dep in spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES):
dkey = dep.spec.dag_hash() dkey = dep.spec.dag_hash()
upstream, record = self.query_by_spec_hash(dkey) upstream, record = self.query_by_spec_hash(dkey)
new_spec._add_dependency(record.spec, deptypes=dep.deptypes, virtuals=dep.virtuals) new_spec._add_dependency(record.spec, depflag=dep.depflag, virtuals=dep.virtuals)
if not upstream: if not upstream:
record.ref_count += 1 record.ref_count += 1
@ -1371,7 +1373,13 @@ def deprecate(self, spec, deprecator):
return self._deprecate(spec, deprecator) return self._deprecate(spec, deprecator)
@_autospec @_autospec
def installed_relatives(self, spec, direction="children", transitive=True, deptype="all"): def installed_relatives(
self,
spec,
direction="children",
transitive=True,
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
):
"""Return installed specs related to this one.""" """Return installed specs related to this one."""
if direction not in ("parents", "children"): if direction not in ("parents", "children"):
raise ValueError("Invalid direction: %s" % direction) raise ValueError("Invalid direction: %s" % direction)

View File

@ -3,64 +3,11 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Data structures that represent Spack's dependency relationships.""" """Data structures that represent Spack's dependency relationships."""
from typing import Dict, List, Optional, Set, Tuple, Union from typing import Dict, List
import spack.deptypes as dt
import spack.spec import spack.spec
#: The types of dependency relationships that Spack understands.
all_deptypes = ("build", "link", "run", "test")
#: Default dependency type if none is specified
default_deptype = ("build", "link")
#: Type hint for the arguments accepting a dependency type
DependencyArgument = Union[str, List[str], Tuple[str, ...]]
def deptype_chars(*type_tuples: str) -> str:
"""Create a string representing deptypes for many dependencies.
The string will be some subset of 'blrt', like 'bl ', 'b t', or
' lr ' where each letter in 'blrt' stands for 'build', 'link',
'run', and 'test' (the dependency types).
For a single dependency, this just indicates that the dependency has
the indicated deptypes. For a list of dependnecies, this shows
whether ANY dpeendency in the list has the deptypes (so the deptypes
are merged).
"""
types: Set[str] = set()
for t in type_tuples:
if t:
types.update(t)
return "".join(t[0] if t in types else " " for t in all_deptypes)
def canonical_deptype(deptype: DependencyArgument) -> Tuple[str, ...]:
"""Convert deptype to a canonical sorted tuple, or raise ValueError.
Args:
deptype: string representing dependency type, or a list/tuple of such strings.
Can also be the builtin function ``all`` or the string 'all', which result in
a tuple of all dependency types known to Spack.
"""
if deptype in ("all", all):
return all_deptypes
elif isinstance(deptype, str):
if deptype not in all_deptypes:
raise ValueError("Invalid dependency type: %s" % deptype)
return (deptype,)
elif isinstance(deptype, (tuple, list, set)):
bad = [d for d in deptype if d not in all_deptypes]
if bad:
raise ValueError("Invalid dependency types: %s" % ",".join(str(t) for t in bad))
return tuple(sorted(set(deptype)))
raise ValueError("Invalid dependency type: %s" % repr(deptype))
class Dependency: class Dependency:
"""Class representing metadata for a dependency on a package. """Class representing metadata for a dependency on a package.
@ -93,7 +40,7 @@ def __init__(
self, self,
pkg: "spack.package_base.PackageBase", pkg: "spack.package_base.PackageBase",
spec: "spack.spec.Spec", spec: "spack.spec.Spec",
type: Optional[Tuple[str, ...]] = default_deptype, depflag: dt.DepFlag = dt.DEFAULT,
): ):
"""Create a new Dependency. """Create a new Dependency.
@ -110,11 +57,7 @@ def __init__(
# This dict maps condition specs to lists of Patch objects, just # This dict maps condition specs to lists of Patch objects, just
# as the patches dict on packages does. # as the patches dict on packages does.
self.patches: Dict[spack.spec.Spec, "List[spack.patch.Patch]"] = {} self.patches: Dict[spack.spec.Spec, "List[spack.patch.Patch]"] = {}
self.depflag = depflag
if type is None:
self.type = set(default_deptype)
else:
self.type = set(type)
@property @property
def name(self) -> str: def name(self) -> str:
@ -124,7 +67,7 @@ def name(self) -> str:
def merge(self, other: "Dependency"): def merge(self, other: "Dependency"):
"""Merge constraints, deptypes, and patches of other into self.""" """Merge constraints, deptypes, and patches of other into self."""
self.spec.constrain(other.spec) self.spec.constrain(other.spec)
self.type |= other.type self.depflag |= other.depflag
# concatenate patch lists, or just copy them in # concatenate patch lists, or just copy them in
for cond, p in other.patches.items(): for cond, p in other.patches.items():
@ -135,5 +78,5 @@ def merge(self, other: "Dependency"):
self.patches[cond] = other.patches[cond] self.patches[cond] = other.patches[cond]
def __repr__(self) -> str: def __repr__(self) -> str:
types = deptype_chars(*self.type) types = dt.flag_to_chars(self.depflag)
return f"<Dependency: {self.pkg.name} -> {self.spec} [{types}]>" return f"<Dependency: {self.pkg.name} -> {self.spec} [{types}]>"

123
lib/spack/spack/deptypes.py Normal file
View File

@ -0,0 +1,123 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Data structures that represent Spack's edge types."""
from typing import Iterable, List, Tuple, Union
#: Type hint for the low-level dependency input (enum.Flag is too slow)
DepFlag = int
#: Type hint for the high-level dependency input
DepTypes = Union[str, List[str], Tuple[str, ...]]
#: Individual dependency types
DepType = str # Python 3.8: Literal["build", "link", "run", "test"]
# Flag values. NOTE: these values are not arbitrary, since hash computation imposes
# the order (link, run, build, test) when depending on the same package multiple times,
# and we rely on default integer comparison to sort dependency types.
# New dependency types should be appended.
LINK = 0b0001
RUN = 0b0010
BUILD = 0b0100
TEST = 0b1000
#: The types of dependency relationships that Spack understands.
ALL_TYPES: Tuple[DepType, ...] = ("build", "link", "run", "test")
#: Default dependency type if none is specified
DEFAULT_TYPES: Tuple[DepType, ...] = ("build", "link")
#: A flag with all dependency types set
ALL: DepFlag = BUILD | LINK | RUN | TEST
#: Default dependency type if none is specified
DEFAULT: DepFlag = BUILD | LINK
#: An iterator of all flag components
ALL_FLAGS: Tuple[DepFlag, DepFlag, DepFlag, DepFlag] = (BUILD, LINK, RUN, TEST)
def flag_from_string(s: str) -> DepFlag:
if s == "build":
return BUILD
elif s == "link":
return LINK
elif s == "run":
return RUN
elif s == "test":
return TEST
else:
raise ValueError(f"Invalid dependency type: {s}")
def flag_from_strings(deptype: Iterable[str]) -> DepFlag:
"""Transform an iterable of deptype strings into a flag."""
flag = 0
for deptype_str in deptype:
flag |= flag_from_string(deptype_str)
return flag
def canonicalize(deptype: DepTypes) -> DepFlag:
"""Convert deptype user input to a DepFlag, or raise ValueError.
Args:
deptype: string representing dependency type, or a list/tuple of such strings.
Can also be the builtin function ``all`` or the string 'all', which result in
a tuple of all dependency types known to Spack.
"""
if deptype in ("all", all):
return ALL
if isinstance(deptype, str):
return flag_from_string(deptype)
if isinstance(deptype, (tuple, list, set)):
return flag_from_strings(deptype)
raise ValueError(f"Invalid dependency type: {deptype!r}")
def flag_to_tuple(x: DepFlag) -> Tuple[DepType, ...]:
deptype: List[DepType] = []
if x & BUILD:
deptype.append("build")
if x & LINK:
deptype.append("link")
if x & RUN:
deptype.append("run")
if x & TEST:
deptype.append("test")
return tuple(deptype)
def flag_to_string(x: DepFlag) -> DepType:
if x == BUILD:
return "build"
elif x == LINK:
return "link"
elif x == RUN:
return "run"
elif x == TEST:
return "test"
else:
raise ValueError(f"Invalid dependency type flag: {x}")
def flag_to_chars(depflag: DepFlag) -> str:
"""Create a string representing deptypes for many dependencies.
The string will be some subset of 'blrt', like 'bl ', 'b t', or
' lr ' where each letter in 'blrt' stands for 'build', 'link',
'run', and 'test' (the dependency types).
For a single dependency, this just indicates that the dependency has
the indicated deptypes. For a list of dependnecies, this shows
whether ANY dpeendency in the list has the deptypes (so the deptypes
are merged)."""
return "".join(
t_str[0] if t_flag & depflag else " " for t_str, t_flag in zip(ALL_TYPES, ALL_FLAGS)
)

View File

@ -38,13 +38,14 @@ class OpenMpi(Package):
import llnl.util.lang import llnl.util.lang
import llnl.util.tty.color import llnl.util.tty.color
import spack.deptypes as dt
import spack.error import spack.error
import spack.patch import spack.patch
import spack.spec import spack.spec
import spack.url import spack.url
import spack.util.crypto import spack.util.crypto
import spack.variant import spack.variant
from spack.dependency import Dependency, canonical_deptype, default_deptype from spack.dependency import Dependency
from spack.fetch_strategy import from_kwargs from spack.fetch_strategy import from_kwargs
from spack.resource import Resource from spack.resource import Resource
from spack.version import ( from spack.version import (
@ -436,7 +437,7 @@ def _execute_version(pkg, ver, **kwargs):
pkg.versions[version] = kwargs pkg.versions[version] = kwargs
def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None): def _depends_on(pkg, spec, when=None, type=dt.DEFAULT_TYPES, patches=None):
when_spec = make_when_spec(when) when_spec = make_when_spec(when)
if not when_spec: if not when_spec:
return return
@ -447,7 +448,7 @@ def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None):
if pkg.name == dep_spec.name: if pkg.name == dep_spec.name:
raise CircularReferenceError("Package '%s' cannot depend on itself." % pkg.name) raise CircularReferenceError("Package '%s' cannot depend on itself." % pkg.name)
type = canonical_deptype(type) depflag = dt.canonicalize(type)
conditions = pkg.dependencies.setdefault(dep_spec.name, {}) conditions = pkg.dependencies.setdefault(dep_spec.name, {})
# call this patches here for clarity -- we want patch to be a list, # call this patches here for clarity -- we want patch to be a list,
@ -477,12 +478,12 @@ def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None):
# this is where we actually add the dependency to this package # this is where we actually add the dependency to this package
if when_spec not in conditions: if when_spec not in conditions:
dependency = Dependency(pkg, dep_spec, type=type) dependency = Dependency(pkg, dep_spec, depflag=depflag)
conditions[when_spec] = dependency conditions[when_spec] = dependency
else: else:
dependency = conditions[when_spec] dependency = conditions[when_spec]
dependency.spec.constrain(dep_spec, deps=False) dependency.spec.constrain(dep_spec, deps=False)
dependency.type |= set(type) dependency.depflag |= depflag
# apply patches to the dependency # apply patches to the dependency
for execute_patch in patches: for execute_patch in patches:
@ -525,7 +526,7 @@ def _execute_conflicts(pkg):
@directive(("dependencies")) @directive(("dependencies"))
def depends_on(spec, when=None, type=default_deptype, patches=None): def depends_on(spec, when=None, type=dt.DEFAULT_TYPES, patches=None):
"""Creates a dict of deps with specs defining when they apply. """Creates a dict of deps with specs defining when they apply.
Args: Args:

View File

@ -12,6 +12,7 @@
from enum import Enum from enum import Enum
from typing import List, Optional from typing import List, Optional
import spack.deptypes as dt
import spack.environment.environment as ev import spack.environment.environment as ev
import spack.spec import spack.spec
import spack.traverse as traverse import spack.traverse as traverse
@ -36,7 +37,9 @@ def from_string(s: str) -> "UseBuildCache":
def _deptypes(use_buildcache: UseBuildCache): def _deptypes(use_buildcache: UseBuildCache):
"""What edges should we follow for a given node? If it's a cache-only """What edges should we follow for a given node? If it's a cache-only
node, then we can drop build type deps.""" node, then we can drop build type deps."""
return ("link", "run") if use_buildcache == UseBuildCache.ONLY else ("build", "link", "run") return (
dt.LINK | dt.RUN if use_buildcache == UseBuildCache.ONLY else dt.BUILD | dt.LINK | dt.RUN
)
class DepfileNode: class DepfileNode:
@ -69,13 +72,13 @@ def __init__(self, pkg_buildcache: UseBuildCache, deps_buildcache: UseBuildCache
self.adjacency_list: List[DepfileNode] = [] self.adjacency_list: List[DepfileNode] = []
self.pkg_buildcache = pkg_buildcache self.pkg_buildcache = pkg_buildcache
self.deps_buildcache = deps_buildcache self.deps_buildcache = deps_buildcache
self.deptypes_root = _deptypes(pkg_buildcache) self.depflag_root = _deptypes(pkg_buildcache)
self.deptypes_deps = _deptypes(deps_buildcache) self.depflag_deps = _deptypes(deps_buildcache)
def neighbors(self, node): def neighbors(self, node):
"""Produce a list of spec to follow from node""" """Produce a list of spec to follow from node"""
deptypes = self.deptypes_root if node.depth == 0 else self.deptypes_deps depflag = self.depflag_root if node.depth == 0 else self.depflag_deps
return traverse.sort_edges(node.edge.spec.edges_to_dependencies(deptype=deptypes)) return traverse.sort_edges(node.edge.spec.edges_to_dependencies(depflag=depflag))
def accept(self, node): def accept(self, node):
self.adjacency_list.append( self.adjacency_list.append(

View File

@ -28,6 +28,7 @@
import spack.compilers import spack.compilers
import spack.concretize import spack.concretize
import spack.config import spack.config
import spack.deptypes as dt
import spack.error import spack.error
import spack.fetch_strategy import spack.fetch_strategy
import spack.hash_types as ht import spack.hash_types as ht
@ -1536,13 +1537,13 @@ def _concretize_separately(self, tests=False):
for h in self.specs_by_hash: for h in self.specs_by_hash:
current_spec, computed_spec = self.specs_by_hash[h], by_hash[h] current_spec, computed_spec = self.specs_by_hash[h], by_hash[h]
for node in computed_spec.traverse(): for node in computed_spec.traverse():
test_edges = node.edges_to_dependencies(deptype="test") test_edges = node.edges_to_dependencies(depflag=dt.TEST)
for current_edge in test_edges: for current_edge in test_edges:
test_dependency = current_edge.spec test_dependency = current_edge.spec
if test_dependency in current_spec[node.name]: if test_dependency in current_spec[node.name]:
continue continue
current_spec[node.name].add_dependency_edge( current_spec[node.name].add_dependency_edge(
test_dependency.copy(), deptypes="test", virtuals=current_edge.virtuals test_dependency.copy(), depflag=dt.TEST, virtuals=current_edge.virtuals
) )
results = [ results = [
@ -2190,7 +2191,7 @@ def _read_lockfile_dict(self, d):
name, data = reader.name_and_data(node_dict) name, data = reader.name_and_data(node_dict)
for _, dep_hash, deptypes, _, virtuals in reader.dependencies_from_node_dict(data): for _, dep_hash, deptypes, _, virtuals in reader.dependencies_from_node_dict(data):
specs_by_hash[lockfile_key]._add_dependency( specs_by_hash[lockfile_key]._add_dependency(
specs_by_hash[dep_hash], deptypes=deptypes, virtuals=virtuals specs_by_hash[dep_hash], depflag=dt.canonicalize(deptypes), virtuals=virtuals
) )
# Traverse the root specs one at a time in the order they appear. # Traverse the root specs one at a time in the order they appear.

View File

@ -38,11 +38,12 @@
""" """
import enum import enum
import sys import sys
from typing import List, Optional, Set, TextIO, Tuple, Union from typing import List, Optional, Set, TextIO, Tuple
import llnl.util.tty.color import llnl.util.tty.color
import spack.dependency import spack.deptypes as dt
import spack.repo
import spack.spec import spack.spec
import spack.tengine import spack.tengine
@ -78,7 +79,7 @@ def __init__(self):
self.node_character = "o" self.node_character = "o"
self.debug = False self.debug = False
self.indent = 0 self.indent = 0
self.deptype = spack.dependency.all_deptypes self.depflag = dt.ALL
# These are colors in the order they'll be used for edges. # These are colors in the order they'll be used for edges.
# See llnl.util.tty.color for details on color characters. # See llnl.util.tty.color for details on color characters.
@ -326,7 +327,7 @@ def write(self, spec, color=None, out=None):
nodes_in_topological_order = [ nodes_in_topological_order = [
edge.spec edge.spec
for edge in spack.traverse.traverse_edges_topo( for edge in spack.traverse.traverse_edges_topo(
[spec], direction="children", deptype=self.deptype [spec], direction="children", deptype=self.depflag
) )
] ]
nodes_in_topological_order.reverse() nodes_in_topological_order.reverse()
@ -424,7 +425,7 @@ def write(self, spec, color=None, out=None):
# Replace node with its dependencies # Replace node with its dependencies
self._frontier.pop(i) self._frontier.pop(i)
edges = sorted(node.edges_to_dependencies(deptype=self.deptype), reverse=True) edges = sorted(node.edges_to_dependencies(depflag=self.depflag), reverse=True)
if edges: if edges:
deps = [e.spec.dag_hash() for e in edges] deps = [e.spec.dag_hash() for e in edges]
self._connect_deps(i, deps, "new-deps") # anywhere. self._connect_deps(i, deps, "new-deps") # anywhere.
@ -433,13 +434,14 @@ def write(self, spec, color=None, out=None):
self._collapse_line(i) self._collapse_line(i)
def graph_ascii(spec, node="o", out=None, debug=False, indent=0, color=None, deptype="all"): def graph_ascii(
spec, node="o", out=None, debug=False, indent=0, color=None, depflag: dt.DepFlag = dt.ALL
):
graph = AsciiGraph() graph = AsciiGraph()
graph.debug = debug graph.debug = debug
graph.indent = indent graph.indent = indent
graph.node_character = node graph.node_character = node
if deptype: graph.depflag = depflag
graph.deptype = spack.dependency.canonical_deptype(deptype)
graph.write(spec, color=color, out=out) graph.write(spec, color=color, out=out)
@ -513,7 +515,7 @@ def __init__(self):
def visit(self, edge): def visit(self, edge):
if edge.parent is None: if edge.parent is None:
for node in spack.traverse.traverse_nodes([edge.spec], deptype=("link", "run")): for node in spack.traverse.traverse_nodes([edge.spec], deptype=dt.LINK | dt.RUN):
self.main_unified_space.add(node.dag_hash()) self.main_unified_space.add(node.dag_hash())
super().visit(edge) super().visit(edge)
@ -533,36 +535,34 @@ def edge_entry(self, edge):
) )
def _static_edges(specs, deptype): def _static_edges(specs, depflag):
for spec in specs: for spec in specs:
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name) pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
possible = pkg_cls.possible_dependencies(expand_virtuals=True, deptype=deptype) possible = pkg_cls.possible_dependencies(expand_virtuals=True, depflag=depflag)
for parent_name, dependencies in possible.items(): for parent_name, dependencies in possible.items():
for dependency_name in dependencies: for dependency_name in dependencies:
yield spack.spec.DependencySpec( yield spack.spec.DependencySpec(
spack.spec.Spec(parent_name), spack.spec.Spec(parent_name),
spack.spec.Spec(dependency_name), spack.spec.Spec(dependency_name),
deptypes=deptype, depflag=depflag,
virtuals=(), virtuals=(),
) )
def static_graph_dot( def static_graph_dot(
specs: List[spack.spec.Spec], specs: List[spack.spec.Spec], depflag: dt.DepFlag = dt.ALL, out: Optional[TextIO] = None
deptype: Optional[Union[str, Tuple[str, ...]]] = "all",
out: Optional[TextIO] = None,
): ):
"""Static DOT graph with edges to all possible dependencies. """Static DOT graph with edges to all possible dependencies.
Args: Args:
specs: abstract specs to be represented specs: abstract specs to be represented
deptype: dependency types to consider depflag: dependency types to consider
out: optional output stream. If None sys.stdout is used out: optional output stream. If None sys.stdout is used
""" """
out = out or sys.stdout out = out or sys.stdout
builder = StaticDag() builder = StaticDag()
for edge in _static_edges(specs, deptype): for edge in _static_edges(specs, depflag):
builder.visit(edge) builder.visit(edge)
out.write(builder.render()) out.write(builder.render())
@ -570,7 +570,7 @@ def static_graph_dot(
def graph_dot( def graph_dot(
specs: List[spack.spec.Spec], specs: List[spack.spec.Spec],
builder: Optional[DotGraphBuilder] = None, builder: Optional[DotGraphBuilder] = None,
deptype: spack.dependency.DependencyArgument = "all", depflag: dt.DepFlag = dt.ALL,
out: Optional[TextIO] = None, out: Optional[TextIO] = None,
): ):
"""DOT graph of the concrete specs passed as input. """DOT graph of the concrete specs passed as input.
@ -578,7 +578,7 @@ def graph_dot(
Args: Args:
specs: specs to be represented specs: specs to be represented
builder: builder to use to render the graph builder: builder to use to render the graph
deptype: dependency types to consider depflag: dependency types to consider
out: optional output stream. If None sys.stdout is used out: optional output stream. If None sys.stdout is used
""" """
if not specs: if not specs:
@ -587,10 +587,9 @@ def graph_dot(
if out is None: if out is None:
out = sys.stdout out = sys.stdout
deptype = spack.dependency.canonical_deptype(deptype)
builder = builder or SimpleDAG() builder = builder or SimpleDAG()
for edge in spack.traverse.traverse_edges( for edge in spack.traverse.traverse_edges(
specs, cover="edges", order="breadth", deptype=deptype specs, cover="edges", order="breadth", deptype=depflag
): ):
builder.visit(edge) builder.visit(edge)

View File

@ -4,7 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Definitions that control how Spack creates Spec hashes.""" """Definitions that control how Spack creates Spec hashes."""
import spack.dependency as dp import spack.deptypes as dt
import spack.repo import spack.repo
hashes = [] hashes = []
@ -20,8 +20,8 @@ class SpecHashDescriptor:
We currently use different hashes for different use cases.""" We currently use different hashes for different use cases."""
def __init__(self, deptype, package_hash, name, override=None): def __init__(self, depflag: dt.DepFlag, package_hash, name, override=None):
self.deptype = dp.canonical_deptype(deptype) self.depflag = depflag
self.package_hash = package_hash self.package_hash = package_hash
self.name = name self.name = name
hashes.append(self) hashes.append(self)
@ -39,12 +39,12 @@ def __call__(self, spec):
#: Spack's deployment hash. Includes all inputs that can affect how a package is built. #: Spack's deployment hash. Includes all inputs that can affect how a package is built.
dag_hash = SpecHashDescriptor(deptype=("build", "link", "run"), package_hash=True, name="hash") dag_hash = SpecHashDescriptor(depflag=dt.BUILD | dt.LINK | dt.RUN, package_hash=True, name="hash")
#: Hash descriptor used only to transfer a DAG, as is, across processes #: Hash descriptor used only to transfer a DAG, as is, across processes
process_hash = SpecHashDescriptor( process_hash = SpecHashDescriptor(
deptype=("build", "link", "run", "test"), package_hash=True, name="process_hash" depflag=dt.BUILD | dt.LINK | dt.RUN | dt.TEST, package_hash=True, name="process_hash"
) )
@ -56,7 +56,7 @@ def _content_hash_override(spec):
#: Package hash used as part of dag hash #: Package hash used as part of dag hash
package_hash = SpecHashDescriptor( package_hash = SpecHashDescriptor(
deptype=(), package_hash=True, name="package_hash", override=_content_hash_override depflag=0, package_hash=True, name="package_hash", override=_content_hash_override
) )
@ -64,10 +64,10 @@ def _content_hash_override(spec):
# spec formats # spec formats
full_hash = SpecHashDescriptor( full_hash = SpecHashDescriptor(
deptype=("build", "link", "run"), package_hash=True, name="full_hash" depflag=dt.BUILD | dt.LINK | dt.RUN, package_hash=True, name="full_hash"
) )
build_hash = SpecHashDescriptor( build_hash = SpecHashDescriptor(
deptype=("build", "link", "run"), package_hash=False, name="build_hash" depflag=dt.BUILD | dt.LINK | dt.RUN, package_hash=False, name="build_hash"
) )

View File

@ -50,6 +50,7 @@
import spack.compilers import spack.compilers
import spack.config import spack.config
import spack.database import spack.database
import spack.deptypes as dt
import spack.error import spack.error
import spack.hooks import spack.hooks
import spack.mirror import spack.mirror
@ -313,7 +314,7 @@ def _packages_needed_to_bootstrap_compiler(
# mark compiler as depended-on by the packages that use it # mark compiler as depended-on by the packages that use it
for pkg in pkgs: for pkg in pkgs:
dep._dependents.add( dep._dependents.add(
spack.spec.DependencySpec(pkg.spec, dep, deptypes=("build",), virtuals=()) spack.spec.DependencySpec(pkg.spec, dep, depflag=dt.BUILD, virtuals=())
) )
packages = [(s.package, False) for s in dep.traverse(order="post", root=False)] packages = [(s.package, False) for s in dep.traverse(order="post", root=False)]
@ -788,10 +789,9 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
# Save off dependency package ids for quick checks since traversals # Save off dependency package ids for quick checks since traversals
# are not able to return full dependents for all packages across # are not able to return full dependents for all packages across
# environment specs. # environment specs.
deptypes = self.get_deptypes(self.pkg)
self.dependencies = set( self.dependencies = set(
package_id(d.package) package_id(d.package)
for d in self.pkg.spec.dependencies(deptype=deptypes) for d in self.pkg.spec.dependencies(deptype=self.get_depflags(self.pkg))
if package_id(d.package) != self.pkg_id if package_id(d.package) != self.pkg_id
) )
@ -830,7 +830,7 @@ def _add_default_args(self) -> None:
]: ]:
_ = self.install_args.setdefault(arg, default) _ = self.install_args.setdefault(arg, default)
def get_deptypes(self, pkg: "spack.package_base.PackageBase") -> Tuple[str, ...]: def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
"""Determine the required dependency types for the associated package. """Determine the required dependency types for the associated package.
Args: Args:
@ -839,7 +839,7 @@ def get_deptypes(self, pkg: "spack.package_base.PackageBase") -> Tuple[str, ...]
Returns: Returns:
tuple: required dependency type(s) for the package tuple: required dependency type(s) for the package
""" """
deptypes = ["link", "run"] depflag = dt.LINK | dt.RUN
include_build_deps = self.install_args.get("include_build_deps") include_build_deps = self.install_args.get("include_build_deps")
if self.pkg_id == package_id(pkg): if self.pkg_id == package_id(pkg):
@ -851,10 +851,10 @@ def get_deptypes(self, pkg: "spack.package_base.PackageBase") -> Tuple[str, ...]
# is False, or if build depdencies are explicitly called for # is False, or if build depdencies are explicitly called for
# by include_build_deps. # by include_build_deps.
if include_build_deps or not (cache_only or pkg.spec.installed): if include_build_deps or not (cache_only or pkg.spec.installed):
deptypes.append("build") depflag |= dt.BUILD
if self.run_tests(pkg): if self.run_tests(pkg):
deptypes.append("test") depflag |= dt.TEST
return tuple(sorted(deptypes)) return depflag
def has_dependency(self, dep_id) -> bool: def has_dependency(self, dep_id) -> bool:
"""Returns ``True`` if the package id represents a known dependency """Returns ``True`` if the package id represents a known dependency
@ -887,9 +887,8 @@ def traverse_dependencies(self, spec=None, visited=None) -> Iterator["spack.spec
spec = self.spec spec = self.spec
if visited is None: if visited is None:
visited = set() visited = set()
deptype = self.get_deptypes(spec.package)
for dep in spec.dependencies(deptype=deptype): for dep in spec.dependencies(deptype=self.get_depflags(spec.package)):
hash = dep.dag_hash() hash = dep.dag_hash()
if hash in visited: if hash in visited:
continue continue
@ -973,10 +972,9 @@ def __init__(
# Be consistent wrt use of dependents and dependencies. That is, # Be consistent wrt use of dependents and dependencies. That is,
# if use traverse for transitive dependencies, then must remove # if use traverse for transitive dependencies, then must remove
# transitive dependents on failure. # transitive dependents on failure.
deptypes = self.request.get_deptypes(self.pkg)
self.dependencies = set( self.dependencies = set(
package_id(d.package) package_id(d.package)
for d in self.pkg.spec.dependencies(deptype=deptypes) for d in self.pkg.spec.dependencies(deptype=self.request.get_depflags(self.pkg))
if package_id(d.package) != self.pkg_id if package_id(d.package) != self.pkg_id
) )

View File

@ -716,7 +716,7 @@ def __call__(self, *argv, **kwargs):
out = io.StringIO() out = io.StringIO()
try: try:
with log_output(out): with log_output(out, echo=True):
self.returncode = _invoke_command(self.command, self.parser, args, unknown) self.returncode = _invoke_command(self.command, self.parser, args, unknown)
except SystemExit as e: except SystemExit as e:

View File

@ -67,7 +67,7 @@
from spack.build_systems.waf import WafPackage from spack.build_systems.waf import WafPackage
from spack.build_systems.xorg import XorgPackage from spack.build_systems.xorg import XorgPackage
from spack.builder import run_after, run_before from spack.builder import run_after, run_before
from spack.dependency import all_deptypes from spack.deptypes import ALL_TYPES as all_deptypes
from spack.directives import * from spack.directives import *
from spack.install_test import ( from spack.install_test import (
SkipTest, SkipTest,

View File

@ -34,7 +34,7 @@
import spack.compilers import spack.compilers
import spack.config import spack.config
import spack.dependency import spack.deptypes as dt
import spack.directives import spack.directives
import spack.directory_layout import spack.directory_layout
import spack.environment import spack.environment
@ -525,6 +525,9 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
# This allows analysis tools to correctly interpret the class attributes. # This allows analysis tools to correctly interpret the class attributes.
versions: dict versions: dict
# Same for dependencies
dependencies: dict
#: By default, packages are not virtual #: By default, packages are not virtual
#: Virtual packages override this attribute #: Virtual packages override this attribute
virtual = False virtual = False
@ -682,7 +685,7 @@ def possible_dependencies(
cls, cls,
transitive=True, transitive=True,
expand_virtuals=True, expand_virtuals=True,
deptype="all", depflag: dt.DepFlag = dt.ALL,
visited=None, visited=None,
missing=None, missing=None,
virtuals=None, virtuals=None,
@ -694,7 +697,7 @@ def possible_dependencies(
True, only direct dependencies if False (default True).. True, only direct dependencies if False (default True)..
expand_virtuals (bool or None): expand virtual dependencies into expand_virtuals (bool or None): expand virtual dependencies into
all possible implementations (default True) all possible implementations (default True)
deptype (str or tuple or None): dependency types to consider depflag: dependency types to consider
visited (dict or None): dict of names of dependencies visited so visited (dict or None): dict of names of dependencies visited so
far, mapped to their immediate dependencies' names. far, mapped to their immediate dependencies' names.
missing (dict or None): dict to populate with packages and their missing (dict or None): dict to populate with packages and their
@ -720,8 +723,6 @@ def possible_dependencies(
Note: the returned dict *includes* the package itself. Note: the returned dict *includes* the package itself.
""" """
deptype = spack.dependency.canonical_deptype(deptype)
visited = {} if visited is None else visited visited = {} if visited is None else visited
missing = {} if missing is None else missing missing = {} if missing is None else missing
@ -729,9 +730,10 @@ def possible_dependencies(
for name, conditions in cls.dependencies.items(): for name, conditions in cls.dependencies.items():
# check whether this dependency could be of the type asked for # check whether this dependency could be of the type asked for
deptypes = [dep.type for cond, dep in conditions.items()] depflag_union = 0
deptypes = set.union(*deptypes) for dep in conditions.values():
if not any(d in deptypes for d in deptype): depflag_union |= dep.depflag
if not (depflag & depflag_union):
continue continue
# expand virtuals if enabled, otherwise just stop at virtuals # expand virtuals if enabled, otherwise just stop at virtuals
@ -770,7 +772,7 @@ def possible_dependencies(
continue continue
dep_cls.possible_dependencies( dep_cls.possible_dependencies(
transitive, expand_virtuals, deptype, visited, missing, virtuals transitive, expand_virtuals, depflag, visited, missing, virtuals
) )
return visited return visited
@ -1203,7 +1205,7 @@ def fetcher(self, f):
self._fetcher.set_package(self) self._fetcher.set_package(self)
@classmethod @classmethod
def dependencies_of_type(cls, *deptypes): def dependencies_of_type(cls, deptypes: dt.DepFlag):
"""Get dependencies that can possibly have these deptypes. """Get dependencies that can possibly have these deptypes.
This analyzes the package and determines which dependencies *can* This analyzes the package and determines which dependencies *can*
@ -1215,7 +1217,7 @@ def dependencies_of_type(cls, *deptypes):
return dict( return dict(
(name, conds) (name, conds)
for name, conds in cls.dependencies.items() for name, conds in cls.dependencies.items()
if any(dt in cls.dependencies[name][cond].type for cond in conds for dt in deptypes) if any(deptypes & cls.dependencies[name][cond].depflag for cond in conds)
) )
# TODO: allow more than one active extendee. # TODO: allow more than one active extendee.

View File

@ -288,7 +288,7 @@ def next_spec(
) )
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str) raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
root_spec._add_dependency(dependency, deptypes=(), virtuals=()) root_spec._add_dependency(dependency, depflag=0, virtuals=())
else: else:
break break

View File

@ -17,6 +17,8 @@
import archspec.cpu import archspec.cpu
import spack.deptypes as dt
try: try:
import clingo # type: ignore[import] import clingo # type: ignore[import]
@ -34,7 +36,6 @@
import spack.cmd import spack.cmd
import spack.compilers import spack.compilers
import spack.config import spack.config
import spack.dependency
import spack.directives import spack.directives
import spack.environment as ev import spack.environment as ev
import spack.error import spack.error
@ -1462,18 +1463,18 @@ def package_dependencies_rules(self, pkg):
"""Translate 'depends_on' directives into ASP logic.""" """Translate 'depends_on' directives into ASP logic."""
for _, conditions in sorted(pkg.dependencies.items()): for _, conditions in sorted(pkg.dependencies.items()):
for cond, dep in sorted(conditions.items()): for cond, dep in sorted(conditions.items()):
deptypes = dep.type.copy() depflag = dep.depflag
# Skip test dependencies if they're not requested # Skip test dependencies if they're not requested
if not self.tests: if not self.tests:
deptypes.discard("test") depflag &= ~dt.TEST
# ... or if they are requested only for certain packages # ... or if they are requested only for certain packages
if not isinstance(self.tests, bool) and pkg.name not in self.tests: elif not isinstance(self.tests, bool) and pkg.name not in self.tests:
deptypes.discard("test") depflag &= ~dt.TEST
# if there are no dependency types to be considered # if there are no dependency types to be considered
# anymore, don't generate the dependency # anymore, don't generate the dependency
if not deptypes: if not depflag:
continue continue
msg = "%s depends on %s" % (pkg.name, dep.spec.name) msg = "%s depends on %s" % (pkg.name, dep.spec.name)
@ -1487,9 +1488,10 @@ def package_dependencies_rules(self, pkg):
fn.pkg_fact(pkg.name, fn.dependency_condition(condition_id, dep.spec.name)) fn.pkg_fact(pkg.name, fn.dependency_condition(condition_id, dep.spec.name))
) )
for t in sorted(deptypes): for t in dt.ALL_FLAGS:
# there is a declared dependency of type t if t & depflag:
self.gen.fact(fn.dependency_type(condition_id, t)) # there is a declared dependency of type t
self.gen.fact(fn.dependency_type(condition_id, dt.flag_to_string(t)))
self.gen.newline() self.gen.newline()
@ -1863,9 +1865,11 @@ class Body:
if spec.concrete: if spec.concrete:
# We know dependencies are real for concrete specs. For abstract # We know dependencies are real for concrete specs. For abstract
# specs they just mean the dep is somehow in the DAG. # specs they just mean the dep is somehow in the DAG.
for dtype in dspec.deptypes: for dtype in dt.ALL_FLAGS:
if not dspec.depflag & dtype:
continue
# skip build dependencies of already-installed specs # skip build dependencies of already-installed specs
if concrete_build_deps or dtype != "build": if concrete_build_deps or dtype != dt.BUILD:
clauses.append(fn.attr("depends_on", spec.name, dep.name, dtype)) clauses.append(fn.attr("depends_on", spec.name, dep.name, dtype))
for virtual_name in dspec.virtuals: for virtual_name in dspec.virtuals:
clauses.append( clauses.append(
@ -1875,7 +1879,7 @@ class Body:
# imposing hash constraints for all but pure build deps of # imposing hash constraints for all but pure build deps of
# already-installed concrete specs. # already-installed concrete specs.
if concrete_build_deps or dspec.deptypes != ("build",): if concrete_build_deps or dspec.depflag != dt.BUILD:
clauses.append(fn.attr("hash", dep.name, dep.dag_hash())) clauses.append(fn.attr("hash", dep.name, dep.dag_hash()))
# if the spec is abstract, descend into dependencies. # if the spec is abstract, descend into dependencies.
@ -2658,13 +2662,14 @@ def depends_on(self, parent_node, dependency_node, type):
dependency_spec = self._specs[dependency_node] dependency_spec = self._specs[dependency_node]
edges = self._specs[parent_node].edges_to_dependencies(name=dependency_spec.name) edges = self._specs[parent_node].edges_to_dependencies(name=dependency_spec.name)
edges = [x for x in edges if id(x.spec) == id(dependency_spec)] edges = [x for x in edges if id(x.spec) == id(dependency_spec)]
depflag = dt.flag_from_string(type)
if not edges: if not edges:
self._specs[parent_node].add_dependency_edge( self._specs[parent_node].add_dependency_edge(
self._specs[dependency_node], deptypes=(type,), virtuals=() self._specs[dependency_node], depflag=depflag, virtuals=()
) )
else: else:
edges[0].update_deptypes(deptypes=(type,)) edges[0].update_deptypes(depflag=depflag)
def virtual_on_edge(self, parent_node, provider_node, virtual): def virtual_on_edge(self, parent_node, provider_node, virtual):
dependencies = self._specs[parent_node].edges_to_dependencies(name=(provider_node.pkg)) dependencies = self._specs[parent_node].edges_to_dependencies(name=(provider_node.pkg))

View File

@ -3,10 +3,11 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections import collections
from typing import List, Set, Tuple from typing import List, Set
import spack.dependency import spack.deptypes as dt
import spack.package_base import spack.package_base
import spack.repo
PossibleDependencies = Set[str] PossibleDependencies = Set[str]
@ -23,11 +24,11 @@ class Counter:
def __init__(self, specs: List["spack.spec.Spec"], tests: bool) -> None: def __init__(self, specs: List["spack.spec.Spec"], tests: bool) -> None:
self.specs = specs self.specs = specs
self.link_run_types: Tuple[str, ...] = ("link", "run", "test") self.link_run_types: dt.DepFlag = dt.LINK | dt.RUN | dt.TEST
self.all_types: Tuple[str, ...] = spack.dependency.all_deptypes self.all_types: dt.DepFlag = dt.ALL
if not tests: if not tests:
self.link_run_types = ("link", "run") self.link_run_types = dt.LINK | dt.RUN
self.all_types = ("link", "run", "build") self.all_types = dt.LINK | dt.RUN | dt.BUILD
self._possible_dependencies: PossibleDependencies = set() self._possible_dependencies: PossibleDependencies = set()
self._possible_virtuals: Set[str] = set(x.name for x in specs if x.virtual) self._possible_virtuals: Set[str] = set(x.name for x in specs if x.virtual)
@ -59,7 +60,7 @@ def _compute_cache_values(self):
class NoDuplicatesCounter(Counter): class NoDuplicatesCounter(Counter):
def _compute_cache_values(self): def _compute_cache_values(self):
result = spack.package_base.possible_dependencies( result = spack.package_base.possible_dependencies(
*self.specs, virtuals=self._possible_virtuals, deptype=self.all_types *self.specs, virtuals=self._possible_virtuals, depflag=self.all_types
) )
self._possible_dependencies = set(result) self._possible_dependencies = set(result)
@ -89,17 +90,17 @@ def __init__(self, specs, tests):
def _compute_cache_values(self): def _compute_cache_values(self):
self._link_run = set( self._link_run = set(
spack.package_base.possible_dependencies( spack.package_base.possible_dependencies(
*self.specs, virtuals=self._possible_virtuals, deptype=self.link_run_types *self.specs, virtuals=self._possible_virtuals, depflag=self.link_run_types
) )
) )
self._link_run_virtuals.update(self._possible_virtuals) self._link_run_virtuals.update(self._possible_virtuals)
for x in self._link_run: for x in self._link_run:
current = spack.repo.PATH.get_pkg_class(x).dependencies_of_type("build") current = spack.repo.PATH.get_pkg_class(x).dependencies_of_type(dt.BUILD)
self._direct_build.update(current) self._direct_build.update(current)
self._total_build = set( self._total_build = set(
spack.package_base.possible_dependencies( spack.package_base.possible_dependencies(
*self._direct_build, virtuals=self._possible_virtuals, deptype=self.all_types *self._direct_build, virtuals=self._possible_virtuals, depflag=self.all_types
) )
) )
self._possible_dependencies = set(self._link_run) | set(self._total_build) self._possible_dependencies = set(self._link_run) | set(self._total_build)

View File

@ -67,6 +67,7 @@
import spack.compilers import spack.compilers
import spack.config import spack.config
import spack.dependency as dp import spack.dependency as dp
import spack.deptypes as dt
import spack.error import spack.error
import spack.hash_types as ht import spack.hash_types as ht
import spack.paths import spack.paths
@ -727,81 +728,54 @@ class DependencySpec:
Args: Args:
parent: starting node of the edge parent: starting node of the edge
spec: ending node of the edge. spec: ending node of the edge.
deptypes: list of strings, representing dependency relationships. depflag: represents dependency relationships.
virtuals: virtual packages provided from child to parent node. virtuals: virtual packages provided from child to parent node.
""" """
__slots__ = "parent", "spec", "parameters" __slots__ = "parent", "spec", "depflag", "virtuals"
def __init__( def __init__(
self, self, parent: "Spec", spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[str, ...]
parent: "Spec",
spec: "Spec",
*,
deptypes: dp.DependencyArgument,
virtuals: Tuple[str, ...],
): ):
self.parent = parent self.parent = parent
self.spec = spec self.spec = spec
self.parameters = { self.depflag = depflag
"deptypes": dp.canonical_deptype(deptypes), self.virtuals = virtuals
"virtuals": tuple(sorted(set(virtuals))),
}
@property def update_deptypes(self, depflag: dt.DepFlag) -> bool:
def deptypes(self) -> Tuple[str, ...]:
return self.parameters["deptypes"]
@property
def virtuals(self) -> Tuple[str, ...]:
return self.parameters["virtuals"]
def _update_edge_multivalued_property(
self, property_name: str, value: Tuple[str, ...]
) -> bool:
current = self.parameters[property_name]
update = set(current) | set(value)
update = tuple(sorted(update))
changed = current != update
if not changed:
return False
self.parameters[property_name] = update
return True
def update_deptypes(self, deptypes: Tuple[str, ...]) -> bool:
"""Update the current dependency types""" """Update the current dependency types"""
return self._update_edge_multivalued_property("deptypes", deptypes) old = self.depflag
new = depflag | old
if new == old:
return False
self.depflag = new
return True
def update_virtuals(self, virtuals: Tuple[str, ...]) -> bool: def update_virtuals(self, virtuals: Tuple[str, ...]) -> bool:
"""Update the list of provided virtuals""" """Update the list of provided virtuals"""
return self._update_edge_multivalued_property("virtuals", virtuals) old = self.virtuals
self.virtuals = tuple(sorted(set(virtuals).union(self.virtuals)))
return old != self.virtuals
def copy(self) -> "DependencySpec": def copy(self) -> "DependencySpec":
"""Return a copy of this edge""" """Return a copy of this edge"""
return DependencySpec( return DependencySpec(self.parent, self.spec, depflag=self.depflag, virtuals=self.virtuals)
self.parent, self.spec, deptypes=self.deptypes, virtuals=self.virtuals
)
def _cmp_iter(self): def _cmp_iter(self):
yield self.parent.name if self.parent else None yield self.parent.name if self.parent else None
yield self.spec.name if self.spec else None yield self.spec.name if self.spec else None
yield self.deptypes yield self.depflag
yield self.virtuals yield self.virtuals
def __str__(self) -> str: def __str__(self) -> str:
parent = self.parent.name if self.parent else None parent = self.parent.name if self.parent else None
child = self.spec.name if self.spec else None child = self.spec.name if self.spec else None
return f"{parent} {self.deptypes}[virtuals={','.join(self.virtuals)}] --> {child}" return f"{parent} {self.depflag}[virtuals={','.join(self.virtuals)}] --> {child}"
def canonical(self) -> Tuple[str, str, Tuple[str, ...], Tuple[str, ...]]:
return self.parent.dag_hash(), self.spec.dag_hash(), self.deptypes, self.virtuals
def flip(self) -> "DependencySpec": def flip(self) -> "DependencySpec":
"""Flip the dependency, and drop virtual information""" """Flip the dependency, and drop virtual information"""
return DependencySpec( return DependencySpec(
parent=self.spec, spec=self.parent, deptypes=self.deptypes, virtuals=() parent=self.spec, spec=self.parent, depflag=self.depflag, virtuals=()
) )
@ -946,9 +920,8 @@ def __str__(self):
) )
def _sort_by_dep_types(dspec): def _sort_by_dep_types(dspec: DependencySpec):
# Use negation since False < True for sorting return dspec.depflag
return tuple(t not in dspec.deptypes for t in ("link", "run", "build", "test"))
#: Enum for edge directions #: Enum for edge directions
@ -1014,7 +987,7 @@ def copy(self):
return clone return clone
def select(self, parent=None, child=None, deptypes=dp.all_deptypes): def select(self, parent=None, child=None, depflag: dt.DepFlag = dt.ALL):
"""Select a list of edges and return them. """Select a list of edges and return them.
If an edge: If an edge:
@ -1022,18 +995,18 @@ def select(self, parent=None, child=None, deptypes=dp.all_deptypes):
- Matches the parent and/or child name, if passed - Matches the parent and/or child name, if passed
then it is selected. then it is selected.
The deptypes argument needs to be canonical, since the method won't The deptypes argument needs to be a flag, since the method won't
convert it for performance reason. convert it for performance reason.
Args: Args:
parent (str): name of the parent package parent (str): name of the parent package
child (str): name of the child package child (str): name of the child package
deptypes (tuple): allowed dependency types in canonical form depflag: allowed dependency types in flag form
Returns: Returns:
List of DependencySpec objects List of DependencySpec objects
""" """
if not deptypes: if not depflag:
return [] return []
# Start from all the edges we store # Start from all the edges we store
@ -1048,12 +1021,7 @@ def select(self, parent=None, child=None, deptypes=dp.all_deptypes):
selected = (d for d in selected if d.spec.name == child) selected = (d for d in selected if d.spec.name == child)
# Filter by allowed dependency types # Filter by allowed dependency types
if deptypes: selected = (dep for dep in selected if not dep.depflag or (depflag & dep.depflag))
selected = (
dep
for dep in selected
if not dep.deptypes or any(d in deptypes for d in dep.deptypes)
)
return list(selected) return list(selected)
@ -1473,47 +1441,49 @@ def _get_dependency(self, name):
raise spack.error.SpecError(err_msg.format(name, len(deps))) raise spack.error.SpecError(err_msg.format(name, len(deps)))
return deps[0] return deps[0]
def edges_from_dependents(self, name=None, deptype="all"): def edges_from_dependents(self, name=None, depflag: dt.DepFlag = dt.ALL):
"""Return a list of edges connecting this node in the DAG """Return a list of edges connecting this node in the DAG
to parents. to parents.
Args: Args:
name (str): filter dependents by package name name (str): filter dependents by package name
deptype (str or tuple): allowed dependency types depflag: allowed dependency types
""" """
deptype = dp.canonical_deptype(deptype) return [d for d in self._dependents.select(parent=name, depflag=depflag)]
return [d for d in self._dependents.select(parent=name, deptypes=deptype)]
def edges_to_dependencies(self, name=None, deptype="all"): def edges_to_dependencies(self, name=None, depflag: dt.DepFlag = dt.ALL):
"""Return a list of edges connecting this node in the DAG """Return a list of edges connecting this node in the DAG
to children. to children.
Args: Args:
name (str): filter dependencies by package name name (str): filter dependencies by package name
deptype (str or tuple): allowed dependency types depflag: allowed dependency types
""" """
deptype = dp.canonical_deptype(deptype) return [d for d in self._dependencies.select(child=name, depflag=depflag)]
return [d for d in self._dependencies.select(child=name, deptypes=deptype)]
def dependencies(self, name=None, deptype="all"): def dependencies(self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL):
"""Return a list of direct dependencies (nodes in the DAG). """Return a list of direct dependencies (nodes in the DAG).
Args: Args:
name (str): filter dependencies by package name name (str): filter dependencies by package name
deptype (str or tuple): allowed dependency types deptype: allowed dependency types
""" """
return [d.spec for d in self.edges_to_dependencies(name, deptype=deptype)] if not isinstance(deptype, dt.DepFlag):
deptype = dt.canonicalize(deptype)
return [d.spec for d in self.edges_to_dependencies(name, depflag=deptype)]
def dependents(self, name=None, deptype="all"): def dependents(self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL):
"""Return a list of direct dependents (nodes in the DAG). """Return a list of direct dependents (nodes in the DAG).
Args: Args:
name (str): filter dependents by package name name (str): filter dependents by package name
deptype (str or tuple): allowed dependency types deptype: allowed dependency types
""" """
return [d.parent for d in self.edges_from_dependents(name, deptype=deptype)] if not isinstance(deptype, dt.DepFlag):
deptype = dt.canonicalize(deptype)
return [d.parent for d in self.edges_from_dependents(name, depflag=deptype)]
def _dependencies_dict(self, deptype="all"): def _dependencies_dict(self, depflag: dt.DepFlag = dt.ALL):
"""Return a dictionary, keyed by package name, of the direct """Return a dictionary, keyed by package name, of the direct
dependencies. dependencies.
@ -1522,10 +1492,9 @@ def _dependencies_dict(self, deptype="all"):
Args: Args:
deptype: allowed dependency types deptype: allowed dependency types
""" """
_sort_fn = lambda x: (x.spec.name,) + _sort_by_dep_types(x) _sort_fn = lambda x: (x.spec.name, _sort_by_dep_types(x))
_group_fn = lambda x: x.spec.name _group_fn = lambda x: x.spec.name
deptype = dp.canonical_deptype(deptype) selected_edges = self._dependencies.select(depflag=depflag)
selected_edges = self._dependencies.select(deptypes=deptype)
result = {} result = {}
for key, group in itertools.groupby(sorted(selected_edges, key=_sort_fn), key=_group_fn): for key, group in itertools.groupby(sorted(selected_edges, key=_sort_fn), key=_group_fn):
result[key] = list(group) result[key] = list(group)
@ -1621,19 +1590,17 @@ def _set_compiler(self, compiler):
) )
self.compiler = compiler self.compiler = compiler
def _add_dependency( def _add_dependency(self, spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[str, ...]):
self, spec: "Spec", *, deptypes: dp.DependencyArgument, virtuals: Tuple[str, ...]
):
"""Called by the parser to add another spec as a dependency.""" """Called by the parser to add another spec as a dependency."""
if spec.name not in self._dependencies or not spec.name: if spec.name not in self._dependencies or not spec.name:
self.add_dependency_edge(spec, deptypes=deptypes, virtuals=virtuals) self.add_dependency_edge(spec, depflag=depflag, virtuals=virtuals)
return return
# Keep the intersection of constraints when a dependency is added # Keep the intersection of constraints when a dependency is added
# multiple times. Currently, we only allow identical edge types. # multiple times. Currently, we only allow identical edge types.
orig = self._dependencies[spec.name] orig = self._dependencies[spec.name]
try: try:
dspec = next(dspec for dspec in orig if deptypes == dspec.deptypes) dspec = next(dspec for dspec in orig if depflag == dspec.depflag)
except StopIteration: except StopIteration:
raise DuplicateDependencyError("Cannot depend on '%s' twice" % spec) raise DuplicateDependencyError("Cannot depend on '%s' twice" % spec)
@ -1645,11 +1612,7 @@ def _add_dependency(
) )
def add_dependency_edge( def add_dependency_edge(
self, self, dependency_spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[str, ...]
dependency_spec: "Spec",
*,
deptypes: dp.DependencyArgument,
virtuals: Tuple[str, ...],
): ):
"""Add a dependency edge to this spec. """Add a dependency edge to this spec.
@ -1658,19 +1621,17 @@ def add_dependency_edge(
deptypes: dependency types for this edge deptypes: dependency types for this edge
virtuals: virtuals provided by this edge virtuals: virtuals provided by this edge
""" """
deptypes = dp.canonical_deptype(deptypes)
# Check if we need to update edges that are already present # Check if we need to update edges that are already present
selected = self._dependencies.select(child=dependency_spec.name) selected = self._dependencies.select(child=dependency_spec.name)
for edge in selected: for edge in selected:
has_errors, details = False, [] has_errors, details = False, []
msg = f"cannot update the edge from {edge.parent.name} to {edge.spec.name}" msg = f"cannot update the edge from {edge.parent.name} to {edge.spec.name}"
if any(d in edge.deptypes for d in deptypes): if edge.depflag & depflag:
has_errors = True has_errors = True
details.append( details.append(
( (
f"{edge.parent.name} has already an edge matching any" f"{edge.parent.name} has already an edge matching any"
f" of these types {str(deptypes)}" f" of these types {depflag}"
) )
) )
@ -1679,7 +1640,7 @@ def add_dependency_edge(
details.append( details.append(
( (
f"{edge.parent.name} has already an edge matching any" f"{edge.parent.name} has already an edge matching any"
f" of these virtuals {str(virtuals)}" f" of these virtuals {virtuals}"
) )
) )
@ -1691,11 +1652,11 @@ def add_dependency_edge(
# If we are here, it means the edge object was previously added to # If we are here, it means the edge object was previously added to
# both the parent and the child. When we update this object they'll # both the parent and the child. When we update this object they'll
# both see the deptype modification. # both see the deptype modification.
edge.update_deptypes(deptypes=deptypes) edge.update_deptypes(depflag=depflag)
edge.update_virtuals(virtuals=virtuals) edge.update_virtuals(virtuals=virtuals)
return return
edge = DependencySpec(self, dependency_spec, deptypes=deptypes, virtuals=virtuals) edge = DependencySpec(self, dependency_spec, depflag=depflag, virtuals=virtuals)
self._dependencies.add(edge) self._dependencies.add(edge)
dependency_spec._dependents.add(edge) dependency_spec._dependents.add(edge)
@ -1962,12 +1923,12 @@ def lookup_hash(self):
# Get dependencies that need to be replaced # Get dependencies that need to be replaced
for node in self.traverse(root=False): for node in self.traverse(root=False):
if node.abstract_hash: if node.abstract_hash:
spec._add_dependency(node._lookup_hash(), deptypes=(), virtuals=()) spec._add_dependency(node._lookup_hash(), depflag=0, virtuals=())
# reattach nodes that were not otherwise satisfied by new dependencies # reattach nodes that were not otherwise satisfied by new dependencies
for node in self.traverse(root=False): for node in self.traverse(root=False):
if not any(n.satisfies(node) for n in spec.traverse()): if not any(n.satisfies(node) for n in spec.traverse()):
spec._add_dependency(node.copy(), deptypes=(), virtuals=()) spec._add_dependency(node.copy(), depflag=0, virtuals=())
return spec return spec
@ -2093,7 +2054,7 @@ def to_node_dict(self, hash=ht.dag_hash):
d["package_hash"] = package_hash d["package_hash"] = package_hash
# Note: Relies on sorting dict by keys later in algorithm. # Note: Relies on sorting dict by keys later in algorithm.
deps = self._dependencies_dict(deptype=hash.deptype) deps = self._dependencies_dict(depflag=hash.depflag)
if deps: if deps:
deps_list = [] deps_list = []
for name, edges_for_name in sorted(deps.items()): for name, edges_for_name in sorted(deps.items()):
@ -2103,7 +2064,10 @@ def to_node_dict(self, hash=ht.dag_hash):
parameters_tuple = ( parameters_tuple = (
"parameters", "parameters",
syaml.syaml_dict( syaml.syaml_dict(
(key, dspec.parameters[key]) for key in sorted(dspec.parameters) (
("deptypes", dt.flag_to_tuple(dspec.depflag)),
("virtuals", dspec.virtuals),
)
), ),
) )
ordered_entries = [name_tuple, hash_tuple, parameters_tuple] ordered_entries = [name_tuple, hash_tuple, parameters_tuple]
@ -2201,7 +2165,7 @@ def to_dict(self, hash=ht.dag_hash):
""" """
node_list = [] # Using a list to preserve preorder traversal for hash. node_list = [] # Using a list to preserve preorder traversal for hash.
hash_set = set() hash_set = set()
for s in self.traverse(order="pre", deptype=hash.deptype): for s in self.traverse(order="pre", deptype=hash.depflag):
spec_hash = s._cached_hash(hash) spec_hash = s._cached_hash(hash)
if spec_hash not in hash_set: if spec_hash not in hash_set:
@ -2385,13 +2349,12 @@ def spec_builder(d):
if dep_like is None: if dep_like is None:
return spec return spec
def name_and_dependency_types(s): def name_and_dependency_types(s: str) -> Tuple[str, dt.DepFlag]:
"""Given a key in the dictionary containing the literal, """Given a key in the dictionary containing the literal,
extracts the name of the spec and its dependency types. extracts the name of the spec and its dependency types.
Args: Args:
s (str): key in the dictionary containing the literal s: key in the dictionary containing the literal
""" """
t = s.split(":") t = s.split(":")
@ -2399,39 +2362,37 @@ def name_and_dependency_types(s):
msg = 'more than one ":" separator in key "{0}"' msg = 'more than one ":" separator in key "{0}"'
raise KeyError(msg.format(s)) raise KeyError(msg.format(s))
n = t[0] name = t[0]
if len(t) == 2: if len(t) == 2:
dtypes = tuple(dt.strip() for dt in t[1].split(",")) depflag = dt.flag_from_strings(dep_str.strip() for dep_str in t[1].split(","))
else: else:
dtypes = () depflag = 0
return name, depflag
return n, dtypes def spec_and_dependency_types(
s: Union[Spec, Tuple[Spec, str]]
def spec_and_dependency_types(s): ) -> Tuple[Spec, dt.DepFlag]:
"""Given a non-string key in the literal, extracts the spec """Given a non-string key in the literal, extracts the spec
and its dependency types. and its dependency types.
Args: Args:
s (spec or tuple): either a Spec object or a tuple s: either a Spec object, or a tuple of Spec and string of dependency types
composed of a Spec object and a string with the
dependency types
""" """
if isinstance(s, Spec): if isinstance(s, Spec):
return s, () return s, 0
spec_obj, dtypes = s spec_obj, dtypes = s
return spec_obj, tuple(dt.strip() for dt in dtypes.split(",")) return spec_obj, dt.flag_from_strings(dt.strip() for dt in dtypes.split(","))
# Recurse on dependencies # Recurse on dependencies
for s, s_dependencies in dep_like.items(): for s, s_dependencies in dep_like.items():
if isinstance(s, str): if isinstance(s, str):
dag_node, dependency_types = name_and_dependency_types(s) dag_node, dep_flag = name_and_dependency_types(s)
else: else:
dag_node, dependency_types = spec_and_dependency_types(s) dag_node, dep_flag = spec_and_dependency_types(s)
dependency_spec = spec_builder({dag_node: s_dependencies}) dependency_spec = spec_builder({dag_node: s_dependencies})
spec._add_dependency(dependency_spec, deptypes=dependency_types, virtuals=()) spec._add_dependency(dependency_spec, depflag=dep_flag, virtuals=())
return spec return spec
@ -2604,7 +2565,7 @@ def _replace_with(self, concrete):
virtuals = (self.name,) virtuals = (self.name,)
for dep_spec in itertools.chain.from_iterable(self._dependents.values()): for dep_spec in itertools.chain.from_iterable(self._dependents.values()):
dependent = dep_spec.parent dependent = dep_spec.parent
deptypes = dep_spec.deptypes depflag = dep_spec.depflag
# remove self from all dependents, unless it is already removed # remove self from all dependents, unless it is already removed
if self.name in dependent._dependencies: if self.name in dependent._dependencies:
@ -2612,7 +2573,7 @@ def _replace_with(self, concrete):
# add the replacement, unless it is already a dep of dependent. # add the replacement, unless it is already a dep of dependent.
if concrete.name not in dependent._dependencies: if concrete.name not in dependent._dependencies:
dependent._add_dependency(concrete, deptypes=deptypes, virtuals=virtuals) dependent._add_dependency(concrete, depflag=depflag, virtuals=virtuals)
else: else:
dependent.edges_to_dependencies(name=concrete.name)[0].update_virtuals( dependent.edges_to_dependencies(name=concrete.name)[0].update_virtuals(
virtuals=virtuals virtuals=virtuals
@ -3174,7 +3135,7 @@ def _evaluate_dependency_conditions(self, name):
for when_spec, dependency in conditions.items(): for when_spec, dependency in conditions.items():
if self.satisfies(when_spec): if self.satisfies(when_spec):
if dep is None: if dep is None:
dep = dp.Dependency(self.name, Spec(name), type=()) dep = dp.Dependency(self.name, Spec(name), depflag=0)
try: try:
dep.merge(dependency) dep.merge(dependency)
except spack.error.UnsatisfiableSpecError as e: except spack.error.UnsatisfiableSpecError as e:
@ -3318,7 +3279,7 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
# Add merged spec to my deps and recurse # Add merged spec to my deps and recurse
spec_dependency = spec_deps[dep.name] spec_dependency = spec_deps[dep.name]
if dep.name not in self._dependencies: if dep.name not in self._dependencies:
self._add_dependency(spec_dependency, deptypes=dependency.type, virtuals=virtuals) self._add_dependency(spec_dependency, depflag=dependency.depflag, virtuals=virtuals)
changed |= spec_dependency._normalize_helper(visited, spec_deps, provider_index, tests) changed |= spec_dependency._normalize_helper(visited, spec_deps, provider_index, tests)
return changed return changed
@ -3359,7 +3320,7 @@ def _normalize_helper(self, visited, spec_deps, provider_index, tests):
or (tests and self.name in tests) or (tests and self.name in tests)
or or
# this is not a test-only dependency # this is not a test-only dependency
dep.type - set(["test"]) (dep.depflag & ~dt.TEST)
) )
if merge: if merge:
@ -3653,9 +3614,7 @@ def _constrain_dependencies(self, other):
# WARNING: using index 0 i.e. we assume that we have only # WARNING: using index 0 i.e. we assume that we have only
# WARNING: one edge from package "name" # WARNING: one edge from package "name"
edges_from_name = self._dependencies[name] edges_from_name = self._dependencies[name]
changed |= edges_from_name[0].update_deptypes( changed |= edges_from_name[0].update_deptypes(other._dependencies[name][0].depflag)
other._dependencies[name][0].deptypes
)
changed |= edges_from_name[0].update_virtuals( changed |= edges_from_name[0].update_virtuals(
other._dependencies[name][0].virtuals other._dependencies[name][0].virtuals
) )
@ -3667,7 +3626,7 @@ def _constrain_dependencies(self, other):
dep_spec_copy = other._get_dependency(name) dep_spec_copy = other._get_dependency(name)
self._add_dependency( self._add_dependency(
dep_spec_copy.spec.copy(), dep_spec_copy.spec.copy(),
deptypes=dep_spec_copy.deptypes, depflag=dep_spec_copy.depflag,
virtuals=dep_spec_copy.virtuals, virtuals=dep_spec_copy.virtuals,
) )
changed = True changed = True
@ -3942,7 +3901,7 @@ def patches(self):
return self._patches return self._patches
def _dup(self, other, deps=True, cleardeps=True): def _dup(self, other, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, cleardeps=True):
"""Copy the spec other into self. This is an overwriting """Copy the spec other into self. This is an overwriting
copy. It does not copy any dependents (parents), but by default copy. It does not copy any dependents (parents), but by default
copies dependencies. copies dependencies.
@ -3951,9 +3910,8 @@ def _dup(self, other, deps=True, cleardeps=True):
Args: Args:
other (Spec): spec to be copied onto ``self`` other (Spec): spec to be copied onto ``self``
deps (bool or Sequence): if True copies all the dependencies. If deps: if True copies all the dependencies. If
False copies None. If a sequence of dependency types copy False copies None. If deptype/depflag, copy matching types.
only those types.
cleardeps (bool): if True clears the dependencies of ``self``, cleardeps (bool): if True clears the dependencies of ``self``,
before possibly copying the dependencies of ``other`` onto before possibly copying the dependencies of ``other`` onto
``self`` ``self``
@ -4013,10 +3971,10 @@ def _dup(self, other, deps=True, cleardeps=True):
if deps: if deps:
# If caller restricted deptypes to be copied, adjust that here. # If caller restricted deptypes to be copied, adjust that here.
# By default, just copy all deptypes # By default, just copy all deptypes
deptypes = dp.all_deptypes depflag = dt.ALL
if isinstance(deps, (tuple, list)): if isinstance(deps, (tuple, list, str)):
deptypes = deps depflag = dt.canonicalize(deps)
self._dup_deps(other, deptypes) self._dup_deps(other, depflag)
self._concrete = other._concrete self._concrete = other._concrete
@ -4037,13 +3995,13 @@ def _dup(self, other, deps=True, cleardeps=True):
return changed return changed
def _dup_deps(self, other, deptypes): def _dup_deps(self, other, depflag: dt.DepFlag):
def spid(spec): def spid(spec):
return id(spec) return id(spec)
new_specs = {spid(other): self} new_specs = {spid(other): self}
for edge in other.traverse_edges(cover="edges", root=False): for edge in other.traverse_edges(cover="edges", root=False):
if edge.deptypes and not any(d in deptypes for d in edge.deptypes): if edge.depflag and not depflag & edge.depflag:
continue continue
if spid(edge.parent) not in new_specs: if spid(edge.parent) not in new_specs:
@ -4053,17 +4011,16 @@ def spid(spec):
new_specs[spid(edge.spec)] = edge.spec.copy(deps=False) new_specs[spid(edge.spec)] = edge.spec.copy(deps=False)
new_specs[spid(edge.parent)].add_dependency_edge( new_specs[spid(edge.parent)].add_dependency_edge(
new_specs[spid(edge.spec)], deptypes=edge.deptypes, virtuals=edge.virtuals new_specs[spid(edge.spec)], depflag=edge.depflag, virtuals=edge.virtuals
) )
def copy(self, deps=True, **kwargs): def copy(self, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, **kwargs):
"""Make a copy of this spec. """Make a copy of this spec.
Args: Args:
deps (bool or tuple): Defaults to True. If boolean, controls deps: Defaults to True. If boolean, controls
whether dependencies are copied (copied if True). If a whether dependencies are copied (copied if True). If a
tuple is provided, *only* dependencies of types matching DepTypes or DepFlag is provided, *only* matching dependencies are copied.
those in the tuple are copied.
kwargs: additional arguments for internal use (passed to ``_dup``). kwargs: additional arguments for internal use (passed to ``_dup``).
Returns: Returns:
@ -4123,7 +4080,7 @@ def __getitem__(self, name):
# only when we don't find the package do we consider the full DAG. # only when we don't find the package do we consider the full DAG.
order = lambda: itertools.chain( order = lambda: itertools.chain(
self.traverse(deptype="link"), self.traverse(deptype="link"),
self.dependencies(deptype=("build", "run", "test")), self.dependencies(deptype=dt.BUILD | dt.RUN | dt.TEST),
self.traverse(), # fall back to a full search self.traverse(), # fall back to a full search
) )
@ -4181,7 +4138,7 @@ def eq_dag(self, other, deptypes=True, vs=None, vo=None):
for s_dspec, o_dspec in zip( for s_dspec, o_dspec in zip(
itertools.chain.from_iterable(ssorted), itertools.chain.from_iterable(osorted) itertools.chain.from_iterable(ssorted), itertools.chain.from_iterable(osorted)
): ):
if deptypes and s_dspec.deptypes != o_dspec.deptypes: if deptypes and s_dspec.depflag != o_dspec.depflag:
return False return False
s, o = s_dspec.spec, o_dspec.spec s, o = s_dspec.spec, o_dspec.spec
@ -4239,7 +4196,7 @@ def _cmp_iter(self):
def deps(): def deps():
for dep in sorted(itertools.chain.from_iterable(self._dependencies.values())): for dep in sorted(itertools.chain.from_iterable(self._dependencies.values())):
yield dep.spec.name yield dep.spec.name
yield tuple(sorted(dep.deptypes)) yield dep.depflag
yield hash(dep.spec) yield hash(dep.spec)
yield deps yield deps
@ -4585,13 +4542,15 @@ def tree(
if cover == "nodes": if cover == "nodes":
# when only covering nodes, we merge dependency types # when only covering nodes, we merge dependency types
# from all dependents before showing them. # from all dependents before showing them.
types = [ds.deptypes for ds in node.edges_from_dependents()] depflag = 0
for ds in node.edges_from_dependents():
depflag |= ds.depflag
else: else:
# when covering edges or paths, we show dependency # when covering edges or paths, we show dependency
# types only for the edge through which we visited # types only for the edge through which we visited
types = [dep_spec.deptypes] depflag = dep_spec.depflag
type_chars = dp.deptype_chars(*types) type_chars = dt.flag_to_chars(depflag)
out += "[%s] " % type_chars out += "[%s] " % type_chars
out += " " * d out += " " * d
@ -4753,14 +4712,14 @@ def from_self(name, transitive):
for edge in self[name].edges_to_dependencies(): for edge in self[name].edges_to_dependencies():
dep_name = deps_to_replace.get(edge.spec, edge.spec).name dep_name = deps_to_replace.get(edge.spec, edge.spec).name
nodes[name].add_dependency_edge( nodes[name].add_dependency_edge(
nodes[dep_name], deptypes=edge.deptypes, virtuals=edge.virtuals nodes[dep_name], depflag=edge.depflag, virtuals=edge.virtuals
) )
if any(dep not in self_nodes for dep in self[name]._dependencies): if any(dep not in self_nodes for dep in self[name]._dependencies):
nodes[name].build_spec = self[name].build_spec nodes[name].build_spec = self[name].build_spec
else: else:
for edge in other[name].edges_to_dependencies(): for edge in other[name].edges_to_dependencies():
nodes[name].add_dependency_edge( nodes[name].add_dependency_edge(
nodes[edge.spec.name], deptypes=edge.deptypes, virtuals=edge.virtuals nodes[edge.spec.name], depflag=edge.depflag, virtuals=edge.virtuals
) )
if any(dep not in other_nodes for dep in other[name]._dependencies): if any(dep not in other_nodes for dep in other[name]._dependencies):
nodes[name].build_spec = other[name].build_spec nodes[name].build_spec = other[name].build_spec
@ -4851,8 +4810,9 @@ def merge_abstract_anonymous_specs(*abstract_specs: Spec):
# Update with additional constraints from other spec # Update with additional constraints from other spec
for name in current_spec_constraint.direct_dep_difference(merged_spec): for name in current_spec_constraint.direct_dep_difference(merged_spec):
edge = next(iter(current_spec_constraint.edges_to_dependencies(name))) edge = next(iter(current_spec_constraint.edges_to_dependencies(name)))
merged_spec._add_dependency( merged_spec._add_dependency(
edge.spec.copy(), deptypes=edge.deptypes, virtuals=edge.virtuals edge.spec.copy(), depflag=edge.depflag, virtuals=edge.virtuals
) )
return merged_spec return merged_spec
@ -4999,9 +4959,11 @@ def _load(cls, data):
# Pass 2: Finish construction of all DAG edges (including build specs) # Pass 2: Finish construction of all DAG edges (including build specs)
for node_hash, node in hash_dict.items(): for node_hash, node in hash_dict.items():
node_spec = node["node_spec"] node_spec = node["node_spec"]
for _, dhash, dtypes, _, virtuals in cls.dependencies_from_node_dict(node): for _, dhash, dtype, _, virtuals in cls.dependencies_from_node_dict(node):
node_spec._add_dependency( node_spec._add_dependency(
hash_dict[dhash]["node_spec"], deptypes=dtypes, virtuals=virtuals hash_dict[dhash]["node_spec"],
depflag=dt.canonicalize(dtype),
virtuals=virtuals,
) )
if "build_spec" in node.keys(): if "build_spec" in node.keys():
_, bhash, _ = cls.build_spec_from_node_dict(node, hash_type=hash_type) _, bhash, _ = cls.build_spec_from_node_dict(node, hash_type=hash_type)
@ -5037,7 +4999,9 @@ def load(cls, data):
# get dependency dict from the node. # get dependency dict from the node.
name, data = cls.name_and_data(node) name, data = cls.name_and_data(node)
for dname, _, dtypes, _, virtuals in cls.dependencies_from_node_dict(data): for dname, _, dtypes, _, virtuals in cls.dependencies_from_node_dict(data):
deps[name]._add_dependency(deps[dname], deptypes=dtypes, virtuals=virtuals) deps[name]._add_dependency(
deps[dname], depflag=dt.canonicalize(dtypes), virtuals=virtuals
)
reconstruct_virtuals_on_edges(result) reconstruct_virtuals_on_edges(result)
return result return result

View File

@ -5,6 +5,7 @@
import pytest import pytest
import spack.deptypes as dt
import spack.installer as inst import spack.installer as inst
import spack.repo import spack.repo
import spack.spec import spack.spec
@ -59,10 +60,10 @@ def test_build_request_strings(install_mockery):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"package_cache_only,dependencies_cache_only,package_deptypes,dependencies_deptypes", "package_cache_only,dependencies_cache_only,package_deptypes,dependencies_deptypes",
[ [
(False, False, ["build", "link", "run"], ["build", "link", "run"]), (False, False, dt.BUILD | dt.LINK | dt.RUN, dt.BUILD | dt.LINK | dt.RUN),
(True, False, ["link", "run"], ["build", "link", "run"]), (True, False, dt.LINK | dt.RUN, dt.BUILD | dt.LINK | dt.RUN),
(False, True, ["build", "link", "run"], ["link", "run"]), (False, True, dt.BUILD | dt.LINK | dt.RUN, dt.LINK | dt.RUN),
(True, True, ["link", "run"], ["link", "run"]), (True, True, dt.LINK | dt.RUN, dt.LINK | dt.RUN),
], ],
) )
def test_build_request_deptypes( def test_build_request_deptypes(
@ -82,8 +83,8 @@ def test_build_request_deptypes(
}, },
) )
actual_package_deptypes = build_request.get_deptypes(s.package) actual_package_deptypes = build_request.get_depflags(s.package)
actual_dependency_deptypes = build_request.get_deptypes(s["dependency-install"].package) actual_dependency_deptypes = build_request.get_depflags(s["dependency-install"].package)
assert sorted(actual_package_deptypes) == package_deptypes assert actual_package_deptypes == package_deptypes
assert sorted(actual_dependency_deptypes) == dependencies_deptypes assert actual_dependency_deptypes == dependencies_deptypes

View File

@ -16,6 +16,7 @@
import spack.compilers import spack.compilers
import spack.concretize import spack.concretize
import spack.config import spack.config
import spack.deptypes as dt
import spack.detection import spack.detection
import spack.error import spack.error
import spack.hash_types as ht import spack.hash_types as ht
@ -235,13 +236,13 @@ def test_concretize_mention_build_dep(self):
# Check parent's perspective of child # Check parent's perspective of child
to_dependencies = spec.edges_to_dependencies(name="cmake") to_dependencies = spec.edges_to_dependencies(name="cmake")
assert len(to_dependencies) == 1 assert len(to_dependencies) == 1
assert set(to_dependencies[0].deptypes) == set(["build"]) assert to_dependencies[0].depflag == dt.BUILD
# Check child's perspective of parent # Check child's perspective of parent
cmake = spec["cmake"] cmake = spec["cmake"]
from_dependents = cmake.edges_from_dependents(name="cmake-client") from_dependents = cmake.edges_from_dependents(name="cmake-client")
assert len(from_dependents) == 1 assert len(from_dependents) == 1
assert set(from_dependents[0].deptypes) == set(["build"]) assert from_dependents[0].depflag == dt.BUILD
def test_concretize_preferred_version(self): def test_concretize_preferred_version(self):
spec = check_concretize("python") spec = check_concretize("python")

View File

@ -17,6 +17,7 @@
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import spack.deptypes as dt
import spack.install_test import spack.install_test
import spack.package_base import spack.package_base
import spack.repo import spack.repo
@ -92,16 +93,16 @@ def test_possible_dependencies_with_deptypes(mock_packages):
"dtbuild1": {"dtrun2", "dtlink2"}, "dtbuild1": {"dtrun2", "dtlink2"},
"dtlink2": set(), "dtlink2": set(),
"dtrun2": set(), "dtrun2": set(),
} == dtbuild1.possible_dependencies(deptype=("link", "run")) } == dtbuild1.possible_dependencies(depflag=dt.LINK | dt.RUN)
assert { assert {
"dtbuild1": {"dtbuild2", "dtlink2"}, "dtbuild1": {"dtbuild2", "dtlink2"},
"dtbuild2": set(), "dtbuild2": set(),
"dtlink2": set(), "dtlink2": set(),
} == dtbuild1.possible_dependencies(deptype=("build")) } == dtbuild1.possible_dependencies(depflag=dt.BUILD)
assert {"dtbuild1": {"dtlink2"}, "dtlink2": set()} == dtbuild1.possible_dependencies( assert {"dtbuild1": {"dtlink2"}, "dtlink2": set()} == dtbuild1.possible_dependencies(
deptype=("link") depflag=dt.LINK
) )

View File

@ -7,12 +7,13 @@
""" """
import pytest import pytest
import spack.deptypes as dt
import spack.error import spack.error
import spack.package_base import spack.package_base
import spack.parser import spack.parser
import spack.repo import spack.repo
import spack.util.hash as hashutil import spack.util.hash as hashutil
from spack.dependency import Dependency, all_deptypes, canonical_deptype from spack.dependency import Dependency
from spack.spec import Spec from spack.spec import Spec
@ -37,7 +38,7 @@ def set_dependency(saved_deps, monkeypatch):
for a package in the ``saved_deps`` fixture. for a package in the ``saved_deps`` fixture.
""" """
def _mock(pkg_name, spec, deptypes=all_deptypes): def _mock(pkg_name, spec):
"""Alters dependence information for a package. """Alters dependence information for a package.
Adds a dependency on <spec> to pkg. Use this to mock up constraints. Adds a dependency on <spec> to pkg. Use this to mock up constraints.
@ -49,7 +50,7 @@ def _mock(pkg_name, spec, deptypes=all_deptypes):
saved_deps[pkg_name] = (pkg_cls, pkg_cls.dependencies.copy()) saved_deps[pkg_name] = (pkg_cls, pkg_cls.dependencies.copy())
cond = Spec(pkg_cls.name) cond = Spec(pkg_cls.name)
dependency = Dependency(pkg_cls, spec, type=deptypes) dependency = Dependency(pkg_cls, spec)
monkeypatch.setitem(pkg_cls.dependencies, spec.name, {cond: dependency}) monkeypatch.setitem(pkg_cls.dependencies, spec.name, {cond: dependency})
return _mock return _mock
@ -123,7 +124,7 @@ def _mock_installed(self):
# use the installed C. It should *not* force A to use the installed D # use the installed C. It should *not* force A to use the installed D
# *if* we're doing a fresh installation. # *if* we're doing a fresh installation.
a_spec = Spec(a) a_spec = Spec(a)
a_spec._add_dependency(c_spec, deptypes=("build", "link"), virtuals=()) a_spec._add_dependency(c_spec, depflag=dt.BUILD | dt.LINK, virtuals=())
a_spec.concretize() a_spec.concretize()
assert spack.version.Version("2") == a_spec[c][d].version assert spack.version.Version("2") == a_spec[c][d].version
assert spack.version.Version("2") == a_spec[e].version assert spack.version.Version("2") == a_spec[e].version
@ -146,7 +147,7 @@ def test_specify_preinstalled_dep(tmpdir, monkeypatch):
monkeypatch.setattr(Spec, "installed", property(lambda x: x.name != "a")) monkeypatch.setattr(Spec, "installed", property(lambda x: x.name != "a"))
a_spec = Spec("a") a_spec = Spec("a")
a_spec._add_dependency(b_spec, deptypes=("build", "link"), virtuals=()) a_spec._add_dependency(b_spec, depflag=dt.BUILD | dt.LINK, virtuals=())
a_spec.concretize() a_spec.concretize()
assert set(x.name for x in a_spec.traverse()) == set(["a", "b", "c"]) assert set(x.name for x in a_spec.traverse()) == set(["a", "b", "c"])
@ -788,13 +789,13 @@ def test_construct_spec_with_deptypes(self):
{"a": {"b": {"c:build": None}, "d": {"e:build,link": {"f:run": None}}}} {"a": {"b": {"c:build": None}, "d": {"e:build,link": {"f:run": None}}}}
) )
assert s["b"].edges_to_dependencies(name="c")[0].deptypes == ("build",) assert s["b"].edges_to_dependencies(name="c")[0].depflag == dt.BUILD
assert s["d"].edges_to_dependencies(name="e")[0].deptypes == ("build", "link") assert s["d"].edges_to_dependencies(name="e")[0].depflag == dt.BUILD | dt.LINK
assert s["e"].edges_to_dependencies(name="f")[0].deptypes == ("run",) assert s["e"].edges_to_dependencies(name="f")[0].depflag == dt.RUN
assert s["c"].edges_from_dependents(name="b")[0].deptypes == ("build",) assert s["c"].edges_from_dependents(name="b")[0].depflag == dt.BUILD
assert s["e"].edges_from_dependents(name="d")[0].deptypes == ("build", "link") assert s["e"].edges_from_dependents(name="d")[0].depflag == dt.BUILD | dt.LINK
assert s["f"].edges_from_dependents(name="e")[0].deptypes == ("run",) assert s["f"].edges_from_dependents(name="e")[0].depflag == dt.RUN
def check_diamond_deptypes(self, spec): def check_diamond_deptypes(self, spec):
"""Validate deptypes in dt-diamond spec. """Validate deptypes in dt-diamond spec.
@ -803,23 +804,22 @@ def check_diamond_deptypes(self, spec):
depend on the same dependency in different ways. depend on the same dependency in different ways.
""" """
assert spec["dt-diamond"].edges_to_dependencies(name="dt-diamond-left")[0].deptypes == ( assert (
"build", spec["dt-diamond"].edges_to_dependencies(name="dt-diamond-left")[0].depflag
"link", == dt.BUILD | dt.LINK
) )
assert (
assert spec["dt-diamond"].edges_to_dependencies(name="dt-diamond-right")[0].deptypes == ( spec["dt-diamond"].edges_to_dependencies(name="dt-diamond-right")[0].depflag
"build", == dt.BUILD | dt.LINK
"link", )
assert (
spec["dt-diamond-left"].edges_to_dependencies(name="dt-diamond-bottom")[0].depflag
== dt.BUILD
)
assert (
spec["dt-diamond-right"].edges_to_dependencies(name="dt-diamond-bottom")[0].depflag
== dt.BUILD | dt.LINK | dt.RUN
) )
assert spec["dt-diamond-left"].edges_to_dependencies(name="dt-diamond-bottom")[
0
].deptypes == ("build",)
assert spec["dt-diamond-right"].edges_to_dependencies(name="dt-diamond-bottom")[
0
].deptypes == ("build", "link", "run")
def check_diamond_normalized_dag(self, spec): def check_diamond_normalized_dag(self, spec):
dag = Spec.from_literal( dag = Spec.from_literal(
@ -912,48 +912,52 @@ def test_getitem_exceptional_paths(self):
def test_canonical_deptype(self): def test_canonical_deptype(self):
# special values # special values
assert canonical_deptype(all) == all_deptypes assert dt.canonicalize(all) == dt.ALL
assert canonical_deptype("all") == all_deptypes assert dt.canonicalize("all") == dt.ALL
with pytest.raises(ValueError): with pytest.raises(ValueError):
canonical_deptype(None) dt.canonicalize(None)
with pytest.raises(ValueError): with pytest.raises(ValueError):
canonical_deptype([None]) dt.canonicalize([None])
# everything in all_deptypes is canonical # everything in all_types is canonical
for v in all_deptypes: for v in dt.ALL_TYPES:
assert canonical_deptype(v) == (v,) assert dt.canonicalize(v) == dt.flag_from_string(v)
# tuples # tuples
assert canonical_deptype(("build",)) == ("build",) assert dt.canonicalize(("build",)) == dt.BUILD
assert canonical_deptype(("build", "link", "run")) == ("build", "link", "run") assert dt.canonicalize(("build", "link", "run")) == dt.BUILD | dt.LINK | dt.RUN
assert canonical_deptype(("build", "link")) == ("build", "link") assert dt.canonicalize(("build", "link")) == dt.BUILD | dt.LINK
assert canonical_deptype(("build", "run")) == ("build", "run") assert dt.canonicalize(("build", "run")) == dt.BUILD | dt.RUN
# lists # lists
assert canonical_deptype(["build", "link", "run"]) == ("build", "link", "run") assert dt.canonicalize(["build", "link", "run"]) == dt.BUILD | dt.LINK | dt.RUN
assert canonical_deptype(["build", "link"]) == ("build", "link") assert dt.canonicalize(["build", "link"]) == dt.BUILD | dt.LINK
assert canonical_deptype(["build", "run"]) == ("build", "run") assert dt.canonicalize(["build", "run"]) == dt.BUILD | dt.RUN
# sorting # sorting
assert canonical_deptype(("run", "build", "link")) == ("build", "link", "run") assert dt.canonicalize(("run", "build", "link")) == dt.BUILD | dt.LINK | dt.RUN
assert canonical_deptype(("run", "link", "build")) == ("build", "link", "run") assert dt.canonicalize(("run", "link", "build")) == dt.BUILD | dt.LINK | dt.RUN
assert canonical_deptype(("run", "link")) == ("link", "run") assert dt.canonicalize(("run", "link")) == dt.LINK | dt.RUN
assert canonical_deptype(("link", "build")) == ("build", "link") assert dt.canonicalize(("link", "build")) == dt.BUILD | dt.LINK
# deduplication
assert dt.canonicalize(("run", "run", "link")) == dt.RUN | dt.LINK
assert dt.canonicalize(("run", "link", "link")) == dt.RUN | dt.LINK
# can't put 'all' in tuple or list # can't put 'all' in tuple or list
with pytest.raises(ValueError): with pytest.raises(ValueError):
canonical_deptype(["all"]) dt.canonicalize(["all"])
with pytest.raises(ValueError): with pytest.raises(ValueError):
canonical_deptype(("all",)) dt.canonicalize(("all",))
# invalid values # invalid values
with pytest.raises(ValueError): with pytest.raises(ValueError):
canonical_deptype("foo") dt.canonicalize("foo")
with pytest.raises(ValueError): with pytest.raises(ValueError):
canonical_deptype(("foo", "bar")) dt.canonicalize(("foo", "bar"))
with pytest.raises(ValueError): with pytest.raises(ValueError):
canonical_deptype(("foo",)) dt.canonicalize(("foo",))
def test_invalid_literal_spec(self): def test_invalid_literal_spec(self):
# Can't give type 'build' to a top-level spec # Can't give type 'build' to a top-level spec
@ -987,16 +991,16 @@ def test_synthetic_construction_of_split_dependencies_from_same_package(mock_pac
link_run_spec = Spec("c@=1.0").concretized() link_run_spec = Spec("c@=1.0").concretized()
build_spec = Spec("c@=2.0").concretized() build_spec = Spec("c@=2.0").concretized()
root.add_dependency_edge(link_run_spec, deptypes="link", virtuals=()) root.add_dependency_edge(link_run_spec, depflag=dt.LINK, virtuals=())
root.add_dependency_edge(link_run_spec, deptypes="run", virtuals=()) root.add_dependency_edge(link_run_spec, depflag=dt.RUN, virtuals=())
root.add_dependency_edge(build_spec, deptypes="build", virtuals=()) root.add_dependency_edge(build_spec, depflag=dt.BUILD, virtuals=())
# Check dependencies from the perspective of root # Check dependencies from the perspective of root
assert len(root.dependencies()) == 2 assert len(root.dependencies()) == 2
assert all(x.name == "c" for x in root.dependencies()) assert all(x.name == "c" for x in root.dependencies())
assert "@2.0" in root.dependencies(name="c", deptype="build")[0] assert "@2.0" in root.dependencies(name="c", deptype=dt.BUILD)[0]
assert "@1.0" in root.dependencies(name="c", deptype=("link", "run"))[0] assert "@1.0" in root.dependencies(name="c", deptype=dt.LINK | dt.RUN)[0]
# Check parent from the perspective of the dependencies # Check parent from the perspective of the dependencies
assert len(build_spec.dependents()) == 1 assert len(build_spec.dependents()) == 1
@ -1015,7 +1019,7 @@ def test_synthetic_construction_bootstrapping(mock_packages, config):
root = Spec("b@=2.0").concretized() root = Spec("b@=2.0").concretized()
bootstrap = Spec("b@=1.0").concretized() bootstrap = Spec("b@=1.0").concretized()
root.add_dependency_edge(bootstrap, deptypes="build", virtuals=()) root.add_dependency_edge(bootstrap, depflag=dt.BUILD, virtuals=())
assert len(root.dependencies()) == 1 assert len(root.dependencies()) == 1
assert root.dependencies()[0].name == "b" assert root.dependencies()[0].name == "b"
@ -1033,37 +1037,38 @@ def test_addition_of_different_deptypes_in_multiple_calls(mock_packages, config)
root = Spec("b@=2.0").concretized() root = Spec("b@=2.0").concretized()
bootstrap = Spec("b@=1.0").concretized() bootstrap = Spec("b@=1.0").concretized()
for current_deptype in ("build", "link", "run"): for current_depflag in (dt.BUILD, dt.LINK, dt.RUN):
root.add_dependency_edge(bootstrap, deptypes=current_deptype, virtuals=()) root.add_dependency_edge(bootstrap, depflag=current_depflag, virtuals=())
# Check edges in dependencies # Check edges in dependencies
assert len(root.edges_to_dependencies()) == 1 assert len(root.edges_to_dependencies()) == 1
forward_edge = root.edges_to_dependencies(deptype=current_deptype)[0] forward_edge = root.edges_to_dependencies(depflag=current_depflag)[0]
assert current_deptype in forward_edge.deptypes assert current_depflag & forward_edge.depflag
assert id(forward_edge.parent) == id(root) assert id(forward_edge.parent) == id(root)
assert id(forward_edge.spec) == id(bootstrap) assert id(forward_edge.spec) == id(bootstrap)
# Check edges from dependents # Check edges from dependents
assert len(bootstrap.edges_from_dependents()) == 1 assert len(bootstrap.edges_from_dependents()) == 1
backward_edge = bootstrap.edges_from_dependents(deptype=current_deptype)[0] backward_edge = bootstrap.edges_from_dependents(depflag=current_depflag)[0]
assert current_deptype in backward_edge.deptypes assert current_depflag & backward_edge.depflag
assert id(backward_edge.parent) == id(root) assert id(backward_edge.parent) == id(root)
assert id(backward_edge.spec) == id(bootstrap) assert id(backward_edge.spec) == id(bootstrap)
@pytest.mark.parametrize( @pytest.mark.parametrize(
"c1_deptypes,c2_deptypes", [("link", ("build", "link")), (("link", "run"), ("build", "link"))] "c1_depflag,c2_depflag",
[(dt.LINK, dt.BUILD | dt.LINK), (dt.LINK | dt.RUN, dt.BUILD | dt.LINK)],
) )
def test_adding_same_deptype_with_the_same_name_raises( def test_adding_same_deptype_with_the_same_name_raises(
mock_packages, config, c1_deptypes, c2_deptypes mock_packages, config, c1_depflag, c2_depflag
): ):
p = Spec("b@=2.0").concretized() p = Spec("b@=2.0").concretized()
c1 = Spec("b@=1.0").concretized() c1 = Spec("b@=1.0").concretized()
c2 = Spec("b@=2.0").concretized() c2 = Spec("b@=2.0").concretized()
p.add_dependency_edge(c1, deptypes=c1_deptypes, virtuals=()) p.add_dependency_edge(c1, depflag=c1_depflag, virtuals=())
with pytest.raises(spack.error.SpackError): with pytest.raises(spack.error.SpackError):
p.add_dependency_edge(c2, deptypes=c2_deptypes, virtuals=()) p.add_dependency_edge(c2, depflag=c2_depflag, virtuals=())
@pytest.mark.regression("33499") @pytest.mark.regression("33499")
@ -1082,16 +1087,16 @@ def test_indexing_prefers_direct_or_transitive_link_deps():
z3_flavor_1 = Spec("z3 +through_a1") z3_flavor_1 = Spec("z3 +through_a1")
z3_flavor_2 = Spec("z3 +through_z1") z3_flavor_2 = Spec("z3 +through_z1")
root.add_dependency_edge(a1, deptypes=("build", "run", "test"), virtuals=()) root.add_dependency_edge(a1, depflag=dt.BUILD | dt.RUN | dt.TEST, virtuals=())
# unique package as a dep of a build/run/test type dep. # unique package as a dep of a build/run/test type dep.
a1.add_dependency_edge(a2, deptypes="all", virtuals=()) a1.add_dependency_edge(a2, depflag=dt.ALL, virtuals=())
a1.add_dependency_edge(z3_flavor_1, deptypes="all", virtuals=()) a1.add_dependency_edge(z3_flavor_1, depflag=dt.ALL, virtuals=())
# chain of link type deps root -> z1 -> z2 -> z3 # chain of link type deps root -> z1 -> z2 -> z3
root.add_dependency_edge(z1, deptypes="link", virtuals=()) root.add_dependency_edge(z1, depflag=dt.LINK, virtuals=())
z1.add_dependency_edge(z2, deptypes="link", virtuals=()) z1.add_dependency_edge(z2, depflag=dt.LINK, virtuals=())
z2.add_dependency_edge(z3_flavor_2, deptypes="link", virtuals=()) z2.add_dependency_edge(z3_flavor_2, depflag=dt.LINK, virtuals=())
# Indexing should prefer the link-type dep. # Indexing should prefer the link-type dep.
assert "through_z1" in root["z3"].variants assert "through_z1" in root["z3"].variants

View File

@ -971,7 +971,7 @@ def test_error_message_unknown_variant(self):
def test_satisfies_dependencies_ordered(self): def test_satisfies_dependencies_ordered(self):
d = Spec("zmpi ^fake") d = Spec("zmpi ^fake")
s = Spec("mpileaks") s = Spec("mpileaks")
s._add_dependency(d, deptypes=(), virtuals=()) s._add_dependency(d, depflag=0, virtuals=())
assert s.satisfies("mpileaks ^zmpi ^fake") assert s.satisfies("mpileaks ^zmpi ^fake")
@pytest.mark.parametrize("transitive", [True, False]) @pytest.mark.parametrize("transitive", [True, False])
@ -1120,7 +1120,7 @@ def test_concretize_partial_old_dag_hash_spec(mock_packages, config):
# add it to an abstract spec as a dependency # add it to an abstract spec as a dependency
top = Spec("dt-diamond") top = Spec("dt-diamond")
top.add_dependency_edge(bottom, deptypes=(), virtuals=()) top.add_dependency_edge(bottom, depflag=0, virtuals=())
# concretize with the already-concrete dependency # concretize with the already-concrete dependency
top.concretize() top.concretize()

View File

@ -198,7 +198,7 @@ def test_ordered_read_not_required_for_consistent_dag_hash(config, mock_packages
round_trip_reversed_json_spec = Spec.from_yaml(reversed_json_string) round_trip_reversed_json_spec = Spec.from_yaml(reversed_json_string)
# Strip spec if we stripped the yaml # Strip spec if we stripped the yaml
spec = spec.copy(deps=ht.dag_hash.deptype) spec = spec.copy(deps=ht.dag_hash.depflag)
# specs are equal to the original # specs are equal to the original
assert spec == round_trip_yaml_spec assert spec == round_trip_yaml_spec

View File

@ -5,6 +5,7 @@
import pytest import pytest
import spack.deptypes as dt
import spack.traverse as traverse import spack.traverse as traverse
from spack.spec import Spec from spack.spec import Spec
@ -19,7 +20,9 @@ def create_dag(nodes, edges):
""" """
specs = {name: Spec(name) for name in nodes} specs = {name: Spec(name) for name in nodes}
for parent, child, deptypes in edges: for parent, child, deptypes in edges:
specs[parent].add_dependency_edge(specs[child], deptypes=deptypes, virtuals=()) specs[parent].add_dependency_edge(
specs[child], depflag=dt.canonicalize(deptypes), virtuals=()
)
return specs return specs

View File

@ -4,7 +4,9 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
from collections import defaultdict, namedtuple from collections import defaultdict, namedtuple
from typing import Union
import spack.deptypes as dt
import spack.spec import spack.spec
# Export only the high-level API. # Export only the high-level API.
@ -26,8 +28,8 @@ class BaseVisitor:
"""A simple visitor that accepts all edges unconditionally and follows all """A simple visitor that accepts all edges unconditionally and follows all
edges to dependencies of a given ``deptype``.""" edges to dependencies of a given ``deptype``."""
def __init__(self, deptype="all"): def __init__(self, depflag: dt.DepFlag = dt.ALL):
self.deptype = deptype self.depflag = depflag
def accept(self, item): def accept(self, item):
""" """
@ -43,15 +45,15 @@ def accept(self, item):
return True return True
def neighbors(self, item): def neighbors(self, item):
return sort_edges(item.edge.spec.edges_to_dependencies(deptype=self.deptype)) return sort_edges(item.edge.spec.edges_to_dependencies(depflag=self.depflag))
class ReverseVisitor: class ReverseVisitor:
"""A visitor that reverses the arrows in the DAG, following dependents.""" """A visitor that reverses the arrows in the DAG, following dependents."""
def __init__(self, visitor, deptype="all"): def __init__(self, visitor, depflag: dt.DepFlag = dt.ALL):
self.visitor = visitor self.visitor = visitor
self.deptype = deptype self.depflag = depflag
def accept(self, item): def accept(self, item):
return self.visitor.accept(item) return self.visitor.accept(item)
@ -61,7 +63,7 @@ def neighbors(self, item):
generic programming""" generic programming"""
spec = item.edge.spec spec = item.edge.spec
return sort_edges( return sort_edges(
[edge.flip() for edge in spec.edges_from_dependents(deptype=self.deptype)] [edge.flip() for edge in spec.edges_from_dependents(depflag=self.depflag)]
) )
@ -174,7 +176,9 @@ def edges(self):
return list(reversed(self.reverse_order)) return list(reversed(self.reverse_order))
def get_visitor_from_args(cover, direction, deptype, key=id, visited=None, visitor=None): def get_visitor_from_args(
cover, direction, depflag: Union[dt.DepFlag, dt.DepTypes], key=id, visited=None, visitor=None
):
""" """
Create a visitor object from common keyword arguments. Create a visitor object from common keyword arguments.
@ -190,7 +194,7 @@ def get_visitor_from_args(cover, direction, deptype, key=id, visited=None, visit
direction (str): ``children`` or ``parents``. If ``children``, does a traversal direction (str): ``children`` or ``parents``. If ``children``, does a traversal
of this spec's children. If ``parents``, traverses upwards in the DAG of this spec's children. If ``parents``, traverses upwards in the DAG
towards the root. towards the root.
deptype (str or tuple): allowed dependency types deptype: allowed dependency types
key: function that takes a spec and outputs a key for uniqueness test. key: function that takes a spec and outputs a key for uniqueness test.
visited (set or None): a set of nodes not to follow (when using cover=nodes/edges) visited (set or None): a set of nodes not to follow (when using cover=nodes/edges)
visitor: An initial visitor that is used for composition. visitor: An initial visitor that is used for composition.
@ -198,13 +202,15 @@ def get_visitor_from_args(cover, direction, deptype, key=id, visited=None, visit
Returns: Returns:
A visitor A visitor
""" """
visitor = visitor or BaseVisitor(deptype) if not isinstance(depflag, dt.DepFlag):
depflag = dt.canonicalize(depflag)
visitor = visitor or BaseVisitor(depflag)
if cover == "nodes": if cover == "nodes":
visitor = CoverNodesVisitor(visitor, key, visited) visitor = CoverNodesVisitor(visitor, key, visited)
elif cover == "edges": elif cover == "edges":
visitor = CoverEdgesVisitor(visitor, key, visited) visitor = CoverEdgesVisitor(visitor, key, visited)
if direction == "parents": if direction == "parents":
visitor = ReverseVisitor(visitor, deptype) visitor = ReverseVisitor(visitor, depflag)
return visitor return visitor
@ -212,7 +218,7 @@ def with_artificial_edges(specs):
"""Initialize a list of edges from an imaginary root node to the root specs.""" """Initialize a list of edges from an imaginary root node to the root specs."""
return [ return [
EdgeAndDepth( EdgeAndDepth(
edge=spack.spec.DependencySpec(parent=None, spec=s, deptypes=(), virtuals=()), depth=0 edge=spack.spec.DependencySpec(parent=None, spec=s, depflag=0, virtuals=()), depth=0
) )
for s in specs for s in specs
] ]
@ -374,7 +380,12 @@ def traverse_breadth_first_tree_nodes(parent_id, edges, key=id, depth=0):
# Topologic order # Topologic order
def traverse_edges_topo( def traverse_edges_topo(
specs, direction="children", deptype="all", key=id, root=True, all_edges=False specs,
direction="children",
deptype: Union[dt.DepFlag, dt.DepTypes] = "all",
key=id,
root=True,
all_edges=False,
): ):
""" """
Returns a list of edges in topological order, in the sense that all in-edges of a Returns a list of edges in topological order, in the sense that all in-edges of a
@ -386,13 +397,15 @@ def traverse_edges_topo(
specs (list): List of root specs (considered to be depth 0) specs (list): List of root specs (considered to be depth 0)
direction (str): ``children`` (edges are directed from dependent to dependency) direction (str): ``children`` (edges are directed from dependent to dependency)
or ``parents`` (edges are flipped / directed from dependency to dependent) or ``parents`` (edges are flipped / directed from dependency to dependent)
deptype (str or tuple): allowed dependency types deptype: allowed dependency types
key: function that takes a spec and outputs a key for uniqueness test. key: function that takes a spec and outputs a key for uniqueness test.
root (bool): Yield the root nodes themselves root (bool): Yield the root nodes themselves
all_edges (bool): When ``False`` only one in-edge per node is returned, when all_edges (bool): When ``False`` only one in-edge per node is returned, when
``True`` all reachable edges are returned. ``True`` all reachable edges are returned.
""" """
visitor = BaseVisitor(deptype) if not isinstance(deptype, dt.DepFlag):
deptype = dt.canonicalize(deptype)
visitor: Union[BaseVisitor, ReverseVisitor, TopoVisitor] = BaseVisitor(deptype)
if direction == "parents": if direction == "parents":
visitor = ReverseVisitor(visitor, deptype) visitor = ReverseVisitor(visitor, deptype)
visitor = TopoVisitor(visitor, key=key, root=root, all_edges=all_edges) visitor = TopoVisitor(visitor, key=key, root=root, all_edges=all_edges)
@ -409,7 +422,7 @@ def traverse_edges(
order="pre", order="pre",
cover="nodes", cover="nodes",
direction="children", direction="children",
deptype="all", deptype: Union[dt.DepFlag, dt.DepTypes] = "all",
depth=False, depth=False,
key=id, key=id,
visited=None, visited=None,
@ -435,7 +448,7 @@ def traverse_edges(
direction (str): ``children`` or ``parents``. If ``children``, does a traversal direction (str): ``children`` or ``parents``. If ``children``, does a traversal
of this spec's children. If ``parents``, traverses upwards in the DAG of this spec's children. If ``parents``, traverses upwards in the DAG
towards the root. towards the root.
deptype (str or tuple): allowed dependency types deptype: allowed dependency types
depth (bool): When ``False``, yield just edges. When ``True`` yield depth (bool): When ``False``, yield just edges. When ``True`` yield
the tuple (depth, edge), where depth corresponds to the depth the tuple (depth, edge), where depth corresponds to the depth
at which edge.spec was discovered. at which edge.spec was discovered.
@ -478,7 +491,7 @@ def traverse_nodes(
order="pre", order="pre",
cover="nodes", cover="nodes",
direction="children", direction="children",
deptype="all", deptype: Union[dt.DepFlag, dt.DepTypes] = "all",
depth=False, depth=False,
key=id, key=id,
visited=None, visited=None,
@ -502,7 +515,7 @@ def traverse_nodes(
direction (str): ``children`` or ``parents``. If ``children``, does a traversal direction (str): ``children`` or ``parents``. If ``children``, does a traversal
of this spec's children. If ``parents``, traverses upwards in the DAG of this spec's children. If ``parents``, traverses upwards in the DAG
towards the root. towards the root.
deptype (str or tuple): allowed dependency types deptype: allowed dependency types
depth (bool): When ``False``, yield just edges. When ``True`` yield depth (bool): When ``False``, yield just edges. When ``True`` yield
the tuple ``(depth, edge)``, where depth corresponds to the depth the tuple ``(depth, edge)``, where depth corresponds to the depth
at which ``edge.spec`` was discovered. at which ``edge.spec`` was discovered.
@ -517,7 +530,9 @@ def traverse_nodes(
yield (item[0], item[1].spec) if depth else item.spec yield (item[0], item[1].spec) if depth else item.spec
def traverse_tree(specs, cover="nodes", deptype="all", key=id, depth_first=True): def traverse_tree(
specs, cover="nodes", deptype: Union[dt.DepFlag, dt.DepTypes] = "all", key=id, depth_first=True
):
""" """
Generator that yields ``(depth, DependencySpec)`` tuples in the depth-first Generator that yields ``(depth, DependencySpec)`` tuples in the depth-first
pre-order, so that a tree can be printed from it. pre-order, so that a tree can be printed from it.
@ -533,7 +548,7 @@ def traverse_tree(specs, cover="nodes", deptype="all", key=id, depth_first=True)
``paths`` -- Explore every unique path reachable from the root. ``paths`` -- Explore every unique path reachable from the root.
This descends into visited subtrees and will accept nodes multiple This descends into visited subtrees and will accept nodes multiple
times if they're reachable by multiple paths. times if they're reachable by multiple paths.
deptype (str or tuple): allowed dependency types deptype: allowed dependency types
key: function that takes a spec and outputs a key for uniqueness test. key: function that takes a spec and outputs a key for uniqueness test.
depth_first (bool): Explore the tree in depth-first or breadth-first order. depth_first (bool): Explore the tree in depth-first or breadth-first order.
When setting ``depth_first=True`` and ``cover=nodes``, each spec only When setting ``depth_first=True`` and ``cover=nodes``, each spec only

View File

@ -1273,7 +1273,7 @@ complete -c spack -n '__fish_spack_using_command dependencies' -s i -l installed
complete -c spack -n '__fish_spack_using_command dependencies' -s t -l transitive -f -a transitive complete -c spack -n '__fish_spack_using_command dependencies' -s t -l transitive -f -a transitive
complete -c spack -n '__fish_spack_using_command dependencies' -s t -l transitive -d 'show all transitive dependencies' complete -c spack -n '__fish_spack_using_command dependencies' -s t -l transitive -d 'show all transitive dependencies'
complete -c spack -n '__fish_spack_using_command dependencies' -l deptype -r -f -a deptype complete -c spack -n '__fish_spack_using_command dependencies' -l deptype -r -f -a deptype
complete -c spack -n '__fish_spack_using_command dependencies' -l deptype -r -d 'comma-separated list of deptypes to traverse' complete -c spack -n '__fish_spack_using_command dependencies' -l deptype -r -d 'comma-separated list of deptypes to traverse (default=build,link,run,test)'
complete -c spack -n '__fish_spack_using_command dependencies' -s V -l no-expand-virtuals -f -a expand_virtuals complete -c spack -n '__fish_spack_using_command dependencies' -s V -l no-expand-virtuals -f -a expand_virtuals
complete -c spack -n '__fish_spack_using_command dependencies' -s V -l no-expand-virtuals -d 'do not expand virtual dependencies' complete -c spack -n '__fish_spack_using_command dependencies' -s V -l no-expand-virtuals -d 'do not expand virtual dependencies'
@ -1815,7 +1815,7 @@ complete -c spack -n '__fish_spack_using_command graph' -s c -l color -d 'use di
complete -c spack -n '__fish_spack_using_command graph' -s i -l installed -f -a installed complete -c spack -n '__fish_spack_using_command graph' -s i -l installed -f -a installed
complete -c spack -n '__fish_spack_using_command graph' -s i -l installed -d 'graph installed specs, or specs in the active env (implies --dot)' complete -c spack -n '__fish_spack_using_command graph' -s i -l installed -d 'graph installed specs, or specs in the active env (implies --dot)'
complete -c spack -n '__fish_spack_using_command graph' -l deptype -r -f -a deptype complete -c spack -n '__fish_spack_using_command graph' -l deptype -r -f -a deptype
complete -c spack -n '__fish_spack_using_command graph' -l deptype -r -d 'comma-separated list of deptypes to traverse' complete -c spack -n '__fish_spack_using_command graph' -l deptype -r -d 'comma-separated list of deptypes to traverse (default=build,link,run,test)'
# spack help # spack help
set -g __fish_spack_optspecs_spack_help h/help a/all spec set -g __fish_spack_optspecs_spack_help h/help a/all spec