Allow buildcache specs to be referenced by hash (#35042)
Currently, specs on buildcache mirrors must be referenced by their full description. This PR allows buildcache specs to be referenced by their hashes, rather than their full description. ### How it works Hash resolution has been moved from `SpecParser` into `Spec`, and now includes the ability to execute a `BinaryCacheQuery` after checking the local store, but before concluding that the hash doesn't exist. ### Side-effects of Proposed Changes Failures will take longer when nonexistent hashes are parsed, as mirrors will now be scanned. ### Other Changes - `BinaryCacheIndex.update` has been modified to fail appropriately only when mirrors have been configured. - Tests of hash failures have been updated to use `mutable_empty_config` so they don't needlessly search mirrors. - Documentation has been clarified for `BinaryCacheQuery`, and more documentation has been added to the hash resolution functions added to `Spec`.
This commit is contained in:
		@@ -426,7 +426,7 @@ def update(self, with_cooldown=False):
 | 
			
		||||
 | 
			
		||||
        self._write_local_index_cache()
 | 
			
		||||
 | 
			
		||||
        if all_methods_failed:
 | 
			
		||||
        if configured_mirror_urls and all_methods_failed:
 | 
			
		||||
            raise FetchCacheError(fetch_errors)
 | 
			
		||||
        if fetch_errors:
 | 
			
		||||
            tty.warn(
 | 
			
		||||
@@ -2415,6 +2415,10 @@ def __init__(self, all_architectures):
 | 
			
		||||
        self.possible_specs = specs
 | 
			
		||||
 | 
			
		||||
    def __call__(self, spec, **kwargs):
 | 
			
		||||
        """
 | 
			
		||||
        Args:
 | 
			
		||||
            spec (str): The spec being searched for in its string representation or hash.
 | 
			
		||||
        """
 | 
			
		||||
        matches = []
 | 
			
		||||
        if spec.startswith("/"):
 | 
			
		||||
            # Matching a DAG hash
 | 
			
		||||
 
 | 
			
		||||
@@ -1119,9 +1119,9 @@ def add(self, user_spec, list_name=user_speclist_name):
 | 
			
		||||
            raise SpackEnvironmentError(f"No list {list_name} exists in environment {self.name}")
 | 
			
		||||
 | 
			
		||||
        if list_name == user_speclist_name:
 | 
			
		||||
            if not spec.name:
 | 
			
		||||
            if spec.anonymous:
 | 
			
		||||
                raise SpackEnvironmentError("cannot add anonymous specs to an environment")
 | 
			
		||||
            elif not spack.repo.path.exists(spec.name):
 | 
			
		||||
            elif not spack.repo.path.exists(spec.name) and not spec.abstract_hash:
 | 
			
		||||
                virtuals = spack.repo.path.provider_index.providers.keys()
 | 
			
		||||
                if spec.name not in virtuals:
 | 
			
		||||
                    msg = "no such package: %s" % spec.name
 | 
			
		||||
 
 | 
			
		||||
@@ -241,6 +241,9 @@ def accept(self, kind: TokenType):
 | 
			
		||||
            return True
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
    def expect(self, *kinds: TokenType):
 | 
			
		||||
        return self.next_token and self.next_token.kind in kinds
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class SpecParser:
 | 
			
		||||
    """Parse text into specs"""
 | 
			
		||||
@@ -257,7 +260,9 @@ def tokens(self) -> List[Token]:
 | 
			
		||||
        """
 | 
			
		||||
        return list(filter(lambda x: x.kind != TokenType.WS, tokenize(self.literal_str)))
 | 
			
		||||
 | 
			
		||||
    def next_spec(self, initial_spec: Optional[spack.spec.Spec] = None) -> spack.spec.Spec:
 | 
			
		||||
    def next_spec(
 | 
			
		||||
        self, initial_spec: Optional[spack.spec.Spec] = None
 | 
			
		||||
    ) -> Optional[spack.spec.Spec]:
 | 
			
		||||
        """Return the next spec parsed from text.
 | 
			
		||||
 | 
			
		||||
        Args:
 | 
			
		||||
@@ -267,13 +272,16 @@ def next_spec(self, initial_spec: Optional[spack.spec.Spec] = None) -> spack.spe
 | 
			
		||||
        Return
 | 
			
		||||
            The spec that was parsed
 | 
			
		||||
        """
 | 
			
		||||
        if not self.ctx.next_token:
 | 
			
		||||
            return initial_spec
 | 
			
		||||
 | 
			
		||||
        initial_spec = initial_spec or spack.spec.Spec()
 | 
			
		||||
        root_spec = SpecNodeParser(self.ctx).parse(initial_spec)
 | 
			
		||||
        while True:
 | 
			
		||||
            if self.ctx.accept(TokenType.DEPENDENCY):
 | 
			
		||||
                dependency = SpecNodeParser(self.ctx).parse(spack.spec.Spec())
 | 
			
		||||
                dependency = SpecNodeParser(self.ctx).parse()
 | 
			
		||||
 | 
			
		||||
                if dependency == spack.spec.Spec():
 | 
			
		||||
                if dependency is None:
 | 
			
		||||
                    msg = (
 | 
			
		||||
                        "this dependency sigil needs to be followed by a package name "
 | 
			
		||||
                        "or a node attribute (version, variant, etc.)"
 | 
			
		||||
@@ -292,7 +300,7 @@ def next_spec(self, initial_spec: Optional[spack.spec.Spec] = None) -> spack.spe
 | 
			
		||||
 | 
			
		||||
    def all_specs(self) -> List[spack.spec.Spec]:
 | 
			
		||||
        """Return all the specs that remain to be parsed"""
 | 
			
		||||
        return list(iter(self.next_spec, spack.spec.Spec()))
 | 
			
		||||
        return list(iter(self.next_spec, None))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class SpecNodeParser:
 | 
			
		||||
@@ -306,7 +314,7 @@ def __init__(self, ctx):
 | 
			
		||||
        self.has_version = False
 | 
			
		||||
        self.has_hash = False
 | 
			
		||||
 | 
			
		||||
    def parse(self, initial_spec: spack.spec.Spec) -> spack.spec.Spec:
 | 
			
		||||
    def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spack.spec.Spec]:
 | 
			
		||||
        """Parse a single spec node from a stream of tokens
 | 
			
		||||
 | 
			
		||||
        Args:
 | 
			
		||||
@@ -315,7 +323,10 @@ def parse(self, initial_spec: spack.spec.Spec) -> spack.spec.Spec:
 | 
			
		||||
        Return
 | 
			
		||||
            The object passed as argument
 | 
			
		||||
        """
 | 
			
		||||
        import spack.environment  # Needed to retrieve by hash
 | 
			
		||||
        if not self.ctx.next_token or self.ctx.expect(TokenType.DEPENDENCY):
 | 
			
		||||
            return initial_spec
 | 
			
		||||
 | 
			
		||||
        initial_spec = initial_spec or spack.spec.Spec()
 | 
			
		||||
 | 
			
		||||
        # If we start with a package name we have a named spec, we cannot
 | 
			
		||||
        # accept another package name afterwards in a node
 | 
			
		||||
@@ -390,27 +401,11 @@ def parse(self, initial_spec: spack.spec.Spec) -> spack.spec.Spec:
 | 
			
		||||
                name = name.strip("'\" ")
 | 
			
		||||
                value = value.strip("'\" ")
 | 
			
		||||
                initial_spec._add_flag(name, value, propagate=True)
 | 
			
		||||
            elif not self.has_hash and self.ctx.accept(TokenType.DAG_HASH):
 | 
			
		||||
                dag_hash = self.ctx.current_token.value[1:]
 | 
			
		||||
                matches = []
 | 
			
		||||
                active_env = spack.environment.active_environment()
 | 
			
		||||
                if active_env:
 | 
			
		||||
                    matches = active_env.get_by_hash(dag_hash)
 | 
			
		||||
                if not matches:
 | 
			
		||||
                    matches = spack.store.db.get_by_hash(dag_hash)
 | 
			
		||||
                if not matches:
 | 
			
		||||
                    raise spack.spec.NoSuchHashError(dag_hash)
 | 
			
		||||
 | 
			
		||||
                if len(matches) != 1:
 | 
			
		||||
                    raise spack.spec.AmbiguousHashError(
 | 
			
		||||
                        f"Multiple packages specify hash beginning '{dag_hash}'.", *matches
 | 
			
		||||
                    )
 | 
			
		||||
                spec_by_hash = matches[0]
 | 
			
		||||
                if not spec_by_hash.satisfies(initial_spec):
 | 
			
		||||
                    raise spack.spec.InvalidHashError(initial_spec, spec_by_hash.dag_hash())
 | 
			
		||||
                initial_spec._dup(spec_by_hash)
 | 
			
		||||
 | 
			
		||||
                self.has_hash = True
 | 
			
		||||
            elif self.ctx.expect(TokenType.DAG_HASH):
 | 
			
		||||
                if initial_spec.abstract_hash:
 | 
			
		||||
                    break
 | 
			
		||||
                self.ctx.accept(TokenType.DAG_HASH)
 | 
			
		||||
                initial_spec.abstract_hash = self.ctx.current_token.value[1:]
 | 
			
		||||
            else:
 | 
			
		||||
                break
 | 
			
		||||
 | 
			
		||||
@@ -488,6 +483,11 @@ def parse_one_or_raise(
 | 
			
		||||
            message += color.colorize(f"@*r{{{underline}}}")
 | 
			
		||||
        raise ValueError(message)
 | 
			
		||||
 | 
			
		||||
    if result is None:
 | 
			
		||||
        message = "a single spec was requested, but none was parsed:"
 | 
			
		||||
        message += f"\n{text}"
 | 
			
		||||
        raise ValueError(message)
 | 
			
		||||
 | 
			
		||||
    return result
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -110,7 +110,6 @@
 | 
			
		||||
    "UnsatisfiableDependencySpecError",
 | 
			
		||||
    "AmbiguousHashError",
 | 
			
		||||
    "InvalidHashError",
 | 
			
		||||
    "NoSuchHashError",
 | 
			
		||||
    "RedundantSpecError",
 | 
			
		||||
    "SpecDeprecatedError",
 | 
			
		||||
]
 | 
			
		||||
@@ -147,7 +146,7 @@
 | 
			
		||||
 | 
			
		||||
default_format = "{name}{@versions}"
 | 
			
		||||
default_format += "{%compiler.name}{@compiler.versions}{compiler_flags}"
 | 
			
		||||
default_format += "{variants}{arch=architecture}"
 | 
			
		||||
default_format += "{variants}{arch=architecture}{/abstract_hash}"
 | 
			
		||||
 | 
			
		||||
#: Regular expression to pull spec contents out of clearsigned signature
 | 
			
		||||
#: file.
 | 
			
		||||
@@ -1249,6 +1248,7 @@ def copy(self, *args, **kwargs):
 | 
			
		||||
class Spec(object):
 | 
			
		||||
    #: Cache for spec's prefix, computed lazily in the corresponding property
 | 
			
		||||
    _prefix = None
 | 
			
		||||
    abstract_hash = None
 | 
			
		||||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def default_arch():
 | 
			
		||||
@@ -1556,7 +1556,7 @@ def _set_compiler(self, compiler):
 | 
			
		||||
 | 
			
		||||
    def _add_dependency(self, spec: "Spec", *, deptypes: dp.DependencyArgument):
 | 
			
		||||
        """Called by the parser to add another spec as a dependency."""
 | 
			
		||||
        if spec.name not in self._dependencies:
 | 
			
		||||
        if spec.name not in self._dependencies or not spec.name:
 | 
			
		||||
            self.add_dependency_edge(spec, deptypes=deptypes)
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
@@ -1617,6 +1617,10 @@ def fullname(self):
 | 
			
		||||
            else (self.name if self.name else "")
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def anonymous(self):
 | 
			
		||||
        return not self.name and not self.abstract_hash
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def root(self):
 | 
			
		||||
        """Follow dependent links and find the root of this spec's DAG.
 | 
			
		||||
@@ -1825,6 +1829,73 @@ def process_hash_bit_prefix(self, bits):
 | 
			
		||||
        """Get the first <bits> bits of the DAG hash as an integer type."""
 | 
			
		||||
        return spack.util.hash.base32_prefix_bits(self.process_hash(), bits)
 | 
			
		||||
 | 
			
		||||
    def _lookup_hash(self):
 | 
			
		||||
        """Lookup just one spec with an abstract hash, returning a spec from the the environment,
 | 
			
		||||
        store, or finally, binary caches."""
 | 
			
		||||
        import spack.environment
 | 
			
		||||
 | 
			
		||||
        matches = []
 | 
			
		||||
        active_env = spack.environment.active_environment()
 | 
			
		||||
 | 
			
		||||
        if active_env:
 | 
			
		||||
            env_matches = active_env.get_by_hash(self.abstract_hash) or []
 | 
			
		||||
            matches = [m for m in env_matches if m._satisfies(self)]
 | 
			
		||||
        if not matches:
 | 
			
		||||
            db_matches = spack.store.db.get_by_hash(self.abstract_hash) or []
 | 
			
		||||
            matches = [m for m in db_matches if m._satisfies(self)]
 | 
			
		||||
        if not matches:
 | 
			
		||||
            query = spack.binary_distribution.BinaryCacheQuery(True)
 | 
			
		||||
            remote_matches = query("/" + self.abstract_hash) or []
 | 
			
		||||
            matches = [m for m in remote_matches if m._satisfies(self)]
 | 
			
		||||
        if not matches:
 | 
			
		||||
            raise InvalidHashError(self, self.abstract_hash)
 | 
			
		||||
 | 
			
		||||
        if len(matches) != 1:
 | 
			
		||||
            raise spack.spec.AmbiguousHashError(
 | 
			
		||||
                f"Multiple packages specify hash beginning '{self.abstract_hash}'.", *matches
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        return matches[0]
 | 
			
		||||
 | 
			
		||||
    def lookup_hash(self):
 | 
			
		||||
        """Given a spec with an abstract hash, return a copy of the spec with all properties and
 | 
			
		||||
        dependencies by looking up the hash in the environment, store, or finally, binary caches.
 | 
			
		||||
        This is non-destructive."""
 | 
			
		||||
        if self.concrete or not any(node.abstract_hash for node in self.traverse()):
 | 
			
		||||
            return self
 | 
			
		||||
 | 
			
		||||
        spec = self.copy(deps=False)
 | 
			
		||||
        # root spec is replaced
 | 
			
		||||
        if spec.abstract_hash:
 | 
			
		||||
            new = self._lookup_hash()
 | 
			
		||||
            spec._dup(new)
 | 
			
		||||
            return spec
 | 
			
		||||
 | 
			
		||||
        # Get dependencies that need to be replaced
 | 
			
		||||
        for node in self.traverse(root=False):
 | 
			
		||||
            if node.abstract_hash:
 | 
			
		||||
                new = node._lookup_hash()
 | 
			
		||||
                spec._add_dependency(new, deptypes=())
 | 
			
		||||
 | 
			
		||||
        # reattach nodes that were not otherwise satisfied by new dependencies
 | 
			
		||||
        for node in self.traverse(root=False):
 | 
			
		||||
            if not any(n._satisfies(node) for n in spec.traverse()):
 | 
			
		||||
                spec._add_dependency(node.copy(), deptypes=())
 | 
			
		||||
 | 
			
		||||
        return spec
 | 
			
		||||
 | 
			
		||||
    def replace_hash(self):
 | 
			
		||||
        """Given a spec with an abstract hash, attempt to populate all properties and dependencies
 | 
			
		||||
        by looking up the hash in the environment, store, or finally, binary caches.
 | 
			
		||||
        This is destructive."""
 | 
			
		||||
 | 
			
		||||
        if not any(node for node in self.traverse(order="post") if node.abstract_hash):
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        spec_by_hash = self.lookup_hash()
 | 
			
		||||
 | 
			
		||||
        self._dup(spec_by_hash)
 | 
			
		||||
 | 
			
		||||
    def to_node_dict(self, hash=ht.dag_hash):
 | 
			
		||||
        """Create a dictionary representing the state of this Spec.
 | 
			
		||||
 | 
			
		||||
@@ -2583,6 +2654,8 @@ def _old_concretize(self, tests=False, deprecation_warning=True):
 | 
			
		||||
            )
 | 
			
		||||
            warnings.warn(msg)
 | 
			
		||||
 | 
			
		||||
        self.replace_hash()
 | 
			
		||||
 | 
			
		||||
        if not self.name:
 | 
			
		||||
            raise spack.error.SpecError("Attempting to concretize anonymous spec")
 | 
			
		||||
 | 
			
		||||
@@ -2781,8 +2854,13 @@ def ensure_no_deprecated(root):
 | 
			
		||||
    def _new_concretize(self, tests=False):
 | 
			
		||||
        import spack.solver.asp
 | 
			
		||||
 | 
			
		||||
        if not self.name:
 | 
			
		||||
            raise spack.error.SpecError("Spec has no name; cannot concretize an anonymous spec")
 | 
			
		||||
        self.replace_hash()
 | 
			
		||||
 | 
			
		||||
        for node in self.traverse():
 | 
			
		||||
            if not node.name:
 | 
			
		||||
                raise spack.error.SpecError(
 | 
			
		||||
                    f"Spec {node} has no name; cannot concretize an anonymous spec"
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
        if self._concrete:
 | 
			
		||||
            return
 | 
			
		||||
@@ -3365,6 +3443,11 @@ def constrain(self, other, deps=True):
 | 
			
		||||
                raise spack.error.UnsatisfiableSpecError(self, other, "constrain a concrete spec")
 | 
			
		||||
 | 
			
		||||
        other = self._autospec(other)
 | 
			
		||||
        if other.abstract_hash:
 | 
			
		||||
            if not self.abstract_hash or other.abstract_hash.startswith(self.abstract_hash):
 | 
			
		||||
                self.abstract_hash = other.abstract_hash
 | 
			
		||||
            elif not self.abstract_hash.startswith(other.abstract_hash):
 | 
			
		||||
                raise InvalidHashError(self, other.abstract_hash)
 | 
			
		||||
 | 
			
		||||
        if not (self.name == other.name or (not self.name) or (not other.name)):
 | 
			
		||||
            raise UnsatisfiableSpecNameError(self.name, other.name)
 | 
			
		||||
@@ -3523,6 +3606,12 @@ def intersects(self, other: "Spec", deps: bool = True) -> bool:
 | 
			
		||||
        """
 | 
			
		||||
        other = self._autospec(other)
 | 
			
		||||
 | 
			
		||||
        lhs = self.lookup_hash() or self
 | 
			
		||||
        rhs = other.lookup_hash() or other
 | 
			
		||||
 | 
			
		||||
        return lhs._intersects(rhs, deps)
 | 
			
		||||
 | 
			
		||||
    def _intersects(self, other: "Spec", deps: bool = True) -> bool:
 | 
			
		||||
        if other.concrete and self.concrete:
 | 
			
		||||
            return self.dag_hash() == other.dag_hash()
 | 
			
		||||
 | 
			
		||||
@@ -3588,9 +3677,18 @@ def intersects(self, other: "Spec", deps: bool = True) -> bool:
 | 
			
		||||
        else:
 | 
			
		||||
            return True
 | 
			
		||||
 | 
			
		||||
    def _intersects_dependencies(self, other):
 | 
			
		||||
    def satisfies(self, other, deps=True):
 | 
			
		||||
        """
 | 
			
		||||
        This checks constraints on common dependencies against each other.
 | 
			
		||||
        """
 | 
			
		||||
        other = self._autospec(other)
 | 
			
		||||
 | 
			
		||||
        lhs = self.lookup_hash() or self
 | 
			
		||||
        rhs = other.lookup_hash() or other
 | 
			
		||||
 | 
			
		||||
        return lhs._satisfies(rhs, deps=deps)
 | 
			
		||||
 | 
			
		||||
    def _intersects_dependencies(self, other):
 | 
			
		||||
        if not other._dependencies or not self._dependencies:
 | 
			
		||||
            # one spec *could* eventually satisfy the other
 | 
			
		||||
            return True
 | 
			
		||||
@@ -3625,7 +3723,7 @@ def _intersects_dependencies(self, other):
 | 
			
		||||
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
    def satisfies(self, other: "Spec", deps: bool = True) -> bool:
 | 
			
		||||
    def _satisfies(self, other: "Spec", deps: bool = True) -> bool:
 | 
			
		||||
        """Return True if all concrete specs matching self also match other, otherwise False.
 | 
			
		||||
 | 
			
		||||
        Args:
 | 
			
		||||
@@ -3770,6 +3868,7 @@ def _dup(self, other, deps=True, cleardeps=True):
 | 
			
		||||
                and self.external_path != other.external_path
 | 
			
		||||
                and self.external_modules != other.external_modules
 | 
			
		||||
                and self.compiler_flags != other.compiler_flags
 | 
			
		||||
                and self.abstract_hash != other.abstract_hash
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        self._package = None
 | 
			
		||||
@@ -3812,6 +3911,8 @@ def _dup(self, other, deps=True, cleardeps=True):
 | 
			
		||||
 | 
			
		||||
        self._concrete = other._concrete
 | 
			
		||||
 | 
			
		||||
        self.abstract_hash = other.abstract_hash
 | 
			
		||||
 | 
			
		||||
        if self._concrete:
 | 
			
		||||
            self._dunder_hash = other._dunder_hash
 | 
			
		||||
            self._normal = other._normal
 | 
			
		||||
@@ -4001,6 +4102,7 @@ def _cmp_node(self):
 | 
			
		||||
        yield self.compiler
 | 
			
		||||
        yield self.compiler_flags
 | 
			
		||||
        yield self.architecture
 | 
			
		||||
        yield self.abstract_hash
 | 
			
		||||
 | 
			
		||||
        # this is not present on older specs
 | 
			
		||||
        yield getattr(self, "_package_hash", None)
 | 
			
		||||
@@ -4011,7 +4113,10 @@ def eq_node(self, other):
 | 
			
		||||
 | 
			
		||||
    def _cmp_iter(self):
 | 
			
		||||
        """Lazily yield components of self for comparison."""
 | 
			
		||||
        for item in self._cmp_node():
 | 
			
		||||
 | 
			
		||||
        cmp_spec = self.lookup_hash() or self
 | 
			
		||||
 | 
			
		||||
        for item in cmp_spec._cmp_node():
 | 
			
		||||
            yield item
 | 
			
		||||
 | 
			
		||||
        # This needs to be in _cmp_iter so that no specs with different process hashes
 | 
			
		||||
@@ -4022,10 +4127,10 @@ def _cmp_iter(self):
 | 
			
		||||
        # TODO: they exist for speed.  We should benchmark whether it's really worth
 | 
			
		||||
        # TODO: having two types of hashing now that we use `json` instead of `yaml` for
 | 
			
		||||
        # TODO: spec hashing.
 | 
			
		||||
        yield self.process_hash() if self.concrete else None
 | 
			
		||||
        yield cmp_spec.process_hash() if cmp_spec.concrete else None
 | 
			
		||||
 | 
			
		||||
        def deps():
 | 
			
		||||
            for dep in sorted(itertools.chain.from_iterable(self._dependencies.values())):
 | 
			
		||||
            for dep in sorted(itertools.chain.from_iterable(cmp_spec._dependencies.values())):
 | 
			
		||||
                yield dep.spec.name
 | 
			
		||||
                yield tuple(sorted(dep.deptypes))
 | 
			
		||||
                yield hash(dep.spec)
 | 
			
		||||
@@ -4146,7 +4251,7 @@ def write_attribute(spec, attribute, color):
 | 
			
		||||
                raise SpecFormatSigilError(sig, "versions", attribute)
 | 
			
		||||
            elif sig == "%" and attribute not in ("compiler", "compiler.name"):
 | 
			
		||||
                raise SpecFormatSigilError(sig, "compilers", attribute)
 | 
			
		||||
            elif sig == "/" and not re.match(r"hash(:\d+)?$", attribute):
 | 
			
		||||
            elif sig == "/" and not re.match(r"(abstract_)?hash(:\d+)?$", attribute):
 | 
			
		||||
                raise SpecFormatSigilError(sig, "DAG hashes", attribute)
 | 
			
		||||
            elif sig == " arch=" and attribute not in ("architecture", "arch"):
 | 
			
		||||
                raise SpecFormatSigilError(sig, "the architecture", attribute)
 | 
			
		||||
@@ -4266,7 +4371,9 @@ def cformat(self, *args, **kwargs):
 | 
			
		||||
        return self.format(*args, **kwargs)
 | 
			
		||||
 | 
			
		||||
    def __str__(self):
 | 
			
		||||
        sorted_nodes = [self] + sorted(self.traverse(root=False), key=lambda x: x.name)
 | 
			
		||||
        sorted_nodes = [self] + sorted(
 | 
			
		||||
            self.traverse(root=False), key=lambda x: x.name or x.abstract_hash
 | 
			
		||||
        )
 | 
			
		||||
        spec_str = " ^".join(d.format() for d in sorted_nodes)
 | 
			
		||||
        return spec_str.strip()
 | 
			
		||||
 | 
			
		||||
@@ -5066,14 +5173,9 @@ def __init__(self, msg, *specs):
 | 
			
		||||
 | 
			
		||||
class InvalidHashError(spack.error.SpecError):
 | 
			
		||||
    def __init__(self, spec, hash):
 | 
			
		||||
        super(InvalidHashError, self).__init__(
 | 
			
		||||
            "The spec specified by %s does not match provided spec %s" % (hash, spec)
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class NoSuchHashError(spack.error.SpecError):
 | 
			
		||||
    def __init__(self, hash):
 | 
			
		||||
        super(NoSuchHashError, self).__init__("No installed spec matches the hash: '%s'" % hash)
 | 
			
		||||
        msg = f"No spec with hash {hash} could be found to match {spec}."
 | 
			
		||||
        msg += " Either the hash does not exist, or it does not match other spec constraints."
 | 
			
		||||
        super(InvalidHashError, self).__init__(msg)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class SpecFilenameError(spack.error.SpecError):
 | 
			
		||||
 
 | 
			
		||||
@@ -86,10 +86,9 @@ def test_spec_parse_unquoted_flags_report():
 | 
			
		||||
        # cflags, we just explain how to fix it for the immediate next arg.
 | 
			
		||||
        spec("gcc cflags=-Os -pipe -other-arg-that-gets-ignored cflags=-I /usr/include")
 | 
			
		||||
    # Verify that the generated error message is nicely formatted.
 | 
			
		||||
    assert str(cm.value) == dedent(
 | 
			
		||||
        '''\
 | 
			
		||||
    No installed spec matches the hash: 'usr'
 | 
			
		||||
 | 
			
		||||
    expected_message = dedent(
 | 
			
		||||
        '''\
 | 
			
		||||
    Some compiler or linker flags were provided without quoting their arguments,
 | 
			
		||||
    which now causes spack to try to parse the *next* argument as a spec component
 | 
			
		||||
    such as a variant instead of an additional compiler or linker flag. If the
 | 
			
		||||
@@ -100,6 +99,8 @@ def test_spec_parse_unquoted_flags_report():
 | 
			
		||||
    (2) cflags=-I /usr/include => cflags="-I /usr/include"'''
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    assert expected_message in str(cm.value)
 | 
			
		||||
 | 
			
		||||
    # Verify that the same unquoted cflags report is generated in the error message even
 | 
			
		||||
    # if it fails during concretization, not just during parsing.
 | 
			
		||||
    with pytest.raises(spack.error.SpackError) as cm:
 | 
			
		||||
 
 | 
			
		||||
@@ -239,6 +239,22 @@ def test_abstract_specs_can_constrain_each_other(self, lhs, rhs, expected):
 | 
			
		||||
        assert c1 == c2
 | 
			
		||||
        assert c1 == expected
 | 
			
		||||
 | 
			
		||||
    def test_constrain_specs_by_hash(self, default_mock_concretization, database):
 | 
			
		||||
        """Test that Specs specified only by their hashes can constrain eachother."""
 | 
			
		||||
        mpich_dag_hash = "/" + database.query_one("mpich").dag_hash()
 | 
			
		||||
        spec = Spec(mpich_dag_hash[:7])
 | 
			
		||||
        assert spec.constrain(Spec(mpich_dag_hash)) is False
 | 
			
		||||
        assert spec.abstract_hash == mpich_dag_hash[1:]
 | 
			
		||||
 | 
			
		||||
    def test_mismatched_constrain_spec_by_hash(self, default_mock_concretization, database):
 | 
			
		||||
        """Test that Specs specified only by their incompatible hashes fail appropriately."""
 | 
			
		||||
        lhs = "/" + database.query_one("callpath ^mpich").dag_hash()
 | 
			
		||||
        rhs = "/" + database.query_one("callpath ^mpich2").dag_hash()
 | 
			
		||||
        with pytest.raises(spack.spec.InvalidHashError):
 | 
			
		||||
            Spec(lhs).constrain(Spec(rhs))
 | 
			
		||||
        with pytest.raises(spack.spec.InvalidHashError):
 | 
			
		||||
            Spec(lhs[:7]).constrain(Spec(rhs))
 | 
			
		||||
 | 
			
		||||
    @pytest.mark.parametrize(
 | 
			
		||||
        "lhs,rhs", [("libelf", Spec()), ("libelf", "@0:1"), ("libelf", "@0:1 %gcc")]
 | 
			
		||||
    )
 | 
			
		||||
 
 | 
			
		||||
@@ -23,7 +23,7 @@
 | 
			
		||||
 | 
			
		||||
FAIL_ON_WINDOWS = pytest.mark.xfail(
 | 
			
		||||
    sys.platform == "win32",
 | 
			
		||||
    raises=(SpecTokenizationError, spack.spec.NoSuchHashError),
 | 
			
		||||
    raises=(SpecTokenizationError, spack.spec.InvalidHashError),
 | 
			
		||||
    reason="Unix style path on Windows",
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
@@ -631,21 +631,34 @@ def test_spec_by_hash_tokens(text, tokens):
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.mark.db
 | 
			
		||||
def test_spec_by_hash(database):
 | 
			
		||||
def test_spec_by_hash(database, monkeypatch, config):
 | 
			
		||||
    mpileaks = database.query_one("mpileaks ^zmpi")
 | 
			
		||||
    b = spack.spec.Spec("b").concretized()
 | 
			
		||||
    monkeypatch.setattr(spack.binary_distribution, "update_cache_and_get_specs", lambda: [b])
 | 
			
		||||
 | 
			
		||||
    hash_str = f"/{mpileaks.dag_hash()}"
 | 
			
		||||
    assert str(SpecParser(hash_str).next_spec()) == str(mpileaks)
 | 
			
		||||
    parsed_spec = SpecParser(hash_str).next_spec()
 | 
			
		||||
    parsed_spec.replace_hash()
 | 
			
		||||
    assert parsed_spec == mpileaks
 | 
			
		||||
 | 
			
		||||
    short_hash_str = f"/{mpileaks.dag_hash()[:5]}"
 | 
			
		||||
    assert str(SpecParser(short_hash_str).next_spec()) == str(mpileaks)
 | 
			
		||||
    parsed_spec = SpecParser(short_hash_str).next_spec()
 | 
			
		||||
    parsed_spec.replace_hash()
 | 
			
		||||
    assert parsed_spec == mpileaks
 | 
			
		||||
 | 
			
		||||
    name_version_and_hash = f"{mpileaks.name}@{mpileaks.version} /{mpileaks.dag_hash()[:5]}"
 | 
			
		||||
    assert str(SpecParser(name_version_and_hash).next_spec()) == str(mpileaks)
 | 
			
		||||
    parsed_spec = SpecParser(name_version_and_hash).next_spec()
 | 
			
		||||
    parsed_spec.replace_hash()
 | 
			
		||||
    assert parsed_spec == mpileaks
 | 
			
		||||
 | 
			
		||||
    b_hash = f"/{b.dag_hash()}"
 | 
			
		||||
    parsed_spec = SpecParser(b_hash).next_spec()
 | 
			
		||||
    parsed_spec.replace_hash()
 | 
			
		||||
    assert parsed_spec == b
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.mark.db
 | 
			
		||||
def test_dep_spec_by_hash(database):
 | 
			
		||||
def test_dep_spec_by_hash(database, config):
 | 
			
		||||
    mpileaks_zmpi = database.query_one("mpileaks ^zmpi")
 | 
			
		||||
    zmpi = database.query_one("zmpi")
 | 
			
		||||
    fake = database.query_one("fake")
 | 
			
		||||
@@ -653,26 +666,25 @@ def test_dep_spec_by_hash(database):
 | 
			
		||||
    assert "fake" in mpileaks_zmpi
 | 
			
		||||
    assert "zmpi" in mpileaks_zmpi
 | 
			
		||||
 | 
			
		||||
    mpileaks_hash_fake = SpecParser(f"mpileaks ^/{fake.dag_hash()}").next_spec()
 | 
			
		||||
    mpileaks_hash_fake = SpecParser(f"mpileaks ^/{fake.dag_hash()} ^zmpi").next_spec()
 | 
			
		||||
    mpileaks_hash_fake.replace_hash()
 | 
			
		||||
    assert "fake" in mpileaks_hash_fake
 | 
			
		||||
    assert mpileaks_hash_fake["fake"] == fake
 | 
			
		||||
    assert "zmpi" in mpileaks_hash_fake
 | 
			
		||||
    assert mpileaks_hash_fake["zmpi"] == spack.spec.Spec("zmpi")
 | 
			
		||||
 | 
			
		||||
    mpileaks_hash_zmpi = SpecParser(
 | 
			
		||||
        f"mpileaks %{mpileaks_zmpi.compiler} ^ /{zmpi.dag_hash()}"
 | 
			
		||||
    ).next_spec()
 | 
			
		||||
    mpileaks_hash_zmpi.replace_hash()
 | 
			
		||||
    assert "zmpi" in mpileaks_hash_zmpi
 | 
			
		||||
    assert mpileaks_hash_zmpi["zmpi"] == zmpi
 | 
			
		||||
 | 
			
		||||
    # notice: the round-trip str -> Spec loses specificity when
 | 
			
		||||
    # since %gcc@=x gets printed as %gcc@x. So stick to satisfies
 | 
			
		||||
    # here, unless/until we want to differentiate between ranges
 | 
			
		||||
    # and specific versions in the future.
 | 
			
		||||
    # assert mpileaks_hash_zmpi.compiler == mpileaks_zmpi.compiler
 | 
			
		||||
    assert mpileaks_zmpi.compiler.satisfies(mpileaks_hash_zmpi.compiler)
 | 
			
		||||
 | 
			
		||||
    mpileaks_hash_fake_and_zmpi = SpecParser(
 | 
			
		||||
        f"mpileaks ^/{fake.dag_hash()[:4]} ^ /{zmpi.dag_hash()[:5]}"
 | 
			
		||||
    ).next_spec()
 | 
			
		||||
    mpileaks_hash_fake_and_zmpi.replace_hash()
 | 
			
		||||
    assert "zmpi" in mpileaks_hash_fake_and_zmpi
 | 
			
		||||
    assert mpileaks_hash_fake_and_zmpi["zmpi"] == zmpi
 | 
			
		||||
 | 
			
		||||
@@ -681,7 +693,7 @@ def test_dep_spec_by_hash(database):
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.mark.db
 | 
			
		||||
def test_multiple_specs_with_hash(database):
 | 
			
		||||
def test_multiple_specs_with_hash(database, config):
 | 
			
		||||
    mpileaks_zmpi = database.query_one("mpileaks ^zmpi")
 | 
			
		||||
    callpath_mpich2 = database.query_one("callpath ^mpich2")
 | 
			
		||||
 | 
			
		||||
@@ -713,7 +725,7 @@ def test_multiple_specs_with_hash(database):
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.mark.db
 | 
			
		||||
def test_ambiguous_hash(mutable_database, default_mock_concretization):
 | 
			
		||||
def test_ambiguous_hash(mutable_database, default_mock_concretization, config):
 | 
			
		||||
    x1 = default_mock_concretization("a")
 | 
			
		||||
    x2 = x1.copy()
 | 
			
		||||
    x1._hash = "xyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy"
 | 
			
		||||
@@ -723,31 +735,43 @@ def test_ambiguous_hash(mutable_database, default_mock_concretization):
 | 
			
		||||
 | 
			
		||||
    # ambiguity in first hash character
 | 
			
		||||
    with pytest.raises(spack.spec.AmbiguousHashError):
 | 
			
		||||
        SpecParser("/x").next_spec()
 | 
			
		||||
        parsed_spec = SpecParser("/x").next_spec()
 | 
			
		||||
        parsed_spec.replace_hash()
 | 
			
		||||
 | 
			
		||||
    # ambiguity in first hash character AND spec name
 | 
			
		||||
    with pytest.raises(spack.spec.AmbiguousHashError):
 | 
			
		||||
        SpecParser("a/x").next_spec()
 | 
			
		||||
        parsed_spec = SpecParser("a/x").next_spec()
 | 
			
		||||
        parsed_spec.replace_hash()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.mark.db
 | 
			
		||||
def test_invalid_hash(database):
 | 
			
		||||
def test_invalid_hash(database, config):
 | 
			
		||||
    zmpi = database.query_one("zmpi")
 | 
			
		||||
    mpich = database.query_one("mpich")
 | 
			
		||||
 | 
			
		||||
    # name + incompatible hash
 | 
			
		||||
    with pytest.raises(spack.spec.InvalidHashError):
 | 
			
		||||
        SpecParser(f"zmpi /{mpich.dag_hash()}").next_spec()
 | 
			
		||||
        parsed_spec = SpecParser(f"zmpi /{mpich.dag_hash()}").next_spec()
 | 
			
		||||
        parsed_spec.replace_hash()
 | 
			
		||||
    with pytest.raises(spack.spec.InvalidHashError):
 | 
			
		||||
        SpecParser(f"mpich /{zmpi.dag_hash()}").next_spec()
 | 
			
		||||
        parsed_spec = SpecParser(f"mpich /{zmpi.dag_hash()}").next_spec()
 | 
			
		||||
        parsed_spec.replace_hash()
 | 
			
		||||
 | 
			
		||||
    # name + dep + incompatible hash
 | 
			
		||||
    with pytest.raises(spack.spec.InvalidHashError):
 | 
			
		||||
        SpecParser(f"mpileaks ^zmpi /{mpich.dag_hash()}").next_spec()
 | 
			
		||||
        parsed_spec = SpecParser(f"mpileaks ^zmpi /{mpich.dag_hash()}").next_spec()
 | 
			
		||||
        parsed_spec.replace_hash()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def test_invalid_hash_dep(database, config):
 | 
			
		||||
    mpich = database.query_one("mpich")
 | 
			
		||||
    hash = mpich.dag_hash()
 | 
			
		||||
    with pytest.raises(spack.spec.InvalidHashError):
 | 
			
		||||
        spack.spec.Spec(f"callpath ^zlib/{hash}").replace_hash()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.mark.db
 | 
			
		||||
def test_nonexistent_hash(database):
 | 
			
		||||
def test_nonexistent_hash(database, config):
 | 
			
		||||
    """Ensure we get errors for non existent hashes."""
 | 
			
		||||
    specs = database.query()
 | 
			
		||||
 | 
			
		||||
@@ -756,26 +780,40 @@ def test_nonexistent_hash(database):
 | 
			
		||||
    hashes = [s._hash for s in specs]
 | 
			
		||||
    assert no_such_hash not in [h[: len(no_such_hash)] for h in hashes]
 | 
			
		||||
 | 
			
		||||
    with pytest.raises(spack.spec.NoSuchHashError):
 | 
			
		||||
        SpecParser(f"/{no_such_hash}").next_spec()
 | 
			
		||||
    with pytest.raises(spack.spec.InvalidHashError):
 | 
			
		||||
        parsed_spec = SpecParser(f"/{no_such_hash}").next_spec()
 | 
			
		||||
        parsed_spec.replace_hash()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.mark.db
 | 
			
		||||
@pytest.mark.parametrize(
 | 
			
		||||
    "query_str,text_fmt",
 | 
			
		||||
    "spec1,spec2,constraint",
 | 
			
		||||
    [
 | 
			
		||||
        ("mpileaks ^zmpi", r"/{hash}%{0.compiler}"),
 | 
			
		||||
        ("callpath ^zmpi", r"callpath /{hash} ^libelf"),
 | 
			
		||||
        ("dyninst", r'/{hash} cflags="-O3 -fPIC"'),
 | 
			
		||||
        ("mpileaks ^mpich2", r"mpileaks/{hash} @{0.version}"),
 | 
			
		||||
        ("zlib", "hdf5", None),
 | 
			
		||||
        ("zlib+shared", "zlib~shared", "+shared"),
 | 
			
		||||
        ("hdf5+mpi^zmpi", "hdf5~mpi", "^zmpi"),
 | 
			
		||||
        ("hdf5+mpi^mpich+debug", "hdf5+mpi^mpich~debug", "^mpich+debug"),
 | 
			
		||||
    ],
 | 
			
		||||
)
 | 
			
		||||
def test_redundant_spec(query_str, text_fmt, database):
 | 
			
		||||
    """Check that redundant spec constraints raise errors."""
 | 
			
		||||
    spec = database.query_one(query_str)
 | 
			
		||||
    text = text_fmt.format(spec, hash=spec.dag_hash())
 | 
			
		||||
    with pytest.raises(spack.spec.RedundantSpecError):
 | 
			
		||||
        SpecParser(text).next_spec()
 | 
			
		||||
def test_disambiguate_hash_by_spec(spec1, spec2, constraint, mock_packages, monkeypatch, config):
 | 
			
		||||
    spec1_concrete = spack.spec.Spec(spec1).concretized()
 | 
			
		||||
    spec2_concrete = spack.spec.Spec(spec2).concretized()
 | 
			
		||||
 | 
			
		||||
    spec1_concrete._hash = "spec1"
 | 
			
		||||
    spec2_concrete._hash = "spec2"
 | 
			
		||||
 | 
			
		||||
    monkeypatch.setattr(
 | 
			
		||||
        spack.binary_distribution,
 | 
			
		||||
        "update_cache_and_get_specs",
 | 
			
		||||
        lambda: [spec1_concrete, spec2_concrete],
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # Ordering is tricky -- for constraints we want after, for names we want before
 | 
			
		||||
    if not constraint:
 | 
			
		||||
        spec = spack.spec.Spec(spec1 + "/spec")
 | 
			
		||||
    else:
 | 
			
		||||
        spec = spack.spec.Spec("/spec" + constraint)
 | 
			
		||||
 | 
			
		||||
    assert spec.lookup_hash() == spec1_concrete
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.mark.parametrize(
 | 
			
		||||
@@ -858,11 +896,13 @@ def test_error_conditions(text, exc_cls):
 | 
			
		||||
            "libfoo ^./libdwarf.yaml", spack.spec.NoSuchSpecFileError, marks=FAIL_ON_WINDOWS
 | 
			
		||||
        ),
 | 
			
		||||
        pytest.param(
 | 
			
		||||
            "/bogus/path/libdwarf.yamlfoobar", spack.spec.SpecFilenameError, marks=FAIL_ON_WINDOWS
 | 
			
		||||
            "/bogus/path/libdwarf.yamlfoobar",
 | 
			
		||||
            spack.spec.NoSuchSpecFileError,
 | 
			
		||||
            marks=FAIL_ON_WINDOWS,
 | 
			
		||||
        ),
 | 
			
		||||
        pytest.param(
 | 
			
		||||
            "libdwarf^/bogus/path/libelf.yamlfoobar ^/path/to/bogus.yaml",
 | 
			
		||||
            spack.spec.SpecFilenameError,
 | 
			
		||||
            spack.spec.NoSuchSpecFileError,
 | 
			
		||||
            marks=FAIL_ON_WINDOWS,
 | 
			
		||||
        ),
 | 
			
		||||
        pytest.param(
 | 
			
		||||
@@ -895,7 +935,7 @@ def test_error_conditions(text, exc_cls):
 | 
			
		||||
)
 | 
			
		||||
def test_specfile_error_conditions_windows(text, exc_cls):
 | 
			
		||||
    with pytest.raises(exc_cls):
 | 
			
		||||
        SpecParser(text).next_spec()
 | 
			
		||||
        SpecParser(text).all_specs()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@pytest.mark.parametrize(
 | 
			
		||||
 
 | 
			
		||||
@@ -18,7 +18,7 @@
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def sort_edges(edges):
 | 
			
		||||
    edges.sort(key=lambda edge: edge.spec.name)
 | 
			
		||||
    edges.sort(key=lambda edge: (edge.spec.name or "", edge.spec.abstract_hash or ""))
 | 
			
		||||
    return edges
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user