Unify url and oci buildcache push code paths (#45776)

This commit is contained in:
Harmen Stoppels 2024-08-20 13:17:49 +02:00 committed by GitHub
parent 9d0b9f086f
commit 2ae5596e92
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 213 additions and 149 deletions

View File

@ -6,7 +6,6 @@
import codecs import codecs
import collections import collections
import concurrent.futures import concurrent.futures
import contextlib
import copy import copy
import hashlib import hashlib
import io import io
@ -25,7 +24,7 @@
import urllib.request import urllib.request
import warnings import warnings
from contextlib import closing from contextlib import closing
from typing import Dict, Generator, Iterable, List, NamedTuple, Optional, Set, Tuple, Union from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
import llnl.util.filesystem as fsys import llnl.util.filesystem as fsys
import llnl.util.lang import llnl.util.lang
@ -958,7 +957,7 @@ def _spec_files_from_cache(url: str):
raise ListMirrorSpecsError("Failed to get list of specs from {0}".format(url)) raise ListMirrorSpecsError("Failed to get list of specs from {0}".format(url))
def generate_package_index(url: str, tmpdir: str, concurrency: int = 32): def _url_generate_package_index(url: str, tmpdir: str, concurrency: int = 32):
"""Create or replace the build cache index on the given mirror. The """Create or replace the build cache index on the given mirror. The
buildcache index contains an entry for each binary package under the buildcache index contains an entry for each binary package under the
cache_prefix. cache_prefix.
@ -1119,7 +1118,7 @@ def _exists_in_buildcache(spec: Spec, tmpdir: str, out_url: str) -> ExistsInBuil
return ExistsInBuildcache(signed, unsigned, tarball) return ExistsInBuildcache(signed, unsigned, tarball)
def _upload_tarball_and_specfile( def _url_upload_tarball_and_specfile(
spec: Spec, tmpdir: str, out_url: str, exists: ExistsInBuildcache, signing_key: Optional[str] spec: Spec, tmpdir: str, out_url: str, exists: ExistsInBuildcache, signing_key: Optional[str]
): ):
files = BuildcacheFiles(spec, tmpdir, out_url) files = BuildcacheFiles(spec, tmpdir, out_url)
@ -1154,49 +1153,146 @@ def _upload_tarball_and_specfile(
) )
class Uploader:
def __init__(self, mirror: spack.mirror.Mirror, force: bool, update_index: bool):
self.mirror = mirror
self.force = force
self.update_index = update_index
self.tmpdir: str
self.executor: concurrent.futures.Executor
def __enter__(self):
self._tmpdir = tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root())
self._executor = spack.util.parallel.make_concurrent_executor()
self.tmpdir = self._tmpdir.__enter__()
self.executor = self.executor = self._executor.__enter__()
return self
def __exit__(self, *args):
self._executor.__exit__(*args)
self._tmpdir.__exit__(*args)
def push_or_raise(self, specs: List[spack.spec.Spec]) -> List[spack.spec.Spec]:
skipped, errors = self.push(specs)
if errors:
raise PushToBuildCacheError(
f"Failed to push {len(errors)} specs to {self.mirror.push_url}:\n"
+ "\n".join(
f"Failed to push {_format_spec(spec)}: {error}" for spec, error in errors
)
)
return skipped
def push(
self, specs: List[spack.spec.Spec]
) -> Tuple[List[spack.spec.Spec], List[Tuple[spack.spec.Spec, BaseException]]]:
raise NotImplementedError
def tag(self, tag: str, roots: List[spack.spec.Spec]):
"""Make a list of selected specs together available under the given tag"""
pass
class OCIUploader(Uploader):
def __init__(
self,
mirror: spack.mirror.Mirror,
force: bool,
update_index: bool,
base_image: Optional[str],
) -> None:
super().__init__(mirror, force, update_index)
self.target_image = spack.oci.oci.image_from_mirror(mirror)
self.base_image = ImageReference.from_string(base_image) if base_image else None
def push(
self, specs: List[spack.spec.Spec]
) -> Tuple[List[spack.spec.Spec], List[Tuple[spack.spec.Spec, BaseException]]]:
skipped, base_images, checksums, upload_errors = _oci_push(
target_image=self.target_image,
base_image=self.base_image,
installed_specs_with_deps=specs,
force=self.force,
tmpdir=self.tmpdir,
executor=self.executor,
)
self._base_images = base_images
self._checksums = checksums
# only update index if any binaries were uploaded
if self.update_index and len(skipped) + len(upload_errors) < len(specs):
_oci_update_index(self.target_image, self.tmpdir, self.executor)
return skipped, upload_errors
def tag(self, tag: str, roots: List[spack.spec.Spec]):
tagged_image = self.target_image.with_tag(tag)
# _push_oci may not populate self._base_images if binaries were already in the registry
for spec in roots:
_oci_update_base_images(
base_image=self.base_image,
target_image=self.target_image,
spec=spec,
base_image_cache=self._base_images,
)
_oci_put_manifest(
self._base_images, self._checksums, tagged_image, self.tmpdir, None, None, *roots
)
class URLUploader(Uploader):
def __init__(
self,
mirror: spack.mirror.Mirror,
force: bool,
update_index: bool,
signing_key: Optional[str],
) -> None:
super().__init__(mirror, force, update_index)
self.url = mirror.push_url
self.signing_key = signing_key
def push(
self, specs: List[spack.spec.Spec]
) -> Tuple[List[spack.spec.Spec], List[Tuple[spack.spec.Spec, BaseException]]]:
return _url_push(
specs,
out_url=self.url,
force=self.force,
update_index=self.update_index,
signing_key=self.signing_key,
tmpdir=self.tmpdir,
executor=self.executor,
)
def make_uploader(
mirror: spack.mirror.Mirror,
force: bool = False,
update_index: bool = False,
signing_key: Optional[str] = None,
base_image: Optional[str] = None,
) -> Uploader:
"""Builder for the appropriate uploader based on the mirror type"""
if mirror.push_url.startswith("oci://"):
return OCIUploader(
mirror=mirror, force=force, update_index=update_index, base_image=base_image
)
else:
return URLUploader(
mirror=mirror, force=force, update_index=update_index, signing_key=signing_key
)
def _format_spec(spec: Spec) -> str: def _format_spec(spec: Spec) -> str:
return spec.cformat("{name}{@version}{/hash:7}") return spec.cformat("{name}{@version}{/hash:7}")
@contextlib.contextmanager
def default_push_context() -> Generator[Tuple[str, concurrent.futures.Executor], None, None]:
with tempfile.TemporaryDirectory(
dir=spack.stage.get_stage_root()
) as tmpdir, spack.util.parallel.make_concurrent_executor() as executor:
yield tmpdir, executor
def push_or_raise(
specs: List[Spec],
out_url: str,
signing_key: Optional[str],
force: bool = False,
update_index: bool = False,
) -> List[Spec]:
"""Same as push, but raises an exception on error. Returns a list of skipped specs already
present in the build cache when force=False."""
skipped, errors = push(specs, out_url, signing_key, force, update_index)
if errors:
raise PushToBuildCacheError(
f"Failed to push {len(errors)} specs to {out_url}:\n"
+ "\n".join(f"Failed to push {_format_spec(spec)}: {error}" for spec, error in errors)
)
return skipped
def push(
specs: List[Spec],
out_url: str,
signing_key: Optional[str],
force: bool = False,
update_index: bool = False,
) -> Tuple[List[Spec], List[Tuple[Spec, BaseException]]]:
"""Pushes to the provided build cache, and returns a list of skipped specs that were already
present (when force=False). Does not raise on error."""
with default_push_context() as (tmpdir, executor):
return _push(specs, out_url, signing_key, force, update_index, tmpdir, executor)
class FancyProgress: class FancyProgress:
def __init__(self, total: int): def __init__(self, total: int):
self.n = 0 self.n = 0
@ -1234,7 +1330,7 @@ def fail(self) -> None:
tty.info(f"{self.pre}Failed to push {self.pretty_spec}") tty.info(f"{self.pre}Failed to push {self.pretty_spec}")
def _push( def _url_push(
specs: List[Spec], specs: List[Spec],
out_url: str, out_url: str,
signing_key: Optional[str], signing_key: Optional[str],
@ -1279,7 +1375,7 @@ def _push(
upload_futures = [ upload_futures = [
executor.submit( executor.submit(
_upload_tarball_and_specfile, _url_upload_tarball_and_specfile,
spec, spec,
tmpdir, tmpdir,
out_url, out_url,
@ -1309,12 +1405,12 @@ def _push(
if signing_key: if signing_key:
keys_tmpdir = os.path.join(tmpdir, "keys") keys_tmpdir = os.path.join(tmpdir, "keys")
os.mkdir(keys_tmpdir) os.mkdir(keys_tmpdir)
push_keys(out_url, keys=[signing_key], update_index=update_index, tmpdir=keys_tmpdir) _url_push_keys(out_url, keys=[signing_key], update_index=update_index, tmpdir=keys_tmpdir)
if update_index: if update_index:
index_tmpdir = os.path.join(tmpdir, "index") index_tmpdir = os.path.join(tmpdir, "index")
os.mkdir(index_tmpdir) os.mkdir(index_tmpdir)
generate_package_index(out_url, index_tmpdir) _url_generate_package_index(out_url, index_tmpdir)
return skipped, errors return skipped, errors
@ -1517,7 +1613,7 @@ def _oci_update_base_images(
) )
def _push_oci( def _oci_push(
*, *,
target_image: ImageReference, target_image: ImageReference,
base_image: Optional[ImageReference], base_image: Optional[ImageReference],
@ -2643,7 +2739,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
) )
def push_keys( def _url_push_keys(
*mirrors: Union[spack.mirror.Mirror, str], *mirrors: Union[spack.mirror.Mirror, str],
keys: List[str], keys: List[str],
tmpdir: str, tmpdir: str,

View File

@ -1382,8 +1382,10 @@ def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: b
""" """
tty.debug(f"Pushing to build cache ({'signed' if sign_binaries else 'unsigned'})") tty.debug(f"Pushing to build cache ({'signed' if sign_binaries else 'unsigned'})")
signing_key = bindist.select_signing_key() if sign_binaries else None signing_key = bindist.select_signing_key() if sign_binaries else None
mirror = spack.mirror.Mirror.from_url(mirror_url)
try: try:
bindist.push_or_raise([spec], out_url=mirror_url, signing_key=signing_key) with bindist.make_uploader(mirror, signing_key=signing_key) as uploader:
uploader.push_or_raise([spec])
return True return True
except bindist.PushToBuildCacheError as e: except bindist.PushToBuildCacheError as e:
tty.error(f"Problem writing to {mirror_url}: {e}") tty.error(f"Problem writing to {mirror_url}: {e}")

View File

@ -37,7 +37,6 @@
from spack import traverse from spack import traverse
from spack.cmd import display_specs from spack.cmd import display_specs
from spack.cmd.common import arguments from spack.cmd.common import arguments
from spack.oci.image import ImageReference
from spack.spec import Spec, save_dependency_specfiles from spack.spec import Spec, save_dependency_specfiles
description = "create, download and install binary packages" description = "create, download and install binary packages"
@ -392,13 +391,8 @@ def push_fn(args):
else: else:
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots() roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
mirror: spack.mirror.Mirror = args.mirror mirror = args.mirror
assert isinstance(mirror, spack.mirror.Mirror)
# Check if this is an OCI image.
try:
target_image = spack.oci.oci.image_from_mirror(mirror)
except ValueError:
target_image = None
push_url = mirror.push_url push_url = mirror.push_url
@ -409,12 +403,11 @@ def push_fn(args):
unsigned = not (args.key or args.signed) unsigned = not (args.key or args.signed)
# For OCI images, we require dependencies to be pushed for now. # For OCI images, we require dependencies to be pushed for now.
if target_image: if mirror.push_url.startswith("oci://") and not unsigned:
if not unsigned: tty.warn(
tty.warn( "Code signing is currently not supported for OCI images. "
"Code signing is currently not supported for OCI images. " "Use --unsigned to silence this warning."
"Use --unsigned to silence this warning." )
)
unsigned = True unsigned = True
# Select a signing key, or None if unsigned. # Select a signing key, or None if unsigned.
@ -445,49 +438,17 @@ def push_fn(args):
(s, PackageNotInstalledError("package not installed")) for s in not_installed (s, PackageNotInstalledError("package not installed")) for s in not_installed
) )
with bindist.default_push_context() as (tmpdir, executor): with bindist.make_uploader(
if target_image: mirror=mirror,
base_image = ImageReference.from_string(args.base_image) if args.base_image else None force=args.force,
skipped, base_images, checksums, upload_errors = bindist._push_oci( update_index=args.update_index,
target_image=target_image, signing_key=signing_key,
base_image=base_image, base_image=args.base_image,
installed_specs_with_deps=specs, ) as uploader:
force=args.force, skipped, upload_errors = uploader.push(specs=specs)
tmpdir=tmpdir, failed.extend(upload_errors)
executor=executor, if not upload_errors and args.tag:
) uploader.tag(args.tag, roots)
if upload_errors:
failed.extend(upload_errors)
# Apart from creating manifests for each individual spec, we allow users to create a
# separate image tag for all root specs and their runtime dependencies.
elif args.tag:
tagged_image = target_image.with_tag(args.tag)
# _push_oci may not populate base_images if binaries were already in the registry
for spec in roots:
bindist._oci_update_base_images(
base_image=base_image,
target_image=target_image,
spec=spec,
base_image_cache=base_images,
)
bindist._oci_put_manifest(
base_images, checksums, tagged_image, tmpdir, None, None, *roots
)
tty.info(f"Tagged {tagged_image}")
else:
skipped, upload_errors = bindist._push(
specs,
out_url=push_url,
force=args.force,
update_index=args.update_index,
signing_key=signing_key,
tmpdir=tmpdir,
executor=executor,
)
failed.extend(upload_errors)
if skipped: if skipped:
if len(specs) == 1: if len(specs) == 1:
@ -520,13 +481,6 @@ def push_fn(args):
), ),
) )
# Update the OCI index if requested
if target_image and len(skipped) < len(specs) and args.update_index:
with tempfile.TemporaryDirectory(
dir=spack.stage.get_stage_root()
) as tmpdir, spack.util.parallel.make_concurrent_executor() as executor:
bindist._oci_update_index(target_image, tmpdir, executor)
def install_fn(args): def install_fn(args):
"""install from a binary package""" """install from a binary package"""
@ -814,7 +768,7 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
url = mirror.push_url url = mirror.push_url
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir: with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
bindist.generate_package_index(url, tmpdir) bindist._url_generate_package_index(url, tmpdir)
if update_keys: if update_keys:
keys_url = url_util.join( keys_url = url_util.join(

View File

@ -224,7 +224,7 @@ def gpg_publish(args):
mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url) mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir: with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
spack.binary_distribution.push_keys( spack.binary_distribution._url_push_keys(
mirror, keys=args.keys, tmpdir=tmpdir, update_index=args.update_index mirror, keys=args.keys, tmpdir=tmpdir, update_index=args.update_index
) )

View File

@ -24,5 +24,6 @@ def post_install(spec, explicit):
# Push the package to all autopush mirrors # Push the package to all autopush mirrors
for mirror in spack.mirror.MirrorCollection(binary=True, autopush=True).values(): for mirror in spack.mirror.MirrorCollection(binary=True, autopush=True).values():
signing_key = bindist.select_signing_key() if mirror.signed else None signing_key = bindist.select_signing_key() if mirror.signed else None
bindist.push_or_raise([spec], out_url=mirror.push_url, signing_key=signing_key, force=True) with bindist.make_uploader(mirror=mirror, force=True, signing_key=signing_key) as uploader:
uploader.push_or_raise([spec])
tty.msg(f"{spec.name}: Pushed to build cache: '{mirror.name}'") tty.msg(f"{spec.name}: Pushed to build cache: '{mirror.name}'")

View File

@ -357,7 +357,7 @@ def test_push_and_fetch_keys(mock_gnupghome, tmp_path):
assert len(keys) == 1 assert len(keys) == 1
fpr = keys[0] fpr = keys[0]
bindist.push_keys(mirror, keys=[fpr], tmpdir=str(tmp_path), update_index=True) bindist._url_push_keys(mirror, keys=[fpr], tmpdir=str(tmp_path), update_index=True)
# dir 2: import the key from the mirror, and confirm that its fingerprint # dir 2: import the key from the mirror, and confirm that its fingerprint
# matches the one created above # matches the one created above
@ -492,7 +492,7 @@ def mock_list_url(url, recursive=False):
test_url = "file:///fake/keys/dir" test_url = "file:///fake/keys/dir"
with pytest.raises(GenerateIndexError, match="Unable to generate package index"): with pytest.raises(GenerateIndexError, match="Unable to generate package index"):
bindist.generate_package_index(test_url, str(tmp_path)) bindist._url_generate_package_index(test_url, str(tmp_path))
assert ( assert (
"Warning: Encountered problem listing packages at " "Warning: Encountered problem listing packages at "
@ -513,7 +513,7 @@ def mock_list_url(url, recursive=False):
bindist.generate_key_index(url, str(tmp_path)) bindist.generate_key_index(url, str(tmp_path))
with pytest.raises(GenerateIndexError, match="Unable to generate package index"): with pytest.raises(GenerateIndexError, match="Unable to generate package index"):
bindist.generate_package_index(url, str(tmp_path)) bindist._url_generate_package_index(url, str(tmp_path))
assert f"Encountered problem listing packages at {url}" in capfd.readouterr().err assert f"Encountered problem listing packages at {url}" in capfd.readouterr().err

View File

@ -10,6 +10,7 @@
import spack.binary_distribution as bd import spack.binary_distribution as bd
import spack.main import spack.main
import spack.mirror
import spack.spec import spack.spec
import spack.util.url import spack.util.url
@ -22,17 +23,21 @@ def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmp_p
specs = [spec] specs = [spec]
# Runs fine the first time, second time it's a no-op # populate cache, everything is new
out_url = spack.util.url.path_to_file_url(str(tmp_path)) mirror = spack.mirror.Mirror.from_local_path(str(tmp_path))
skipped = bd.push_or_raise(specs, out_url, signing_key=None) with bd.make_uploader(mirror) as uploader:
assert not skipped skipped = uploader.push_or_raise(specs)
assert not skipped
skipped = bd.push_or_raise(specs, out_url, signing_key=None) # should skip all
assert skipped == specs with bd.make_uploader(mirror) as uploader:
skipped = uploader.push_or_raise(specs)
assert skipped == specs
# Should work fine with force=True # with force=True none should be skipped
skipped = bd.push_or_raise(specs, out_url, signing_key=None, force=True) with bd.make_uploader(mirror, force=True) as uploader:
assert not skipped skipped = uploader.push_or_raise(specs)
assert not skipped
# Remove the tarball, which should cause push to push. # Remove the tarball, which should cause push to push.
os.remove( os.remove(
@ -42,5 +47,6 @@ def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmp_p
/ bd.tarball_name(spec, ".spack") / bd.tarball_name(spec, ".spack")
) )
skipped = bd.push_or_raise(specs, out_url, signing_key=None) with bd.make_uploader(mirror) as uploader:
assert not skipped skipped = uploader.push_or_raise(specs)
assert not skipped

View File

@ -7,6 +7,7 @@
import json import json
import os import os
import shutil import shutil
from typing import List
import pytest import pytest
@ -16,6 +17,7 @@
import spack.environment as ev import spack.environment as ev
import spack.error import spack.error
import spack.main import spack.main
import spack.mirror
import spack.spec import spack.spec
import spack.util.url import spack.util.url
from spack.spec import Spec from spack.spec import Spec
@ -380,18 +382,22 @@ def test_correct_specs_are_pushed(
# Concretize dttop and add it to the temporary database (without prefixes) # Concretize dttop and add it to the temporary database (without prefixes)
spec = default_mock_concretization("dttop") spec = default_mock_concretization("dttop")
temporary_store.db.add(spec, directory_layout=None) temporary_store.db.add(spec, directory_layout=None)
slash_hash = "/{0}".format(spec.dag_hash()) slash_hash = f"/{spec.dag_hash()}"
packages_to_push = [] class DontUpload(spack.binary_distribution.Uploader):
def __init__(self):
super().__init__(spack.mirror.Mirror.from_local_path(str(tmpdir)), False, False)
self.pushed = []
def fake_push(specs, *args, **kwargs): def push(self, specs: List[spack.spec.Spec]):
assert all(isinstance(s, Spec) for s in specs) self.pushed.extend(s.name for s in specs)
packages_to_push.extend(s.name for s in specs) return [], [] # nothing skipped, nothing errored
skipped = []
errors = []
return skipped, errors
monkeypatch.setattr(spack.binary_distribution, "_push", fake_push) uploader = DontUpload()
monkeypatch.setattr(
spack.binary_distribution, "make_uploader", lambda *args, **kwargs: uploader
)
buildcache_create_args = ["create", "--unsigned"] buildcache_create_args = ["create", "--unsigned"]
@ -403,10 +409,10 @@ def fake_push(specs, *args, **kwargs):
buildcache(*buildcache_create_args) buildcache(*buildcache_create_args)
# Order is not guaranteed, so we can't just compare lists # Order is not guaranteed, so we can't just compare lists
assert set(packages_to_push) == set(expected) assert set(uploader.pushed) == set(expected)
# Ensure no duplicates # Ensure no duplicates
assert len(set(packages_to_push)) == len(packages_to_push) assert len(set(uploader.pushed)) == len(uploader.pushed)
@pytest.mark.parametrize("signed", [True, False]) @pytest.mark.parametrize("signed", [True, False])

View File

@ -687,7 +687,7 @@ def mock_push_or_raise(*args, **kwargs):
"Encountered problem pushing binary <url>: <expection>" "Encountered problem pushing binary <url>: <expection>"
) )
monkeypatch.setattr(spack.binary_distribution, "push_or_raise", mock_push_or_raise) monkeypatch.setattr(spack.binary_distribution.Uploader, "push_or_raise", mock_push_or_raise)
with working_dir(rebuild_env.env_dir): with working_dir(rebuild_env.env_dir):
activate_rebuild_env(tmp_path, pkg_name, rebuild_env) activate_rebuild_env(tmp_path, pkg_name, rebuild_env)
@ -1022,7 +1022,7 @@ def test_push_to_build_cache_exceptions(monkeypatch, tmp_path, capsys):
def push_or_raise(*args, **kwargs): def push_or_raise(*args, **kwargs):
raise spack.binary_distribution.PushToBuildCacheError("Error: Access Denied") raise spack.binary_distribution.PushToBuildCacheError("Error: Access Denied")
monkeypatch.setattr(spack.binary_distribution, "push_or_raise", push_or_raise) monkeypatch.setattr(spack.binary_distribution.Uploader, "push_or_raise", push_or_raise)
# Input doesn't matter, as we are faking exceptional output # Input doesn't matter, as we are faking exceptional output
url = tmp_path.as_uri() url = tmp_path.as_uri()

View File

@ -610,10 +610,9 @@ def test_install_from_binary_with_missing_patch_succeeds(
temporary_store.db.add(s, directory_layout=temporary_store.layout, explicit=True) temporary_store.db.add(s, directory_layout=temporary_store.layout, explicit=True)
# Push it to a binary cache # Push it to a binary cache
build_cache = tmp_path / "my_build_cache" mirror = spack.mirror.Mirror.from_local_path(str(tmp_path / "my_build_cache"))
binary_distribution.push_or_raise( with binary_distribution.make_uploader(mirror=mirror) as uploader:
[s], out_url=build_cache.as_uri(), signing_key=None, force=False uploader.push_or_raise([s])
)
# Now re-install it. # Now re-install it.
s.package.do_uninstall() s.package.do_uninstall()
@ -624,7 +623,7 @@ def test_install_from_binary_with_missing_patch_succeeds(
s.package.do_install() s.package.do_install()
# Binary install: succeeds, we don't need the patch. # Binary install: succeeds, we don't need the patch.
spack.mirror.add(spack.mirror.Mirror.from_local_path(str(build_cache))) spack.mirror.add(mirror)
s.package.do_install(package_cache_only=True, dependencies_cache_only=True, unsigned=True) s.package.do_install(package_cache_only=True, dependencies_cache_only=True, unsigned=True)
assert temporary_store.db.query_local_by_spec_hash(s.dag_hash()) assert temporary_store.db.query_local_by_spec_hash(s.dag_hash())