spack.mirror: split into submodules (#47936)

This commit is contained in:
Harmen Stoppels 2024-12-05 18:09:08 +01:00 committed by GitHub
parent 1f2a68f2b6
commit 4693b323ac
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
32 changed files with 578 additions and 532 deletions

View File

@ -222,6 +222,9 @@ def setup(sphinx):
("py:class", "spack.traverse.EdgeAndDepth"),
("py:class", "archspec.cpu.microarchitecture.Microarchitecture"),
("py:class", "spack.compiler.CompilerCache"),
("py:class", "spack.mirrors.mirror.Mirror"),
("py:class", "spack.mirrors.layout.MirrorLayout"),
("py:class", "spack.mirrors.utils.MirrorStats"),
# TypeVar that is not handled correctly
("py:class", "llnl.util.lang.T"),
]

View File

@ -40,7 +40,7 @@
import spack.hash_types as ht
import spack.hooks
import spack.hooks.sbang
import spack.mirror
import spack.mirrors.mirror
import spack.oci.image
import spack.oci.oci
import spack.oci.opener
@ -369,7 +369,7 @@ def update(self, with_cooldown=False):
on disk under ``_index_cache_root``)."""
self._init_local_index_cache()
configured_mirror_urls = [
m.fetch_url for m in spack.mirror.MirrorCollection(binary=True).values()
m.fetch_url for m in spack.mirrors.mirror.MirrorCollection(binary=True).values()
]
items_to_remove = []
spec_cache_clear_needed = False
@ -1176,7 +1176,7 @@ def _url_upload_tarball_and_specfile(
class Uploader:
def __init__(self, mirror: spack.mirror.Mirror, force: bool, update_index: bool):
def __init__(self, mirror: spack.mirrors.mirror.Mirror, force: bool, update_index: bool):
self.mirror = mirror
self.force = force
self.update_index = update_index
@ -1224,7 +1224,7 @@ def tag(self, tag: str, roots: List[spack.spec.Spec]):
class OCIUploader(Uploader):
def __init__(
self,
mirror: spack.mirror.Mirror,
mirror: spack.mirrors.mirror.Mirror,
force: bool,
update_index: bool,
base_image: Optional[str],
@ -1273,7 +1273,7 @@ def tag(self, tag: str, roots: List[spack.spec.Spec]):
class URLUploader(Uploader):
def __init__(
self,
mirror: spack.mirror.Mirror,
mirror: spack.mirrors.mirror.Mirror,
force: bool,
update_index: bool,
signing_key: Optional[str],
@ -1297,7 +1297,7 @@ def push(
def make_uploader(
mirror: spack.mirror.Mirror,
mirror: spack.mirrors.mirror.Mirror,
force: bool = False,
update_index: bool = False,
signing_key: Optional[str] = None,
@ -1953,9 +1953,9 @@ def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=No
"signature_verified": "true-if-binary-pkg-was-already-verified"
}
"""
configured_mirrors: Iterable[spack.mirror.Mirror] = spack.mirror.MirrorCollection(
binary=True
).values()
configured_mirrors: Iterable[spack.mirrors.mirror.Mirror] = (
spack.mirrors.mirror.MirrorCollection(binary=True).values()
)
if not configured_mirrors:
tty.die("Please add a spack mirror to allow download of pre-compiled packages.")
@ -1980,7 +1980,7 @@ def fetch_url_to_mirror(url):
for mirror in configured_mirrors:
if mirror.fetch_url == url:
return mirror
return spack.mirror.Mirror(url)
return spack.mirrors.mirror.Mirror(url)
mirrors = [fetch_url_to_mirror(url) for url in mirror_urls]
@ -2650,7 +2650,7 @@ def try_direct_fetch(spec, mirrors=None):
specfile_is_signed = False
found_specs = []
binary_mirrors = spack.mirror.MirrorCollection(mirrors=mirrors, binary=True).values()
binary_mirrors = spack.mirrors.mirror.MirrorCollection(mirrors=mirrors, binary=True).values()
for mirror in binary_mirrors:
buildcache_fetch_url_json = url_util.join(
@ -2711,7 +2711,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
if spec is None:
return []
if not spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True):
if not spack.mirrors.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True):
tty.debug("No Spack mirrors are currently configured")
return {}
@ -2750,7 +2750,7 @@ def clear_spec_cache():
def get_keys(install=False, trust=False, force=False, mirrors=None):
"""Get pgp public keys available on mirror with suffix .pub"""
mirror_collection = mirrors or spack.mirror.MirrorCollection(binary=True)
mirror_collection = mirrors or spack.mirrors.mirror.MirrorCollection(binary=True)
if not mirror_collection:
tty.die("Please add a spack mirror to allow " + "download of build caches.")
@ -2805,7 +2805,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
def _url_push_keys(
*mirrors: Union[spack.mirror.Mirror, str],
*mirrors: Union[spack.mirrors.mirror.Mirror, str],
keys: List[str],
tmpdir: str,
update_index: bool = False,
@ -2872,7 +2872,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None):
"""
rebuilds = {}
for mirror in spack.mirror.MirrorCollection(mirrors, binary=True).values():
for mirror in spack.mirrors.mirror.MirrorCollection(mirrors, binary=True).values():
tty.debug("Checking for built specs at {0}".format(mirror.fetch_url))
rebuild_list = []
@ -2916,7 +2916,7 @@ def _download_buildcache_entry(mirror_root, descriptions):
def download_buildcache_entry(file_descriptions, mirror_url=None):
if not mirror_url and not spack.mirror.MirrorCollection(binary=True):
if not mirror_url and not spack.mirrors.mirror.MirrorCollection(binary=True):
tty.die(
"Please provide or add a spack mirror to allow " + "download of buildcache entries."
)
@ -2925,7 +2925,7 @@ def download_buildcache_entry(file_descriptions, mirror_url=None):
mirror_root = os.path.join(mirror_url, BUILD_CACHE_RELATIVE_PATH)
return _download_buildcache_entry(mirror_root, file_descriptions)
for mirror in spack.mirror.MirrorCollection(binary=True).values():
for mirror in spack.mirrors.mirror.MirrorCollection(binary=True).values():
mirror_root = os.path.join(mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH)
if _download_buildcache_entry(mirror_root, file_descriptions):

View File

@ -37,7 +37,7 @@
import spack.binary_distribution
import spack.config
import spack.detection
import spack.mirror
import spack.mirrors.mirror
import spack.platforms
import spack.spec
import spack.store
@ -91,7 +91,7 @@ def __init__(self, conf: ConfigDictionary) -> None:
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
# Promote (relative) paths to file urls
self.url = spack.mirror.Mirror(conf["info"]["url"]).fetch_url
self.url = spack.mirrors.mirror.Mirror(conf["info"]["url"]).fetch_url
@property
def mirror_scope(self) -> spack.config.InternalConfigScope:

View File

@ -37,7 +37,8 @@
import spack.config as cfg
import spack.error
import spack.main
import spack.mirror
import spack.mirrors.mirror
import spack.mirrors.utils
import spack.paths
import spack.repo
import spack.spec
@ -204,7 +205,7 @@ def _print_staging_summary(spec_labels, stages, rebuild_decisions):
if not stages:
return
mirrors = spack.mirror.MirrorCollection(binary=True)
mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
tty.msg("Checked the following mirrors for binaries:")
for m in mirrors.values():
tty.msg(f" {m.fetch_url}")
@ -797,7 +798,7 @@ def ensure_expected_target_path(path):
path = path.replace("\\", "/")
return path
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
buildcache_destination = None
if "buildcache-destination" not in pipeline_mirrors:
raise SpackCIError("spack ci generate requires a mirror named 'buildcache-destination'")
@ -1323,7 +1324,7 @@ def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: b
"""
tty.debug(f"Pushing to build cache ({'signed' if sign_binaries else 'unsigned'})")
signing_key = bindist.select_signing_key() if sign_binaries else None
mirror = spack.mirror.Mirror.from_url(mirror_url)
mirror = spack.mirrors.mirror.Mirror.from_url(mirror_url)
try:
with bindist.make_uploader(mirror, signing_key=signing_key) as uploader:
uploader.push_or_raise([spec])
@ -1343,7 +1344,7 @@ def remove_other_mirrors(mirrors_to_keep, scope=None):
mirrors_to_remove.append(name)
for mirror_name in mirrors_to_remove:
spack.mirror.remove(mirror_name, scope)
spack.mirrors.utils.remove(mirror_name, scope)
def copy_files_to_artifacts(src, artifacts_dir):

View File

@ -16,7 +16,7 @@
import spack.bootstrap.config
import spack.bootstrap.core
import spack.config
import spack.mirror
import spack.mirrors.utils
import spack.spec
import spack.stage
import spack.util.path
@ -400,7 +400,7 @@ def _mirror(args):
llnl.util.tty.set_msg_enabled(False)
spec = spack.spec.Spec(spec_str).concretized()
for node in spec.traverse():
spack.mirror.create(mirror_dir, [node])
spack.mirrors.utils.create(mirror_dir, [node])
llnl.util.tty.set_msg_enabled(True)
if args.binary_packages:

View File

@ -21,7 +21,7 @@
import spack.deptypes as dt
import spack.environment as ev
import spack.error
import spack.mirror
import spack.mirrors.mirror
import spack.oci.oci
import spack.spec
import spack.stage
@ -392,7 +392,7 @@ def push_fn(args):
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
mirror = args.mirror
assert isinstance(mirror, spack.mirror.Mirror)
assert isinstance(mirror, spack.mirrors.mirror.Mirror)
push_url = mirror.push_url
@ -750,7 +750,7 @@ def manifest_copy(manifest_file_list, dest_mirror=None):
copy_buildcache_file(copy_file["src"], dest)
def update_index(mirror: spack.mirror.Mirror, update_keys=False):
def update_index(mirror: spack.mirrors.mirror.Mirror, update_keys=False):
# Special case OCI images for now.
try:
image_ref = spack.oci.oci.image_from_mirror(mirror)

View File

@ -20,7 +20,7 @@
import spack.config as cfg
import spack.environment as ev
import spack.hash_types as ht
import spack.mirror
import spack.mirrors.mirror
import spack.util.gpg as gpg_util
import spack.util.timer as timer
import spack.util.url as url_util
@ -240,7 +240,7 @@ def ci_reindex(args):
ci_mirrors = yaml_root["mirrors"]
mirror_urls = [url for url in ci_mirrors.values()]
remote_mirror_url = mirror_urls[0]
mirror = spack.mirror.Mirror(remote_mirror_url)
mirror = spack.mirrors.mirror.Mirror(remote_mirror_url)
buildcache.update_index(mirror, update_keys=True)
@ -328,7 +328,7 @@ def ci_rebuild(args):
full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
buildcache_destination = None
if "buildcache-destination" not in pipeline_mirrors:
tty.die("spack ci rebuild requires a mirror named 'buildcache-destination")

View File

@ -14,7 +14,8 @@
import spack.config
import spack.deptypes as dt
import spack.environment as ev
import spack.mirror
import spack.mirrors.mirror
import spack.mirrors.utils
import spack.reporters
import spack.spec
import spack.store
@ -689,31 +690,31 @@ def mirror_name_or_url(m):
# If there's a \ or / in the name, it's interpreted as a path or url.
if "/" in m or "\\" in m or m in (".", ".."):
return spack.mirror.Mirror(m)
return spack.mirrors.mirror.Mirror(m)
# Otherwise, the named mirror is required to exist.
try:
return spack.mirror.require_mirror_name(m)
return spack.mirrors.utils.require_mirror_name(m)
except ValueError as e:
raise argparse.ArgumentTypeError(f"{e}. Did you mean {os.path.join('.', m)}?") from e
def mirror_url(url):
try:
return spack.mirror.Mirror.from_url(url)
return spack.mirrors.mirror.Mirror.from_url(url)
except ValueError as e:
raise argparse.ArgumentTypeError(str(e)) from e
def mirror_directory(path):
try:
return spack.mirror.Mirror.from_local_path(path)
return spack.mirrors.mirror.Mirror.from_local_path(path)
except ValueError as e:
raise argparse.ArgumentTypeError(str(e)) from e
def mirror_name(name):
try:
return spack.mirror.require_mirror_name(name)
return spack.mirrors.utils.require_mirror_name(name)
except ValueError as e:
raise argparse.ArgumentTypeError(str(e)) from e

View File

@ -8,7 +8,7 @@
import tempfile
import spack.binary_distribution
import spack.mirror
import spack.mirrors.mirror
import spack.paths
import spack.stage
import spack.util.gpg
@ -217,11 +217,11 @@ def gpg_publish(args):
mirror = None
if args.directory:
url = spack.util.url.path_to_file_url(args.directory)
mirror = spack.mirror.Mirror(url, url)
mirror = spack.mirrors.mirror.Mirror(url, url)
elif args.mirror_name:
mirror = spack.mirror.MirrorCollection(binary=True).lookup(args.mirror_name)
mirror = spack.mirrors.mirror.MirrorCollection(binary=True).lookup(args.mirror_name)
elif args.mirror_url:
mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
mirror = spack.mirrors.mirror.Mirror(args.mirror_url, args.mirror_url)
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
spack.binary_distribution._url_push_keys(

View File

@ -14,7 +14,8 @@
import spack.concretize
import spack.config
import spack.environment as ev
import spack.mirror
import spack.mirrors.mirror
import spack.mirrors.utils
import spack.repo
import spack.spec
import spack.util.web as web_util
@ -365,15 +366,15 @@ def mirror_add(args):
connection["autopush"] = args.autopush
if args.signed is not None:
connection["signed"] = args.signed
mirror = spack.mirror.Mirror(connection, name=args.name)
mirror = spack.mirrors.mirror.Mirror(connection, name=args.name)
else:
mirror = spack.mirror.Mirror(args.url, name=args.name)
spack.mirror.add(mirror, args.scope)
mirror = spack.mirrors.mirror.Mirror(args.url, name=args.name)
spack.mirrors.utils.add(mirror, args.scope)
def mirror_remove(args):
"""remove a mirror by name"""
spack.mirror.remove(args.name, args.scope)
spack.mirrors.utils.remove(args.name, args.scope)
def _configure_mirror(args):
@ -382,7 +383,7 @@ def _configure_mirror(args):
if args.name not in mirrors:
tty.die(f"No mirror found with name {args.name}.")
entry = spack.mirror.Mirror(mirrors[args.name], args.name)
entry = spack.mirrors.mirror.Mirror(mirrors[args.name], args.name)
direction = "fetch" if args.fetch else "push" if args.push else None
changes = {}
if args.url:
@ -449,7 +450,7 @@ def mirror_set_url(args):
def mirror_list(args):
"""print out available mirrors to the console"""
mirrors = spack.mirror.MirrorCollection(scope=args.scope)
mirrors = spack.mirrors.mirror.MirrorCollection(scope=args.scope)
if not mirrors:
tty.msg("No mirrors configured.")
return
@ -489,9 +490,9 @@ def concrete_specs_from_user(args):
def extend_with_additional_versions(specs, num_versions):
if num_versions == "all":
mirror_specs = spack.mirror.get_all_versions(specs)
mirror_specs = spack.mirrors.utils.get_all_versions(specs)
else:
mirror_specs = spack.mirror.get_matching_versions(specs, num_versions=num_versions)
mirror_specs = spack.mirrors.utils.get_matching_versions(specs, num_versions=num_versions)
mirror_specs = [x.concretized() for x in mirror_specs]
return mirror_specs
@ -570,7 +571,7 @@ def concrete_specs_from_environment():
def all_specs_with_all_versions():
specs = [spack.spec.Spec(n) for n in spack.repo.all_package_names()]
mirror_specs = spack.mirror.get_all_versions(specs)
mirror_specs = spack.mirrors.utils.get_all_versions(specs)
mirror_specs.sort(key=lambda s: (s.name, s.version))
return mirror_specs
@ -659,19 +660,21 @@ def _specs_and_action(args):
def create_mirror_for_all_specs(mirror_specs, path, skip_unstable_versions):
mirror_cache, mirror_stats = spack.mirror.mirror_cache_and_stats(
mirror_cache, mirror_stats = spack.mirrors.utils.mirror_cache_and_stats(
path, skip_unstable_versions=skip_unstable_versions
)
for candidate in mirror_specs:
pkg_cls = spack.repo.PATH.get_pkg_class(candidate.name)
pkg_obj = pkg_cls(spack.spec.Spec(candidate))
mirror_stats.next_spec(pkg_obj.spec)
spack.mirror.create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats)
spack.mirrors.utils.create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats)
process_mirror_stats(*mirror_stats.stats())
def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions):
present, mirrored, error = spack.mirror.create(path, mirror_specs, skip_unstable_versions)
present, mirrored, error = spack.mirrors.utils.create(
path, mirror_specs, skip_unstable_versions
)
tty.msg("Summary for mirror in {}".format(path))
process_mirror_stats(present, mirrored, error)
@ -681,7 +684,7 @@ def mirror_destroy(args):
mirror_url = None
if args.mirror_name:
result = spack.mirror.MirrorCollection().lookup(args.mirror_name)
result = spack.mirrors.mirror.MirrorCollection().lookup(args.mirror_name)
mirror_url = result.push_url
elif args.mirror_url:
mirror_url = args.mirror_url

View File

@ -192,3 +192,10 @@ def __reduce__(self):
def _make_stop_phase(msg, long_msg):
return StopPhase(msg, long_msg)
class MirrorError(SpackError):
"""Superclass of all mirror-creation related errors."""
def __init__(self, msg, long_msg=None):
super().__init__(msg, long_msg)

View File

@ -6,7 +6,7 @@
import llnl.util.tty as tty
import spack.binary_distribution as bindist
import spack.mirror
import spack.mirrors.mirror
def post_install(spec, explicit):
@ -22,7 +22,7 @@ def post_install(spec, explicit):
return
# Push the package to all autopush mirrors
for mirror in spack.mirror.MirrorCollection(binary=True, autopush=True).values():
for mirror in spack.mirrors.mirror.MirrorCollection(binary=True, autopush=True).values():
signing_key = bindist.select_signing_key() if mirror.signed else None
with bindist.make_uploader(mirror=mirror, force=True, signing_key=signing_key) as uploader:
uploader.push_or_raise([spec])

View File

@ -56,7 +56,7 @@
import spack.deptypes as dt
import spack.error
import spack.hooks
import spack.mirror
import spack.mirrors.mirror
import spack.package_base
import spack.package_prefs as prefs
import spack.repo
@ -491,7 +491,7 @@ def _try_install_from_binary_cache(
timer: timer to keep track of binary install phases.
"""
# Early exit if no binary mirrors are configured.
if not spack.mirror.MirrorCollection(binary=True):
if not spack.mirrors.mirror.MirrorCollection(binary=True):
return False
tty.debug(f"Searching for binary cache of {package_id(pkg.spec)}")

View File

@ -0,0 +1,146 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import os.path
from typing import Optional
import llnl.url
import llnl.util.symlink
from llnl.util.filesystem import mkdirp
import spack.fetch_strategy
import spack.oci.image
import spack.repo
import spack.spec
from spack.error import MirrorError
class MirrorLayout:
"""A ``MirrorLayout`` object describes the relative path of a mirror entry."""
def __init__(self, path: str) -> None:
self.path = path
def __iter__(self):
"""Yield all paths including aliases where the resource can be found."""
yield self.path
def make_alias(self, root: str) -> None:
"""Make the entry ``root / self.path`` available under a human readable alias"""
pass
class DefaultLayout(MirrorLayout):
def __init__(self, alias_path: str, digest_path: Optional[str] = None) -> None:
# When we have a digest, it is used as the primary storage location. If not, then we use
# the human-readable alias. In case of mirrors of a VCS checkout, we currently do not have
# a digest, that's why an alias is required and a digest optional.
super().__init__(path=digest_path or alias_path)
self.alias = alias_path
self.digest_path = digest_path
def make_alias(self, root: str) -> None:
"""Symlink a human readible path in our mirror to the actual storage location."""
# We already use the human-readable path as the main storage location.
if not self.digest_path:
return
alias, digest = os.path.join(root, self.alias), os.path.join(root, self.digest_path)
alias_dir = os.path.dirname(alias)
relative_dst = os.path.relpath(digest, start=alias_dir)
mkdirp(alias_dir)
tmp = f"{alias}.tmp"
llnl.util.symlink.symlink(relative_dst, tmp)
try:
os.rename(tmp, alias)
except OSError:
# Clean up the temporary if possible
try:
os.unlink(tmp)
except OSError:
pass
raise
def __iter__(self):
if self.digest_path:
yield self.digest_path
yield self.alias
class OCILayout(MirrorLayout):
"""Follow the OCI Image Layout Specification to archive blobs where paths are of the form
``blobs/<algorithm>/<digest>``"""
def __init__(self, digest: spack.oci.image.Digest) -> None:
super().__init__(os.path.join("blobs", digest.algorithm, digest.digest))
def _determine_extension(fetcher):
if isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy):
if fetcher.expand_archive:
# If we fetch with a URLFetchStrategy, use URL's archive type
ext = llnl.url.determine_url_file_extension(fetcher.url)
if ext:
# Remove any leading dots
ext = ext.lstrip(".")
else:
msg = """\
Unable to parse extension from {0}.
If this URL is for a tarball but does not include the file extension
in the name, you can explicitly declare it with the following syntax:
version('1.2.3', 'hash', extension='tar.gz')
If this URL is for a download like a .jar or .whl that does not need
to be expanded, or an uncompressed installation script, you can tell
Spack not to expand it with the following syntax:
version('1.2.3', 'hash', expand=False)
"""
raise MirrorError(msg.format(fetcher.url))
else:
# If the archive shouldn't be expanded, don't check extension.
ext = None
else:
# Otherwise we'll make a .tar.gz ourselves
ext = "tar.gz"
return ext
def default_mirror_layout(
fetcher: "spack.fetch_strategy.FetchStrategy",
per_package_ref: str,
spec: Optional["spack.spec.Spec"] = None,
) -> MirrorLayout:
"""Returns a ``MirrorReference`` object which keeps track of the relative
storage path of the resource associated with the specified ``fetcher``."""
ext = None
if spec:
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
versions = pkg_cls.versions.get(spec.version, {})
ext = versions.get("extension", None)
# If the spec does not explicitly specify an extension (the default case),
# then try to determine it automatically. An extension can only be
# specified for the primary source of the package (e.g. the source code
# identified in the 'version' declaration). Resources/patches don't have
# an option to specify an extension, so it must be inferred for those.
ext = ext or _determine_extension(fetcher)
if ext:
per_package_ref += ".%s" % ext
global_ref = fetcher.mirror_id()
if global_ref:
global_ref = os.path.join("_source-cache", global_ref)
if global_ref and ext:
global_ref += ".%s" % ext
return DefaultLayout(per_package_ref, global_ref)

View File

@ -2,42 +2,20 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""
This file contains code for creating spack mirror directories. A
mirror is an organized hierarchy containing specially named archive
files. This enabled spack to know where to find files in a mirror if
the main server for a particular package is down. Or, if the computer
where spack is run is not connected to the internet, it allows spack
to download packages directly from a mirror (e.g., on an intranet).
"""
import collections
import collections.abc
import operator
import os
import os.path
import sys
import traceback
import urllib.parse
from typing import Any, Dict, Optional, Tuple, Union
import llnl.url
import llnl.util.symlink
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
import spack.caches
import spack.config
import spack.error
import spack.fetch_strategy
import spack.mirror
import spack.oci.image
import spack.repo
import spack.spec
import spack.util.path
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
import spack.util.url as url_util
import spack.version
from spack.error import MirrorError
#: What schemes do we support
supported_url_schemes = ("file", "http", "https", "sftp", "ftp", "s3", "gs", "oci")
@ -490,380 +468,3 @@ def __iter__(self):
def __len__(self):
return len(self._mirrors)
def _determine_extension(fetcher):
if isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy):
if fetcher.expand_archive:
# If we fetch with a URLFetchStrategy, use URL's archive type
ext = llnl.url.determine_url_file_extension(fetcher.url)
if ext:
# Remove any leading dots
ext = ext.lstrip(".")
else:
msg = """\
Unable to parse extension from {0}.
If this URL is for a tarball but does not include the file extension
in the name, you can explicitly declare it with the following syntax:
version('1.2.3', 'hash', extension='tar.gz')
If this URL is for a download like a .jar or .whl that does not need
to be expanded, or an uncompressed installation script, you can tell
Spack not to expand it with the following syntax:
version('1.2.3', 'hash', expand=False)
"""
raise MirrorError(msg.format(fetcher.url))
else:
# If the archive shouldn't be expanded, don't check extension.
ext = None
else:
# Otherwise we'll make a .tar.gz ourselves
ext = "tar.gz"
return ext
class MirrorLayout:
"""A ``MirrorLayout`` object describes the relative path of a mirror entry."""
def __init__(self, path: str) -> None:
self.path = path
def __iter__(self):
"""Yield all paths including aliases where the resource can be found."""
yield self.path
def make_alias(self, root: str) -> None:
"""Make the entry ``root / self.path`` available under a human readable alias"""
pass
class DefaultLayout(MirrorLayout):
def __init__(self, alias_path: str, digest_path: Optional[str] = None) -> None:
# When we have a digest, it is used as the primary storage location. If not, then we use
# the human-readable alias. In case of mirrors of a VCS checkout, we currently do not have
# a digest, that's why an alias is required and a digest optional.
super().__init__(path=digest_path or alias_path)
self.alias = alias_path
self.digest_path = digest_path
def make_alias(self, root: str) -> None:
"""Symlink a human readible path in our mirror to the actual storage location."""
# We already use the human-readable path as the main storage location.
if not self.digest_path:
return
alias, digest = os.path.join(root, self.alias), os.path.join(root, self.digest_path)
alias_dir = os.path.dirname(alias)
relative_dst = os.path.relpath(digest, start=alias_dir)
mkdirp(alias_dir)
tmp = f"{alias}.tmp"
llnl.util.symlink.symlink(relative_dst, tmp)
try:
os.rename(tmp, alias)
except OSError:
# Clean up the temporary if possible
try:
os.unlink(tmp)
except OSError:
pass
raise
def __iter__(self):
if self.digest_path:
yield self.digest_path
yield self.alias
class OCILayout(MirrorLayout):
"""Follow the OCI Image Layout Specification to archive blobs where paths are of the form
``blobs/<algorithm>/<digest>``"""
def __init__(self, digest: spack.oci.image.Digest) -> None:
super().__init__(os.path.join("blobs", digest.algorithm, digest.digest))
def default_mirror_layout(
fetcher: "spack.fetch_strategy.FetchStrategy",
per_package_ref: str,
spec: Optional["spack.spec.Spec"] = None,
) -> MirrorLayout:
"""Returns a ``MirrorReference`` object which keeps track of the relative
storage path of the resource associated with the specified ``fetcher``."""
ext = None
if spec:
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
versions = pkg_cls.versions.get(spec.version, {})
ext = versions.get("extension", None)
# If the spec does not explicitly specify an extension (the default case),
# then try to determine it automatically. An extension can only be
# specified for the primary source of the package (e.g. the source code
# identified in the 'version' declaration). Resources/patches don't have
# an option to specify an extension, so it must be inferred for those.
ext = ext or _determine_extension(fetcher)
if ext:
per_package_ref += ".%s" % ext
global_ref = fetcher.mirror_id()
if global_ref:
global_ref = os.path.join("_source-cache", global_ref)
if global_ref and ext:
global_ref += ".%s" % ext
return DefaultLayout(per_package_ref, global_ref)
def get_all_versions(specs):
"""Given a set of initial specs, return a new set of specs that includes
each version of each package in the original set.
Note that if any spec in the original set specifies properties other than
version, this information will be omitted in the new set; for example; the
new set of specs will not include variant settings.
"""
version_specs = []
for spec in specs:
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
# Skip any package that has no known versions.
if not pkg_cls.versions:
tty.msg("No safe (checksummed) versions for package %s" % pkg_cls.name)
continue
for version in pkg_cls.versions:
version_spec = spack.spec.Spec(pkg_cls.name)
version_spec.versions = spack.version.VersionList([version])
version_specs.append(version_spec)
return version_specs
def get_matching_versions(specs, num_versions=1):
"""Get a spec for EACH known version matching any spec in the list.
For concrete specs, this retrieves the concrete version and, if more
than one version per spec is requested, retrieves the latest versions
of the package.
"""
matching = []
for spec in specs:
pkg = spec.package
# Skip any package that has no known versions.
if not pkg.versions:
tty.msg("No safe (checksummed) versions for package %s" % pkg.name)
continue
pkg_versions = num_versions
version_order = list(reversed(sorted(pkg.versions)))
matching_spec = []
if spec.concrete:
matching_spec.append(spec)
pkg_versions -= 1
if spec.version in version_order:
version_order.remove(spec.version)
for v in version_order:
# Generate no more than num_versions versions for each spec.
if pkg_versions < 1:
break
# Generate only versions that satisfy the spec.
if spec.concrete or v.intersects(spec.versions):
s = spack.spec.Spec(pkg.name)
s.versions = spack.version.VersionList([v])
s.variants = spec.variants.copy()
# This is needed to avoid hanging references during the
# concretization phase
s.variants.spec = s
matching_spec.append(s)
pkg_versions -= 1
if not matching_spec:
tty.warn("No known version matches spec: %s" % spec)
matching.extend(matching_spec)
return matching
def create(path, specs, skip_unstable_versions=False):
"""Create a directory to be used as a spack mirror, and fill it with
package archives.
Arguments:
path: Path to create a mirror directory hierarchy in.
specs: Any package versions matching these specs will be added \
to the mirror.
skip_unstable_versions: if true, this skips adding resources when
they do not have a stable archive checksum (as determined by
``fetch_strategy.stable_target``)
Return Value:
Returns a tuple of lists: (present, mirrored, error)
* present: Package specs that were already present.
* mirrored: Package specs that were successfully mirrored.
* error: Package specs that failed to mirror due to some error.
"""
# automatically spec-ify anything in the specs array.
specs = [s if isinstance(s, spack.spec.Spec) else spack.spec.Spec(s) for s in specs]
mirror_cache, mirror_stats = mirror_cache_and_stats(path, skip_unstable_versions)
for spec in specs:
mirror_stats.next_spec(spec)
create_mirror_from_package_object(spec.package, mirror_cache, mirror_stats)
return mirror_stats.stats()
def mirror_cache_and_stats(path, skip_unstable_versions=False):
"""Return both a mirror cache and a mirror stats, starting from the path
where a mirror ought to be created.
Args:
path (str): path to create a mirror directory hierarchy in.
skip_unstable_versions: if true, this skips adding resources when
they do not have a stable archive checksum (as determined by
``fetch_strategy.stable_target``)
"""
# Get the absolute path of the root before we start jumping around.
if not os.path.isdir(path):
try:
mkdirp(path)
except OSError as e:
raise MirrorError("Cannot create directory '%s':" % path, str(e))
mirror_cache = spack.caches.MirrorCache(path, skip_unstable_versions=skip_unstable_versions)
mirror_stats = MirrorStats()
return mirror_cache, mirror_stats
def add(mirror: Mirror, scope=None):
"""Add a named mirror in the given scope"""
mirrors = spack.config.get("mirrors", scope=scope)
if not mirrors:
mirrors = syaml.syaml_dict()
if mirror.name in mirrors:
tty.die("Mirror with name {} already exists.".format(mirror.name))
items = [(n, u) for n, u in mirrors.items()]
items.insert(0, (mirror.name, mirror.to_dict()))
mirrors = syaml.syaml_dict(items)
spack.config.set("mirrors", mirrors, scope=scope)
def remove(name, scope):
"""Remove the named mirror in the given scope"""
mirrors = spack.config.get("mirrors", scope=scope)
if not mirrors:
mirrors = syaml.syaml_dict()
if name not in mirrors:
tty.die("No mirror with name %s" % name)
mirrors.pop(name)
spack.config.set("mirrors", mirrors, scope=scope)
tty.msg("Removed mirror %s." % name)
class MirrorStats:
def __init__(self):
self.present = {}
self.new = {}
self.errors = set()
self.current_spec = None
self.added_resources = set()
self.existing_resources = set()
def next_spec(self, spec):
self._tally_current_spec()
self.current_spec = spec
def _tally_current_spec(self):
if self.current_spec:
if self.added_resources:
self.new[self.current_spec] = len(self.added_resources)
if self.existing_resources:
self.present[self.current_spec] = len(self.existing_resources)
self.added_resources = set()
self.existing_resources = set()
self.current_spec = None
def stats(self):
self._tally_current_spec()
return list(self.present), list(self.new), list(self.errors)
def already_existed(self, resource):
# If an error occurred after caching a subset of a spec's
# resources, a secondary attempt may consider them already added
if resource not in self.added_resources:
self.existing_resources.add(resource)
def added(self, resource):
self.added_resources.add(resource)
def error(self):
self.errors.add(self.current_spec)
def create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats):
"""Add a single package object to a mirror.
The package object is only required to have an associated spec
with a concrete version.
Args:
pkg_obj (spack.package_base.PackageBase): package object with to be added.
mirror_cache (spack.caches.MirrorCache): mirror where to add the spec.
mirror_stats (spack.mirror.MirrorStats): statistics on the current mirror
Return:
True if the spec was added successfully, False otherwise
"""
tty.msg("Adding package {} to mirror".format(pkg_obj.spec.format("{name}{@version}")))
num_retries = 3
while num_retries > 0:
try:
# Includes patches and resources
with pkg_obj.stage as pkg_stage:
pkg_stage.cache_mirror(mirror_cache, mirror_stats)
exception = None
break
except Exception as e:
exc_tuple = sys.exc_info()
exception = e
num_retries -= 1
if exception:
if spack.config.get("config:debug"):
traceback.print_exception(file=sys.stderr, *exc_tuple)
else:
tty.warn(
"Error while fetching %s" % pkg_obj.spec.cformat("{name}{@version}"),
getattr(exception, "message", exception),
)
mirror_stats.error()
return False
return True
def require_mirror_name(mirror_name):
"""Find a mirror by name and raise if it does not exist"""
mirror = MirrorCollection().get(mirror_name)
if not mirror:
raise ValueError(f'no mirror named "{mirror_name}"')
return mirror
class MirrorError(spack.error.SpackError):
"""Superclass of all mirror-creation related errors."""
def __init__(self, msg, long_msg=None):
super().__init__(msg, long_msg)

View File

@ -0,0 +1,262 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import os.path
import sys
import traceback
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
import spack.caches
import spack.config
import spack.error
import spack.repo
import spack.spec
import spack.util.spack_yaml as syaml
import spack.version
from spack.error import MirrorError
from spack.mirrors.mirror import Mirror, MirrorCollection
def get_all_versions(specs):
"""Given a set of initial specs, return a new set of specs that includes
each version of each package in the original set.
Note that if any spec in the original set specifies properties other than
version, this information will be omitted in the new set; for example; the
new set of specs will not include variant settings.
"""
version_specs = []
for spec in specs:
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
# Skip any package that has no known versions.
if not pkg_cls.versions:
tty.msg("No safe (checksummed) versions for package %s" % pkg_cls.name)
continue
for version in pkg_cls.versions:
version_spec = spack.spec.Spec(pkg_cls.name)
version_spec.versions = spack.version.VersionList([version])
version_specs.append(version_spec)
return version_specs
def get_matching_versions(specs, num_versions=1):
"""Get a spec for EACH known version matching any spec in the list.
For concrete specs, this retrieves the concrete version and, if more
than one version per spec is requested, retrieves the latest versions
of the package.
"""
matching = []
for spec in specs:
pkg = spec.package
# Skip any package that has no known versions.
if not pkg.versions:
tty.msg("No safe (checksummed) versions for package %s" % pkg.name)
continue
pkg_versions = num_versions
version_order = list(reversed(sorted(pkg.versions)))
matching_spec = []
if spec.concrete:
matching_spec.append(spec)
pkg_versions -= 1
if spec.version in version_order:
version_order.remove(spec.version)
for v in version_order:
# Generate no more than num_versions versions for each spec.
if pkg_versions < 1:
break
# Generate only versions that satisfy the spec.
if spec.concrete or v.intersects(spec.versions):
s = spack.spec.Spec(pkg.name)
s.versions = spack.version.VersionList([v])
s.variants = spec.variants.copy()
# This is needed to avoid hanging references during the
# concretization phase
s.variants.spec = s
matching_spec.append(s)
pkg_versions -= 1
if not matching_spec:
tty.warn("No known version matches spec: %s" % spec)
matching.extend(matching_spec)
return matching
def create(path, specs, skip_unstable_versions=False):
"""Create a directory to be used as a spack mirror, and fill it with
package archives.
Arguments:
path: Path to create a mirror directory hierarchy in.
specs: Any package versions matching these specs will be added \
to the mirror.
skip_unstable_versions: if true, this skips adding resources when
they do not have a stable archive checksum (as determined by
``fetch_strategy.stable_target``)
Return Value:
Returns a tuple of lists: (present, mirrored, error)
* present: Package specs that were already present.
* mirrored: Package specs that were successfully mirrored.
* error: Package specs that failed to mirror due to some error.
"""
# automatically spec-ify anything in the specs array.
specs = [s if isinstance(s, spack.spec.Spec) else spack.spec.Spec(s) for s in specs]
mirror_cache, mirror_stats = mirror_cache_and_stats(path, skip_unstable_versions)
for spec in specs:
mirror_stats.next_spec(spec)
create_mirror_from_package_object(spec.package, mirror_cache, mirror_stats)
return mirror_stats.stats()
def mirror_cache_and_stats(path, skip_unstable_versions=False):
"""Return both a mirror cache and a mirror stats, starting from the path
where a mirror ought to be created.
Args:
path (str): path to create a mirror directory hierarchy in.
skip_unstable_versions: if true, this skips adding resources when
they do not have a stable archive checksum (as determined by
``fetch_strategy.stable_target``)
"""
# Get the absolute path of the root before we start jumping around.
if not os.path.isdir(path):
try:
mkdirp(path)
except OSError as e:
raise MirrorError("Cannot create directory '%s':" % path, str(e))
mirror_cache = spack.caches.MirrorCache(path, skip_unstable_versions=skip_unstable_versions)
mirror_stats = MirrorStats()
return mirror_cache, mirror_stats
def add(mirror: Mirror, scope=None):
"""Add a named mirror in the given scope"""
mirrors = spack.config.get("mirrors", scope=scope)
if not mirrors:
mirrors = syaml.syaml_dict()
if mirror.name in mirrors:
tty.die("Mirror with name {} already exists.".format(mirror.name))
items = [(n, u) for n, u in mirrors.items()]
items.insert(0, (mirror.name, mirror.to_dict()))
mirrors = syaml.syaml_dict(items)
spack.config.set("mirrors", mirrors, scope=scope)
def remove(name, scope):
"""Remove the named mirror in the given scope"""
mirrors = spack.config.get("mirrors", scope=scope)
if not mirrors:
mirrors = syaml.syaml_dict()
if name not in mirrors:
tty.die("No mirror with name %s" % name)
mirrors.pop(name)
spack.config.set("mirrors", mirrors, scope=scope)
tty.msg("Removed mirror %s." % name)
class MirrorStats:
def __init__(self):
self.present = {}
self.new = {}
self.errors = set()
self.current_spec = None
self.added_resources = set()
self.existing_resources = set()
def next_spec(self, spec):
self._tally_current_spec()
self.current_spec = spec
def _tally_current_spec(self):
if self.current_spec:
if self.added_resources:
self.new[self.current_spec] = len(self.added_resources)
if self.existing_resources:
self.present[self.current_spec] = len(self.existing_resources)
self.added_resources = set()
self.existing_resources = set()
self.current_spec = None
def stats(self):
self._tally_current_spec()
return list(self.present), list(self.new), list(self.errors)
def already_existed(self, resource):
# If an error occurred after caching a subset of a spec's
# resources, a secondary attempt may consider them already added
if resource not in self.added_resources:
self.existing_resources.add(resource)
def added(self, resource):
self.added_resources.add(resource)
def error(self):
self.errors.add(self.current_spec)
def create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats):
"""Add a single package object to a mirror.
The package object is only required to have an associated spec
with a concrete version.
Args:
pkg_obj (spack.package_base.PackageBase): package object with to be added.
mirror_cache (spack.caches.MirrorCache): mirror where to add the spec.
mirror_stats (spack.mirror.MirrorStats): statistics on the current mirror
Return:
True if the spec was added successfully, False otherwise
"""
tty.msg("Adding package {} to mirror".format(pkg_obj.spec.format("{name}{@version}")))
num_retries = 3
while num_retries > 0:
try:
# Includes patches and resources
with pkg_obj.stage as pkg_stage:
pkg_stage.cache_mirror(mirror_cache, mirror_stats)
exception = None
break
except Exception as e:
exc_tuple = sys.exc_info()
exception = e
num_retries -= 1
if exception:
if spack.config.get("config:debug"):
traceback.print_exception(file=sys.stderr, *exc_tuple)
else:
tty.warn(
"Error while fetching %s" % pkg_obj.spec.cformat("{name}{@version}"),
getattr(exception, "message", exception),
)
mirror_stats.error()
return False
return True
def require_mirror_name(mirror_name):
"""Find a mirror by name and raise if it does not exist"""
mirror = MirrorCollection().get(mirror_name)
if not mirror:
raise ValueError(f'no mirror named "{mirror_name}"')
return mirror

View File

@ -16,7 +16,8 @@
import llnl.util.tty as tty
import spack.fetch_strategy
import spack.mirror
import spack.mirrors.layout
import spack.mirrors.mirror
import spack.oci.opener
import spack.stage
import spack.util.url
@ -213,7 +214,7 @@ def upload_manifest(
return digest, size
def image_from_mirror(mirror: spack.mirror.Mirror) -> ImageReference:
def image_from_mirror(mirror: spack.mirrors.mirror.Mirror) -> ImageReference:
"""Given an OCI based mirror, extract the URL and image name from it"""
url = mirror.push_url
if not url.startswith("oci://"):
@ -385,5 +386,8 @@ def make_stage(
# is the `oci-layout` and `index.json` files, which are
# required by the spec.
return spack.stage.Stage(
fetch_strategy, mirror_paths=spack.mirror.OCILayout(digest), name=digest.digest, keep=keep
fetch_strategy,
mirror_paths=spack.mirrors.layout.OCILayout(digest),
name=digest.digest,
keep=keep,
)

View File

@ -20,7 +20,7 @@
import llnl.util.lang
import spack.config
import spack.mirror
import spack.mirrors.mirror
import spack.parser
import spack.util.web
@ -367,11 +367,11 @@ def http_error_401(self, req: Request, fp, code, msg, headers):
def credentials_from_mirrors(
domain: str, *, mirrors: Optional[Iterable[spack.mirror.Mirror]] = None
domain: str, *, mirrors: Optional[Iterable[spack.mirrors.mirror.Mirror]] = None
) -> Optional[UsernamePassword]:
"""Filter out OCI registry credentials from a list of mirrors."""
mirrors = mirrors or spack.mirror.MirrorCollection().values()
mirrors = mirrors or spack.mirrors.mirror.MirrorCollection().values()
for mirror in mirrors:
# Prefer push credentials over fetch. Unlikely that those are different

View File

@ -40,7 +40,8 @@
import spack.error
import spack.fetch_strategy as fs
import spack.hooks
import spack.mirror
import spack.mirrors.layout
import spack.mirrors.mirror
import spack.multimethod
import spack.patch
import spack.phase_callbacks
@ -1187,10 +1188,10 @@ def _make_resource_stage(self, root_stage, resource):
root=root_stage,
resource=resource,
name=self._resource_stage(resource),
mirror_paths=spack.mirror.default_mirror_layout(
mirror_paths=spack.mirrors.layout.default_mirror_layout(
resource.fetcher, os.path.join(self.name, pretty_resource_name)
),
mirrors=spack.mirror.MirrorCollection(source=True).values(),
mirrors=spack.mirrors.mirror.MirrorCollection(source=True).values(),
path=self.path,
)
@ -1202,7 +1203,7 @@ def _make_root_stage(self, fetcher):
# Construct a mirror path (TODO: get this out of package.py)
format_string = "{name}-{version}"
pretty_name = self.spec.format_path(format_string)
mirror_paths = spack.mirror.default_mirror_layout(
mirror_paths = spack.mirrors.layout.default_mirror_layout(
fetcher, os.path.join(self.name, pretty_name), self.spec
)
# Construct a path where the stage should build..
@ -1211,7 +1212,7 @@ def _make_root_stage(self, fetcher):
stage = Stage(
fetcher,
mirror_paths=mirror_paths,
mirrors=spack.mirror.MirrorCollection(source=True).values(),
mirrors=spack.mirrors.mirror.MirrorCollection(source=True).values(),
name=stage_name,
path=self.path,
search_fn=self._download_search,

View File

@ -16,7 +16,8 @@
import spack
import spack.error
import spack.fetch_strategy
import spack.mirror
import spack.mirrors.layout
import spack.mirrors.mirror
import spack.repo
import spack.stage
import spack.util.spack_json as sjson
@ -329,12 +330,12 @@ def stage(self) -> "spack.stage.Stage":
name = "{0}-{1}".format(os.path.basename(self.url), fetch_digest[:7])
per_package_ref = os.path.join(self.owner.split(".")[-1], name)
mirror_ref = spack.mirror.default_mirror_layout(fetcher, per_package_ref)
mirror_ref = spack.mirrors.layout.default_mirror_layout(fetcher, per_package_ref)
self._stage = spack.stage.Stage(
fetcher,
name=f"{spack.stage.stage_prefix}patch-{fetch_digest}",
mirror_paths=mirror_ref,
mirrors=spack.mirror.MirrorCollection(source=True).values(),
mirrors=spack.mirrors.mirror.MirrorCollection(source=True).values(),
)
return self._stage

View File

@ -34,7 +34,8 @@
import spack.caches
import spack.config
import spack.error
import spack.mirror
import spack.mirrors.layout
import spack.mirrors.utils
import spack.resource
import spack.spec
import spack.util.crypto
@ -353,8 +354,8 @@ def __init__(
url_or_fetch_strategy,
*,
name=None,
mirror_paths: Optional["spack.mirror.MirrorLayout"] = None,
mirrors: Optional[Iterable["spack.mirror.Mirror"]] = None,
mirror_paths: Optional["spack.mirrors.layout.MirrorLayout"] = None,
mirrors: Optional[Iterable["spack.mirrors.mirror.Mirror"]] = None,
keep=False,
path=None,
lock=True,
@ -601,7 +602,7 @@ def cache_local(self):
spack.caches.FETCH_CACHE.store(self.fetcher, self.mirror_layout.path)
def cache_mirror(
self, mirror: "spack.caches.MirrorCache", stats: "spack.mirror.MirrorStats"
self, mirror: "spack.caches.MirrorCache", stats: "spack.mirrors.utils.MirrorStats"
) -> None:
"""Perform a fetch if the resource is not already cached

View File

@ -32,7 +32,7 @@
import spack.fetch_strategy
import spack.hooks.sbang as sbang
import spack.main
import spack.mirror
import spack.mirrors.mirror
import spack.paths
import spack.spec
import spack.stage
@ -324,8 +324,8 @@ def test_push_and_fetch_keys(mock_gnupghome, tmp_path):
mirror = os.path.join(testpath, "mirror")
mirrors = {"test-mirror": url_util.path_to_file_url(mirror)}
mirrors = spack.mirror.MirrorCollection(mirrors)
mirror = spack.mirror.Mirror(url_util.path_to_file_url(mirror))
mirrors = spack.mirrors.mirror.MirrorCollection(mirrors)
mirror = spack.mirrors.mirror.Mirror(url_util.path_to_file_url(mirror))
gpg_dir1 = os.path.join(testpath, "gpg1")
gpg_dir2 = os.path.join(testpath, "gpg2")

View File

@ -9,7 +9,7 @@
import pytest
import spack.binary_distribution as bd
import spack.mirror
import spack.mirrors.mirror
import spack.spec
from spack.installer import PackageInstaller
@ -23,7 +23,7 @@ def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmp_p
specs = [spec]
# populate cache, everything is new
mirror = spack.mirror.Mirror.from_local_path(str(tmp_path))
mirror = spack.mirrors.mirror.Mirror.from_local_path(str(tmp_path))
with bd.make_uploader(mirror) as uploader:
skipped = uploader.push_or_raise(specs)
assert not skipped

View File

@ -14,7 +14,7 @@
import spack.config
import spack.environment as ev
import spack.main
import spack.mirror
import spack.mirrors.utils
import spack.spec
_bootstrap = spack.main.SpackCommand("bootstrap")
@ -182,8 +182,8 @@ def test_bootstrap_mirror_metadata(mutable_config, linux_os, monkeypatch, tmpdir
`spack bootstrap add`. Here we don't download data, since that would be an
expensive operation for a unit test.
"""
old_create = spack.mirror.create
monkeypatch.setattr(spack.mirror, "create", lambda p, s: old_create(p, []))
old_create = spack.mirrors.utils.create
monkeypatch.setattr(spack.mirrors.utils, "create", lambda p, s: old_create(p, []))
monkeypatch.setattr(spack.spec.Spec, "concretized", lambda p: p)
# Create the mirror in a temporary folder

View File

@ -16,7 +16,7 @@
import spack.environment as ev
import spack.error
import spack.main
import spack.mirror
import spack.mirrors.mirror
import spack.spec
import spack.util.url
from spack.installer import PackageInstaller
@ -385,7 +385,9 @@ def test_correct_specs_are_pushed(
class DontUpload(spack.binary_distribution.Uploader):
def __init__(self):
super().__init__(spack.mirror.Mirror.from_local_path(str(tmpdir)), False, False)
super().__init__(
spack.mirrors.mirror.Mirror.from_local_path(str(tmpdir)), False, False
)
self.pushed = []
def push(self, specs: List[spack.spec.Spec]):

View File

@ -11,7 +11,7 @@
import spack.config
import spack.environment as ev
import spack.error
import spack.mirror
import spack.mirrors.utils
import spack.spec
import spack.util.url as url_util
import spack.version
@ -74,7 +74,7 @@ def test_mirror_skip_unstable(tmpdir_factory, mock_packages, config, source_for_
mirror_dir = str(tmpdir_factory.mktemp("mirror-dir"))
specs = [spack.spec.Spec(x).concretized() for x in ["git-test", "trivial-pkg-with-valid-hash"]]
spack.mirror.create(mirror_dir, specs, skip_unstable_versions=True)
spack.mirrors.utils.create(mirror_dir, specs, skip_unstable_versions=True)
assert set(os.listdir(mirror_dir)) - set(["_source-cache"]) == set(
["trivial-pkg-with-valid-hash"]

View File

@ -16,7 +16,8 @@
import spack.database
import spack.error
import spack.installer
import spack.mirror
import spack.mirrors.mirror
import spack.mirrors.utils
import spack.package_base
import spack.patch
import spack.repo
@ -615,7 +616,7 @@ def test_install_from_binary_with_missing_patch_succeeds(
temporary_store.db.add(s, explicit=True)
# Push it to a binary cache
mirror = spack.mirror.Mirror.from_local_path(str(tmp_path / "my_build_cache"))
mirror = spack.mirrors.mirror.Mirror.from_local_path(str(tmp_path / "my_build_cache"))
with binary_distribution.make_uploader(mirror=mirror) as uploader:
uploader.push_or_raise([s])
@ -628,7 +629,7 @@ def test_install_from_binary_with_missing_patch_succeeds(
PackageInstaller([s.package], explicit=True).install()
# Binary install: succeeds, we don't need the patch.
spack.mirror.add(mirror)
spack.mirrors.utils.add(mirror)
PackageInstaller(
[s.package],
explicit=True,

View File

@ -14,7 +14,9 @@
import spack.caches
import spack.config
import spack.fetch_strategy
import spack.mirror
import spack.mirrors.layout
import spack.mirrors.mirror
import spack.mirrors.utils
import spack.patch
import spack.stage
import spack.util.executable
@ -60,7 +62,7 @@ def check_mirror():
with spack.config.override("mirrors", mirrors):
with spack.config.override("config:checksum", False):
specs = [Spec(x).concretized() for x in repos]
spack.mirror.create(mirror_root, specs)
spack.mirrors.utils.create(mirror_root, specs)
# Stage directory exists
assert os.path.isdir(mirror_root)
@ -68,7 +70,9 @@ def check_mirror():
for spec in specs:
fetcher = spec.package.fetcher
per_package_ref = os.path.join(spec.name, "-".join([spec.name, str(spec.version)]))
mirror_layout = spack.mirror.default_mirror_layout(fetcher, per_package_ref)
mirror_layout = spack.mirrors.layout.default_mirror_layout(
fetcher, per_package_ref
)
expected_path = os.path.join(mirror_root, mirror_layout.path)
assert os.path.exists(expected_path)
@ -135,16 +139,16 @@ def test_all_mirror(mock_git_repository, mock_svn_repository, mock_hg_repository
@pytest.mark.parametrize(
"mirror",
[
spack.mirror.Mirror(
spack.mirrors.mirror.Mirror(
{"fetch": "https://example.com/fetch", "push": "https://example.com/push"}
)
],
)
def test_roundtrip_mirror(mirror: spack.mirror.Mirror):
def test_roundtrip_mirror(mirror: spack.mirrors.mirror.Mirror):
mirror_yaml = mirror.to_yaml()
assert spack.mirror.Mirror.from_yaml(mirror_yaml) == mirror
assert spack.mirrors.mirror.Mirror.from_yaml(mirror_yaml) == mirror
mirror_json = mirror.to_json()
assert spack.mirror.Mirror.from_json(mirror_json) == mirror
assert spack.mirrors.mirror.Mirror.from_json(mirror_json) == mirror
@pytest.mark.parametrize(
@ -152,14 +156,14 @@ def test_roundtrip_mirror(mirror: spack.mirror.Mirror):
)
def test_invalid_yaml_mirror(invalid_yaml):
with pytest.raises(SpackYAMLError, match="error parsing YAML") as e:
spack.mirror.Mirror.from_yaml(invalid_yaml)
spack.mirrors.mirror.Mirror.from_yaml(invalid_yaml)
assert invalid_yaml in str(e.value)
@pytest.mark.parametrize("invalid_json, error_message", [("{13:", "Expecting property name")])
def test_invalid_json_mirror(invalid_json, error_message):
with pytest.raises(sjson.SpackJSONError) as e:
spack.mirror.Mirror.from_json(invalid_json)
spack.mirrors.mirror.Mirror.from_json(invalid_json)
exc_msg = str(e.value)
assert exc_msg.startswith("error parsing JSON mirror:")
assert error_message in exc_msg
@ -168,9 +172,9 @@ def test_invalid_json_mirror(invalid_json, error_message):
@pytest.mark.parametrize(
"mirror_collection",
[
spack.mirror.MirrorCollection(
spack.mirrors.mirror.MirrorCollection(
mirrors={
"example-mirror": spack.mirror.Mirror(
"example-mirror": spack.mirrors.mirror.Mirror(
"https://example.com/fetch", "https://example.com/push"
).to_dict()
}
@ -179,9 +183,15 @@ def test_invalid_json_mirror(invalid_json, error_message):
)
def test_roundtrip_mirror_collection(mirror_collection):
mirror_collection_yaml = mirror_collection.to_yaml()
assert spack.mirror.MirrorCollection.from_yaml(mirror_collection_yaml) == mirror_collection
assert (
spack.mirrors.mirror.MirrorCollection.from_yaml(mirror_collection_yaml)
== mirror_collection
)
mirror_collection_json = mirror_collection.to_json()
assert spack.mirror.MirrorCollection.from_json(mirror_collection_json) == mirror_collection
assert (
spack.mirrors.mirror.MirrorCollection.from_json(mirror_collection_json)
== mirror_collection
)
@pytest.mark.parametrize(
@ -189,14 +199,14 @@ def test_roundtrip_mirror_collection(mirror_collection):
)
def test_invalid_yaml_mirror_collection(invalid_yaml):
with pytest.raises(SpackYAMLError, match="error parsing YAML") as e:
spack.mirror.MirrorCollection.from_yaml(invalid_yaml)
spack.mirrors.mirror.MirrorCollection.from_yaml(invalid_yaml)
assert invalid_yaml in str(e.value)
@pytest.mark.parametrize("invalid_json, error_message", [("{13:", "Expecting property name")])
def test_invalid_json_mirror_collection(invalid_json, error_message):
with pytest.raises(sjson.SpackJSONError) as e:
spack.mirror.MirrorCollection.from_json(invalid_json)
spack.mirrors.mirror.MirrorCollection.from_json(invalid_json)
exc_msg = str(e.value)
assert exc_msg.startswith("error parsing JSON mirror collection:")
assert error_message in exc_msg
@ -205,7 +215,7 @@ def test_invalid_json_mirror_collection(invalid_json, error_message):
def test_mirror_archive_paths_no_version(mock_packages, mock_archive):
spec = Spec("trivial-install-test-package@=nonexistingversion").concretized()
fetcher = spack.fetch_strategy.URLFetchStrategy(url=mock_archive.url)
spack.mirror.default_mirror_layout(fetcher, "per-package-ref", spec)
spack.mirrors.layout.default_mirror_layout(fetcher, "per-package-ref", spec)
def test_mirror_with_url_patches(mock_packages, monkeypatch):
@ -238,10 +248,12 @@ def successful_make_alias(*args, **kwargs):
monkeypatch.setattr(spack.fetch_strategy.URLFetchStrategy, "expand", successful_expand)
monkeypatch.setattr(spack.patch, "apply_patch", successful_apply)
monkeypatch.setattr(spack.caches.MirrorCache, "store", record_store)
monkeypatch.setattr(spack.mirror.DefaultLayout, "make_alias", successful_make_alias)
monkeypatch.setattr(
spack.mirrors.layout.DefaultLayout, "make_alias", successful_make_alias
)
with spack.config.override("config:checksum", False):
spack.mirror.create(mirror_root, list(spec.traverse()))
spack.mirrors.utils.create(mirror_root, list(spec.traverse()))
assert {
"abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234",
@ -268,7 +280,7 @@ def test_mirror_layout_make_alias(tmpdir):
alias = os.path.join("zlib", "zlib-1.2.11.tar.gz")
path = os.path.join("_source-cache", "archive", "c3", "c3e5.tar.gz")
cache = spack.caches.MirrorCache(root=str(tmpdir), skip_unstable_versions=False)
layout = spack.mirror.DefaultLayout(alias, path)
layout = spack.mirrors.layout.DefaultLayout(alias, path)
cache.store(MockFetcher(), layout.path)
layout.make_alias(cache.root)
@ -288,7 +300,7 @@ def test_mirror_layout_make_alias(tmpdir):
)
def test_get_all_versions(specs, expected_specs):
specs = [Spec(s) for s in specs]
output_list = spack.mirror.get_all_versions(specs)
output_list = spack.mirrors.utils.get_all_versions(specs)
output_list = [str(x) for x in output_list]
# Compare sets since order is not important
assert set(output_list) == set(expected_specs)
@ -296,14 +308,14 @@ def test_get_all_versions(specs, expected_specs):
def test_update_1():
# No change
m = spack.mirror.Mirror("https://example.com")
m = spack.mirrors.mirror.Mirror("https://example.com")
assert not m.update({"url": "https://example.com"})
assert m.to_dict() == "https://example.com"
def test_update_2():
# Change URL, shouldn't expand to {"url": ...} dict.
m = spack.mirror.Mirror("https://example.com")
m = spack.mirrors.mirror.Mirror("https://example.com")
assert m.update({"url": "https://example.org"})
assert m.to_dict() == "https://example.org"
assert m.fetch_url == "https://example.org"
@ -312,7 +324,7 @@ def test_update_2():
def test_update_3():
# Change fetch url, ensure minimal config
m = spack.mirror.Mirror("https://example.com")
m = spack.mirrors.mirror.Mirror("https://example.com")
assert m.update({"url": "https://example.org"}, "fetch")
assert m.to_dict() == {"url": "https://example.com", "fetch": "https://example.org"}
assert m.fetch_url == "https://example.org"
@ -321,7 +333,7 @@ def test_update_3():
def test_update_4():
# Change push url, ensure minimal config
m = spack.mirror.Mirror("https://example.com")
m = spack.mirrors.mirror.Mirror("https://example.com")
assert m.update({"url": "https://example.org"}, "push")
assert m.to_dict() == {"url": "https://example.com", "push": "https://example.org"}
assert m.push_url == "https://example.org"
@ -331,7 +343,7 @@ def test_update_4():
@pytest.mark.parametrize("direction", ["fetch", "push"])
def test_update_connection_params(direction, tmpdir, monkeypatch):
"""Test whether new connection params expand the mirror config to a dict."""
m = spack.mirror.Mirror("https://example.com", "example")
m = spack.mirrors.mirror.Mirror("https://example.com", "example")
assert m.update(
{

View File

@ -14,7 +14,7 @@
import pytest
import spack.mirror
import spack.mirrors.mirror
from spack.oci.image import Digest, ImageReference, default_config, default_manifest
from spack.oci.oci import (
copy_missing_layers,
@ -474,7 +474,7 @@ def test_copy_missing_layers(tmpdir, config):
def test_image_from_mirror():
mirror = spack.mirror.Mirror("oci://example.com/image")
mirror = spack.mirrors.mirror.Mirror("oci://example.com/image")
assert image_from_mirror(mirror) == ImageReference.from_string("example.com/image")
@ -511,25 +511,25 @@ def test_default_credentials_provider():
mirrors = [
# OCI mirror with push credentials
spack.mirror.Mirror(
spack.mirrors.mirror.Mirror(
{"url": "oci://a.example.com/image", "push": {"access_pair": ["user.a", "pass.a"]}}
),
# Not an OCI mirror
spack.mirror.Mirror(
spack.mirrors.mirror.Mirror(
{"url": "https://b.example.com/image", "access_pair": ["user.b", "pass.b"]}
),
# No credentials
spack.mirror.Mirror("oci://c.example.com/image"),
spack.mirrors.mirror.Mirror("oci://c.example.com/image"),
# Top-level credentials
spack.mirror.Mirror(
spack.mirrors.mirror.Mirror(
{"url": "oci://d.example.com/image", "access_pair": ["user.d", "pass.d"]}
),
# Dockerhub short reference
spack.mirror.Mirror(
spack.mirrors.mirror.Mirror(
{"url": "oci://user/image", "access_pair": ["dockerhub_user", "dockerhub_pass"]}
),
# Localhost (not a dockerhub short reference)
spack.mirror.Mirror(
spack.mirrors.mirror.Mirror(
{"url": "oci://localhost/image", "access_pair": ["user.localhost", "pass.localhost"]}
),
]

View File

@ -24,7 +24,7 @@
import spack.config
import spack.error
import spack.fetch_strategy
import spack.mirror
import spack.mirrors.utils
import spack.package_base
import spack.stage
import spack.util.gpg
@ -64,7 +64,7 @@ def test_buildcache(mock_archive, tmp_path, monkeypatch, mutable_config):
# Create the build cache and put it directly into the mirror
mirror_path = str(tmp_path / "test-mirror")
spack.mirror.create(mirror_path, specs=[])
spack.mirrors.utils.create(mirror_path, specs=[])
# register mirror with spack config
mirrors = {"spack-mirror-test": url_util.path_to_file_url(mirror_path)}

View File

@ -14,7 +14,7 @@
import llnl.util.tty as tty
import spack.config
import spack.mirror
import spack.mirrors.mirror
import spack.paths
import spack.url
import spack.util.s3
@ -276,7 +276,7 @@ def head_object(self, Bucket=None, Key=None):
def test_gather_s3_information(monkeypatch, capfd):
mirror = spack.mirror.Mirror(
mirror = spack.mirrors.mirror.Mirror(
{
"fetch": {
"access_token": "AAAAAAA",

View File

@ -25,7 +25,7 @@ def get_s3_session(url, method="fetch"):
from botocore.exceptions import ClientError
# Circular dependency
from spack.mirror import MirrorCollection
from spack.mirrors.mirror import MirrorCollection
global s3_client_cache
@ -87,7 +87,7 @@ def _parse_s3_endpoint_url(endpoint_url):
def get_mirror_s3_connection_info(mirror, method):
"""Create s3 config for session/client from a Mirror instance (or just set defaults
when no mirror is given.)"""
from spack.mirror import Mirror
from spack.mirrors.mirror import Mirror
s3_connection = {}
s3_client_args = {"use_ssl": spack.config.get("config:verify_ssl")}