mirror: fetch by digest (#45809)
Source mirrors store entries by digest and add a human readable alias of the form 'name-version'. If no digest is available, the alias is used as the primary storage location. Spack erroneously fetches by alias when the digest path does not exist. This is problematic if `version(..., sha256=...)` changes in package.py, and the mirror is populated with the old shasum. That would result in an error when a digest is available, but in case of git versions with a modified commit sha, the wrong sources would be fetched without error. With this PR, only the digest path is used, not the alias, in case a digest is available. This is also a small performance optimization, as the number of request is halved for mirrors that don't contain the sources. Further, for git sources the tag was used as a digest, but this is a moving target. Only commit sha is used now. Also whenever the alias already existed, Spack used to keep it in place when updating the mirror cache, which means that aliases would always point to outdated mirror entries whenever digests are modified. With this PR the alias is moved in place. Lastly, fix a recent regression where `Stage.disable_mirrors` disabled mirrors but not the local download cache, which was the intention.
This commit is contained in:
parent
47e79c32fd
commit
296e5308a7
@ -9,11 +9,11 @@
|
||||
|
||||
import llnl.util.lang
|
||||
from llnl.util.filesystem import mkdirp
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.mirror
|
||||
import spack.paths
|
||||
import spack.util.file_cache
|
||||
import spack.util.path
|
||||
@ -74,23 +74,6 @@ def store(self, fetcher, relative_dest):
|
||||
mkdirp(os.path.dirname(dst))
|
||||
fetcher.archive(dst)
|
||||
|
||||
def symlink(self, mirror_ref):
|
||||
"""Symlink a human readible path in our mirror to the actual
|
||||
storage location."""
|
||||
|
||||
cosmetic_path = os.path.join(self.root, mirror_ref.cosmetic_path)
|
||||
storage_path = os.path.join(self.root, mirror_ref.storage_path)
|
||||
relative_dst = os.path.relpath(storage_path, start=os.path.dirname(cosmetic_path))
|
||||
|
||||
if not os.path.exists(cosmetic_path):
|
||||
if os.path.lexists(cosmetic_path):
|
||||
# In this case the link itself exists but it is broken: remove
|
||||
# it and recreate it (in order to fix any symlinks broken prior
|
||||
# to https://github.com/spack/spack/pull/13908)
|
||||
os.unlink(cosmetic_path)
|
||||
mkdirp(os.path.dirname(cosmetic_path))
|
||||
symlink(relative_dst, cosmetic_path)
|
||||
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
FETCH_CACHE: Union[spack.fetch_strategy.FsCache, llnl.util.lang.Singleton] = (
|
||||
|
@ -10,8 +10,10 @@
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.fetch_strategy
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.path
|
||||
import spack.version
|
||||
from spack.cmd.common import arguments
|
||||
@ -62,7 +64,7 @@ def change_fn(section):
|
||||
spack.config.change_or_add("develop", find_fn, change_fn)
|
||||
|
||||
|
||||
def _retrieve_develop_source(spec, abspath):
|
||||
def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
|
||||
# "steal" the source code via staging API. We ask for a stage
|
||||
# to be created, then copy it afterwards somewhere else. It would be
|
||||
# better if we can create the `source_path` directly into its final
|
||||
@ -71,13 +73,13 @@ def _retrieve_develop_source(spec, abspath):
|
||||
# We construct a package class ourselves, rather than asking for
|
||||
# Spec.package, since Spec only allows this when it is concrete
|
||||
package = pkg_cls(spec)
|
||||
source_stage = package.stage[0]
|
||||
source_stage: spack.stage.Stage = package.stage[0]
|
||||
if isinstance(source_stage.fetcher, spack.fetch_strategy.GitFetchStrategy):
|
||||
source_stage.fetcher.get_full_repo = True
|
||||
# If we retrieved this version before and cached it, we may have
|
||||
# done so without cloning the full git repo; likewise, any
|
||||
# mirror might store an instance with truncated history.
|
||||
source_stage.disable_mirrors()
|
||||
source_stage.default_fetcher_only = True
|
||||
|
||||
source_stage.fetcher.set_package(package)
|
||||
package.stage.steal_source(abspath)
|
||||
|
@ -574,18 +574,18 @@ def __init__(self, **kwargs):
|
||||
# Set a URL based on the type of fetch strategy.
|
||||
self.url = kwargs.get(self.url_attr, None)
|
||||
if not self.url:
|
||||
raise ValueError("%s requires %s argument." % (self.__class__, self.url_attr))
|
||||
raise ValueError(f"{self.__class__} requires {self.url_attr} argument.")
|
||||
|
||||
for attr in self.optional_attrs:
|
||||
setattr(self, attr, kwargs.get(attr, None))
|
||||
|
||||
@_needs_stage
|
||||
def check(self):
|
||||
tty.debug("No checksum needed when fetching with {0}".format(self.url_attr))
|
||||
tty.debug(f"No checksum needed when fetching with {self.url_attr}")
|
||||
|
||||
@_needs_stage
|
||||
def expand(self):
|
||||
tty.debug("Source fetched with %s is already expanded." % self.url_attr)
|
||||
tty.debug(f"Source fetched with {self.url_attr} is already expanded.")
|
||||
|
||||
@_needs_stage
|
||||
def archive(self, destination, *, exclude: Optional[str] = None):
|
||||
@ -605,10 +605,10 @@ def archive(self, destination, *, exclude: Optional[str] = None):
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return "VCS: %s" % self.url
|
||||
return f"VCS: {self.url}"
|
||||
|
||||
def __repr__(self):
|
||||
return "%s<%s>" % (self.__class__, self.url)
|
||||
return f"{self.__class__}<{self.url}>"
|
||||
|
||||
|
||||
@fetcher
|
||||
@ -717,6 +717,11 @@ class GitFetchStrategy(VCSFetchStrategy):
|
||||
git_version_re = r"git version (\S+)"
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
|
||||
self.commit: Optional[str] = None
|
||||
self.tag: Optional[str] = None
|
||||
self.branch: Optional[str] = None
|
||||
|
||||
# Discards the keywords in kwargs that may conflict with the next call
|
||||
# to __init__
|
||||
forwarded_args = copy.copy(kwargs)
|
||||
@ -765,43 +770,42 @@ def git(self):
|
||||
|
||||
@property
|
||||
def cachable(self):
|
||||
return self.cache_enabled and bool(self.commit or self.tag)
|
||||
return self.cache_enabled and bool(self.commit)
|
||||
|
||||
def source_id(self):
|
||||
return self.commit or self.tag
|
||||
# TODO: tree-hash would secure download cache and mirrors, commit only secures checkouts.
|
||||
return self.commit
|
||||
|
||||
def mirror_id(self):
|
||||
repo_ref = self.commit or self.tag or self.branch
|
||||
if repo_ref:
|
||||
if self.commit:
|
||||
repo_path = urllib.parse.urlparse(self.url).path
|
||||
result = os.path.sep.join(["git", repo_path, repo_ref])
|
||||
result = os.path.sep.join(["git", repo_path, self.commit])
|
||||
return result
|
||||
|
||||
def _repo_info(self):
|
||||
args = ""
|
||||
|
||||
if self.commit:
|
||||
args = " at commit {0}".format(self.commit)
|
||||
args = f" at commit {self.commit}"
|
||||
elif self.tag:
|
||||
args = " at tag {0}".format(self.tag)
|
||||
args = f" at tag {self.tag}"
|
||||
elif self.branch:
|
||||
args = " on branch {0}".format(self.branch)
|
||||
args = f" on branch {self.branch}"
|
||||
|
||||
return "{0}{1}".format(self.url, args)
|
||||
return f"{self.url}{args}"
|
||||
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if self.stage.expanded:
|
||||
tty.debug("Already fetched {0}".format(self.stage.source_path))
|
||||
tty.debug(f"Already fetched {self.stage.source_path}")
|
||||
return
|
||||
|
||||
if self.git_sparse_paths:
|
||||
self._sparse_clone_src(commit=self.commit, branch=self.branch, tag=self.tag)
|
||||
self._sparse_clone_src()
|
||||
else:
|
||||
self._clone_src(commit=self.commit, branch=self.branch, tag=self.tag)
|
||||
self._clone_src()
|
||||
self.submodule_operations()
|
||||
|
||||
def bare_clone(self, dest):
|
||||
def bare_clone(self, dest: str) -> None:
|
||||
"""
|
||||
Execute a bare clone for metadata only
|
||||
|
||||
@ -809,7 +813,7 @@ def bare_clone(self, dest):
|
||||
and shouldn't be used for staging.
|
||||
"""
|
||||
# Default to spack source path
|
||||
tty.debug("Cloning git repository: {0}".format(self._repo_info()))
|
||||
tty.debug(f"Cloning git repository: {self._repo_info()}")
|
||||
|
||||
git = self.git
|
||||
debug = spack.config.get("config:debug")
|
||||
@ -821,24 +825,16 @@ def bare_clone(self, dest):
|
||||
clone_args.extend([self.url, dest])
|
||||
git(*clone_args)
|
||||
|
||||
def _clone_src(self, commit=None, branch=None, tag=None):
|
||||
"""
|
||||
Clone a repository to a path using git.
|
||||
|
||||
Arguments:
|
||||
commit (str or None): A commit to fetch from the remote. Only one of
|
||||
commit, branch, and tag may be non-None.
|
||||
branch (str or None): A branch to fetch from the remote.
|
||||
tag (str or None): A tag to fetch from the remote.
|
||||
"""
|
||||
def _clone_src(self) -> None:
|
||||
"""Clone a repository to a path using git."""
|
||||
# Default to spack source path
|
||||
dest = self.stage.source_path
|
||||
tty.debug("Cloning git repository: {0}".format(self._repo_info()))
|
||||
tty.debug(f"Cloning git repository: {self._repo_info()}")
|
||||
|
||||
git = self.git
|
||||
debug = spack.config.get("config:debug")
|
||||
|
||||
if commit:
|
||||
if self.commit:
|
||||
# Need to do a regular clone and check out everything if
|
||||
# they asked for a particular commit.
|
||||
clone_args = ["clone", self.url]
|
||||
@ -857,7 +853,7 @@ def _clone_src(self, commit=None, branch=None, tag=None):
|
||||
)
|
||||
|
||||
with working_dir(dest):
|
||||
checkout_args = ["checkout", commit]
|
||||
checkout_args = ["checkout", self.commit]
|
||||
if not debug:
|
||||
checkout_args.insert(1, "--quiet")
|
||||
git(*checkout_args)
|
||||
@ -869,10 +865,10 @@ def _clone_src(self, commit=None, branch=None, tag=None):
|
||||
args.append("--quiet")
|
||||
|
||||
# If we want a particular branch ask for it.
|
||||
if branch:
|
||||
args.extend(["--branch", branch])
|
||||
elif tag and self.git_version >= spack.version.Version("1.8.5.2"):
|
||||
args.extend(["--branch", tag])
|
||||
if self.branch:
|
||||
args.extend(["--branch", self.branch])
|
||||
elif self.tag and self.git_version >= spack.version.Version("1.8.5.2"):
|
||||
args.extend(["--branch", self.tag])
|
||||
|
||||
# Try to be efficient if we're using a new enough git.
|
||||
# This checks out only one branch's history
|
||||
@ -904,7 +900,7 @@ def _clone_src(self, commit=None, branch=None, tag=None):
|
||||
# For tags, be conservative and check them out AFTER
|
||||
# cloning. Later git versions can do this with clone
|
||||
# --branch, but older ones fail.
|
||||
if tag and self.git_version < spack.version.Version("1.8.5.2"):
|
||||
if self.tag and self.git_version < spack.version.Version("1.8.5.2"):
|
||||
# pull --tags returns a "special" error code of 1 in
|
||||
# older versions that we have to ignore.
|
||||
# see: https://github.com/git/git/commit/19d122b
|
||||
@ -917,16 +913,8 @@ def _clone_src(self, commit=None, branch=None, tag=None):
|
||||
git(*pull_args, ignore_errors=1)
|
||||
git(*co_args)
|
||||
|
||||
def _sparse_clone_src(self, commit=None, branch=None, tag=None, **kwargs):
|
||||
"""
|
||||
Use git's sparse checkout feature to clone portions of a git repository
|
||||
|
||||
Arguments:
|
||||
commit (str or None): A commit to fetch from the remote. Only one of
|
||||
commit, branch, and tag may be non-None.
|
||||
branch (str or None): A branch to fetch from the remote.
|
||||
tag (str or None): A tag to fetch from the remote.
|
||||
"""
|
||||
def _sparse_clone_src(self, **kwargs):
|
||||
"""Use git's sparse checkout feature to clone portions of a git repository"""
|
||||
dest = self.stage.source_path
|
||||
git = self.git
|
||||
|
||||
@ -945,12 +933,12 @@ def _sparse_clone_src(self, commit=None, branch=None, tag=None, **kwargs):
|
||||
"Cloning the full repository instead."
|
||||
)
|
||||
)
|
||||
self._clone_src(commit, branch, tag)
|
||||
self._clone_src()
|
||||
else:
|
||||
# default to depth=2 to allow for retention of some git properties
|
||||
depth = kwargs.get("depth", 2)
|
||||
needs_fetch = branch or tag
|
||||
git_ref = branch or tag or commit
|
||||
needs_fetch = self.branch or self.tag
|
||||
git_ref = self.branch or self.tag or self.commit
|
||||
|
||||
assert git_ref
|
||||
|
||||
@ -1050,7 +1038,7 @@ def protocol_supports_shallow_clone(self):
|
||||
return not (self.url.startswith("http://") or self.url.startswith("/"))
|
||||
|
||||
def __str__(self):
|
||||
return "[git] {0}".format(self._repo_info())
|
||||
return f"[git] {self._repo_info()}"
|
||||
|
||||
|
||||
@fetcher
|
||||
@ -1668,7 +1656,7 @@ def for_package_version(pkg, version=None):
|
||||
raise InvalidArgsError(pkg, version, **args)
|
||||
|
||||
|
||||
def from_url_scheme(url: str, **kwargs):
|
||||
def from_url_scheme(url: str, **kwargs) -> FetchStrategy:
|
||||
"""Finds a suitable FetchStrategy by matching its url_attr with the scheme
|
||||
in the given url."""
|
||||
parsed_url = urllib.parse.urlparse(url, scheme="file")
|
||||
|
@ -21,6 +21,7 @@
|
||||
from typing import List, Optional, Union
|
||||
|
||||
import llnl.url
|
||||
import llnl.util.symlink
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import mkdirp
|
||||
|
||||
@ -30,6 +31,7 @@
|
||||
import spack.fetch_strategy
|
||||
import spack.mirror
|
||||
import spack.oci.image
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.path
|
||||
import spack.util.spack_json as sjson
|
||||
@ -427,27 +429,58 @@ def _determine_extension(fetcher):
|
||||
|
||||
|
||||
class MirrorLayout:
|
||||
"""A ``MirrorLayout`` stores the relative locations of files in a mirror directory. The main
|
||||
storage location is ``storage_path``. An additional, human-readable path may be obtained as the
|
||||
second entry when iterating this object."""
|
||||
"""A ``MirrorLayout`` object describes the relative path of a mirror entry."""
|
||||
|
||||
def __init__(self, storage_path: str) -> None:
|
||||
self.storage_path = storage_path
|
||||
def __init__(self, path: str) -> None:
|
||||
self.path = path
|
||||
|
||||
def __iter__(self):
|
||||
yield self.storage_path
|
||||
"""Yield all paths including aliases where the resource can be found."""
|
||||
yield self.path
|
||||
|
||||
def make_alias(self, root: str) -> None:
|
||||
"""Make the entry ``root / self.path`` available under a human readable alias"""
|
||||
pass
|
||||
|
||||
|
||||
class DefaultLayout(MirrorLayout):
|
||||
def __init__(self, cosmetic_path: str, global_path: Optional[str] = None) -> None:
|
||||
super().__init__(global_path or cosmetic_path)
|
||||
self.global_path = global_path
|
||||
self.cosmetic_path = cosmetic_path
|
||||
def __init__(self, alias_path: str, digest_path: Optional[str] = None) -> None:
|
||||
# When we have a digest, it is used as the primary storage location. If not, then we use
|
||||
# the human-readable alias. In case of mirrors of a VCS checkout, we currently do not have
|
||||
# a digest, that's why an alias is required and a digest optional.
|
||||
super().__init__(path=digest_path or alias_path)
|
||||
self.alias = alias_path
|
||||
self.digest_path = digest_path
|
||||
|
||||
def make_alias(self, root: str) -> None:
|
||||
"""Symlink a human readible path in our mirror to the actual storage location."""
|
||||
# We already use the human-readable path as the main storage location.
|
||||
if not self.digest_path:
|
||||
return
|
||||
|
||||
alias, digest = os.path.join(root, self.alias), os.path.join(root, self.digest_path)
|
||||
|
||||
alias_dir = os.path.dirname(alias)
|
||||
relative_dst = os.path.relpath(digest, start=alias_dir)
|
||||
|
||||
mkdirp(alias_dir)
|
||||
tmp = f"{alias}.tmp"
|
||||
llnl.util.symlink.symlink(relative_dst, tmp)
|
||||
|
||||
try:
|
||||
os.rename(tmp, alias)
|
||||
except OSError:
|
||||
# Clean up the temporary if possible
|
||||
try:
|
||||
os.unlink(tmp)
|
||||
except OSError:
|
||||
pass
|
||||
raise
|
||||
|
||||
def __iter__(self):
|
||||
if self.global_path:
|
||||
yield self.global_path
|
||||
yield self.cosmetic_path
|
||||
if self.digest_path:
|
||||
yield self.digest_path
|
||||
yield self.alias
|
||||
|
||||
|
||||
class OCILayout(MirrorLayout):
|
||||
@ -458,7 +491,11 @@ def __init__(self, digest: spack.oci.image.Digest) -> None:
|
||||
super().__init__(os.path.join("blobs", digest.algorithm, digest.digest))
|
||||
|
||||
|
||||
def mirror_archive_paths(fetcher, per_package_ref, spec=None):
|
||||
def default_mirror_layout(
|
||||
fetcher: "spack.fetch_strategy.FetchStrategy",
|
||||
per_package_ref: str,
|
||||
spec: Optional["spack.spec.Spec"] = None,
|
||||
) -> MirrorLayout:
|
||||
"""Returns a ``MirrorReference`` object which keeps track of the relative
|
||||
storage path of the resource associated with the specified ``fetcher``."""
|
||||
ext = None
|
||||
|
@ -741,7 +741,7 @@ def __init__(self, spec):
|
||||
raise ValueError(msg.format(self))
|
||||
|
||||
# init internal variables
|
||||
self._stage = None
|
||||
self._stage: Optional[StageComposite] = None
|
||||
self._fetcher = None
|
||||
self._tester: Optional["PackageTest"] = None
|
||||
|
||||
@ -1099,7 +1099,7 @@ def _make_resource_stage(self, root_stage, resource):
|
||||
root=root_stage,
|
||||
resource=resource,
|
||||
name=self._resource_stage(resource),
|
||||
mirror_paths=spack.mirror.mirror_archive_paths(
|
||||
mirror_paths=spack.mirror.default_mirror_layout(
|
||||
resource.fetcher, os.path.join(self.name, pretty_resource_name)
|
||||
),
|
||||
mirrors=spack.mirror.MirrorCollection(source=True).values(),
|
||||
@ -1114,7 +1114,7 @@ def _make_root_stage(self, fetcher):
|
||||
# Construct a mirror path (TODO: get this out of package.py)
|
||||
format_string = "{name}-{version}"
|
||||
pretty_name = self.spec.format_path(format_string)
|
||||
mirror_paths = spack.mirror.mirror_archive_paths(
|
||||
mirror_paths = spack.mirror.default_mirror_layout(
|
||||
fetcher, os.path.join(self.name, pretty_name), self.spec
|
||||
)
|
||||
# Construct a path where the stage should build..
|
||||
@ -1180,7 +1180,7 @@ def stage(self):
|
||||
return self._stage
|
||||
|
||||
@stage.setter
|
||||
def stage(self, stage):
|
||||
def stage(self, stage: StageComposite):
|
||||
"""Allow a stage object to be set to override the default."""
|
||||
self._stage = stage
|
||||
|
||||
|
@ -326,7 +326,7 @@ def stage(self) -> "spack.stage.Stage":
|
||||
name = "{0}-{1}".format(os.path.basename(self.url), fetch_digest[:7])
|
||||
|
||||
per_package_ref = os.path.join(self.owner.split(".")[-1], name)
|
||||
mirror_ref = spack.mirror.mirror_archive_paths(fetcher, per_package_ref)
|
||||
mirror_ref = spack.mirror.default_mirror_layout(fetcher, per_package_ref)
|
||||
self._stage = spack.stage.Stage(
|
||||
fetcher,
|
||||
name=f"{spack.stage.stage_prefix}patch-{fetch_digest}",
|
||||
|
@ -364,36 +364,30 @@ def __init__(
|
||||
"""Create a stage object.
|
||||
Parameters:
|
||||
url_or_fetch_strategy
|
||||
URL of the archive to be downloaded into this stage, OR
|
||||
a valid FetchStrategy.
|
||||
URL of the archive to be downloaded into this stage, OR a valid FetchStrategy.
|
||||
|
||||
name
|
||||
If a name is provided, then this stage is a named stage
|
||||
and will persist between runs (or if you construct another
|
||||
stage object later). If name is not provided, then this
|
||||
If a name is provided, then this stage is a named stage and will persist between runs
|
||||
(or if you construct another stage object later). If name is not provided, then this
|
||||
stage will be given a unique name automatically.
|
||||
|
||||
mirror_paths
|
||||
If provided, Stage will search Spack's mirrors for
|
||||
this archive at each of the provided relative mirror paths
|
||||
before using the default fetch strategy.
|
||||
If provided, Stage will search Spack's mirrors for this archive at each of the
|
||||
provided relative mirror paths before using the default fetch strategy.
|
||||
|
||||
keep
|
||||
By default, when used as a context manager, the Stage
|
||||
is deleted on exit when no exceptions are raised.
|
||||
Pass True to keep the stage intact even if no
|
||||
exceptions are raised.
|
||||
By default, when used as a context manager, the Stage is deleted on exit when no
|
||||
exceptions are raised. Pass True to keep the stage intact even if no exceptions are
|
||||
raised.
|
||||
|
||||
path
|
||||
If provided, the stage path to use for associated builds.
|
||||
|
||||
lock
|
||||
True if the stage directory file lock is to be used, False
|
||||
otherwise.
|
||||
True if the stage directory file lock is to be used, False otherwise.
|
||||
|
||||
search_fn
|
||||
The search function that provides the fetch strategy
|
||||
instance.
|
||||
The search function that provides the fetch strategy instance.
|
||||
"""
|
||||
super().__init__(name, path, keep, lock)
|
||||
|
||||
@ -419,26 +413,27 @@ def __init__(
|
||||
|
||||
self.srcdir = None
|
||||
|
||||
self.mirror_paths = mirror_paths
|
||||
self.mirror_layout = mirror_paths
|
||||
self.mirrors = list(mirrors) if mirrors else []
|
||||
# Allow users the disable both mirrors and download cache
|
||||
self.default_fetcher_only = False
|
||||
|
||||
@property
|
||||
def expected_archive_files(self):
|
||||
"""Possible archive file paths."""
|
||||
paths = []
|
||||
fnames = []
|
||||
expanded = True
|
||||
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
|
||||
expanded = self.default_fetcher.expand_archive
|
||||
fnames.append(url_util.default_download_filename(self.default_fetcher.url))
|
||||
|
||||
if self.mirror_paths:
|
||||
fnames.extend(os.path.basename(x) for x in self.mirror_paths)
|
||||
if self.mirror_layout:
|
||||
fnames.append(os.path.basename(self.mirror_layout.path))
|
||||
|
||||
paths.extend(os.path.join(self.path, f) for f in fnames)
|
||||
paths = [os.path.join(self.path, f) for f in fnames]
|
||||
if not expanded:
|
||||
# If the download file is not compressed, the "archive" is a
|
||||
# single file placed in Stage.source_path
|
||||
# If the download file is not compressed, the "archive" is a single file placed in
|
||||
# Stage.source_path
|
||||
paths.extend(os.path.join(self.source_path, f) for f in fnames)
|
||||
|
||||
return paths
|
||||
@ -471,14 +466,8 @@ def source_path(self):
|
||||
"""Returns the well-known source directory path."""
|
||||
return os.path.join(self.path, _source_path_subdir)
|
||||
|
||||
def disable_mirrors(self):
|
||||
"""The Stage will not attempt to look for the associated fetcher
|
||||
target in any of Spack's mirrors (including the local download cache).
|
||||
"""
|
||||
self.mirror_paths = None
|
||||
|
||||
def _generate_fetchers(self, mirror_only=False) -> Generator[fs.FetchStrategy, None, None]:
|
||||
fetchers = []
|
||||
fetchers: List[fs.FetchStrategy] = []
|
||||
if not mirror_only:
|
||||
fetchers.append(self.default_fetcher)
|
||||
|
||||
@ -495,27 +484,26 @@ def _generate_fetchers(self, mirror_only=False) -> Generator[fs.FetchStrategy, N
|
||||
# TODO: move mirror logic out of here and clean it up!
|
||||
# TODO: Or @alalazo may have some ideas about how to use a
|
||||
# TODO: CompositeFetchStrategy here.
|
||||
if self.mirror_paths and self.mirrors:
|
||||
if not self.default_fetcher_only and self.mirror_layout and self.mirrors:
|
||||
# Add URL strategies for all the mirrors with the digest
|
||||
# Insert fetchers in the order that the URLs are provided.
|
||||
fetchers[:0] = (
|
||||
fs.from_url_scheme(
|
||||
url_util.join(mirror.fetch_url, rel_path),
|
||||
url_util.join(mirror.fetch_url, self.mirror_layout.path),
|
||||
checksum=digest,
|
||||
expand=expand,
|
||||
extension=extension,
|
||||
)
|
||||
for mirror in self.mirrors
|
||||
if not mirror.fetch_url.startswith("oci://")
|
||||
for rel_path in self.mirror_paths
|
||||
if not mirror.fetch_url.startswith("oci://") # no support for mirrors yet
|
||||
)
|
||||
|
||||
if self.mirror_paths and self.default_fetcher.cachable:
|
||||
fetchers[:0] = (
|
||||
if not self.default_fetcher_only and self.mirror_layout and self.default_fetcher.cachable:
|
||||
fetchers.insert(
|
||||
0,
|
||||
spack.caches.FETCH_CACHE.fetcher(
|
||||
rel_path, digest, expand=expand, extension=extension
|
||||
)
|
||||
for rel_path in self.mirror_paths
|
||||
self.mirror_layout.path, digest, expand=expand, extension=extension
|
||||
),
|
||||
)
|
||||
|
||||
yield from fetchers
|
||||
@ -611,41 +599,42 @@ def check(self):
|
||||
self.fetcher.check()
|
||||
|
||||
def cache_local(self):
|
||||
spack.caches.FETCH_CACHE.store(self.fetcher, self.mirror_paths.storage_path)
|
||||
spack.caches.FETCH_CACHE.store(self.fetcher, self.mirror_layout.path)
|
||||
|
||||
def cache_mirror(self, mirror, stats):
|
||||
def cache_mirror(
|
||||
self, mirror: spack.caches.MirrorCache, stats: spack.mirror.MirrorStats
|
||||
) -> None:
|
||||
"""Perform a fetch if the resource is not already cached
|
||||
|
||||
Arguments:
|
||||
mirror (spack.caches.MirrorCache): the mirror to cache this Stage's
|
||||
resource in
|
||||
stats (spack.mirror.MirrorStats): this is updated depending on whether the
|
||||
caching operation succeeded or failed
|
||||
mirror: the mirror to cache this Stage's resource in
|
||||
stats: this is updated depending on whether the caching operation succeeded or failed
|
||||
"""
|
||||
if isinstance(self.default_fetcher, fs.BundleFetchStrategy):
|
||||
# BundleFetchStrategy has no source to fetch. The associated
|
||||
# fetcher does nothing but the associated stage may still exist.
|
||||
# There is currently no method available on the fetcher to
|
||||
# distinguish this ('cachable' refers to whether the fetcher
|
||||
# refers to a resource with a fixed ID, which is not the same
|
||||
# concept as whether there is anything to fetch at all) so we
|
||||
# must examine the type of the fetcher.
|
||||
# BundleFetchStrategy has no source to fetch. The associated fetcher does nothing but
|
||||
# the associated stage may still exist. There is currently no method available on the
|
||||
# fetcher to distinguish this ('cachable' refers to whether the fetcher refers to a
|
||||
# resource with a fixed ID, which is not the same concept as whether there is anything
|
||||
# to fetch at all) so we must examine the type of the fetcher.
|
||||
return
|
||||
|
||||
if mirror.skip_unstable_versions and not fs.stable_target(self.default_fetcher):
|
||||
elif mirror.skip_unstable_versions and not fs.stable_target(self.default_fetcher):
|
||||
return
|
||||
|
||||
absolute_storage_path = os.path.join(mirror.root, self.mirror_paths.storage_path)
|
||||
elif not self.mirror_layout:
|
||||
return
|
||||
|
||||
absolute_storage_path = os.path.join(mirror.root, self.mirror_layout.path)
|
||||
|
||||
if os.path.exists(absolute_storage_path):
|
||||
stats.already_existed(absolute_storage_path)
|
||||
else:
|
||||
self.fetch()
|
||||
self.check()
|
||||
mirror.store(self.fetcher, self.mirror_paths.storage_path)
|
||||
mirror.store(self.fetcher, self.mirror_layout.path)
|
||||
stats.added(absolute_storage_path)
|
||||
|
||||
mirror.symlink(self.mirror_paths)
|
||||
self.mirror_layout.make_alias(mirror.root)
|
||||
|
||||
def expand_archive(self):
|
||||
"""Changes to the stage directory and attempt to expand the downloaded
|
||||
@ -653,9 +642,9 @@ def expand_archive(self):
|
||||
downloaded."""
|
||||
if not self.expanded:
|
||||
self.fetcher.expand()
|
||||
tty.debug("Created stage in {0}".format(self.path))
|
||||
tty.debug(f"Created stage in {self.path}")
|
||||
else:
|
||||
tty.debug("Already staged {0} in {1}".format(self.name, self.path))
|
||||
tty.debug(f"Already staged {self.name} in {self.path}")
|
||||
|
||||
def restage(self):
|
||||
"""Removes the expanded archive path if it exists, then re-expands
|
||||
|
@ -11,15 +11,16 @@
|
||||
from llnl.util.symlink import resolve_link_target_relative_to_the_link
|
||||
|
||||
import spack.caches
|
||||
import spack.config
|
||||
import spack.fetch_strategy
|
||||
import spack.mirror
|
||||
import spack.patch
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.util.executable
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.url as url_util
|
||||
from spack.spec import Spec
|
||||
from spack.stage import Stage
|
||||
from spack.util.executable import which
|
||||
from spack.util.spack_yaml import SpackYAMLError
|
||||
|
||||
@ -51,7 +52,7 @@ def set_up_package(name, repository, url_attr):
|
||||
|
||||
|
||||
def check_mirror():
|
||||
with Stage("spack-mirror-test") as stage:
|
||||
with spack.stage.Stage("spack-mirror-test") as stage:
|
||||
mirror_root = os.path.join(stage.path, "test-mirror")
|
||||
# register mirror with spack config
|
||||
mirrors = {"spack-mirror-test": url_util.path_to_file_url(mirror_root)}
|
||||
@ -66,8 +67,8 @@ def check_mirror():
|
||||
for spec in specs:
|
||||
fetcher = spec.package.fetcher
|
||||
per_package_ref = os.path.join(spec.name, "-".join([spec.name, str(spec.version)]))
|
||||
mirror_paths = spack.mirror.mirror_archive_paths(fetcher, per_package_ref)
|
||||
expected_path = os.path.join(mirror_root, mirror_paths.storage_path)
|
||||
mirror_layout = spack.mirror.default_mirror_layout(fetcher, per_package_ref)
|
||||
expected_path = os.path.join(mirror_root, mirror_layout.path)
|
||||
assert os.path.exists(expected_path)
|
||||
|
||||
# Now try to fetch each package.
|
||||
@ -203,13 +204,11 @@ def test_invalid_json_mirror_collection(invalid_json, error_message):
|
||||
def test_mirror_archive_paths_no_version(mock_packages, mock_archive):
|
||||
spec = Spec("trivial-install-test-package@=nonexistingversion").concretized()
|
||||
fetcher = spack.fetch_strategy.URLFetchStrategy(url=mock_archive.url)
|
||||
spack.mirror.mirror_archive_paths(fetcher, "per-package-ref", spec)
|
||||
spack.mirror.default_mirror_layout(fetcher, "per-package-ref", spec)
|
||||
|
||||
|
||||
def test_mirror_with_url_patches(mock_packages, monkeypatch):
|
||||
spec = Spec("patch-several-dependencies")
|
||||
spec.concretize()
|
||||
|
||||
spec = Spec("patch-several-dependencies").concretized()
|
||||
files_cached_in_mirror = set()
|
||||
|
||||
def record_store(_class, fetcher, relative_dst, cosmetic_path=None):
|
||||
@ -228,30 +227,25 @@ def successful_expand(_class):
|
||||
def successful_apply(*args, **kwargs):
|
||||
pass
|
||||
|
||||
def successful_symlink(*args, **kwargs):
|
||||
def successful_make_alias(*args, **kwargs):
|
||||
pass
|
||||
|
||||
with Stage("spack-mirror-test") as stage:
|
||||
with spack.stage.Stage("spack-mirror-test") as stage:
|
||||
mirror_root = os.path.join(stage.path, "test-mirror")
|
||||
|
||||
monkeypatch.setattr(spack.fetch_strategy.URLFetchStrategy, "fetch", successful_fetch)
|
||||
monkeypatch.setattr(spack.fetch_strategy.URLFetchStrategy, "expand", successful_expand)
|
||||
monkeypatch.setattr(spack.patch, "apply_patch", successful_apply)
|
||||
monkeypatch.setattr(spack.caches.MirrorCache, "store", record_store)
|
||||
monkeypatch.setattr(spack.caches.MirrorCache, "symlink", successful_symlink)
|
||||
monkeypatch.setattr(spack.mirror.DefaultLayout, "make_alias", successful_make_alias)
|
||||
|
||||
with spack.config.override("config:checksum", False):
|
||||
spack.mirror.create(mirror_root, list(spec.traverse()))
|
||||
|
||||
assert not (
|
||||
set(
|
||||
[
|
||||
"abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234",
|
||||
"abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz",
|
||||
]
|
||||
)
|
||||
- files_cached_in_mirror
|
||||
)
|
||||
assert {
|
||||
"abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234",
|
||||
"abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz",
|
||||
}.issubset(files_cached_in_mirror)
|
||||
|
||||
|
||||
class MockFetcher:
|
||||
@ -266,23 +260,21 @@ def archive(dst):
|
||||
|
||||
|
||||
@pytest.mark.regression("14067")
|
||||
def test_mirror_cache_symlinks(tmpdir):
|
||||
def test_mirror_layout_make_alias(tmpdir):
|
||||
"""Confirm that the cosmetic symlink created in the mirror cache (which may
|
||||
be relative) targets the storage path correctly.
|
||||
"""
|
||||
cosmetic_path = os.path.join("zlib", "zlib-1.2.11.tar.gz")
|
||||
global_path = os.path.join("_source-cache", "archive", "c3", "c3e5.tar.gz")
|
||||
cache = spack.caches.MirrorCache(str(tmpdir), False)
|
||||
reference = spack.mirror.DefaultLayout(cosmetic_path, global_path)
|
||||
alias = os.path.join("zlib", "zlib-1.2.11.tar.gz")
|
||||
path = os.path.join("_source-cache", "archive", "c3", "c3e5.tar.gz")
|
||||
cache = spack.caches.MirrorCache(root=str(tmpdir), skip_unstable_versions=False)
|
||||
layout = spack.mirror.DefaultLayout(alias, path)
|
||||
|
||||
cache.store(MockFetcher(), reference.storage_path)
|
||||
cache.symlink(reference)
|
||||
cache.store(MockFetcher(), layout.path)
|
||||
layout.make_alias(cache.root)
|
||||
|
||||
link_target = resolve_link_target_relative_to_the_link(
|
||||
os.path.join(cache.root, reference.cosmetic_path)
|
||||
)
|
||||
link_target = resolve_link_target_relative_to_the_link(os.path.join(cache.root, layout.alias))
|
||||
assert os.path.exists(link_target)
|
||||
assert os.path.normpath(link_target) == os.path.join(cache.root, reference.storage_path)
|
||||
assert os.path.normpath(link_target) == os.path.join(cache.root, layout.path)
|
||||
|
||||
|
||||
@pytest.mark.regression("31627")
|
||||
|
Loading…
Reference in New Issue
Block a user