Simplify URLFetchStrategy (#45741)
This commit is contained in:
parent
c65fd7e12d
commit
57769fac7d
@ -54,7 +54,7 @@
|
|||||||
import spack.version
|
import spack.version
|
||||||
import spack.version.git_ref_lookup
|
import spack.version.git_ref_lookup
|
||||||
from spack.util.compression import decompressor_for
|
from spack.util.compression import decompressor_for
|
||||||
from spack.util.executable import CommandNotFoundError, which
|
from spack.util.executable import CommandNotFoundError, Executable, which
|
||||||
|
|
||||||
#: List of all fetch strategies, created by FetchStrategy metaclass.
|
#: List of all fetch strategies, created by FetchStrategy metaclass.
|
||||||
all_strategies = []
|
all_strategies = []
|
||||||
@ -246,33 +246,28 @@ class URLFetchStrategy(FetchStrategy):
|
|||||||
|
|
||||||
# these are checksum types. The generic 'checksum' is deprecated for
|
# these are checksum types. The generic 'checksum' is deprecated for
|
||||||
# specific hash names, but we need it for backward compatibility
|
# specific hash names, but we need it for backward compatibility
|
||||||
optional_attrs = list(crypto.hashes.keys()) + ["checksum"]
|
optional_attrs = [*crypto.hashes.keys(), "checksum"]
|
||||||
|
|
||||||
def __init__(self, url=None, checksum=None, **kwargs):
|
def __init__(self, *, url: str, checksum: Optional[str] = None, **kwargs) -> None:
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
|
|
||||||
# Prefer values in kwargs to the positionals.
|
self.url = url
|
||||||
self.url = kwargs.get("url", url)
|
|
||||||
self.mirrors = kwargs.get("mirrors", [])
|
self.mirrors = kwargs.get("mirrors", [])
|
||||||
|
|
||||||
# digest can be set as the first argument, or from an explicit
|
# digest can be set as the first argument, or from an explicit
|
||||||
# kwarg by the hash name.
|
# kwarg by the hash name.
|
||||||
self.digest = kwargs.get("checksum", checksum)
|
self.digest: Optional[str] = checksum
|
||||||
for h in self.optional_attrs:
|
for h in self.optional_attrs:
|
||||||
if h in kwargs:
|
if h in kwargs:
|
||||||
self.digest = kwargs[h]
|
self.digest = kwargs[h]
|
||||||
|
|
||||||
self.expand_archive = kwargs.get("expand", True)
|
self.expand_archive: bool = kwargs.get("expand", True)
|
||||||
self.extra_options = kwargs.get("fetch_options", {})
|
self.extra_options: dict = kwargs.get("fetch_options", {})
|
||||||
self._curl = None
|
self._curl: Optional[Executable] = None
|
||||||
|
self.extension: Optional[str] = kwargs.get("extension", None)
|
||||||
self.extension = kwargs.get("extension", None)
|
|
||||||
|
|
||||||
if not self.url:
|
|
||||||
raise ValueError("URLFetchStrategy requires a url for fetching.")
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def curl(self):
|
def curl(self) -> Executable:
|
||||||
if not self._curl:
|
if not self._curl:
|
||||||
self._curl = web_util.require_curl()
|
self._curl = web_util.require_curl()
|
||||||
return self._curl
|
return self._curl
|
||||||
@ -348,8 +343,8 @@ def _fetch_urllib(self, url):
|
|||||||
if os.path.lexists(save_file):
|
if os.path.lexists(save_file):
|
||||||
os.remove(save_file)
|
os.remove(save_file)
|
||||||
|
|
||||||
with open(save_file, "wb") as _open_file:
|
with open(save_file, "wb") as f:
|
||||||
shutil.copyfileobj(response, _open_file)
|
shutil.copyfileobj(response, f)
|
||||||
|
|
||||||
self._check_headers(str(response.headers))
|
self._check_headers(str(response.headers))
|
||||||
|
|
||||||
@ -468,7 +463,7 @@ def check(self):
|
|||||||
"""Check the downloaded archive against a checksum digest.
|
"""Check the downloaded archive against a checksum digest.
|
||||||
No-op if this stage checks code out of a repository."""
|
No-op if this stage checks code out of a repository."""
|
||||||
if not self.digest:
|
if not self.digest:
|
||||||
raise NoDigestError("Attempt to check URLFetchStrategy with no digest.")
|
raise NoDigestError(f"Attempt to check {self.__class__.__name__} with no digest.")
|
||||||
|
|
||||||
verify_checksum(self.archive_file, self.digest)
|
verify_checksum(self.archive_file, self.digest)
|
||||||
|
|
||||||
@ -479,8 +474,8 @@ def reset(self):
|
|||||||
"""
|
"""
|
||||||
if not self.archive_file:
|
if not self.archive_file:
|
||||||
raise NoArchiveFileError(
|
raise NoArchiveFileError(
|
||||||
"Tried to reset URLFetchStrategy before fetching",
|
f"Tried to reset {self.__class__.__name__} before fetching",
|
||||||
"Failed on reset() for URL %s" % self.url,
|
f"Failed on reset() for URL{self.url}",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Remove everything but the archive from the stage
|
# Remove everything but the archive from the stage
|
||||||
@ -493,14 +488,10 @@ def reset(self):
|
|||||||
self.expand()
|
self.expand()
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
url = self.url if self.url else "no url"
|
return f"{self.__class__.__name__}<{self.url}>"
|
||||||
return "%s<%s>" % (self.__class__.__name__, url)
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
if self.url:
|
return self.url
|
||||||
return self.url
|
|
||||||
else:
|
|
||||||
return "[no url]"
|
|
||||||
|
|
||||||
|
|
||||||
@fetcher
|
@fetcher
|
||||||
@ -513,7 +504,7 @@ def fetch(self):
|
|||||||
|
|
||||||
# check whether the cache file exists.
|
# check whether the cache file exists.
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
raise NoCacheError("No cache of %s" % path)
|
raise NoCacheError(f"No cache of {path}")
|
||||||
|
|
||||||
# remove old symlink if one is there.
|
# remove old symlink if one is there.
|
||||||
filename = self.stage.save_filename
|
filename = self.stage.save_filename
|
||||||
@ -523,8 +514,8 @@ def fetch(self):
|
|||||||
# Symlink to local cached archive.
|
# Symlink to local cached archive.
|
||||||
symlink(path, filename)
|
symlink(path, filename)
|
||||||
|
|
||||||
# Remove link if checksum fails, or subsequent fetchers
|
# Remove link if checksum fails, or subsequent fetchers will assume they don't need to
|
||||||
# will assume they don't need to download.
|
# download.
|
||||||
if self.digest:
|
if self.digest:
|
||||||
try:
|
try:
|
||||||
self.check()
|
self.check()
|
||||||
@ -533,12 +524,12 @@ def fetch(self):
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
# Notify the user how we fetched.
|
# Notify the user how we fetched.
|
||||||
tty.msg("Using cached archive: {0}".format(path))
|
tty.msg(f"Using cached archive: {path}")
|
||||||
|
|
||||||
|
|
||||||
class OCIRegistryFetchStrategy(URLFetchStrategy):
|
class OCIRegistryFetchStrategy(URLFetchStrategy):
|
||||||
def __init__(self, url=None, checksum=None, **kwargs):
|
def __init__(self, *, url: str, checksum: Optional[str] = None, **kwargs):
|
||||||
super().__init__(url, checksum, **kwargs)
|
super().__init__(url=url, checksum=checksum, **kwargs)
|
||||||
|
|
||||||
self._urlopen = kwargs.get("_urlopen", spack.oci.opener.urlopen)
|
self._urlopen = kwargs.get("_urlopen", spack.oci.opener.urlopen)
|
||||||
|
|
||||||
@ -1381,7 +1372,7 @@ def reset(self):
|
|||||||
shutil.move(scrubbed, source_path)
|
shutil.move(scrubbed, source_path)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "[hg] %s" % self.url
|
return f"[hg] {self.url}"
|
||||||
|
|
||||||
|
|
||||||
@fetcher
|
@fetcher
|
||||||
@ -1390,47 +1381,16 @@ class S3FetchStrategy(URLFetchStrategy):
|
|||||||
|
|
||||||
url_attr = "s3"
|
url_attr = "s3"
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
try:
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
except ValueError:
|
|
||||||
if not kwargs.get("url"):
|
|
||||||
raise ValueError("S3FetchStrategy requires a url for fetching.")
|
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def fetch(self):
|
def fetch(self):
|
||||||
|
if not self.url.startswith("s3://"):
|
||||||
|
raise spack.error.FetchError(
|
||||||
|
f"{self.__class__.__name__} can only fetch from s3:// urls."
|
||||||
|
)
|
||||||
if self.archive_file:
|
if self.archive_file:
|
||||||
tty.debug(f"Already downloaded {self.archive_file}")
|
tty.debug(f"Already downloaded {self.archive_file}")
|
||||||
return
|
return
|
||||||
|
self._fetch_urllib(self.url)
|
||||||
parsed_url = urllib.parse.urlparse(self.url)
|
|
||||||
if parsed_url.scheme != "s3":
|
|
||||||
raise spack.error.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
|
|
||||||
|
|
||||||
basename = os.path.basename(parsed_url.path)
|
|
||||||
request = urllib.request.Request(
|
|
||||||
self.url, headers={"User-Agent": web_util.SPACK_USER_AGENT}
|
|
||||||
)
|
|
||||||
|
|
||||||
with working_dir(self.stage.path):
|
|
||||||
try:
|
|
||||||
response = web_util.urlopen(request)
|
|
||||||
except (TimeoutError, urllib.error.URLError) as e:
|
|
||||||
raise FailedDownloadError(e) from e
|
|
||||||
|
|
||||||
tty.debug(f"Fetching {self.url}")
|
|
||||||
|
|
||||||
with open(basename, "wb") as f:
|
|
||||||
shutil.copyfileobj(response, f)
|
|
||||||
|
|
||||||
content_type = web_util.get_header(response.headers, "Content-type")
|
|
||||||
|
|
||||||
if content_type == "text/html":
|
|
||||||
warn_content_type_mismatch(self.archive_file or "the archive")
|
|
||||||
|
|
||||||
if self.stage.save_filename:
|
|
||||||
fs.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
|
|
||||||
|
|
||||||
if not self.archive_file:
|
if not self.archive_file:
|
||||||
raise FailedDownloadError(
|
raise FailedDownloadError(
|
||||||
RuntimeError(f"Missing archive {self.archive_file} after fetching")
|
RuntimeError(f"Missing archive {self.archive_file} after fetching")
|
||||||
@ -1443,46 +1403,17 @@ class GCSFetchStrategy(URLFetchStrategy):
|
|||||||
|
|
||||||
url_attr = "gs"
|
url_attr = "gs"
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
try:
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
except ValueError:
|
|
||||||
if not kwargs.get("url"):
|
|
||||||
raise ValueError("GCSFetchStrategy requires a url for fetching.")
|
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def fetch(self):
|
def fetch(self):
|
||||||
|
if not self.url.startswith("gs"):
|
||||||
|
raise spack.error.FetchError(
|
||||||
|
f"{self.__class__.__name__} can only fetch from gs:// urls."
|
||||||
|
)
|
||||||
if self.archive_file:
|
if self.archive_file:
|
||||||
tty.debug("Already downloaded {0}".format(self.archive_file))
|
tty.debug(f"Already downloaded {self.archive_file}")
|
||||||
return
|
return
|
||||||
|
|
||||||
parsed_url = urllib.parse.urlparse(self.url)
|
self._fetch_urllib(self.url)
|
||||||
if parsed_url.scheme != "gs":
|
|
||||||
raise spack.error.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
|
|
||||||
|
|
||||||
basename = os.path.basename(parsed_url.path)
|
|
||||||
request = urllib.request.Request(
|
|
||||||
self.url, headers={"User-Agent": web_util.SPACK_USER_AGENT}
|
|
||||||
)
|
|
||||||
|
|
||||||
with working_dir(self.stage.path):
|
|
||||||
try:
|
|
||||||
response = web_util.urlopen(request)
|
|
||||||
except (TimeoutError, urllib.error.URLError) as e:
|
|
||||||
raise FailedDownloadError(e) from e
|
|
||||||
|
|
||||||
tty.debug(f"Fetching {self.url}")
|
|
||||||
|
|
||||||
with open(basename, "wb") as f:
|
|
||||||
shutil.copyfileobj(response, f)
|
|
||||||
|
|
||||||
content_type = web_util.get_header(response.headers, "Content-type")
|
|
||||||
|
|
||||||
if content_type == "text/html":
|
|
||||||
warn_content_type_mismatch(self.archive_file or "the archive")
|
|
||||||
|
|
||||||
if self.stage.save_filename:
|
|
||||||
os.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
|
|
||||||
|
|
||||||
if not self.archive_file:
|
if not self.archive_file:
|
||||||
raise FailedDownloadError(
|
raise FailedDownloadError(
|
||||||
@ -1496,7 +1427,7 @@ class FetchAndVerifyExpandedFile(URLFetchStrategy):
|
|||||||
as well as after expanding it."""
|
as well as after expanding it."""
|
||||||
|
|
||||||
def __init__(self, url, archive_sha256: str, expanded_sha256: str):
|
def __init__(self, url, archive_sha256: str, expanded_sha256: str):
|
||||||
super().__init__(url, archive_sha256)
|
super().__init__(url=url, checksum=archive_sha256)
|
||||||
self.expanded_sha256 = expanded_sha256
|
self.expanded_sha256 = expanded_sha256
|
||||||
|
|
||||||
def expand(self):
|
def expand(self):
|
||||||
@ -1538,14 +1469,14 @@ def stable_target(fetcher):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def from_url(url):
|
def from_url(url: str) -> URLFetchStrategy:
|
||||||
"""Given a URL, find an appropriate fetch strategy for it.
|
"""Given a URL, find an appropriate fetch strategy for it.
|
||||||
Currently just gives you a URLFetchStrategy that uses curl.
|
Currently just gives you a URLFetchStrategy that uses curl.
|
||||||
|
|
||||||
TODO: make this return appropriate fetch strategies for other
|
TODO: make this return appropriate fetch strategies for other
|
||||||
types of URLs.
|
types of URLs.
|
||||||
"""
|
"""
|
||||||
return URLFetchStrategy(url)
|
return URLFetchStrategy(url=url)
|
||||||
|
|
||||||
|
|
||||||
def from_kwargs(**kwargs):
|
def from_kwargs(**kwargs):
|
||||||
@ -1614,10 +1545,12 @@ def _check_version_attributes(fetcher, pkg, version):
|
|||||||
def _extrapolate(pkg, version):
|
def _extrapolate(pkg, version):
|
||||||
"""Create a fetcher from an extrapolated URL for this version."""
|
"""Create a fetcher from an extrapolated URL for this version."""
|
||||||
try:
|
try:
|
||||||
return URLFetchStrategy(pkg.url_for_version(version), fetch_options=pkg.fetch_options)
|
return URLFetchStrategy(url=pkg.url_for_version(version), fetch_options=pkg.fetch_options)
|
||||||
except spack.package_base.NoURLError:
|
except spack.package_base.NoURLError:
|
||||||
msg = "Can't extrapolate a URL for version %s " "because package %s defines no URLs"
|
raise ExtrapolationError(
|
||||||
raise ExtrapolationError(msg % (version, pkg.name))
|
f"Can't extrapolate a URL for version {version} because "
|
||||||
|
f"package {pkg.name} defines no URLs"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _from_merged_attrs(fetcher, pkg, version):
|
def _from_merged_attrs(fetcher, pkg, version):
|
||||||
@ -1733,11 +1666,9 @@ def for_package_version(pkg, version=None):
|
|||||||
raise InvalidArgsError(pkg, version, **args)
|
raise InvalidArgsError(pkg, version, **args)
|
||||||
|
|
||||||
|
|
||||||
def from_url_scheme(url, *args, **kwargs):
|
def from_url_scheme(url: str, **kwargs):
|
||||||
"""Finds a suitable FetchStrategy by matching its url_attr with the scheme
|
"""Finds a suitable FetchStrategy by matching its url_attr with the scheme
|
||||||
in the given url."""
|
in the given url."""
|
||||||
|
|
||||||
url = kwargs.get("url", url)
|
|
||||||
parsed_url = urllib.parse.urlparse(url, scheme="file")
|
parsed_url = urllib.parse.urlparse(url, scheme="file")
|
||||||
|
|
||||||
scheme_mapping = kwargs.get("scheme_mapping") or {
|
scheme_mapping = kwargs.get("scheme_mapping") or {
|
||||||
@ -1754,11 +1685,9 @@ def from_url_scheme(url, *args, **kwargs):
|
|||||||
for fetcher in all_strategies:
|
for fetcher in all_strategies:
|
||||||
url_attr = getattr(fetcher, "url_attr", None)
|
url_attr = getattr(fetcher, "url_attr", None)
|
||||||
if url_attr and url_attr == scheme:
|
if url_attr and url_attr == scheme:
|
||||||
return fetcher(url, *args, **kwargs)
|
return fetcher(url=url, **kwargs)
|
||||||
|
|
||||||
raise ValueError(
|
raise ValueError(f'No FetchStrategy found for url with scheme: "{parsed_url.scheme}"')
|
||||||
'No FetchStrategy found for url with scheme: "{SCHEME}"'.format(SCHEME=parsed_url.scheme)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def from_list_url(pkg):
|
def from_list_url(pkg):
|
||||||
@ -1783,7 +1712,9 @@ def from_list_url(pkg):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# construct a fetcher
|
# construct a fetcher
|
||||||
return URLFetchStrategy(url_from_list, checksum, fetch_options=pkg.fetch_options)
|
return URLFetchStrategy(
|
||||||
|
url=url_from_list, checksum=checksum, fetch_options=pkg.fetch_options
|
||||||
|
)
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
tty.debug(e)
|
tty.debug(e)
|
||||||
tty.msg("Cannot find version %s in url_list" % pkg.version)
|
tty.msg("Cannot find version %s in url_list" % pkg.version)
|
||||||
@ -1811,10 +1742,10 @@ def store(self, fetcher, relative_dest):
|
|||||||
mkdirp(os.path.dirname(dst))
|
mkdirp(os.path.dirname(dst))
|
||||||
fetcher.archive(dst)
|
fetcher.archive(dst)
|
||||||
|
|
||||||
def fetcher(self, target_path, digest, **kwargs):
|
def fetcher(self, target_path: str, digest: Optional[str], **kwargs) -> CacheURLFetchStrategy:
|
||||||
path = os.path.join(self.root, target_path)
|
path = os.path.join(self.root, target_path)
|
||||||
url = url_util.path_to_file_url(path)
|
url = url_util.path_to_file_url(path)
|
||||||
return CacheURLFetchStrategy(url, digest, **kwargs)
|
return CacheURLFetchStrategy(url=url, checksum=digest, **kwargs)
|
||||||
|
|
||||||
def destroy(self):
|
def destroy(self):
|
||||||
shutil.rmtree(self.root, ignore_errors=True)
|
shutil.rmtree(self.root, ignore_errors=True)
|
||||||
|
@ -390,7 +390,7 @@ def make_stage(
|
|||||||
) -> spack.stage.Stage:
|
) -> spack.stage.Stage:
|
||||||
_urlopen = _urlopen or spack.oci.opener.urlopen
|
_urlopen = _urlopen or spack.oci.opener.urlopen
|
||||||
fetch_strategy = spack.fetch_strategy.OCIRegistryFetchStrategy(
|
fetch_strategy = spack.fetch_strategy.OCIRegistryFetchStrategy(
|
||||||
url, checksum=digest.digest, _urlopen=_urlopen
|
url=url, checksum=digest.digest, _urlopen=_urlopen
|
||||||
)
|
)
|
||||||
# Use blobs/<alg>/<encoded> as the cache path, which follows
|
# Use blobs/<alg>/<encoded> as the cache path, which follows
|
||||||
# the OCI Image Layout Specification. What's missing though,
|
# the OCI Image Layout Specification. What's missing though,
|
||||||
|
@ -319,7 +319,7 @@ def stage(self) -> "spack.stage.Stage":
|
|||||||
self.url, archive_sha256=self.archive_sha256, expanded_sha256=self.sha256
|
self.url, archive_sha256=self.archive_sha256, expanded_sha256=self.sha256
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
fetcher = fs.URLFetchStrategy(self.url, sha256=self.sha256, expand=False)
|
fetcher = fs.URLFetchStrategy(url=self.url, sha256=self.sha256, expand=False)
|
||||||
|
|
||||||
# The same package can have multiple patches with the same name but
|
# The same package can have multiple patches with the same name but
|
||||||
# with different contents, therefore apply a subset of the hash.
|
# with different contents, therefore apply a subset of the hash.
|
||||||
|
@ -501,7 +501,7 @@ def _generate_fetchers(self, mirror_only=False) -> Generator[fs.FetchStrategy, N
|
|||||||
fetchers[:0] = (
|
fetchers[:0] = (
|
||||||
fs.from_url_scheme(
|
fs.from_url_scheme(
|
||||||
url_util.join(mirror.fetch_url, rel_path),
|
url_util.join(mirror.fetch_url, rel_path),
|
||||||
digest,
|
checksum=digest,
|
||||||
expand=expand,
|
expand=expand,
|
||||||
extension=extension,
|
extension=extension,
|
||||||
)
|
)
|
||||||
@ -525,13 +525,13 @@ def _generate_fetchers(self, mirror_only=False) -> Generator[fs.FetchStrategy, N
|
|||||||
if self.search_fn and not mirror_only:
|
if self.search_fn and not mirror_only:
|
||||||
yield from self.search_fn()
|
yield from self.search_fn()
|
||||||
|
|
||||||
def fetch(self, mirror_only=False, err_msg=None):
|
def fetch(self, mirror_only: bool = False, err_msg: Optional[str] = None) -> None:
|
||||||
"""Retrieves the code or archive
|
"""Retrieves the code or archive
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
mirror_only (bool): only fetch from a mirror
|
mirror_only: only fetch from a mirror
|
||||||
err_msg (str or None): the error message to display if all fetchers
|
err_msg: the error message to display if all fetchers fail or ``None`` for the default
|
||||||
fail or ``None`` for the default fetch failure message
|
fetch failure message
|
||||||
"""
|
"""
|
||||||
errors: List[str] = []
|
errors: List[str] = []
|
||||||
for fetcher in self._generate_fetchers(mirror_only):
|
for fetcher in self._generate_fetchers(mirror_only):
|
||||||
@ -593,16 +593,19 @@ def steal_source(self, dest):
|
|||||||
self.destroy()
|
self.destroy()
|
||||||
|
|
||||||
def check(self):
|
def check(self):
|
||||||
"""Check the downloaded archive against a checksum digest.
|
"""Check the downloaded archive against a checksum digest."""
|
||||||
No-op if this stage checks code out of a repository."""
|
|
||||||
if self.fetcher is not self.default_fetcher and self.skip_checksum_for_mirror:
|
if self.fetcher is not self.default_fetcher and self.skip_checksum_for_mirror:
|
||||||
|
cache = isinstance(self.fetcher, fs.CacheURLFetchStrategy)
|
||||||
|
if cache:
|
||||||
|
secure_msg = "your download cache is in a secure location"
|
||||||
|
else:
|
||||||
|
secure_msg = "you trust this mirror and have a secure connection"
|
||||||
tty.warn(
|
tty.warn(
|
||||||
"Fetching from mirror without a checksum!",
|
f"Using {'download cache' if cache else 'a mirror'} instead of version control",
|
||||||
"This package is normally checked out from a version "
|
"The required sources are normally checked out from a version control system, "
|
||||||
"control system, but it has been archived on a spack "
|
f"but have been archived {'in download cache' if cache else 'on a mirror'}: "
|
||||||
"mirror. This means we cannot know a checksum for the "
|
f"{self.fetcher}. Spack lacks a tree hash to verify the integrity of this "
|
||||||
"tarball in advance. Be sure that your connection to "
|
f"archive. Make sure {secure_msg}.",
|
||||||
"this mirror is secure!",
|
|
||||||
)
|
)
|
||||||
elif spack.config.get("config:checksum"):
|
elif spack.config.get("config:checksum"):
|
||||||
self.fetcher.check()
|
self.fetcher.check()
|
||||||
@ -1171,7 +1174,7 @@ def _fetch_and_checksum(url, options, keep_stage, action_fn=None):
|
|||||||
try:
|
try:
|
||||||
url_or_fs = url
|
url_or_fs = url
|
||||||
if options:
|
if options:
|
||||||
url_or_fs = fs.URLFetchStrategy(url, fetch_options=options)
|
url_or_fs = fs.URLFetchStrategy(url=url, fetch_options=options)
|
||||||
|
|
||||||
with Stage(url_or_fs, keep=keep_stage) as stage:
|
with Stage(url_or_fs, keep=keep_stage) as stage:
|
||||||
# Fetch the archive
|
# Fetch the archive
|
||||||
|
@ -1003,7 +1003,7 @@ def temporary_store(tmpdir, request):
|
|||||||
def mock_fetch(mock_archive, monkeypatch):
|
def mock_fetch(mock_archive, monkeypatch):
|
||||||
"""Fake the URL for a package so it downloads from a file."""
|
"""Fake the URL for a package so it downloads from a file."""
|
||||||
monkeypatch.setattr(
|
monkeypatch.setattr(
|
||||||
spack.package_base.PackageBase, "fetcher", URLFetchStrategy(mock_archive.url)
|
spack.package_base.PackageBase, "fetcher", URLFetchStrategy(url=mock_archive.url)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -3,54 +3,21 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.fetch_strategy
|
import spack.fetch_strategy
|
||||||
import spack.stage
|
import spack.stage
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
|
def test_gcsfetchstrategy_downloaded(tmp_path):
|
||||||
def test_gcsfetchstrategy_without_url(_fetch_method):
|
|
||||||
"""Ensure constructor with no URL fails."""
|
|
||||||
with spack.config.override("config:url_fetch_method", _fetch_method):
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
spack.fetch_strategy.GCSFetchStrategy(None)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
|
|
||||||
def test_gcsfetchstrategy_bad_url(tmpdir, _fetch_method):
|
|
||||||
"""Ensure fetch with bad URL fails as expected."""
|
|
||||||
testpath = str(tmpdir)
|
|
||||||
|
|
||||||
with spack.config.override("config:url_fetch_method", _fetch_method):
|
|
||||||
fetcher = spack.fetch_strategy.GCSFetchStrategy(url="file:///does-not-exist")
|
|
||||||
assert fetcher is not None
|
|
||||||
|
|
||||||
with spack.stage.Stage(fetcher, path=testpath) as stage:
|
|
||||||
assert stage is not None
|
|
||||||
assert fetcher.archive_file is None
|
|
||||||
with pytest.raises(spack.error.FetchError):
|
|
||||||
fetcher.fetch()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
|
|
||||||
def test_gcsfetchstrategy_downloaded(tmpdir, _fetch_method):
|
|
||||||
"""Ensure fetch with archive file already downloaded is a noop."""
|
"""Ensure fetch with archive file already downloaded is a noop."""
|
||||||
testpath = str(tmpdir)
|
archive = tmp_path / "gcs.tar.gz"
|
||||||
archive = os.path.join(testpath, "gcs.tar.gz")
|
|
||||||
|
|
||||||
with spack.config.override("config:url_fetch_method", _fetch_method):
|
class Archived_GCSFS(spack.fetch_strategy.GCSFetchStrategy):
|
||||||
|
@property
|
||||||
|
def archive_file(self):
|
||||||
|
return str(archive)
|
||||||
|
|
||||||
class Archived_GCSFS(spack.fetch_strategy.GCSFetchStrategy):
|
fetcher = Archived_GCSFS(url="gs://example/gcs.tar.gz")
|
||||||
@property
|
with spack.stage.Stage(fetcher, path=str(tmp_path)):
|
||||||
def archive_file(self):
|
fetcher.fetch()
|
||||||
return archive
|
|
||||||
|
|
||||||
url = "gcs:///{0}".format(archive)
|
|
||||||
fetcher = Archived_GCSFS(url=url)
|
|
||||||
with spack.stage.Stage(fetcher, path=testpath):
|
|
||||||
fetcher.fetch()
|
|
||||||
|
@ -205,7 +205,7 @@ def test_invalid_json_mirror_collection(invalid_json, error_message):
|
|||||||
|
|
||||||
def test_mirror_archive_paths_no_version(mock_packages, mock_archive):
|
def test_mirror_archive_paths_no_version(mock_packages, mock_archive):
|
||||||
spec = Spec("trivial-install-test-package@=nonexistingversion").concretized()
|
spec = Spec("trivial-install-test-package@=nonexistingversion").concretized()
|
||||||
fetcher = spack.fetch_strategy.URLFetchStrategy(mock_archive.url)
|
fetcher = spack.fetch_strategy.URLFetchStrategy(url=mock_archive.url)
|
||||||
spack.mirror.mirror_archive_paths(fetcher, "per-package-ref", spec)
|
spack.mirror.mirror_archive_paths(fetcher, "per-package-ref", spec)
|
||||||
|
|
||||||
|
|
||||||
|
@ -48,7 +48,7 @@
|
|||||||
def test_buildcache(mock_archive, tmp_path, monkeypatch, mutable_config):
|
def test_buildcache(mock_archive, tmp_path, monkeypatch, mutable_config):
|
||||||
# Install a test package
|
# Install a test package
|
||||||
spec = Spec("trivial-install-test-package").concretized()
|
spec = Spec("trivial-install-test-package").concretized()
|
||||||
monkeypatch.setattr(spec.package, "fetcher", URLFetchStrategy(mock_archive.url))
|
monkeypatch.setattr(spec.package, "fetcher", URLFetchStrategy(url=mock_archive.url))
|
||||||
spec.package.do_install()
|
spec.package.do_install()
|
||||||
pkghash = "/" + str(spec.dag_hash(7))
|
pkghash = "/" + str(spec.dag_hash(7))
|
||||||
|
|
||||||
|
@ -3,54 +3,19 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
import spack.config as spack_config
|
|
||||||
import spack.error
|
|
||||||
import spack.fetch_strategy as spack_fs
|
import spack.fetch_strategy as spack_fs
|
||||||
import spack.stage as spack_stage
|
import spack.stage as spack_stage
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
|
def test_s3fetchstrategy_downloaded(tmp_path):
|
||||||
def test_s3fetchstrategy_sans_url(_fetch_method):
|
|
||||||
"""Ensure constructor with no URL fails."""
|
|
||||||
with spack_config.override("config:url_fetch_method", _fetch_method):
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
spack_fs.S3FetchStrategy(None)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
|
|
||||||
def test_s3fetchstrategy_bad_url(tmpdir, _fetch_method):
|
|
||||||
"""Ensure fetch with bad URL fails as expected."""
|
|
||||||
testpath = str(tmpdir)
|
|
||||||
|
|
||||||
with spack_config.override("config:url_fetch_method", _fetch_method):
|
|
||||||
fetcher = spack_fs.S3FetchStrategy(url="file:///does-not-exist")
|
|
||||||
assert fetcher is not None
|
|
||||||
|
|
||||||
with spack_stage.Stage(fetcher, path=testpath) as stage:
|
|
||||||
assert stage is not None
|
|
||||||
assert fetcher.archive_file is None
|
|
||||||
with pytest.raises(spack.error.FetchError):
|
|
||||||
fetcher.fetch()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
|
|
||||||
def test_s3fetchstrategy_downloaded(tmpdir, _fetch_method):
|
|
||||||
"""Ensure fetch with archive file already downloaded is a noop."""
|
"""Ensure fetch with archive file already downloaded is a noop."""
|
||||||
testpath = str(tmpdir)
|
archive = tmp_path / "s3.tar.gz"
|
||||||
archive = os.path.join(testpath, "s3.tar.gz")
|
|
||||||
|
|
||||||
with spack_config.override("config:url_fetch_method", _fetch_method):
|
class Archived_S3FS(spack_fs.S3FetchStrategy):
|
||||||
|
@property
|
||||||
|
def archive_file(self):
|
||||||
|
return archive
|
||||||
|
|
||||||
class Archived_S3FS(spack_fs.S3FetchStrategy):
|
fetcher = Archived_S3FS(url="s3://example/s3.tar.gz")
|
||||||
@property
|
with spack_stage.Stage(fetcher, path=str(tmp_path)):
|
||||||
def archive_file(self):
|
fetcher.fetch()
|
||||||
return archive
|
|
||||||
|
|
||||||
url = "s3:///{0}".format(archive)
|
|
||||||
fetcher = Archived_S3FS(url=url)
|
|
||||||
with spack_stage.Stage(fetcher, path=testpath):
|
|
||||||
fetcher.fetch()
|
|
||||||
|
@ -76,12 +76,6 @@ def fn_urls(v):
|
|||||||
return factory
|
return factory
|
||||||
|
|
||||||
|
|
||||||
def test_urlfetchstrategy_sans_url():
|
|
||||||
"""Ensure constructor with no URL fails."""
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
fs.URLFetchStrategy(None)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("method", ["curl", "urllib"])
|
@pytest.mark.parametrize("method", ["curl", "urllib"])
|
||||||
def test_urlfetchstrategy_bad_url(tmp_path, mutable_config, method):
|
def test_urlfetchstrategy_bad_url(tmp_path, mutable_config, method):
|
||||||
"""Ensure fetch with bad URL fails as expected."""
|
"""Ensure fetch with bad URL fails as expected."""
|
||||||
@ -267,7 +261,7 @@ def is_true():
|
|||||||
monkeypatch.setattr(sys.stdout, "isatty", is_true)
|
monkeypatch.setattr(sys.stdout, "isatty", is_true)
|
||||||
monkeypatch.setattr(tty, "msg_enabled", is_true)
|
monkeypatch.setattr(tty, "msg_enabled", is_true)
|
||||||
with spack.config.override("config:url_fetch_method", "curl"):
|
with spack.config.override("config:url_fetch_method", "curl"):
|
||||||
fetcher = fs.URLFetchStrategy(mock_archive.url)
|
fetcher = fs.URLFetchStrategy(url=mock_archive.url)
|
||||||
with Stage(fetcher, path=testpath) as stage:
|
with Stage(fetcher, path=testpath) as stage:
|
||||||
assert fetcher.archive_file is None
|
assert fetcher.archive_file is None
|
||||||
stage.fetch()
|
stage.fetch()
|
||||||
@ -280,7 +274,7 @@ def is_true():
|
|||||||
def test_url_extra_fetch(tmp_path, mutable_config, mock_archive, _fetch_method):
|
def test_url_extra_fetch(tmp_path, mutable_config, mock_archive, _fetch_method):
|
||||||
"""Ensure a fetch after downloading is effectively a no-op."""
|
"""Ensure a fetch after downloading is effectively a no-op."""
|
||||||
mutable_config.set("config:url_fetch_method", _fetch_method)
|
mutable_config.set("config:url_fetch_method", _fetch_method)
|
||||||
fetcher = fs.URLFetchStrategy(mock_archive.url)
|
fetcher = fs.URLFetchStrategy(url=mock_archive.url)
|
||||||
with Stage(fetcher, path=str(tmp_path)) as stage:
|
with Stage(fetcher, path=str(tmp_path)) as stage:
|
||||||
assert fetcher.archive_file is None
|
assert fetcher.archive_file is None
|
||||||
stage.fetch()
|
stage.fetch()
|
||||||
|
Loading…
Reference in New Issue
Block a user