Fetch & patch: actually acquire stage lock, and many more issues (#38903)

* Fetching patches wouldn't result in acquiring a stage lock during install
* The installer would acquire a stage lock *after* fetching instead of
   before, leading to races
* The name of the stage for patches was random, so on build failure
   (where stage dirs are not removed), these directories would continue
   to exist after a second successful install.
* There was this redundant "composite fetch" object -- there's already
   a composite stage. Remove this.
* For some reason we do *double* shasum validation of patches, before
   and after compression -- that's just too much? I removed it.
This commit is contained in:
Harmen Stoppels 2023-07-19 15:06:56 +02:00 committed by GitHub
parent 8c8186c757
commit 185bccb70f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 212 additions and 221 deletions

View File

@ -10,6 +10,7 @@
import spack.config
import spack.environment as ev
import spack.repo
import spack.traverse
description = "fetch archives for packages"
section = "build"
@ -36,6 +37,12 @@ def setup_parser(subparser):
def fetch(parser, args):
if args.no_checksum:
spack.config.set("config:checksum", False, scope="command_line")
if args.deprecated:
spack.config.set("config:deprecated", True, scope="command_line")
if args.specs:
specs = spack.cmd.parse_specs(args.specs, concretize=True)
else:
@ -55,18 +62,17 @@ def fetch(parser, args):
else:
tty.die("fetch requires at least one spec argument")
if args.no_checksum:
spack.config.set("config:checksum", False, scope="command_line")
if args.dependencies or args.missing:
to_be_fetched = spack.traverse.traverse_nodes(specs, key=spack.traverse.by_dag_hash)
else:
to_be_fetched = specs
if args.deprecated:
spack.config.set("config:deprecated", True, scope="command_line")
for spec in to_be_fetched:
if args.missing and spec.installed:
continue
for spec in specs:
if args.missing or args.dependencies:
for s in spec.traverse(root=False):
# Skip already-installed packages with --missing
if args.missing and s.installed:
continue
pkg = spec.package
s.package.do_fetch()
spec.package.do_fetch()
pkg.stage.keep = True
with pkg.stage:
pkg.do_fetch()

View File

@ -7,7 +7,11 @@
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.config
import spack.environment as ev
import spack.package_base
import spack.repo
import spack.traverse
description = "patch expanded archive sources in preparation for install"
section = "build"
@ -21,7 +25,10 @@ def setup_parser(subparser):
def patch(parser, args):
if not args.specs:
tty.die("patch requires at least one spec argument")
env = ev.active_environment()
if not env:
tty.die("`spack patch` requires a spec or an active environment")
return _patch_env(env)
if args.no_checksum:
spack.config.set("config:checksum", False, scope="command_line")
@ -29,6 +36,19 @@ def patch(parser, args):
if args.deprecated:
spack.config.set("config:deprecated", True, scope="command_line")
specs = spack.cmd.parse_specs(args.specs, concretize=True)
specs = spack.cmd.parse_specs(args.specs, concretize=False)
for spec in specs:
spec.package.do_patch()
_patch(spack.cmd.matching_spec_from_env(spec).package)
def _patch_env(env: ev.Environment):
tty.msg(f"Patching specs from environment {env.name}")
for spec in spack.traverse.traverse_nodes(env.concrete_roots()):
_patch(spec.package)
def _patch(pkg: spack.package_base.PackageBase):
pkg.stage.keep = True
with pkg.stage:
pkg.do_patch()
tty.msg(f"Patched {pkg.name} in {pkg.stage.path}")

View File

@ -9,9 +9,12 @@
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.config
import spack.environment as ev
import spack.package_base
import spack.repo
import spack.stage
import spack.traverse
description = "expand downloaded archive in preparation for install"
section = "build"
@ -27,24 +30,18 @@ def setup_parser(subparser):
def stage(parser, args):
if not args.specs:
env = ev.active_environment()
if env:
tty.msg("Staging specs from environment %s" % env.name)
for spec in env.specs_by_hash.values():
for dep in spec.traverse():
dep.package.do_stage()
tty.msg("Staged {0} in {1}".format(dep.package.name, dep.package.stage.path))
return
else:
tty.die("`spack stage` requires a spec or an active environment")
if args.no_checksum:
spack.config.set("config:checksum", False, scope="command_line")
if args.deprecated:
spack.config.set("config:deprecated", True, scope="command_line")
if not args.specs:
env = ev.active_environment()
if not env:
tty.die("`spack stage` requires a spec or an active environment")
return _stage_env(env)
specs = spack.cmd.parse_specs(args.specs, concretize=False)
# We temporarily modify the working directory when setting up a stage, so we need to
@ -57,7 +54,24 @@ def stage(parser, args):
for spec in specs:
spec = spack.cmd.matching_spec_from_env(spec)
pkg = spec.package
if custom_path:
spec.package.path = custom_path
spec.package.do_stage()
tty.msg("Staged {0} in {1}".format(spec.package.name, spec.package.stage.path))
pkg.path = custom_path
_stage(pkg)
def _stage_env(env: ev.Environment):
tty.msg(f"Staging specs from environment {env.name}")
for spec in spack.traverse.traverse_nodes(env.concrete_roots()):
_stage(spec.package)
def _stage(pkg: spack.package_base.PackageBase):
# Use context manager to ensure we don't restage while an installation is in progress
# keep = True ensures that the stage is not removed after exiting the context manager
pkg.stage.keep = True
with pkg.stage:
pkg.do_stage()
tty.msg(f"Staged {pkg.name} in {pkg.stage.path}")

View File

@ -29,6 +29,7 @@
import spack.concretize
import spack.config
import spack.error
import spack.fetch_strategy
import spack.hash_types as ht
import spack.hooks
import spack.main
@ -1265,12 +1266,12 @@ def develop(self, spec: Spec, path: str, clone: bool = False) -> bool:
# We construct a package class ourselves, rather than asking for
# Spec.package, since Spec only allows this when it is concrete
package = pkg_cls(spec)
if isinstance(package.fetcher[0], spack.fetch_strategy.GitFetchStrategy):
package.fetcher[0].get_full_repo = True
if isinstance(package.fetcher, spack.fetch_strategy.GitFetchStrategy):
package.fetcher.get_full_repo = True
# If we retrieved this version before and cached it, we may have
# done so without cloning the full git repo; likewise, any
# mirror might store an instance with truncated history.
package.stage[0].disable_mirrors()
package.stage.disable_mirrors()
package.stage.steal_source(abspath)

View File

@ -42,7 +42,6 @@
import spack.url
import spack.util.crypto as crypto
import spack.util.git
import spack.util.pattern as pattern
import spack.util.url as url_util
import spack.util.web as web_util
import spack.version
@ -228,24 +227,6 @@ def mirror_id(self):
"""BundlePackages don't have a mirror id."""
class FetchStrategyComposite(pattern.Composite):
"""Composite for a FetchStrategy object."""
matches = FetchStrategy.matches
def __init__(self):
super().__init__(["fetch", "check", "expand", "reset", "archive", "cachable", "mirror_id"])
def source_id(self):
component_ids = tuple(i.source_id() for i in self)
if all(component_ids):
return component_ids
def set_package(self, package):
for item in self:
item.package = package
@fetcher
class URLFetchStrategy(FetchStrategy):
"""URLFetchStrategy pulls source code from a URL for an archive, check the
@ -488,17 +469,7 @@ def check(self):
if not self.digest:
raise NoDigestError("Attempt to check URLFetchStrategy with no digest.")
checker = crypto.Checker(self.digest)
if not checker.check(self.archive_file):
# On failure, provide some information about the file size and
# contents, so that we can quickly see what the issue is (redirect
# was not followed, empty file, text instead of binary, ...)
size, contents = fs.filesummary(self.archive_file)
raise ChecksumError(
f"{checker.hash_name} checksum failed for {self.archive_file}",
f"Expected {self.digest} but got {checker.sum}. "
f"File size = {size} bytes. Contents = {contents!r}",
)
verify_checksum(self.archive_file, self.digest)
@_needs_stage
def reset(self):
@ -1388,6 +1359,45 @@ def fetch(self):
raise FailedDownloadError(self.url)
@fetcher
class FetchAndVerifyExpandedFile(URLFetchStrategy):
"""Fetch strategy that verifies the content digest during fetching,
as well as after expanding it."""
def __init__(self, url, archive_sha256: str, expanded_sha256: str):
super().__init__(url, archive_sha256)
self.expanded_sha256 = expanded_sha256
def expand(self):
"""Verify checksum after expanding the archive."""
# Expand the archive
super().expand()
# Ensure a single patch file.
src_dir = self.stage.source_path
files = os.listdir(src_dir)
if len(files) != 1:
raise ChecksumError(self, f"Expected a single file in {src_dir}.")
verify_checksum(os.path.join(src_dir, files[0]), self.expanded_sha256)
def verify_checksum(file, digest):
checker = crypto.Checker(digest)
if not checker.check(file):
# On failure, provide some information about the file size and
# contents, so that we can quickly see what the issue is (redirect
# was not followed, empty file, text instead of binary, ...)
size, contents = fs.filesummary(file)
raise ChecksumError(
f"{checker.hash_name} checksum failed for {file}",
f"Expected {digest} but got {checker.sum}. "
f"File size = {size} bytes. Contents = {contents!r}",
)
def stable_target(fetcher):
"""Returns whether the fetcher target is expected to have a stable
checksum. This is only true if the target is a preexisting archive

View File

@ -2341,28 +2341,28 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
def run(self) -> bool:
"""Main entry point from ``build_process`` to kick off install in child."""
self.timer.start("stage")
if not self.fake:
if not self.skip_patch:
self.pkg.do_patch()
else:
self.pkg.do_stage()
self.timer.stop("stage")
tty.debug(
"{0} Building {1} [{2}]".format(self.pre, self.pkg_id, self.pkg.build_system_class) # type: ignore[attr-defined] # noqa: E501
)
# get verbosity from do_install() parameter or saved value
self.echo = self.verbose
if spack.package_base.PackageBase._verbose is not None:
self.echo = spack.package_base.PackageBase._verbose
self.pkg.stage.keep = self.keep_stage
with self.pkg.stage:
self.timer.start("stage")
if not self.fake:
if not self.skip_patch:
self.pkg.do_patch()
else:
self.pkg.do_stage()
self.timer.stop("stage")
tty.debug(
"{0} Building {1} [{2}]".format(self.pre, self.pkg_id, self.pkg.build_system_class) # type: ignore[attr-defined] # noqa: E501
)
# get verbosity from do_install() parameter or saved value
self.echo = self.verbose
if spack.package_base.PackageBase._verbose is not None:
self.echo = spack.package_base.PackageBase._verbose
# Run the pre-install hook in the child process after
# the directory is created.
spack.hooks.pre_install(self.pkg.spec)

View File

@ -675,12 +675,9 @@ def create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats):
num_retries = 3
while num_retries > 0:
try:
# Includes patches and resources
with pkg_obj.stage as pkg_stage:
pkg_stage.cache_mirror(mirror_cache, mirror_stats)
for patch in pkg_obj.all_patches():
if patch.stage:
patch.stage.cache_mirror(mirror_cache, mirror_stats)
patch.clean()
exception = None
break
except Exception as e:

View File

@ -44,6 +44,7 @@
import spack.mirror
import spack.mixins
import spack.multimethod
import spack.patch
import spack.paths
import spack.repo
import spack.spec
@ -62,7 +63,7 @@
install_test_root,
)
from spack.installer import InstallError, PackageInstaller
from spack.stage import ResourceStage, Stage, StageComposite, compute_stage_name
from spack.stage import DIYStage, ResourceStage, Stage, StageComposite, compute_stage_name
from spack.util.executable import ProcessError, which
from spack.util.package_hash import package_hash
from spack.util.web import FetchError
@ -981,20 +982,17 @@ def find_valid_url_for_version(self, version):
return None
def _make_resource_stage(self, root_stage, fetcher, resource):
resource_stage_folder = self._resource_stage(resource)
mirror_paths = spack.mirror.mirror_archive_paths(
fetcher, os.path.join(self.name, "%s-%s" % (resource.name, self.version))
)
stage = ResourceStage(
def _make_resource_stage(self, root_stage, resource):
return ResourceStage(
resource.fetcher,
root=root_stage,
resource=resource,
name=resource_stage_folder,
mirror_paths=mirror_paths,
name=self._resource_stage(resource),
mirror_paths=spack.mirror.mirror_archive_paths(
resource.fetcher, os.path.join(self.name, f"{resource.name}-{self.version}")
),
path=self.path,
)
return stage
def _download_search(self):
dynamic_fetcher = fs.from_list_url(self)
@ -1003,7 +1001,7 @@ def _download_search(self):
def _make_root_stage(self, fetcher):
# Construct a mirror path (TODO: get this out of package.py)
mirror_paths = spack.mirror.mirror_archive_paths(
fetcher, os.path.join(self.name, "%s-%s" % (self.name, self.version)), self.spec
fetcher, os.path.join(self.name, f"{self.name}-{self.version}"), self.spec
)
# Construct a path where the stage should build..
s = self.spec
@ -1021,24 +1019,21 @@ def _make_stage(self):
# If it's a dev package (not transitively), use a DIY stage object
dev_path_var = self.spec.variants.get("dev_path", None)
if dev_path_var:
return spack.stage.DIYStage(dev_path_var.value)
return DIYStage(dev_path_var.value)
# Construct a composite stage on top of the composite FetchStrategy
composite_fetcher = self.fetcher
composite_stage = StageComposite()
resources = self._get_needed_resources()
for ii, fetcher in enumerate(composite_fetcher):
if ii == 0:
# Construct root stage first
stage = self._make_root_stage(fetcher)
else:
# Construct resource stage
resource = resources[ii - 1] # ii == 0 is root!
stage = self._make_resource_stage(composite_stage[0], fetcher, resource)
# Append the item to the composite
composite_stage.append(stage)
# To fetch the current version
source_stage = self._make_root_stage(self.fetcher)
return composite_stage
# Extend it with all resources and patches
all_stages = StageComposite()
all_stages.append(source_stage)
all_stages.extend(
self._make_resource_stage(source_stage, r) for r in self._get_needed_resources()
)
all_stages.extend(
p.stage for p in self.spec.patches if isinstance(p, spack.patch.UrlPatch)
)
return all_stages
@property
def stage(self):
@ -1187,26 +1182,12 @@ def installed_upstream(self):
warnings.warn(msg)
return self.spec.installed_upstream
def _make_fetcher(self):
# Construct a composite fetcher that always contains at least
# one element (the root package). In case there are resources
# associated with the package, append their fetcher to the
# composite.
root_fetcher = fs.for_package_version(self)
fetcher = fs.FetchStrategyComposite() # Composite fetcher
fetcher.append(root_fetcher) # Root fetcher is always present
resources = self._get_needed_resources()
for resource in resources:
fetcher.append(resource.fetcher)
fetcher.set_package(self)
return fetcher
@property
def fetcher(self):
if not self.spec.versions.concrete:
raise ValueError("Cannot retrieve fetcher for package without concrete version.")
if not self._fetcher:
self._fetcher = self._make_fetcher()
self._fetcher = fs.for_package_version(self)
return self._fetcher
@fetcher.setter
@ -1445,11 +1426,6 @@ def do_fetch(self, mirror_only=False):
self.stage.cache_local()
for patch in self.spec.patches:
patch.fetch()
if patch.stage:
patch.stage.cache_local()
def do_stage(self, mirror_only=False):
"""Unpacks and expands the fetched tarball."""
# Always create the stage directory at this point. Why? A no-code
@ -2341,9 +2317,6 @@ def do_restage(self):
def do_clean(self):
"""Removes the package's build stage and source tarball."""
for patch in self.spec.patches:
patch.clean()
self.stage.destroy()
@classmethod

View File

@ -15,6 +15,7 @@
import spack
import spack.error
import spack.fetch_strategy as fs
import spack.mirror
import spack.repo
import spack.stage
import spack.util.spack_json as sjson
@ -75,22 +76,14 @@ def __init__(self, pkg, path_or_url, level, working_dir):
self.level = level
self.working_dir = working_dir
def fetch(self):
"""Fetch the patch in case of a UrlPatch"""
def clean(self):
"""Clean up the patch stage in case of a UrlPatch"""
def apply(self, stage):
def apply(self, stage: spack.stage.Stage):
"""Apply a patch to source in a stage.
Arguments:
stage (spack.stage.Stage): stage where source code lives
"""
assert self.path, "Path for patch not set in apply: %s" % self.path_or_url
if not os.path.isfile(self.path):
raise NoSuchPatchError("No such patch: %s" % self.path)
if not self.path or not os.path.isfile(self.path):
raise NoSuchPatchError(f"No such patch: {self.path}")
apply_patch(stage, self.path, self.level, self.working_dir)
@ -197,70 +190,44 @@ def __init__(self, pkg, url, level=1, working_dir=".", ordering_key=None, **kwar
if not self.sha256:
raise PatchDirectiveError("URL patches require a sha256 checksum")
def fetch(self):
"""Retrieve the patch in a temporary stage and compute self.path
def apply(self, stage: spack.stage.Stage):
assert self.stage.expanded, "Stage must be expanded before applying patches"
Args:
stage: stage for the package that needs to be patched
"""
self.stage.create()
self.stage.fetch()
self.stage.check()
# Get the patch file.
files = os.listdir(self.stage.source_path)
assert len(files) == 1, "Expected one file in stage source path, found %s" % files
self.path = os.path.join(self.stage.source_path, files[0])
root = self.stage.path
if self.archive_sha256:
self.stage.expand_archive()
root = self.stage.source_path
files = os.listdir(root)
if not files:
if self.archive_sha256:
raise NoSuchPatchError("Archive was empty: %s" % self.url)
else:
raise NoSuchPatchError("Patch failed to download: %s" % self.url)
self.path = os.path.join(root, files.pop())
if not os.path.isfile(self.path):
raise NoSuchPatchError("Archive %s contains no patch file!" % self.url)
# for a compressed archive, Need to check the patch sha256 again
# and the patch is in a directory, not in the same place
if self.archive_sha256 and spack.config.get("config:checksum"):
checker = Checker(self.sha256)
if not checker.check(self.path):
raise fs.ChecksumError(
"sha256 checksum failed for %s" % self.path,
"Expected %s but got %s" % (self.sha256, checker.sum),
)
return super().apply(stage)
@property
def stage(self):
if self._stage:
return self._stage
# use archive digest for compressed archives
fetch_digest = self.sha256
if self.archive_sha256:
fetch_digest = self.archive_sha256
fetch_digest = self.archive_sha256 or self.sha256
fetcher = fs.URLFetchStrategy(self.url, fetch_digest, expand=bool(self.archive_sha256))
# Two checksums, one for compressed file, one for its contents
if self.archive_sha256:
fetcher = fs.FetchAndVerifyExpandedFile(
self.url, archive_sha256=self.archive_sha256, expanded_sha256=self.sha256
)
else:
fetcher = fs.URLFetchStrategy(self.url, sha256=self.sha256, expand=False)
# The same package can have multiple patches with the same name but
# with different contents, therefore apply a subset of the hash.
name = "{0}-{1}".format(os.path.basename(self.url), fetch_digest[:7])
per_package_ref = os.path.join(self.owner.split(".")[-1], name)
# Reference starting with "spack." is required to avoid cyclic imports
mirror_ref = spack.mirror.mirror_archive_paths(fetcher, per_package_ref)
self._stage = spack.stage.Stage(fetcher, mirror_paths=mirror_ref)
self._stage.create()
self._stage = spack.stage.Stage(
fetcher,
name=f"{spack.stage.stage_prefix}patch-{fetch_digest}",
mirror_paths=mirror_ref,
)
return self._stage
def clean(self):
self.stage.destroy()
def to_dict(self):
data = super().to_dict()
data["url"] = self.url

View File

@ -50,7 +50,7 @@
def compute_stage_name(spec):
"""Determine stage name given a spec"""
default_stage_structure = "spack-stage-{name}-{version}-{hash}"
default_stage_structure = stage_prefix + "{name}-{version}-{hash}"
stage_name_structure = spack.config.get("config:stage_name", default=default_stage_structure)
return spec.format(format_string=stage_name_structure)
@ -744,6 +744,7 @@ def __init__(self):
"cache_local",
"cache_mirror",
"steal_source",
"disable_mirrors",
"managed_by_spack",
]
)

View File

@ -10,8 +10,11 @@
import spack.config
import spack.environment as ev
import spack.package_base
import spack.repo
from spack.main import SpackCommand
import spack.stage
import spack.traverse
from spack.main import SpackCommand, SpackCommandError
from spack.version import Version
stage = SpackCommand("stage")
@ -21,6 +24,7 @@
@pytest.mark.skipif(sys.platform == "win32", reason="not implemented on windows")
@pytest.mark.disable_clean_stage_check
def test_stage_spec(monkeypatch):
"""Verify that staging specs works."""
@ -56,11 +60,12 @@ def test_stage_path(check_stage_path):
def test_stage_path_errors_multiple_specs(check_stage_path):
"""Verify that --path only works with single specs."""
with pytest.raises(spack.main.SpackCommandError):
stage("--path={0}".format(check_stage_path), "trivial-install-test-package", "mpileaks")
with pytest.raises(SpackCommandError):
stage(f"--path={check_stage_path}", "trivial-install-test-package", "mpileaks")
@pytest.mark.skipif(sys.platform == "win32", reason="not implemented on windows")
@pytest.mark.disable_clean_stage_check
def test_stage_with_env_outside_env(mutable_mock_env_path, monkeypatch):
"""Verify that stage concretizes specs not in environment instead of erroring."""
@ -79,6 +84,7 @@ def fake_stage(pkg, mirror_only=False):
@pytest.mark.skipif(sys.platform == "win32", reason="not implemented on windows")
@pytest.mark.disable_clean_stage_check
def test_stage_with_env_inside_env(mutable_mock_env_path, monkeypatch):
"""Verify that stage filters specs in environment instead of reconcretizing."""
@ -97,6 +103,7 @@ def fake_stage(pkg, mirror_only=False):
@pytest.mark.skipif(sys.platform == "win32", reason="not implemented on windows")
@pytest.mark.disable_clean_stage_check
def test_stage_full_env(mutable_mock_env_path, monkeypatch):
"""Verify that stage filters specs in environment."""
@ -105,10 +112,7 @@ def test_stage_full_env(mutable_mock_env_path, monkeypatch):
e.concretize()
# list all the package names that should be staged
expected = set()
for spec in e.specs_by_hash.values():
for dep in spec.traverse():
expected.add(dep.name)
expected = set(dep.name for dep in spack.traverse.traverse_nodes(e.concrete_roots()))
# pop the package name from the list instead of actually staging
def fake_stage(pkg, mirror_only=False):

View File

@ -178,11 +178,11 @@ def test_config_set_pkg_property_url(self, mutable_mock_repo):
{"url": "http://www.somewhereelse.com/mpileaks-1.0.tar.gz"},
)
spec = concretize("mpileaks")
assert spec.package.fetcher[0].url == "http://www.somewhereelse.com/mpileaks-2.3.tar.gz"
assert spec.package.fetcher.url == "http://www.somewhereelse.com/mpileaks-2.3.tar.gz"
update_packages("mpileaks", "package_attributes", {})
spec = concretize("mpileaks")
assert spec.package.fetcher[0].url == "http://www.llnl.gov/mpileaks-2.3.tar.gz"
assert spec.package.fetcher.url == "http://www.llnl.gov/mpileaks-2.3.tar.gz"
def test_config_set_pkg_property_new(self, mutable_mock_repo):
"""Test that you can set arbitrary attributes on the Package class"""

View File

@ -50,7 +50,7 @@
import spack.util.gpg
import spack.util.spack_yaml as syaml
import spack.util.url as url_util
from spack.fetch_strategy import FetchStrategyComposite, URLFetchStrategy
from spack.fetch_strategy import URLFetchStrategy
from spack.util.pattern import Bunch
from spack.util.web import FetchError
@ -993,10 +993,9 @@ def install_mockery_mutable_config(temporary_store, mutable_config, mock_package
@pytest.fixture()
def mock_fetch(mock_archive, monkeypatch):
"""Fake the URL for a package so it downloads from a file."""
mock_fetcher = FetchStrategyComposite()
mock_fetcher.append(URLFetchStrategy(mock_archive.url))
monkeypatch.setattr(spack.package_base.PackageBase, "fetcher", mock_fetcher)
monkeypatch.setattr(
spack.package_base.PackageBase, "fetcher", URLFetchStrategy(mock_archive.url)
)
class MockLayout:

View File

@ -20,6 +20,7 @@
import spack.concretize
import spack.config
import spack.installer as inst
import spack.package_base
import spack.package_prefs as prefs
import spack.repo
import spack.spec
@ -1139,6 +1140,7 @@ def _test_install_fail_fast_on_except_patch(installer, **kwargs):
raise RuntimeError("mock patch failure")
@pytest.mark.disable_clean_stage_check
def test_install_fail_fast_on_except(install_mockery, monkeypatch, capsys):
"""Test fail_fast install when an install failure results from an error."""
const_arg = installer_args(["a"], {"fail_fast": True})

View File

@ -65,7 +65,7 @@ def check_mirror():
assert os.path.isdir(mirror_root)
for spec in specs:
fetcher = spec.package.fetcher[0]
fetcher = spec.package.fetcher
per_package_ref = os.path.join(spec.name, "-".join([spec.name, str(spec.version)]))
mirror_paths = spack.mirror.mirror_archive_paths(fetcher, per_package_ref)
expected_path = os.path.join(mirror_root, mirror_paths.storage_path)

View File

@ -26,7 +26,7 @@
import spack.store
import spack.util.gpg
import spack.util.url as url_util
from spack.fetch_strategy import FetchStrategyComposite, URLFetchStrategy
from spack.fetch_strategy import URLFetchStrategy
from spack.paths import mock_gpg_keys_path
from spack.relocate import (
macho_find_paths,
@ -46,9 +46,7 @@
def test_buildcache(mock_archive, tmp_path, monkeypatch, mutable_config):
# Install a test package
spec = Spec("trivial-install-test-package").concretized()
fetcher = FetchStrategyComposite()
fetcher.append(URLFetchStrategy(mock_archive.url))
monkeypatch.setattr(spec.package, "fetcher", fetcher)
monkeypatch.setattr(spec.package, "fetcher", URLFetchStrategy(mock_archive.url))
spec.package.do_install()
pkghash = "/" + str(spec.dag_hash(7))
@ -485,8 +483,7 @@ def fetch(self):
"<non-existent URL>", "This FetchStrategy always fails"
)
fetcher = FetchStrategyComposite()
fetcher.append(FailedDownloadStrategy())
fetcher = FailedDownloadStrategy()
@property
def fake_fn(self):
@ -532,9 +529,7 @@ def mirror_id(self):
def fetch(self):
raise Exception("Sources are fetched but shouldn't have been")
fetcher = FetchStrategyComposite()
fetcher.append(FetchingNotAllowed())
monkeypatch.setattr(spack.package_base.PackageBase, "fetcher", fetcher)
monkeypatch.setattr(spack.package_base.PackageBase, "fetcher", FetchingNotAllowed())
def test_fetch_without_code_is_noop(

View File

@ -115,9 +115,11 @@ def test_url_patch(mock_patch_stage, filename, sha256, archive_sha256, config):
"""
)
# apply the patch and compare files
patch.fetch()
patch.apply(stage)
patch.clean()
with patch.stage:
patch.stage.create()
patch.stage.fetch()
patch.stage.expand_archive()
patch.apply(stage)
with working_dir(stage.source_path):
assert filecmp.cmp("foo.txt", "foo-expected.txt")