Compare commits
9 Commits
develop
...
features/s
Author | SHA1 | Date | |
---|---|---|---|
![]() |
655ee3387b | ||
![]() |
a1d37bbd7a | ||
![]() |
af4cb0a14b | ||
![]() |
930dfd7cc4 | ||
![]() |
9b53bb09f5 | ||
![]() |
95b9be2c14 | ||
![]() |
43c8bb9fa3 | ||
![]() |
e59901d1f4 | ||
![]() |
3d84549fbe |
2
.github/workflows/valid-style.yml
vendored
2
.github/workflows/valid-style.yml
vendored
@ -85,5 +85,5 @@ jobs:
|
||||
source share/spack/setup-env.sh
|
||||
spack debug report
|
||||
spack -d bootstrap now --dev
|
||||
spack style -t black
|
||||
spack -d style -t black
|
||||
spack unit-test -V
|
||||
|
@ -716,7 +716,7 @@ def get_buildfile_manifest(spec):
|
||||
def hashes_to_prefixes(spec):
|
||||
"""Return a dictionary of hashes to prefixes for a spec and its deps, excluding externals"""
|
||||
return {
|
||||
s.dag_hash(): str(s.prefix)
|
||||
s.dag_hash(): (s.name, str(s.prefix))
|
||||
for s in itertools.chain(
|
||||
spec.traverse(root=True, deptype="link"), spec.dependencies(deptype="run")
|
||||
)
|
||||
@ -737,7 +737,7 @@ def get_buildinfo_dict(spec):
|
||||
"relocate_binaries": manifest["binary_to_relocate"],
|
||||
"relocate_links": manifest["link_to_relocate"],
|
||||
"hardlinks_deduped": manifest["hardlinks_deduped"],
|
||||
"hash_to_prefix": hashes_to_prefixes(spec),
|
||||
"hash_to_prefix": {h: info[1] for h, info in hashes_to_prefixes(spec).items()},
|
||||
}
|
||||
|
||||
|
||||
@ -1531,7 +1531,6 @@ def _push_oci(
|
||||
Dict[str, spack.oci.oci.Blob],
|
||||
List[Tuple[Spec, BaseException]],
|
||||
]:
|
||||
|
||||
# Spec dag hash -> blob
|
||||
checksums: Dict[str, spack.oci.oci.Blob] = {}
|
||||
|
||||
@ -2101,9 +2100,25 @@ def relocate_package(spec):
|
||||
# First match specific prefix paths. Possibly the *local* install prefix
|
||||
# of some dependency is in an upstream, so we cannot assume the original
|
||||
# spack store root can be mapped uniformly to the new spack store root.
|
||||
for dag_hash, new_dep_prefix in hashes_to_prefixes(spec).items():
|
||||
if dag_hash in hash_to_old_prefix:
|
||||
old_dep_prefix = hash_to_old_prefix[dag_hash]
|
||||
for dag_hash, prefix_info in hashes_to_prefixes(spec).items():
|
||||
name, new_dep_prefix = prefix_info
|
||||
try:
|
||||
lookup_dag_hash = spec.build_spec[name].dag_hash()
|
||||
except KeyError:
|
||||
dependent_edges = spec[name].edges_from_dependents()
|
||||
virtuals = set()
|
||||
for edge in dependent_edges:
|
||||
virtuals.update(edge.virtuals)
|
||||
for virtual in virtuals:
|
||||
try:
|
||||
lookup_dag_hash = spec.build_spec[virtual].dag_hash()
|
||||
break
|
||||
except KeyError:
|
||||
# This is a new dependency
|
||||
tty.debug(f"{spec} does not have relocation for {name}")
|
||||
|
||||
if lookup_dag_hash in hash_to_old_prefix:
|
||||
old_dep_prefix = hash_to_old_prefix[lookup_dag_hash]
|
||||
prefix_to_prefix_bin[old_dep_prefix] = new_dep_prefix
|
||||
prefix_to_prefix_text[old_dep_prefix] = new_dep_prefix
|
||||
|
||||
@ -2421,6 +2436,10 @@ def _ensure_common_prefix(tar: tarfile.TarFile) -> str:
|
||||
|
||||
|
||||
def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
# TODO?: Go through this and determine where it needs to be the deploy_spec vs. the spec.
|
||||
# TODO?: Modify dependency lookup s.t. if it's not there, figure out what virtual it provided
|
||||
# and find out what is now providing that virtual. Else raise an error that it didn't exist.
|
||||
# Inverts the logic where you would call the build_spec as spec and the spec as deploy_spec
|
||||
"""Install the root node of a concrete spec from a buildcache.
|
||||
|
||||
Checking the sha256 sum of a node before installation is usually needed only
|
||||
@ -2443,10 +2462,10 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
|
||||
return
|
||||
|
||||
download_result = download_tarball(spec, unsigned)
|
||||
download_result = download_tarball(spec.build_spec, unsigned)
|
||||
if not download_result:
|
||||
msg = 'download of binary cache file for spec "{0}" failed'
|
||||
raise RuntimeError(msg.format(spec.format()))
|
||||
raise RuntimeError(msg.format(spec.build_spec.format()))
|
||||
|
||||
if sha256:
|
||||
checker = spack.util.crypto.Checker(sha256)
|
||||
@ -2466,6 +2485,10 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, force)
|
||||
spec.package.windows_establish_runtime_linkage()
|
||||
if spec.spliced: # overwrite old metadata with new
|
||||
spack.store.STORE.layout.write_spec(
|
||||
spec, spack.store.STORE.layout.spec_file_path(spec)
|
||||
)
|
||||
spack.hooks.post_install(spec, False)
|
||||
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
||||
|
||||
|
@ -53,6 +53,7 @@
|
||||
import spack.schema.modules
|
||||
import spack.schema.packages
|
||||
import spack.schema.repos
|
||||
import spack.schema.splice
|
||||
import spack.schema.upstreams
|
||||
|
||||
# Hacked yaml for configuration files preserves line numbers.
|
||||
@ -77,6 +78,7 @@
|
||||
"bootstrap": spack.schema.bootstrap.schema,
|
||||
"ci": spack.schema.ci.schema,
|
||||
"cdash": spack.schema.cdash.schema,
|
||||
"splice": spack.schema.splice.schema,
|
||||
}
|
||||
|
||||
# Same as above, but including keys for environments
|
||||
|
@ -2168,6 +2168,13 @@ def _concrete_specs_dict(self):
|
||||
# Assumes no legacy formats, since this was just created.
|
||||
spec_dict[ht.dag_hash.name] = s.dag_hash()
|
||||
concrete_specs[s.dag_hash()] = spec_dict
|
||||
|
||||
if s.build_spec is not s:
|
||||
for d in s.build_spec.traverse():
|
||||
build_spec_dict = d.node_dict_with_hashes(hash=ht.dag_hash)
|
||||
build_spec_dict[ht.dag_hash.name] = d.dag_hash()
|
||||
concrete_specs[d.dag_hash()] = build_spec_dict
|
||||
|
||||
return concrete_specs
|
||||
|
||||
def _concrete_roots_dict(self):
|
||||
@ -2327,7 +2334,7 @@ def filter_specs(self, reader, json_specs_by_hash, order_concretized):
|
||||
specs_by_hash[lockfile_key] = spec
|
||||
|
||||
# Second pass: For each spec, get its dependencies from the node dict
|
||||
# and add them to the spec
|
||||
# and add them to the spec, including build specs
|
||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||
name, data = reader.name_and_data(node_dict)
|
||||
for _, dep_hash, deptypes, _, virtuals in reader.dependencies_from_node_dict(data):
|
||||
@ -2335,6 +2342,10 @@ def filter_specs(self, reader, json_specs_by_hash, order_concretized):
|
||||
specs_by_hash[dep_hash], depflag=dt.canonicalize(deptypes), virtuals=virtuals
|
||||
)
|
||||
|
||||
if "build_spec" in node_dict:
|
||||
_, bhash, _ = reader.build_spec_from_node_dict(node_dict)
|
||||
specs_by_hash[lockfile_key]._build_spec = specs_by_hash[bhash]
|
||||
|
||||
# Traverse the root specs one at a time in the order they appear.
|
||||
# The first time we see each DAG hash, that's the one we want to
|
||||
# keep. This is only required as long as we support older lockfile
|
||||
|
@ -2,8 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""
|
||||
This module encapsulates package installation functionality.
|
||||
"""This module encapsulates package installation functionality.
|
||||
|
||||
The PackageInstaller coordinates concurrent builds of packages for the same
|
||||
Spack instance by leveraging the dependency DAG and file system locks. It
|
||||
@ -17,16 +16,18 @@
|
||||
File system locks enable coordination such that no two processes attempt to
|
||||
build the same or a failed dependency package.
|
||||
|
||||
Failures to install dependency packages result in removal of their dependents'
|
||||
build tasks from the current process. A failure file is also written (and
|
||||
locked) so that other processes can detect the failure and adjust their build
|
||||
tasks accordingly.
|
||||
Failures to install dependency packages result in removal of their
|
||||
dependents' tasks from the current process. A failure file is also
|
||||
written (and locked) so that other processes can detect the failure
|
||||
and adjust their tasks accordingly.
|
||||
|
||||
This module supports the coordination of local and distributed concurrent
|
||||
installations of packages in a Spack instance.
|
||||
|
||||
"""
|
||||
|
||||
import copy
|
||||
import enum
|
||||
import glob
|
||||
import heapq
|
||||
import io
|
||||
@ -58,6 +59,7 @@
|
||||
import spack.package_base
|
||||
import spack.package_prefs as prefs
|
||||
import spack.repo
|
||||
import spack.rewiring
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.executable
|
||||
@ -102,7 +104,16 @@ def _write_timer_json(pkg, timer, cache):
|
||||
return
|
||||
|
||||
|
||||
class InstallAction:
|
||||
class ExecuteResult(enum.Enum):
|
||||
# Task succeeded
|
||||
SUCCESS = enum.auto()
|
||||
# Task failed
|
||||
FAILED = enum.auto()
|
||||
# Task is missing build spec and will be requeued
|
||||
MISSING_BUILD_SPEC = enum.auto()
|
||||
|
||||
|
||||
class InstallAction(enum.Enum):
|
||||
#: Don't perform an install
|
||||
NONE = 0
|
||||
#: Do a standard install
|
||||
@ -276,6 +287,13 @@ def _do_fake_install(pkg: "spack.package_base.PackageBase") -> None:
|
||||
dump_packages(pkg.spec, packages_dir)
|
||||
|
||||
|
||||
def _add_compiler_package_to_config(pkg):
|
||||
compiler_search_prefix = getattr(pkg, "compiler_search_prefix", pkg.spec.prefix)
|
||||
spack.compilers.add_compilers_to_config(
|
||||
spack.compilers.find_compilers([compiler_search_prefix])
|
||||
)
|
||||
|
||||
|
||||
def _packages_needed_to_bootstrap_compiler(
|
||||
compiler: "spack.spec.CompilerSpec", architecture: "spack.spec.ArchSpec", pkgs: list
|
||||
) -> List[Tuple["spack.package_base.PackageBase", bool]]:
|
||||
@ -725,7 +743,7 @@ def log(pkg: "spack.package_base.PackageBase") -> None:
|
||||
def package_id(spec: "spack.spec.Spec") -> str:
|
||||
"""A "unique" package identifier for installation purposes
|
||||
|
||||
The identifier is used to track build tasks, locks, install, and
|
||||
The identifier is used to track tasks, locks, install, and
|
||||
failure statuses.
|
||||
|
||||
The identifier needs to distinguish between combinations of compilers
|
||||
@ -888,9 +906,10 @@ def traverse_dependencies(self, spec=None, visited=None) -> Iterator["spack.spec
|
||||
yield dep
|
||||
|
||||
|
||||
class BuildTask:
|
||||
"""Class for representing the build task for a package."""
|
||||
class Task:
|
||||
"""Base class for representing a task for a package."""
|
||||
|
||||
# TODO: Consider adding pid as a parameter here:
|
||||
def __init__(
|
||||
self,
|
||||
pkg: "spack.package_base.PackageBase",
|
||||
@ -902,7 +921,7 @@ def __init__(
|
||||
installed: Set[str],
|
||||
):
|
||||
"""
|
||||
Instantiate a build task for a package.
|
||||
Instantiate a task for a package.
|
||||
|
||||
Args:
|
||||
pkg: the package to be built and installed
|
||||
@ -938,13 +957,13 @@ def __init__(
|
||||
# queue.
|
||||
if status == STATUS_REMOVED:
|
||||
raise InstallError(
|
||||
f"Cannot create a build task for {self.pkg_id} with status '{status}'", pkg=pkg
|
||||
f"Cannot create a task for {self.pkg_id} with status '{status}'", pkg=pkg
|
||||
)
|
||||
|
||||
self.status = status
|
||||
|
||||
# Package is associated with a bootstrap compiler
|
||||
self.compiler = compiler
|
||||
# Getting the PID again because it will be needed for execute functionality.
|
||||
# TODO: Should this be cached in PackageInstaller?
|
||||
self.pid = os.getpid()
|
||||
|
||||
# The initial start time for processing the spec
|
||||
self.start = start
|
||||
@ -968,28 +987,8 @@ def __init__(
|
||||
if package_id(d) != self.pkg_id
|
||||
)
|
||||
|
||||
# Handle bootstrapped compiler
|
||||
#
|
||||
# The bootstrapped compiler is not a dependency in the spec, but it is
|
||||
# a dependency of the build task. Here we add it to self.dependencies
|
||||
compiler_spec = self.pkg.spec.compiler
|
||||
arch_spec = self.pkg.spec.architecture
|
||||
strict = spack.concretize.Concretizer().check_for_compiler_existence
|
||||
if (
|
||||
not spack.compilers.compilers_for_spec(compiler_spec, arch_spec=arch_spec)
|
||||
and not strict
|
||||
):
|
||||
# The compiler is in the queue, identify it as dependency
|
||||
dep = spack.compilers.pkg_spec_for_compiler(compiler_spec)
|
||||
dep.constrain(f"platform={str(arch_spec.platform)}")
|
||||
dep.constrain(f"os={str(arch_spec.os)}")
|
||||
dep.constrain(f"target={arch_spec.target.microarchitecture.family.name}:")
|
||||
dep.concretize()
|
||||
dep_id = package_id(dep)
|
||||
self.dependencies.add(dep_id)
|
||||
|
||||
# List of uninstalled dependencies, which is used to establish
|
||||
# the priority of the build task.
|
||||
# the priority of the task.
|
||||
#
|
||||
self.uninstalled_deps = set(
|
||||
pkg_id for pkg_id in self.dependencies if pkg_id not in installed
|
||||
@ -999,6 +998,33 @@ def __init__(
|
||||
self.attempts = 0
|
||||
self._update()
|
||||
|
||||
# Is this task to install a compiler
|
||||
self.compiler = compiler
|
||||
|
||||
# Handle bootstrapped compiler
|
||||
#
|
||||
# The bootstrapped compiler is not a dependency in the spec, but it is
|
||||
# a dependency of the build task. Here we add it to self.dependencies
|
||||
if compiler:
|
||||
compiler_spec = self.pkg.spec.compiler
|
||||
arch_spec = self.pkg.spec.architecture
|
||||
strict = spack.concretize.Concretizer().check_for_compiler_existence
|
||||
if (
|
||||
not spack.compilers.compilers_for_spec(compiler_spec, arch_spec=arch_spec)
|
||||
and not strict
|
||||
):
|
||||
# The compiler is in the queue, identify it as dependency
|
||||
dep = spack.compilers.pkg_spec_for_compiler(compiler_spec)
|
||||
dep.constrain("platform=%s" % str(arch_spec.platform))
|
||||
dep.constrain("os=%s" % str(arch_spec.os))
|
||||
dep.constrain("target=%s:" % arch_spec.target.microarchitecture.family.name)
|
||||
dep.concretize()
|
||||
dep_id = package_id(dep.package.spec)
|
||||
self.dependencies.add(dep_id)
|
||||
|
||||
def execute(self, install_status):
|
||||
raise NotImplementedError
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.key == other.key
|
||||
|
||||
@ -1018,14 +1044,14 @@ def __ne__(self, other):
|
||||
return self.key != other.key
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Returns a formal representation of the build task."""
|
||||
"""Returns a formal representation of the task."""
|
||||
rep = f"{self.__class__.__name__}("
|
||||
for attr, value in self.__dict__.items():
|
||||
rep += f"{attr}={value.__repr__()}, "
|
||||
return f"{rep.strip(', ')})"
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Returns a printable version of the build task."""
|
||||
"""Returns a printable version of the task."""
|
||||
dependencies = f"#dependencies={len(self.dependencies)}"
|
||||
return "priority={0}, status={1}, start={2}, {3}".format(
|
||||
self.priority, self.status, self.start, dependencies
|
||||
@ -1052,6 +1078,21 @@ def add_dependent(self, pkg_id: str) -> None:
|
||||
tty.debug(f"Adding {pkg_id} as a dependent of {self.pkg_id}")
|
||||
self.dependents.add(pkg_id)
|
||||
|
||||
def add_dependency(self, pkg_id, installed=False):
|
||||
"""
|
||||
Ensure the dependency package id is in the task's list so the task priority will be
|
||||
correct.
|
||||
|
||||
Args:
|
||||
pkg_id (str): package identifier of the dependency package
|
||||
installed (bool): install status of the dependency package
|
||||
"""
|
||||
if pkg_id != self.pkg_id and pkg_id not in self.dependencies:
|
||||
tty.debug("Adding {0} as a depencency of {1}".format(pkg_id, self.pkg_id))
|
||||
self.dependencies.add(pkg_id)
|
||||
if not installed:
|
||||
self.uninstalled_deps.add(pkg_id)
|
||||
|
||||
def flag_installed(self, installed: List[str]) -> None:
|
||||
"""
|
||||
Ensure the dependency is not considered to still be uninstalled.
|
||||
@ -1068,6 +1109,39 @@ def flag_installed(self, installed: List[str]) -> None:
|
||||
level=2,
|
||||
)
|
||||
|
||||
def _setup_install_dir(self, pkg: "spack.package_base.PackageBase") -> None:
|
||||
"""
|
||||
Create and ensure proper access controls for the install directory.
|
||||
Write a small metadata file with the current spack environment.
|
||||
|
||||
Args:
|
||||
pkg: the package to be built and installed
|
||||
"""
|
||||
# Move to a module level method.
|
||||
if not os.path.exists(pkg.spec.prefix):
|
||||
path = spack.util.path.debug_padded_filter(pkg.spec.prefix)
|
||||
tty.debug("Creating the installation directory {0}".format(path))
|
||||
spack.store.STORE.layout.create_install_directory(pkg.spec)
|
||||
else:
|
||||
# Set the proper group for the prefix
|
||||
group = prefs.get_package_group(pkg.spec)
|
||||
if group:
|
||||
fs.chgrp(pkg.spec.prefix, group)
|
||||
|
||||
# Set the proper permissions.
|
||||
# This has to be done after group because changing groups blows
|
||||
# away the sticky group bit on the directory
|
||||
mode = os.stat(pkg.spec.prefix).st_mode
|
||||
perms = prefs.get_package_dir_permissions(pkg.spec)
|
||||
if mode != perms:
|
||||
os.chmod(pkg.spec.prefix, perms)
|
||||
|
||||
# Ensure the metadata path exists as well
|
||||
fs.mkdirp(spack.store.STORE.layout.metadata_path(pkg.spec), mode=perms)
|
||||
|
||||
# Always write host environment - we assume this can change
|
||||
spack.store.STORE.layout.write_host_environment(pkg.spec)
|
||||
|
||||
@property
|
||||
def explicit(self) -> bool:
|
||||
return self.pkg.spec.dag_hash() in self.request.install_args.get("explicit", [])
|
||||
@ -1098,7 +1172,7 @@ def key(self) -> Tuple[int, int]:
|
||||
"""The key is the tuple (# uninstalled dependencies, sequence)."""
|
||||
return (self.priority, self.sequence)
|
||||
|
||||
def next_attempt(self, installed) -> "BuildTask":
|
||||
def next_attempt(self, installed) -> "Task":
|
||||
"""Create a new, updated task for the next installation attempt."""
|
||||
task = copy.copy(self)
|
||||
task._update()
|
||||
@ -1112,6 +1186,99 @@ def priority(self):
|
||||
return len(self.uninstalled_deps)
|
||||
|
||||
|
||||
class BuildTask(Task):
|
||||
"""Class for representing a build task for a package."""
|
||||
|
||||
def execute(self, install_status):
|
||||
"""
|
||||
Perform the installation of the requested spec and/or dependency
|
||||
represented by the build task.
|
||||
"""
|
||||
install_args = self.request.install_args
|
||||
tests = install_args.get("tests")
|
||||
unsigned = install_args.get("unsigned")
|
||||
|
||||
pkg, pkg_id = self.pkg, self.pkg_id
|
||||
|
||||
tty.msg(install_msg(pkg_id, self.pid, install_status))
|
||||
self.start = self.start or time.time()
|
||||
self.status = STATUS_INSTALLING
|
||||
|
||||
# Use the binary cache if requested
|
||||
if self.use_cache:
|
||||
if _install_from_cache(pkg, self.explicit, unsigned):
|
||||
if self.compiler:
|
||||
_add_compiler_package_to_config(pkg)
|
||||
return ExecuteResult.SUCCESS
|
||||
elif self.cache_only:
|
||||
raise InstallError("No binary found when cache-only was specified", pkg=pkg)
|
||||
else:
|
||||
tty.msg(f"No binary for {pkg_id} found: installing from source")
|
||||
|
||||
pkg.run_tests = tests is True or tests and pkg.name in tests
|
||||
|
||||
# hook that allows tests to inspect the Package before installation
|
||||
# see unit_test_check() docs.
|
||||
if not pkg.unit_test_check():
|
||||
return ExecuteResult.FAILED
|
||||
|
||||
try:
|
||||
# Create stage object now and let it be serialized for the child process. That
|
||||
# way monkeypatch in tests works correctly.
|
||||
pkg.stage
|
||||
|
||||
self._setup_install_dir(pkg)
|
||||
|
||||
# Create a child process to do the actual installation.
|
||||
# Preserve verbosity settings across installs.
|
||||
spack.package_base.PackageBase._verbose = spack.build_environment.start_build_process(
|
||||
pkg, build_process, install_args
|
||||
)
|
||||
# Currently this is how RPATH-like behavior is achieved on Windows, after install
|
||||
# establish runtime linkage via Windows Runtime link object
|
||||
# Note: this is a no-op on non Windows platforms
|
||||
pkg.windows_establish_runtime_linkage()
|
||||
# Note: PARENT of the build process adds the new package to
|
||||
# the database, so that we don't need to re-read from file.
|
||||
spack.store.STORE.db.add(pkg.spec, spack.store.STORE.layout, explicit=self.explicit)
|
||||
|
||||
# If a compiler, ensure it is added to the configuration
|
||||
if self.compiler:
|
||||
_add_compiler_package_to_config(pkg)
|
||||
except spack.build_environment.StopPhase as e:
|
||||
# A StopPhase exception means that do_install was asked to
|
||||
# stop early from clients, and is not an error at this point
|
||||
pid = "{0}: ".format(self.pid) if tty.show_pid() else ""
|
||||
tty.debug("{0}{1}".format(pid, str(e)))
|
||||
tty.debug("Package stage directory: {0}".format(pkg.stage.source_path))
|
||||
return ExecuteResult.SUCCESS
|
||||
|
||||
|
||||
class RewireTask(Task):
|
||||
"""Class for representing a rewire task for a package."""
|
||||
|
||||
def execute(self, install_status):
|
||||
# TODO: Docstring
|
||||
oldstatus = self.status
|
||||
self.status = STATUS_INSTALLING
|
||||
tty.msg(install_msg(self.pkg_id, self.pid, install_status))
|
||||
self.start = self.start or time.time()
|
||||
if not self.pkg.spec.build_spec.installed:
|
||||
try:
|
||||
install_args = self.request.install_args
|
||||
unsigned = install_args.get("unsigned")
|
||||
binary_distribution.install_root_node(self.pkg.spec, unsigned=unsigned)
|
||||
_print_installed_pkg(self.pkg.prefix)
|
||||
return ExecuteResult.SUCCESS
|
||||
except BaseException as e:
|
||||
tty.debug(f"Failed to rewire {self.pkg.spec} from binary. {e}")
|
||||
self.status = oldstatus
|
||||
return ExecuteResult.MISSING_BUILD_SPEC
|
||||
spack.rewiring.rewire_node(self.pkg.spec, self.explicit)
|
||||
_print_installed_pkg(self.pkg.prefix)
|
||||
return ExecuteResult.SUCCESS
|
||||
|
||||
|
||||
class PackageInstaller:
|
||||
"""
|
||||
Class for managing the install process for a Spack instance based on a bottom-up DAG approach.
|
||||
@ -1126,11 +1293,11 @@ def __init__(
|
||||
# List of build requests
|
||||
self.build_requests = [BuildRequest(pkg, install_args) for pkg in packages]
|
||||
|
||||
# Priority queue of build tasks
|
||||
self.build_pq: List[Tuple[Tuple[int, int], BuildTask]] = []
|
||||
# Priority queue of tasks
|
||||
self.build_pq: List[Tuple[Tuple[int, int], Task]] = []
|
||||
|
||||
# Mapping of unique package ids to build task
|
||||
self.build_tasks: Dict[str, BuildTask] = {}
|
||||
# Mapping of unique package ids to task
|
||||
self.build_tasks: Dict[str, Task] = {}
|
||||
|
||||
# Cache of package locks for failed packages, keyed on package's ids
|
||||
self.failed: Dict[str, Optional[lk.Lock]] = {}
|
||||
@ -1151,6 +1318,9 @@ def __init__(
|
||||
# fast then that option applies to all build requests.
|
||||
self.fail_fast = False
|
||||
|
||||
# Initializing all_dependencies to empty. This will be set later in _init_queue.
|
||||
self.all_dependencies: Dict[str, Set[str]] = {}
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Returns a formal representation of the package installer."""
|
||||
rep = f"{self.__class__.__name__}("
|
||||
@ -1221,7 +1391,7 @@ def _add_init_task(
|
||||
all_deps: Dict[str, Set[str]],
|
||||
) -> None:
|
||||
"""
|
||||
Creates and queus the initial build task for the package.
|
||||
Creates and queus the initial task for the package.
|
||||
|
||||
Args:
|
||||
pkg: the package to be built and installed
|
||||
@ -1232,7 +1402,9 @@ def _add_init_task(
|
||||
all_deps (defaultdict(set)): dictionary of all dependencies and
|
||||
associated dependents
|
||||
"""
|
||||
task = BuildTask(pkg, request, is_compiler, 0, 0, STATUS_ADDED, self.installed)
|
||||
cls = RewireTask if pkg.spec.spliced else BuildTask
|
||||
task: Task = cls(pkg, request, is_compiler, 0, 0, STATUS_ADDED, self.installed)
|
||||
|
||||
for dep_id in task.dependencies:
|
||||
all_deps[dep_id].add(package_id(pkg.spec))
|
||||
|
||||
@ -1306,7 +1478,7 @@ def _check_deps_status(self, request: BuildRequest) -> None:
|
||||
else:
|
||||
lock.release_read()
|
||||
|
||||
def _prepare_for_install(self, task: BuildTask) -> None:
|
||||
def _prepare_for_install(self, task: Task) -> None:
|
||||
"""
|
||||
Check the database and leftover installation directories/files and
|
||||
prepare for a new install attempt for an uninstalled package.
|
||||
@ -1314,7 +1486,7 @@ def _prepare_for_install(self, task: BuildTask) -> None:
|
||||
and ensuring the database is up-to-date.
|
||||
|
||||
Args:
|
||||
task (BuildTask): the build task whose associated package is
|
||||
task: the task whose associated package is
|
||||
being checked
|
||||
"""
|
||||
install_args = task.request.install_args
|
||||
@ -1365,7 +1537,7 @@ def _prepare_for_install(self, task: BuildTask) -> None:
|
||||
spack.store.STORE.db.update_explicit(task.pkg.spec, True)
|
||||
|
||||
def _cleanup_all_tasks(self) -> None:
|
||||
"""Cleanup all build tasks to include releasing their locks."""
|
||||
"""Cleanup all tasks to include releasing their locks."""
|
||||
for pkg_id in self.locks:
|
||||
self._release_lock(pkg_id)
|
||||
|
||||
@ -1397,7 +1569,7 @@ def _cleanup_failed(self, pkg_id: str) -> None:
|
||||
|
||||
def _cleanup_task(self, pkg: "spack.package_base.PackageBase") -> None:
|
||||
"""
|
||||
Cleanup the build task for the spec
|
||||
Cleanup the task for the spec
|
||||
|
||||
Args:
|
||||
pkg: the package being installed
|
||||
@ -1469,7 +1641,7 @@ def _ensure_locked(
|
||||
|
||||
if lock_type == "read":
|
||||
# Wait until the other process finishes if there are no more
|
||||
# build tasks with priority 0 (i.e., with no uninstalled
|
||||
# tasks with priority 0 (i.e., with no uninstalled
|
||||
# dependencies).
|
||||
no_p0 = len(self.build_tasks) == 0 or not self._next_is_pri0()
|
||||
timeout = None if no_p0 else 3.0
|
||||
@ -1521,6 +1693,75 @@ def _ensure_locked(
|
||||
self.locks[pkg_id] = (lock_type, lock)
|
||||
return self.locks[pkg_id]
|
||||
|
||||
def _requeue_with_build_spec_tasks(self, task):
|
||||
"""TODO: Docstring"""
|
||||
# Full install of the build_spec is necessary because it didn't already exist somewhere
|
||||
# TODO: Bootstrap compilers first (from add_tasks)
|
||||
install_compilers = spack.config.get("config:install_missing_compilers", False)
|
||||
|
||||
spec = task.pkg.spec
|
||||
|
||||
if install_compilers:
|
||||
packages_per_compiler = {}
|
||||
|
||||
# Queue all dependencies of the build spec.
|
||||
for dep in spec.build_spec.traverse():
|
||||
pkg = dep.package
|
||||
compiler = pkg.spec.compiler
|
||||
arch = pkg.spec.architecture
|
||||
if compiler not in packages_per_compiler:
|
||||
packages_per_compiler[compiler] = {}
|
||||
|
||||
if arch not in packages_per_compiler[compiler]:
|
||||
packages_per_compiler[compiler][arch] = []
|
||||
|
||||
packages_per_compiler[compiler][arch].append(pkg)
|
||||
pkg_id = package_id(pkg.spec)
|
||||
if pkg_id not in self.build_tasks:
|
||||
spack.store.STORE.failure_tracker.clear(dep, force=False)
|
||||
self._add_init_task(dep.package, task.request, False, self.all_dependencies)
|
||||
|
||||
compiler = spec.build_spec.compiler
|
||||
arch = spec.build_spec.architecture
|
||||
|
||||
if compiler not in packages_per_compiler:
|
||||
packages_per_compiler[compiler] = {}
|
||||
|
||||
if arch not in packages_per_compiler[compiler]:
|
||||
packages_per_compiler[compiler][arch] = []
|
||||
|
||||
packages_per_compiler[compiler][arch].append(spec.build_spec.package)
|
||||
|
||||
for compiler, archs in packages_per_compiler.items():
|
||||
for arch, packages in archs.items():
|
||||
# TODO: Ensure that this works w.r.t all deps
|
||||
self._add_bootstrap_compilers(
|
||||
compiler, arch, packages, task.request, self.all_dependencies
|
||||
)
|
||||
|
||||
for dep in spec.build_spec.traverse():
|
||||
dep_pkg = dep.package
|
||||
|
||||
dep_id = package_id(dep)
|
||||
if dep_id not in self.build_tasks:
|
||||
self._add_init_task(dep_pkg, task.request, False, self.all_dependencies)
|
||||
|
||||
# Clear any persistent failure markings _unless_ they are
|
||||
# associated with another process in this parallel build
|
||||
# of the spec.
|
||||
spack.store.STORE.failure_tracker.clear(dep, force=False)
|
||||
|
||||
# Queue the build spec.
|
||||
build_pkg_id = package_id(spec.build_spec)
|
||||
build_spec_task = self.build_tasks[build_pkg_id]
|
||||
spec_pkg_id = package_id(spec)
|
||||
spec_task = task.next_attempt(self.installed)
|
||||
spec_task.status = STATUS_ADDED
|
||||
# Convey a build spec as a dependency of a deployed spec.
|
||||
build_spec_task.add_dependent(spec_pkg_id)
|
||||
spec_task.add_dependency(build_pkg_id)
|
||||
self._push_task(spec_task)
|
||||
|
||||
def _add_tasks(self, request: BuildRequest, all_deps):
|
||||
"""Add tasks to the priority queue for the given build request.
|
||||
|
||||
@ -1609,83 +1850,24 @@ def _add_tasks(self, request: BuildRequest, all_deps):
|
||||
fail_fast = bool(request.install_args.get("fail_fast"))
|
||||
self.fail_fast = self.fail_fast or fail_fast
|
||||
|
||||
def _add_compiler_package_to_config(self, pkg: "spack.package_base.PackageBase") -> None:
|
||||
compiler_search_prefix = getattr(pkg, "compiler_search_prefix", pkg.spec.prefix)
|
||||
spack.compilers.add_compilers_to_config(
|
||||
spack.compilers.find_compilers([compiler_search_prefix])
|
||||
)
|
||||
|
||||
def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
||||
def _install_task(self, task: Task, install_status: InstallStatus) -> None:
|
||||
"""
|
||||
Perform the installation of the requested spec and/or dependency
|
||||
represented by the build task.
|
||||
represented by the task.
|
||||
|
||||
Args:
|
||||
task: the installation build task for a package
|
||||
task: the installation task for a package
|
||||
install_status: the installation status for the package"""
|
||||
|
||||
explicit = task.explicit
|
||||
install_args = task.request.install_args
|
||||
cache_only = task.cache_only
|
||||
use_cache = task.use_cache
|
||||
tests = install_args.get("tests", False)
|
||||
assert isinstance(tests, (bool, list)) # make mypy happy.
|
||||
unsigned: Optional[bool] = install_args.get("unsigned")
|
||||
|
||||
pkg, pkg_id = task.pkg, task.pkg_id
|
||||
|
||||
tty.msg(install_msg(pkg_id, self.pid, install_status))
|
||||
task.start = task.start or time.time()
|
||||
task.status = STATUS_INSTALLING
|
||||
|
||||
# Use the binary cache if requested
|
||||
if use_cache:
|
||||
if _install_from_cache(pkg, explicit, unsigned):
|
||||
self._update_installed(task)
|
||||
if task.compiler:
|
||||
self._add_compiler_package_to_config(pkg)
|
||||
return
|
||||
elif cache_only:
|
||||
raise InstallError("No binary found when cache-only was specified", pkg=pkg)
|
||||
else:
|
||||
tty.msg(f"No binary for {pkg_id} found: installing from source")
|
||||
|
||||
pkg.run_tests = tests if isinstance(tests, bool) else pkg.name in tests
|
||||
|
||||
# hook that allows tests to inspect the Package before installation
|
||||
# see unit_test_check() docs.
|
||||
if not pkg.unit_test_check():
|
||||
return
|
||||
|
||||
try:
|
||||
self._setup_install_dir(pkg)
|
||||
|
||||
# Create stage object now and let it be serialized for the child process. That
|
||||
# way monkeypatch in tests works correctly.
|
||||
pkg.stage
|
||||
|
||||
# Create a child process to do the actual installation.
|
||||
# Preserve verbosity settings across installs.
|
||||
spack.package_base.PackageBase._verbose = spack.build_environment.start_build_process(
|
||||
pkg, build_process, install_args
|
||||
)
|
||||
# Note: PARENT of the build process adds the new package to
|
||||
# the database, so that we don't need to re-read from file.
|
||||
spack.store.STORE.db.add(pkg.spec, spack.store.STORE.layout, explicit=explicit)
|
||||
|
||||
# If a compiler, ensure it is added to the configuration
|
||||
if task.compiler:
|
||||
self._add_compiler_package_to_config(pkg)
|
||||
except spack.build_environment.StopPhase as e:
|
||||
# A StopPhase exception means that do_install was asked to
|
||||
# stop early from clients, and is not an error at this point
|
||||
pid = f"{self.pid}: " if tty.show_pid() else ""
|
||||
tty.debug(f"{pid}{str(e)}")
|
||||
tty.debug(f"Package stage directory: {pkg.stage.source_path}")
|
||||
# TODO: use install_status
|
||||
rc = task.execute(install_status)
|
||||
if rc == ExecuteResult.MISSING_BUILD_SPEC:
|
||||
self._requeue_with_build_spec_tasks(task)
|
||||
else: # if rc == ExecuteResult.SUCCESS or rc == ExecuteResult.FAILED
|
||||
self._update_installed(task)
|
||||
|
||||
def _next_is_pri0(self) -> bool:
|
||||
"""
|
||||
Determine if the next build task has priority 0
|
||||
Determine if the next task has priority 0
|
||||
|
||||
Return:
|
||||
True if it does, False otherwise
|
||||
@ -1695,9 +1877,9 @@ def _next_is_pri0(self) -> bool:
|
||||
task = self.build_pq[0][1]
|
||||
return task.priority == 0
|
||||
|
||||
def _pop_task(self) -> Optional[BuildTask]:
|
||||
def _pop_task(self) -> Optional[Task]:
|
||||
"""
|
||||
Remove and return the lowest priority build task.
|
||||
Remove and return the lowest priority task.
|
||||
|
||||
Source: Variant of function at docs.python.org/2/library/heapq.html
|
||||
"""
|
||||
@ -1709,17 +1891,17 @@ def _pop_task(self) -> Optional[BuildTask]:
|
||||
return task
|
||||
return None
|
||||
|
||||
def _push_task(self, task: BuildTask) -> None:
|
||||
def _push_task(self, task: Task) -> None:
|
||||
"""
|
||||
Push (or queue) the specified build task for the package.
|
||||
Push (or queue) the specified task for the package.
|
||||
|
||||
Source: Customization of "add_task" function at
|
||||
docs.python.org/2/library/heapq.html
|
||||
|
||||
Args:
|
||||
task: the installation build task for a package
|
||||
task: the installation task for a package
|
||||
"""
|
||||
msg = "{0} a build task for {1} with status '{2}'"
|
||||
msg = "{0} a task for {1} with status '{2}'"
|
||||
skip = "Skipping requeue of task for {0}: {1}"
|
||||
|
||||
# Ensure do not (re-)queue installed or failed packages whose status
|
||||
@ -1732,7 +1914,7 @@ def _push_task(self, task: BuildTask) -> None:
|
||||
tty.debug(skip.format(task.pkg_id, "failed"))
|
||||
return
|
||||
|
||||
# Remove any associated build task since its sequence will change
|
||||
# Remove any associated task since its sequence will change
|
||||
self._remove_task(task.pkg_id)
|
||||
desc = "Queueing" if task.attempts == 0 else "Requeueing"
|
||||
tty.debug(msg.format(desc, task.pkg_id, task.status))
|
||||
@ -1765,9 +1947,9 @@ def _release_lock(self, pkg_id: str) -> None:
|
||||
except Exception as exc:
|
||||
tty.warn(err.format(exc.__class__.__name__, ltype, pkg_id, str(exc)))
|
||||
|
||||
def _remove_task(self, pkg_id: str) -> Optional[BuildTask]:
|
||||
def _remove_task(self, pkg_id: str) -> Optional[Task]:
|
||||
"""
|
||||
Mark the existing package build task as being removed and return it.
|
||||
Mark the existing package task as being removed and return it.
|
||||
Raises KeyError if not found.
|
||||
|
||||
Source: Variant of function at docs.python.org/2/library/heapq.html
|
||||
@ -1776,19 +1958,19 @@ def _remove_task(self, pkg_id: str) -> Optional[BuildTask]:
|
||||
pkg_id: identifier for the package to be removed
|
||||
"""
|
||||
if pkg_id in self.build_tasks:
|
||||
tty.debug(f"Removing build task for {pkg_id} from list")
|
||||
tty.debug(f"Removing task for {pkg_id} from list")
|
||||
task = self.build_tasks.pop(pkg_id)
|
||||
task.status = STATUS_REMOVED
|
||||
return task
|
||||
else:
|
||||
return None
|
||||
|
||||
def _requeue_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
||||
def _requeue_task(self, task: Task, install_status: InstallStatus) -> None:
|
||||
"""
|
||||
Requeues a task that appears to be in progress by another process.
|
||||
|
||||
Args:
|
||||
task (BuildTask): the installation build task for a package
|
||||
task (Task): the installation task for a package
|
||||
"""
|
||||
if task.status not in [STATUS_INSTALLED, STATUS_INSTALLING]:
|
||||
tty.debug(
|
||||
@ -1800,47 +1982,15 @@ def _requeue_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
||||
new_task.status = STATUS_INSTALLING
|
||||
self._push_task(new_task)
|
||||
|
||||
def _setup_install_dir(self, pkg: "spack.package_base.PackageBase") -> None:
|
||||
"""
|
||||
Create and ensure proper access controls for the install directory.
|
||||
Write a small metadata file with the current spack environment.
|
||||
|
||||
Args:
|
||||
pkg: the package to be built and installed
|
||||
"""
|
||||
if not os.path.exists(pkg.spec.prefix):
|
||||
path = spack.util.path.debug_padded_filter(pkg.spec.prefix)
|
||||
tty.debug(f"Creating the installation directory {path}")
|
||||
spack.store.STORE.layout.create_install_directory(pkg.spec)
|
||||
else:
|
||||
# Set the proper group for the prefix
|
||||
group = prefs.get_package_group(pkg.spec)
|
||||
if group:
|
||||
fs.chgrp(pkg.spec.prefix, group)
|
||||
|
||||
# Set the proper permissions.
|
||||
# This has to be done after group because changing groups blows
|
||||
# away the sticky group bit on the directory
|
||||
mode = os.stat(pkg.spec.prefix).st_mode
|
||||
perms = prefs.get_package_dir_permissions(pkg.spec)
|
||||
if mode != perms:
|
||||
os.chmod(pkg.spec.prefix, perms)
|
||||
|
||||
# Ensure the metadata path exists as well
|
||||
fs.mkdirp(spack.store.STORE.layout.metadata_path(pkg.spec), mode=perms)
|
||||
|
||||
# Always write host environment - we assume this can change
|
||||
spack.store.STORE.layout.write_host_environment(pkg.spec)
|
||||
|
||||
def _update_failed(
|
||||
self, task: BuildTask, mark: bool = False, exc: Optional[BaseException] = None
|
||||
self, task: Task, mark: bool = False, exc: Optional[BaseException] = None
|
||||
) -> None:
|
||||
"""
|
||||
Update the task and transitive dependents as failed; optionally mark
|
||||
externally as failed; and remove associated build tasks.
|
||||
externally as failed; and remove associated tasks.
|
||||
|
||||
Args:
|
||||
task: the build task for the failed package
|
||||
task: the task for the failed package
|
||||
mark: ``True`` if the package and its dependencies are to
|
||||
be marked as "failed", otherwise, ``False``
|
||||
exc: optional exception if associated with the failure
|
||||
@ -1858,19 +2008,19 @@ def _update_failed(
|
||||
if dep_id in self.build_tasks:
|
||||
tty.warn(f"Skipping build of {dep_id} since {pkg_id} failed")
|
||||
# Ensure the dependent's uninstalled dependents are
|
||||
# up-to-date and their build tasks removed.
|
||||
# up-to-date and their tasks removed.
|
||||
dep_task = self.build_tasks[dep_id]
|
||||
self._update_failed(dep_task, mark)
|
||||
self._remove_task(dep_id)
|
||||
else:
|
||||
tty.debug(f"No build task for {dep_id} to skip since {pkg_id} failed")
|
||||
tty.debug(f"No task for {dep_id} to skip since {pkg_id} failed")
|
||||
|
||||
def _update_installed(self, task: BuildTask) -> None:
|
||||
def _update_installed(self, task: Task) -> None:
|
||||
"""
|
||||
Mark the task as installed and ensure dependent build tasks are aware.
|
||||
Mark the task as installed and ensure dependent tasks are aware.
|
||||
|
||||
Args:
|
||||
task (BuildTask): the build task for the installed package
|
||||
task: the task for the installed package
|
||||
"""
|
||||
task.status = STATUS_INSTALLED
|
||||
self._flag_installed(task.pkg, task.dependents)
|
||||
@ -1879,7 +2029,7 @@ def _flag_installed(
|
||||
self, pkg: "spack.package_base.PackageBase", dependent_ids: Optional[Set[str]] = None
|
||||
) -> None:
|
||||
"""
|
||||
Flag the package as installed and ensure known by all build tasks of
|
||||
Flag the package as installed and ensure known by all tasks of
|
||||
known dependents.
|
||||
|
||||
Args:
|
||||
@ -1907,7 +2057,7 @@ def _flag_installed(
|
||||
dep_task = self.build_tasks[dep_id]
|
||||
self._push_task(dep_task.next_attempt(self.installed))
|
||||
else:
|
||||
tty.debug(f"{dep_id} has no build task to update for {pkg_id}'s success")
|
||||
tty.debug(f"{dep_id} has no task to update for {pkg_id}'s success")
|
||||
|
||||
def _init_queue(self) -> None:
|
||||
"""Initialize the build queue from the list of build requests."""
|
||||
@ -1926,8 +2076,9 @@ def _init_queue(self) -> None:
|
||||
task = self.build_tasks[dep_id]
|
||||
for dependent_id in dependents.difference(task.dependents):
|
||||
task.add_dependent(dependent_id)
|
||||
self.all_dependencies = all_dependencies
|
||||
|
||||
def _install_action(self, task: BuildTask) -> int:
|
||||
def _install_action(self, task: Task) -> InstallAction:
|
||||
"""
|
||||
Determine whether the installation should be overwritten (if it already
|
||||
exists) or skipped (if has been handled by another process).
|
||||
@ -2078,7 +2229,7 @@ def install(self) -> None:
|
||||
|
||||
# It's an already installed compiler, add it to the config
|
||||
if task.compiler:
|
||||
self._add_compiler_package_to_config(pkg)
|
||||
_add_compiler_package_to_config(pkg)
|
||||
|
||||
else:
|
||||
# At this point we've failed to get a write or a read
|
||||
@ -2119,8 +2270,6 @@ def install(self) -> None:
|
||||
# wrapper -- silence mypy
|
||||
OverwriteInstall(self, spack.store.STORE.db, task, install_status).install() # type: ignore[arg-type] # noqa: E501
|
||||
|
||||
self._update_installed(task)
|
||||
|
||||
# If we installed then we should keep the prefix
|
||||
stop_before_phase = getattr(pkg, "stop_before_phase", None)
|
||||
last_phase = getattr(pkg, "last_phase", None)
|
||||
@ -2180,7 +2329,8 @@ def install(self) -> None:
|
||||
|
||||
# Perform basic task cleanup for the installed spec to
|
||||
# include downgrading the write to a read lock
|
||||
self._cleanup_task(pkg)
|
||||
if pkg.spec.installed:
|
||||
self._cleanup_task(pkg)
|
||||
|
||||
# Cleanup, which includes releasing all of the read locks
|
||||
self._cleanup_all_tasks()
|
||||
@ -2452,7 +2602,7 @@ def __init__(
|
||||
self,
|
||||
installer: PackageInstaller,
|
||||
database: spack.database.Database,
|
||||
task: BuildTask,
|
||||
task: Task,
|
||||
install_status: InstallStatus,
|
||||
):
|
||||
self.installer = installer
|
||||
|
@ -52,6 +52,7 @@ def rewire_node(spec, explicit):
|
||||
its subgraph. Binaries, text, and links are all changed in accordance with
|
||||
the splice. The resulting package is then 'installed.'"""
|
||||
tempdir = tempfile.mkdtemp()
|
||||
|
||||
# copy anything installed to a temporary directory
|
||||
shutil.copytree(spec.build_spec.prefix, os.path.join(tempdir, spec.dag_hash()))
|
||||
|
||||
@ -60,7 +61,16 @@ def rewire_node(spec, explicit):
|
||||
# spec
|
||||
prefix_to_prefix = OrderedDict({spec.build_spec.prefix: spec.prefix})
|
||||
for build_dep in spec.build_spec.traverse(root=False):
|
||||
prefix_to_prefix[build_dep.prefix] = spec[build_dep.name].prefix
|
||||
if build_dep.name in spec:
|
||||
prefix_to_prefix[build_dep.prefix] = spec[build_dep.name].prefix
|
||||
else:
|
||||
virtuals = build_dep.package.virtuals_provided
|
||||
for virtual in virtuals:
|
||||
try:
|
||||
prefix_to_prefix[build_dep.prefix] = spec[virtual.name].prefix
|
||||
break
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
manifest = bindist.get_buildfile_manifest(spec.build_spec)
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
|
@ -25,6 +25,7 @@
|
||||
import spack.schema.modules
|
||||
import spack.schema.packages
|
||||
import spack.schema.repos
|
||||
import spack.schema.splice
|
||||
import spack.schema.upstreams
|
||||
import spack.schema.view
|
||||
|
||||
@ -43,6 +44,7 @@
|
||||
spack.schema.modules.properties,
|
||||
spack.schema.packages.properties,
|
||||
spack.schema.repos.properties,
|
||||
spack.schema.splice.properties,
|
||||
spack.schema.upstreams.properties,
|
||||
spack.schema.view.properties,
|
||||
)
|
||||
|
36
lib/spack/spack/schema/splice.py
Normal file
36
lib/spack/spack/schema/splice.py
Normal file
@ -0,0 +1,36 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for splice.yaml configuration file.
|
||||
"""
|
||||
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties = {
|
||||
"splice": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": ["target", "replacement"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"target": {"type": "string"},
|
||||
"replacement": {"type": "string"},
|
||||
"transitive": {"type": "boolean", "default": False},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Spack concretization splice configuration file schema",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": properties,
|
||||
}
|
@ -519,7 +519,7 @@ def _compute_specs_from_answer_set(self):
|
||||
node = SpecBuilder.make_node(pkg=providers[0])
|
||||
candidate = answer.get(node)
|
||||
|
||||
if candidate and candidate.satisfies(input_spec):
|
||||
if candidate and candidate.build_spec.satisfies(input_spec):
|
||||
self._concrete_specs.append(answer[node])
|
||||
self._concrete_specs_by_input[input_spec] = answer[node]
|
||||
else:
|
||||
@ -3617,7 +3617,33 @@ def build_specs(self, function_tuples):
|
||||
spack.version.git_ref_lookup.GitRefLookup(spec.fullname)
|
||||
)
|
||||
|
||||
return self._specs
|
||||
specs = self.execute_splices()
|
||||
|
||||
return specs
|
||||
|
||||
def execute_splices(self):
|
||||
splice_config = spack.config.CONFIG.get("splice", [])
|
||||
splice_triples = []
|
||||
for splice_set in splice_config:
|
||||
target = splice_set["target"]
|
||||
replacement = spack.spec.Spec(splice_set["replacement"])
|
||||
assert replacement.abstract_hash
|
||||
replacement.replace_hash()
|
||||
transitive = splice_set.get("transitive", False)
|
||||
splice_triples.append((target, replacement, transitive))
|
||||
|
||||
specs = {}
|
||||
for key, spec in self._specs.items():
|
||||
current_spec = spec
|
||||
for target, replacement, transitive in splice_triples:
|
||||
if target in current_spec:
|
||||
# matches root or non-root
|
||||
# e.g. mvapich2%gcc
|
||||
current_spec = current_spec.splice(replacement, transitive)
|
||||
new_key = NodeArgument(id=key.id, pkg=current_spec.name)
|
||||
specs[new_key] = current_spec
|
||||
|
||||
return specs
|
||||
|
||||
|
||||
def _develop_specs_from_env(spec, env):
|
||||
|
@ -3838,9 +3838,16 @@ def safe_color(sigil: str, string: str, color_fmt: Optional[str]) -> str:
|
||||
return clr.colorize(f"{color_fmt}{sigil}{clr.cescape(string)}@.", color=color)
|
||||
|
||||
def format_attribute(match_object: Match) -> str:
|
||||
(esc, sig, dep, hash, hash_len, attribute, close_brace, unmatched_close_brace) = (
|
||||
match_object.groups()
|
||||
)
|
||||
(
|
||||
esc,
|
||||
sig,
|
||||
dep,
|
||||
hash,
|
||||
hash_len,
|
||||
attribute,
|
||||
close_brace,
|
||||
unmatched_close_brace,
|
||||
) = match_object.groups()
|
||||
if esc:
|
||||
return esc
|
||||
elif unmatched_close_brace:
|
||||
@ -4175,52 +4182,59 @@ def splice(self, other, transitive):
|
||||
assert self.concrete
|
||||
assert other.concrete
|
||||
|
||||
virtuals_to_replace = [v.name for v in other.package.virtuals_provided if v in self]
|
||||
virtuals_to_replace = [
|
||||
v.name
|
||||
for v in other.package.virtuals_provided
|
||||
if v in self or v in self.package.virtuals_provided
|
||||
]
|
||||
if transitive:
|
||||
virtuals_to_replace.extend(
|
||||
[
|
||||
v.name
|
||||
for od in other.traverse(root=False)
|
||||
for v in od.package.virtuals_provided
|
||||
if v in self or v in self.package.virtuals_provided
|
||||
]
|
||||
)
|
||||
|
||||
if virtuals_to_replace:
|
||||
deps_to_replace = dict((self[v], other) for v in virtuals_to_replace)
|
||||
deps_to_replace = {
|
||||
self[v]: (other[v] if v in other else other) for v in virtuals_to_replace
|
||||
}
|
||||
# deps_to_replace = [self[v] for v in virtuals_to_replace]
|
||||
else:
|
||||
# TODO: sanity check and error raise here for other.name not in self
|
||||
deps_to_replace = {self[other.name]: other}
|
||||
# deps_to_replace = [self[other.name]]
|
||||
|
||||
for d in deps_to_replace:
|
||||
if not all(
|
||||
v in other.package.virtuals_provided or v not in self
|
||||
for v in d.package.virtuals_provided
|
||||
):
|
||||
# There was something provided by the original that we don't
|
||||
# get from its replacement.
|
||||
raise SpliceError(
|
||||
("Splice between {0} and {1} will not provide " "the same virtuals.").format(
|
||||
self.name, other.name
|
||||
)
|
||||
)
|
||||
for n in d.traverse(root=False):
|
||||
if not all(
|
||||
any(
|
||||
v in other_n.package.virtuals_provided
|
||||
for other_n in other.traverse(root=False)
|
||||
)
|
||||
or v not in self
|
||||
for v in n.package.virtuals_provided
|
||||
):
|
||||
for d, od in deps_to_replace.items():
|
||||
virtuals = []
|
||||
for e in d.edges_from_dependents():
|
||||
virtuals.extend(e.virtuals)
|
||||
|
||||
for v in virtuals:
|
||||
if not any(ov.satisfies(v) for ov in od.package.virtuals_provided):
|
||||
# There was something provided by the original that we don't
|
||||
# get from its replacement.
|
||||
raise SpliceError(
|
||||
(
|
||||
"Splice between {0} and {1} will not provide " "the same virtuals."
|
||||
).format(self.name, other.name)
|
||||
f"Splice between {self.name} and {other.name} will not provide "
|
||||
"the same virtuals."
|
||||
)
|
||||
)
|
||||
|
||||
# For now, check that we don't have DAG with multiple specs from the
|
||||
# same package
|
||||
def multiple_specs(root):
|
||||
counter = collections.Counter([node.name for node in root.traverse()])
|
||||
counter = collections.Counter(
|
||||
[node.name for node in root.traverse(deptype=("link", "run"))]
|
||||
)
|
||||
_, max_number = counter.most_common()[0]
|
||||
return max_number > 1
|
||||
|
||||
if multiple_specs(self) or multiple_specs(other):
|
||||
msg = (
|
||||
'Either "{0}" or "{1}" contain multiple specs from the same '
|
||||
'Either "{0}"\n or "{1}"\n contain multiple specs from the same '
|
||||
"package, which cannot be handled by splicing at the moment"
|
||||
)
|
||||
raise ValueError(msg.format(self, other))
|
||||
@ -4242,7 +4256,7 @@ def from_self(name, transitive):
|
||||
else:
|
||||
if name == other.name:
|
||||
return False
|
||||
if any(
|
||||
if any( # TODO: should this be all
|
||||
v in other.package.virtuals_provided
|
||||
for v in self[name].package.virtuals_provided
|
||||
):
|
||||
@ -4275,17 +4289,30 @@ def from_self(name, transitive):
|
||||
nodes[name].add_dependency_edge(
|
||||
nodes[dep_name], depflag=edge.depflag, virtuals=edge.virtuals
|
||||
)
|
||||
if any(dep not in self_nodes for dep in self[name]._dependencies):
|
||||
nodes[name].build_spec = self[name].build_spec
|
||||
deps_to_check = []
|
||||
for dep_name, dep_specs in self[name]._dependencies.items():
|
||||
deps_to_check.append(dep_name)
|
||||
for dep_spec in dep_specs:
|
||||
deps_to_check.extend(dep_spec.virtuals)
|
||||
|
||||
if any(dep not in self_nodes for dep in deps_to_check):
|
||||
nodes[name].build_spec = self[name].build_spec.copy()
|
||||
else:
|
||||
for edge in other[name].edges_to_dependencies():
|
||||
nodes[name].add_dependency_edge(
|
||||
nodes[edge.spec.name], depflag=edge.depflag, virtuals=edge.virtuals
|
||||
)
|
||||
if any(dep not in other_nodes for dep in other[name]._dependencies):
|
||||
nodes[name].build_spec = other[name].build_spec
|
||||
deps_to_check = []
|
||||
for dep_name, dep_specs in other[name]._dependencies.items():
|
||||
deps_to_check.append(dep_name)
|
||||
for dep_spec in dep_specs:
|
||||
deps_to_check.extend(dep_spec.virtuals)
|
||||
|
||||
ret = nodes[self.name]
|
||||
if any(dep not in other_nodes for dep in deps_to_check):
|
||||
nodes[name].build_spec = other[name].build_spec.copy()
|
||||
|
||||
# If self.name not in nodes then we spliced the root with a different virtual provider
|
||||
ret = nodes[self.name] if self.name in nodes else nodes[other.name]
|
||||
|
||||
# Clear cached hashes for all affected nodes
|
||||
# Do not touch unaffected nodes
|
||||
@ -4297,7 +4324,7 @@ def from_self(name, transitive):
|
||||
|
||||
dep.dag_hash()
|
||||
|
||||
return nodes[self.name]
|
||||
return ret
|
||||
|
||||
def clear_cached_hashes(self, ignore=()):
|
||||
"""
|
||||
|
@ -64,22 +64,6 @@ def cache_directory(tmpdir):
|
||||
spack.config.caches = old_cache_path
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def mirror_dir(tmpdir_factory):
|
||||
dir = tmpdir_factory.mktemp("mirror")
|
||||
dir.ensure("build_cache", dir=True)
|
||||
yield str(dir)
|
||||
dir.join("build_cache").remove()
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def test_mirror(mirror_dir):
|
||||
mirror_url = url_util.path_to_file_url(mirror_dir)
|
||||
mirror_cmd("add", "--scope", "site", "test-mirror-func", mirror_url)
|
||||
yield mirror_dir
|
||||
mirror_cmd("rm", "--scope=site", "test-mirror-func")
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def config_directory(tmpdir_factory):
|
||||
tmpdir = tmpdir_factory.mktemp("test_configs")
|
||||
@ -223,9 +207,9 @@ def dummy_prefix(tmpdir):
|
||||
@pytest.mark.requires_executables(*args)
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.usefixtures(
|
||||
"default_config", "cache_directory", "install_dir_default_layout", "test_mirror"
|
||||
"default_config", "cache_directory", "install_dir_default_layout", "temporary_mirror"
|
||||
)
|
||||
def test_default_rpaths_create_install_default_layout(mirror_dir):
|
||||
def test_default_rpaths_create_install_default_layout(temporary_mirror_dir):
|
||||
"""
|
||||
Test the creation and installation of buildcaches with default rpaths
|
||||
into the default directory layout scheme.
|
||||
@ -238,13 +222,12 @@ def test_default_rpaths_create_install_default_layout(mirror_dir):
|
||||
install_cmd("--no-cache", sy_spec.name)
|
||||
|
||||
# Create a buildache
|
||||
buildcache_cmd("push", "-u", mirror_dir, cspec.name, sy_spec.name)
|
||||
|
||||
buildcache_cmd("push", "-u", temporary_mirror_dir, cspec.name, sy_spec.name)
|
||||
# Test force overwrite create buildcache (-f option)
|
||||
buildcache_cmd("push", "-uf", mirror_dir, cspec.name)
|
||||
buildcache_cmd("push", "-uf", temporary_mirror_dir, cspec.name)
|
||||
|
||||
# Create mirror index
|
||||
buildcache_cmd("update-index", mirror_dir)
|
||||
buildcache_cmd("update-index", temporary_mirror_dir)
|
||||
|
||||
# List the buildcaches in the mirror
|
||||
buildcache_cmd("list", "-alv")
|
||||
@ -272,9 +255,9 @@ def test_default_rpaths_create_install_default_layout(mirror_dir):
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.nomockstage
|
||||
@pytest.mark.usefixtures(
|
||||
"default_config", "cache_directory", "install_dir_non_default_layout", "test_mirror"
|
||||
"default_config", "cache_directory", "install_dir_non_default_layout", "temporary_mirror"
|
||||
)
|
||||
def test_default_rpaths_install_nondefault_layout(mirror_dir):
|
||||
def test_default_rpaths_install_nondefault_layout(temporary_mirror_dir):
|
||||
"""
|
||||
Test the creation and installation of buildcaches with default rpaths
|
||||
into the non-default directory layout scheme.
|
||||
@ -295,9 +278,9 @@ def test_default_rpaths_install_nondefault_layout(mirror_dir):
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.nomockstage
|
||||
@pytest.mark.usefixtures(
|
||||
"default_config", "cache_directory", "install_dir_default_layout", "test_mirror"
|
||||
"default_config", "cache_directory", "install_dir_default_layout", "temporary_mirror"
|
||||
)
|
||||
def test_relative_rpaths_install_default_layout(mirror_dir):
|
||||
def test_relative_rpaths_install_default_layout(temporary_mirror_dir):
|
||||
"""
|
||||
Test the creation and installation of buildcaches with relative
|
||||
rpaths into the default directory layout scheme.
|
||||
@ -324,9 +307,9 @@ def test_relative_rpaths_install_default_layout(mirror_dir):
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.nomockstage
|
||||
@pytest.mark.usefixtures(
|
||||
"default_config", "cache_directory", "install_dir_non_default_layout", "test_mirror"
|
||||
"default_config", "cache_directory", "install_dir_non_default_layout", "temporary_mirror"
|
||||
)
|
||||
def test_relative_rpaths_install_nondefault(mirror_dir):
|
||||
def test_relative_rpaths_install_nondefault(temporary_mirror_dir):
|
||||
"""
|
||||
Test the installation of buildcaches with relativized rpaths
|
||||
into the non-default directory layout scheme.
|
||||
@ -375,9 +358,9 @@ def test_push_and_fetch_keys(mock_gnupghome, tmp_path):
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.nomockstage
|
||||
@pytest.mark.usefixtures(
|
||||
"default_config", "cache_directory", "install_dir_non_default_layout", "test_mirror"
|
||||
"default_config", "cache_directory", "install_dir_non_default_layout", "temporary_mirror"
|
||||
)
|
||||
def test_built_spec_cache(mirror_dir):
|
||||
def test_built_spec_cache(temporary_mirror_dir):
|
||||
"""Because the buildcache list command fetches the buildcache index
|
||||
and uses it to populate the binary_distribution built spec cache, when
|
||||
this test calls get_mirrors_for_spec, it is testing the popluation of
|
||||
@ -398,7 +381,9 @@ def fake_dag_hash(spec, length=None):
|
||||
return "tal4c7h4z0gqmixb1eqa92mjoybxn5l6"[:length]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("install_mockery", "mock_packages", "mock_fetch", "test_mirror")
|
||||
@pytest.mark.usefixtures(
|
||||
"install_mockery_mutable_config", "mock_packages", "mock_fetch", "temporary_mirror"
|
||||
)
|
||||
def test_spec_needs_rebuild(monkeypatch, tmpdir):
|
||||
"""Make sure needs_rebuild properly compares remote hash
|
||||
against locally computed one, avoiding unnecessary rebuilds"""
|
||||
@ -519,7 +504,7 @@ def mock_list_url(url, recursive=False):
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_fetch", "install_mockery")
|
||||
def test_update_sbang(tmpdir, test_mirror):
|
||||
def test_update_sbang(tmpdir, temporary_mirror):
|
||||
"""Test the creation and installation of buildcaches with default rpaths
|
||||
into the non-default directory layout scheme, triggering an update of the
|
||||
sbang.
|
||||
@ -530,7 +515,7 @@ def test_update_sbang(tmpdir, test_mirror):
|
||||
old_spec_hash_str = "/{0}".format(old_spec.dag_hash())
|
||||
|
||||
# Need a fake mirror with *function* scope.
|
||||
mirror_dir = test_mirror
|
||||
mirror_dir = temporary_mirror
|
||||
|
||||
# Assume all commands will concretize old_spec the same way.
|
||||
install_cmd("--no-cache", old_spec.name)
|
||||
|
@ -25,7 +25,7 @@ def test_build_task_errors(install_mockery):
|
||||
inst.BuildTask(spec.package, None, False, 0, 0, 0, set())
|
||||
|
||||
request = inst.BuildRequest(spec.package, {})
|
||||
with pytest.raises(inst.InstallError, match="Cannot create a build task"):
|
||||
with pytest.raises(inst.InstallError, match="Cannot create a task"):
|
||||
inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_REMOVED, set())
|
||||
|
||||
|
||||
|
@ -310,7 +310,7 @@ def test_pkg_grep(mock_packages, capfd):
|
||||
output, _ = capfd.readouterr()
|
||||
assert output.strip() == "\n".join(
|
||||
spack.repo.PATH.get_pkg_class(name).module.__file__
|
||||
for name in ["splice-a", "splice-h", "splice-t", "splice-vh", "splice-z"]
|
||||
for name in ["splice-a", "splice-h", "splice-t", "splice-vh", "splice-vt", "splice-z"]
|
||||
)
|
||||
|
||||
# ensure that this string isn't fouhnd
|
||||
|
@ -2286,6 +2286,25 @@ def test_virtuals_are_annotated_on_edges(self, spec_str):
|
||||
edges = spec.edges_to_dependencies(name="callpath")
|
||||
assert len(edges) == 1 and edges[0].virtuals == ()
|
||||
|
||||
@pytest.mark.parametrize("transitive", [True, False])
|
||||
def test_explicit_splices(
|
||||
self, mutable_config, database_mutable_config, mock_packages, transitive
|
||||
):
|
||||
mpich_spec = database_mutable_config.query("mpich")[0]
|
||||
splice_info = {
|
||||
"target": "mpi",
|
||||
"replacement": f"/{mpich_spec.dag_hash()}",
|
||||
"transitive": transitive,
|
||||
}
|
||||
spack.config.CONFIG.set("splice", [splice_info])
|
||||
|
||||
spec = spack.spec.Spec("hdf5 ^zmpi").concretized()
|
||||
|
||||
assert spec.satisfies(f"^mpich/{mpich_spec.dag_hash()}")
|
||||
assert spec.build_spec.satisfies("^zmpi")
|
||||
assert not spec.build_spec.satisfies(f"^mpich/{mpich_spec.dag_hash()}")
|
||||
assert not spec.satisfies("^zmpi")
|
||||
|
||||
@pytest.mark.db
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,mpi_name",
|
||||
|
@ -62,8 +62,11 @@
|
||||
import spack.util.web
|
||||
import spack.version
|
||||
from spack.fetch_strategy import URLFetchStrategy
|
||||
from spack.main import SpackCommand
|
||||
from spack.util.pattern import Bunch
|
||||
|
||||
mirror_cmd = SpackCommand("mirror")
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def check_config_fixture(request):
|
||||
@ -989,6 +992,38 @@ def install_mockery(temporary_store: spack.store.Store, mutable_config, mock_pac
|
||||
temporary_store.failure_tracker.clear_all()
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def temporary_mirror_dir(tmpdir_factory):
|
||||
dir = tmpdir_factory.mktemp("mirror")
|
||||
dir.ensure("build_cache", dir=True)
|
||||
yield str(dir)
|
||||
dir.join("build_cache").remove()
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def temporary_mirror(temporary_mirror_dir):
|
||||
mirror_url = url_util.path_to_file_url(temporary_mirror_dir)
|
||||
mirror_cmd("add", "--scope", "site", "test-mirror-func", mirror_url)
|
||||
yield temporary_mirror_dir
|
||||
mirror_cmd("rm", "--scope=site", "test-mirror-func")
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def mutable_temporary_mirror_dir(tmpdir_factory):
|
||||
dir = tmpdir_factory.mktemp("mirror")
|
||||
dir.ensure("build_cache", dir=True)
|
||||
yield str(dir)
|
||||
dir.join("build_cache").remove()
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def mutable_temporary_mirror(mutable_temporary_mirror_dir):
|
||||
mirror_url = url_util.path_to_file_url(mutable_temporary_mirror_dir)
|
||||
mirror_cmd("add", "--scope", "site", "test-mirror-func", mirror_url)
|
||||
yield mutable_temporary_mirror_dir
|
||||
mirror_cmd("rm", "--scope=site", "test-mirror-func")
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def temporary_store(tmpdir, request):
|
||||
"""Hooks a temporary empty store for the test function."""
|
||||
|
@ -32,6 +32,7 @@
|
||||
import spack.store
|
||||
import spack.util.lock as lk
|
||||
import spack.version
|
||||
from spack.main import SpackCommand
|
||||
|
||||
|
||||
def _mock_repo(root, namespace):
|
||||
@ -739,6 +740,85 @@ def test_installer_init_requests(install_mockery):
|
||||
assert request.pkg.name == spec_name
|
||||
|
||||
|
||||
@pytest.mark.parametrize("transitive", [True, False])
|
||||
def test_install_spliced(
|
||||
install_mockery, mock_fetch, default_mock_concretization, monkeypatch, capsys, transitive
|
||||
):
|
||||
"""TODO: description"""
|
||||
spec = default_mock_concretization("splice-t")
|
||||
dep = default_mock_concretization("splice-h+foo")
|
||||
|
||||
# Do the splice.
|
||||
out = spec.splice(dep, transitive)
|
||||
installer = create_installer([out], {"vebose": True, "fail_fast": True})
|
||||
installer.install()
|
||||
for node in out.traverse():
|
||||
assert node.installed
|
||||
assert node.build_spec.installed
|
||||
|
||||
|
||||
@pytest.mark.parametrize("transitive", [True, False])
|
||||
def test_install_spliced_build_spec_installed(
|
||||
install_mockery, default_mock_concretization, capfd, mock_fetch, transitive
|
||||
):
|
||||
"""TODO: description"""
|
||||
spec = default_mock_concretization("splice-t")
|
||||
dep = default_mock_concretization("splice-h+foo")
|
||||
|
||||
# Do the splice.
|
||||
out = spec.splice(dep, transitive)
|
||||
out.build_spec.package.do_install()
|
||||
installer = create_installer([out], {"vebose": True, "fail_fast": True})
|
||||
installer._init_queue()
|
||||
for _, task in installer.build_pq:
|
||||
assert isinstance(task, inst.RewireTask if task.pkg.spec.spliced else inst.BuildTask)
|
||||
assert installer.build_pq[-1][0][0] == 2
|
||||
installer.install()
|
||||
for node in out.traverse():
|
||||
assert node.installed
|
||||
assert node.build_spec.installed
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("lacking windows support for binary installs")
|
||||
@pytest.mark.parametrize("transitive", [True, False])
|
||||
@pytest.mark.parametrize("root_str", ["splice-t^splice-h~foo", "splice-h~foo"])
|
||||
def test_install_splice_root_from_binary(
|
||||
install_mockery,
|
||||
default_mock_concretization,
|
||||
mock_fetch,
|
||||
mutable_temporary_mirror,
|
||||
transitive,
|
||||
root_str,
|
||||
):
|
||||
"""TODO: Docstring"""
|
||||
# Test splicing and rewiring a spec with the same name, different hash.
|
||||
original_spec = spack.spec.Spec(root_str).concretized()
|
||||
spec_to_splice = spack.spec.Spec("splice-h+foo").concretized()
|
||||
|
||||
original_spec.package.do_install()
|
||||
spec_to_splice.package.do_install()
|
||||
|
||||
out = original_spec.splice(spec_to_splice, transitive)
|
||||
|
||||
buildcache = SpackCommand("buildcache")
|
||||
buildcache(
|
||||
"push",
|
||||
"--allow-root",
|
||||
"--unsigned",
|
||||
"--update-index",
|
||||
mutable_temporary_mirror,
|
||||
str(original_spec),
|
||||
str(spec_to_splice),
|
||||
)
|
||||
|
||||
uninstall = SpackCommand("uninstall")
|
||||
uninstall("-ay")
|
||||
|
||||
out.package.do_install(unsigned=True)
|
||||
|
||||
assert len(spack.store.STORE.db.query()) == len(list(out.traverse()))
|
||||
|
||||
|
||||
def test_install_task_use_cache(install_mockery, monkeypatch):
|
||||
installer = create_installer(["trivial-install-test-package"], {})
|
||||
request = installer.build_requests[0]
|
||||
@ -761,7 +841,7 @@ def _add(_compilers):
|
||||
|
||||
# Preclude any meaningful side-effects
|
||||
monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", _true)
|
||||
monkeypatch.setattr(inst.PackageInstaller, "_setup_install_dir", _noop)
|
||||
monkeypatch.setattr(inst.BuildTask, "_setup_install_dir", _noop)
|
||||
monkeypatch.setattr(spack.build_environment, "start_build_process", _noop)
|
||||
monkeypatch.setattr(spack.database.Database, "add", _noop)
|
||||
monkeypatch.setattr(spack.compilers, "add_compilers_to_config", _add)
|
||||
@ -867,8 +947,10 @@ def _chgrp(path, group, follow_symlinks=True):
|
||||
monkeypatch.setattr(prefs, "get_package_group", _get_group)
|
||||
monkeypatch.setattr(fs, "chgrp", _chgrp)
|
||||
|
||||
installer = create_installer(["trivial-install-test-package"], {})
|
||||
spec = installer.build_requests[0].pkg.spec
|
||||
build_task = create_build_task(
|
||||
spack.spec.Spec("trivial-install-test-package").concretized().package
|
||||
)
|
||||
spec = build_task.request.pkg.spec
|
||||
|
||||
fs.touchp(spec.prefix)
|
||||
metadatadir = spack.store.STORE.layout.metadata_path(spec)
|
||||
@ -878,7 +960,7 @@ def _chgrp(path, group, follow_symlinks=True):
|
||||
metadatadir = None
|
||||
# Should fail with a "not a directory" error
|
||||
with pytest.raises(OSError, match=metadatadir):
|
||||
installer._setup_install_dir(spec.package)
|
||||
build_task._setup_install_dir(spec.package)
|
||||
|
||||
out = str(capfd.readouterr()[0])
|
||||
|
||||
@ -965,79 +1047,76 @@ def test_install_failed_not_fast(install_mockery, monkeypatch, capsys):
|
||||
assert "Skipping build of pkg-a" in out
|
||||
|
||||
|
||||
def test_install_fail_on_interrupt(install_mockery, monkeypatch):
|
||||
def _interrupt(installer, task, install_status, **kwargs):
|
||||
if task.pkg.name == "a":
|
||||
raise KeyboardInterrupt("mock keyboard interrupt for a")
|
||||
else:
|
||||
return installer._real_install_task(task, None)
|
||||
# installer.installed.add(task.pkg.name)
|
||||
|
||||
|
||||
def test_install_fail_on_interrupt(install_mockery, mock_fetch, monkeypatch):
|
||||
"""Test ctrl-c interrupted install."""
|
||||
spec_name = "pkg-a"
|
||||
err_msg = "mock keyboard interrupt for {0}".format(spec_name)
|
||||
|
||||
def _interrupt(installer, task, install_status, **kwargs):
|
||||
if task.pkg.name == spec_name:
|
||||
raise KeyboardInterrupt(err_msg)
|
||||
else:
|
||||
installer.installed.add(task.pkg.name)
|
||||
|
||||
installer = create_installer([spec_name], {})
|
||||
|
||||
# TODO: Clean this up in fixture with delattr.
|
||||
setattr(inst.PackageInstaller, "_real_install_task", inst.PackageInstaller._install_task)
|
||||
# Raise a KeyboardInterrupt error to trigger early termination
|
||||
monkeypatch.setattr(inst.PackageInstaller, "_install_task", _interrupt)
|
||||
|
||||
with pytest.raises(KeyboardInterrupt, match=err_msg):
|
||||
installer.install()
|
||||
|
||||
assert "pkg-b" in installer.installed # ensure dependency of pkg-a is 'installed'
|
||||
assert spec_name not in installer.installed
|
||||
assert not any(i.startswith("pkg-a-") for i in installer.installed)
|
||||
assert any(
|
||||
i.startswith("pkg-b-") for i in installer.installed
|
||||
) # ensure dependency of a is 'installed'
|
||||
# assert spec_name not in installer.installed
|
||||
|
||||
|
||||
def test_install_fail_single(install_mockery, monkeypatch):
|
||||
class MyBuildException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def _install_fail_my_build_exception(installer, task, install_status, **kwargs):
|
||||
print(task, task.pkg.name)
|
||||
if task.pkg.name == "pkg-a":
|
||||
raise MyBuildException("mock internal package build error for pkg-a")
|
||||
else:
|
||||
# No need for more complex logic here because no splices
|
||||
task.execute(install_status)
|
||||
installer._update_installed(task)
|
||||
|
||||
|
||||
def test_install_fail_single(install_mockery, mock_fetch, monkeypatch):
|
||||
"""Test expected results for failure of single package."""
|
||||
spec_name = "pkg-a"
|
||||
err_msg = "mock internal package build error for {0}".format(spec_name)
|
||||
|
||||
class MyBuildException(Exception):
|
||||
pass
|
||||
|
||||
def _install(installer, task, install_status, **kwargs):
|
||||
if task.pkg.name == spec_name:
|
||||
raise MyBuildException(err_msg)
|
||||
else:
|
||||
installer.installed.add(task.pkg.name)
|
||||
|
||||
installer = create_installer([spec_name], {})
|
||||
installer = create_installer(["pkg-a"], {})
|
||||
|
||||
# Raise a KeyboardInterrupt error to trigger early termination
|
||||
monkeypatch.setattr(inst.PackageInstaller, "_install_task", _install)
|
||||
monkeypatch.setattr(inst.PackageInstaller, "_install_task", _install_fail_my_build_exception)
|
||||
|
||||
with pytest.raises(MyBuildException, match=err_msg):
|
||||
with pytest.raises(MyBuildException, match="mock internal package build error for a"):
|
||||
installer.install()
|
||||
|
||||
assert "pkg-b" in installer.installed # ensure dependency of a is 'installed'
|
||||
assert spec_name not in installer.installed
|
||||
# ensure dependency of a is 'installed' and a is not
|
||||
assert any(pkg_id.startswith("pkg-b-") for pkg_id in installer.installed)
|
||||
assert not any(pkg_id.startswith("pkg-a-") for pkg_id in installer.installed)
|
||||
|
||||
|
||||
def test_install_fail_multi(install_mockery, monkeypatch):
|
||||
def test_install_fail_multi(install_mockery, mock_fetch, monkeypatch):
|
||||
"""Test expected results for failure of multiple packages."""
|
||||
spec_name = "pkg-c"
|
||||
err_msg = "mock internal package build error"
|
||||
|
||||
class MyBuildException(Exception):
|
||||
pass
|
||||
|
||||
def _install(installer, task, install_status, **kwargs):
|
||||
if task.pkg.name == spec_name:
|
||||
raise MyBuildException(err_msg)
|
||||
else:
|
||||
installer.installed.add(task.pkg.name)
|
||||
|
||||
installer = create_installer([spec_name, "pkg-a"], {})
|
||||
installer = create_installer(["pkg-a", "pkg-c"], {})
|
||||
|
||||
# Raise a KeyboardInterrupt error to trigger early termination
|
||||
monkeypatch.setattr(inst.PackageInstaller, "_install_task", _install)
|
||||
monkeypatch.setattr(inst.PackageInstaller, "_install_task", _install_fail_my_build_exception)
|
||||
|
||||
with pytest.raises(inst.InstallError, match="Installation request failed"):
|
||||
installer.install()
|
||||
|
||||
assert "pkg-a" in installer.installed # ensure the the second spec installed
|
||||
assert spec_name not in installer.installed
|
||||
# ensure the the second spec installed but not the first
|
||||
assert any(pkg_id.startswith("pkg-c-") for pkg_id in installer.installed)
|
||||
assert not any(pkg_id.startswith("pkg-a-") for pkg_id in installer.installed)
|
||||
|
||||
|
||||
def test_install_fail_fast_on_detect(install_mockery, monkeypatch, capsys):
|
||||
|
@ -1024,10 +1024,14 @@ def test_splice_swap_names(self, default_mock_concretization, transitive):
|
||||
|
||||
@pytest.mark.parametrize("transitive", [True, False])
|
||||
def test_splice_swap_names_mismatch_virtuals(self, default_mock_concretization, transitive):
|
||||
spec = default_mock_concretization("splice-t")
|
||||
dep = default_mock_concretization("splice-vh+foo")
|
||||
t = default_mock_concretization("splice-t")
|
||||
vt = default_mock_concretization("splice-vt")
|
||||
vh = default_mock_concretization("splice-vh+foo")
|
||||
with pytest.raises(spack.spec.SpliceError, match="will not provide the same virtuals."):
|
||||
spec.splice(dep, transitive)
|
||||
vt.splice(vh, transitive)
|
||||
|
||||
# No error for t which depends on h directly, not the "somethingelse" virtual
|
||||
t.splice(vh, transitive)
|
||||
|
||||
def test_spec_override(self):
|
||||
init_spec = Spec("pkg-a foo=baz foobar=baz cflags=-O3 cxxflags=-O1")
|
||||
|
@ -1186,19 +1186,19 @@ complete -c spack -n '__fish_spack_using_command config' -l scope -r -d 'configu
|
||||
|
||||
# spack config get
|
||||
set -g __fish_spack_optspecs_spack_config_get h/help
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 config get' -f -a 'bootstrap cdash ci compilers concretizer config definitions develop mirrors modules packages repos upstreams view'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 config get' -f -a 'bootstrap cdash ci compilers concretizer config definitions develop mirrors modules packages repos splice upstreams view'
|
||||
complete -c spack -n '__fish_spack_using_command config get' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command config get' -s h -l help -d 'show this help message and exit'
|
||||
|
||||
# spack config blame
|
||||
set -g __fish_spack_optspecs_spack_config_blame h/help
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 config blame' -f -a 'bootstrap cdash ci compilers concretizer config definitions develop mirrors modules packages repos upstreams view'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 config blame' -f -a 'bootstrap cdash ci compilers concretizer config definitions develop mirrors modules packages repos splice upstreams view'
|
||||
complete -c spack -n '__fish_spack_using_command config blame' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command config blame' -s h -l help -d 'show this help message and exit'
|
||||
|
||||
# spack config edit
|
||||
set -g __fish_spack_optspecs_spack_config_edit h/help print-file
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 config edit' -f -a 'bootstrap cdash ci compilers concretizer config definitions develop mirrors modules packages repos upstreams view'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 config edit' -f -a 'bootstrap cdash ci compilers concretizer config definitions develop mirrors modules packages repos splice upstreams view'
|
||||
complete -c spack -n '__fish_spack_using_command config edit' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command config edit' -s h -l help -d 'show this help message and exit'
|
||||
complete -c spack -n '__fish_spack_using_command config edit' -l print-file -f -a print_file
|
||||
|
24
var/spack/repos/builtin.mock/packages/splice-vt/package.py
Normal file
24
var/spack/repos/builtin.mock/packages/splice-vt/package.py
Normal file
@ -0,0 +1,24 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class SpliceVt(Package):
|
||||
"""Simple package with one optional dependency"""
|
||||
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/splice-t-1.0.tar.gz"
|
||||
|
||||
version("1.0", md5="0123456789abcdef0123456789abcdef")
|
||||
|
||||
depends_on("somethingelse")
|
||||
depends_on("splice-z")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
with open(prefix.join("splice-t"), "w") as f:
|
||||
f.write("splice-t: {0}".format(prefix))
|
||||
f.write("splice-h: {0}".format(spec["somethingelse"].prefix))
|
||||
f.write("splice-z: {0}".format(spec["splice-z"].prefix))
|
Loading…
Reference in New Issue
Block a user