Compare commits

...

27 Commits

Author SHA1 Message Date
Gregory Becker
559ace64e1
reporters wip: working for installs
Signed-off-by: Gregory Becker <becker33@llnl.gov>
2025-04-11 15:47:21 -07:00
Gregory Becker
a2441f4656
fixup after rebase
Signed-off-by: Gregory Becker <becker33@llnl.gov>
2025-04-09 10:19:29 -07:00
kshea21
dcdbbd235d
style 2025-04-09 09:49:41 -07:00
Gregory Becker
be03df1b86
fix overwrite test 2025-04-09 09:49:40 -07:00
kshea21
e3d3230c9b
new mock packages 2025-04-09 09:49:40 -07:00
Gregory Becker
e687bb902f
cleanup from debugging 2025-04-09 09:49:40 -07:00
Gregory Becker
f1638365a9
refactor transaction across start/complete 2025-04-09 09:49:40 -07:00
Gregory Becker
4e9547703c
overwrite tests: work on macos 2025-04-09 09:49:38 -07:00
kshea21
56df316cc2
overwrite install refactoring and tests 2025-04-09 09:48:10 -07:00
kshea21
0cfd514c0c
spack commands --update-completion 2025-04-09 09:47:22 -07:00
Todd Gamblin
92e1b1795b
fix style issues and refactor a bit
Signed-off-by: Todd Gamblin <tgamblin@llnl.gov>
2025-04-09 09:47:22 -07:00
kshea21
d8362d02c7
pushing to rebase 2025-04-09 09:47:22 -07:00
kshea21
57a5722ba7
some style things 2025-04-09 09:47:19 -07:00
Todd Gamblin
1e5e416df0
docs: add nitpick ignore for internal multiprocessing.context.Process class
Signed-off-by: Todd Gamblin <tgamblin@llnl.gov>
2025-04-09 09:46:21 -07:00
Todd Gamblin
8d0923f29e
add quotes to type annotation
Signed-off-by: Todd Gamblin <tgamblin@llnl.gov>
2025-04-09 09:46:20 -07:00
kshea21
4a153a185b
refactors and test fixes 2025-04-09 09:46:18 -07:00
kshea21
2da51eaec7
failing overwrite build deps 2025-04-09 09:44:01 -07:00
kshea21
974d10f32b
added no_op state tracking 2025-04-09 09:41:56 -07:00
kshea21
1a19c09c55
style 2025-04-09 09:41:56 -07:00
kshea21
11572c0caf
multi and single tests 2025-04-09 09:41:56 -07:00
kshea21
5d7ba48b36
style 2025-04-09 09:41:53 -07:00
kshea21
fa088e4975
review changes 2025-04-09 09:40:38 -07:00
kshea21
9d0133a771
process handle back 2025-04-09 09:38:37 -07:00
kshea21
0b1e388a0f
remove process handle 2025-04-09 09:38:35 -07:00
kshea21
60380a4724
untest -> test 2025-04-09 09:37:38 -07:00
kshea21
03a3546f14
style 2025-04-09 09:37:38 -07:00
kshea21
0808fd1a44
Rebasing -p/--concurrent-packages on develop 2025-04-09 09:37:32 -07:00
21 changed files with 1102 additions and 736 deletions

View File

@ -206,6 +206,7 @@ def setup(sphinx):
("py:class", "TextIO"),
("py:class", "hashlib._Hash"),
("py:class", "concurrent.futures._base.Executor"),
("py:class", "multiprocessing.context.Process"),
# Spack classes that are private and we don't want to expose
("py:class", "spack.provider_index._IndexBase"),
("py:class", "spack.repo._PrependFileLoader"),

View File

@ -1030,6 +1030,9 @@ def replace_directory_transaction(directory_name):
Returns:
temporary directory where ``directory_name`` has been moved
"""
for a, b, c in os.walk(directory_name):
print("PRE", a, b, c)
# Check the input is indeed a directory with absolute path.
# Raise before anything is done to avoid moving the wrong directory
directory_name = os.path.abspath(directory_name)
@ -1048,6 +1051,7 @@ def replace_directory_transaction(directory_name):
try:
yield backup_dir
except (Exception, KeyboardInterrupt, SystemExit) as inner_exception:
print("hitting the proper exception block")
# Try to recover the original directory, if this fails, raise a
# composite exception.
try:
@ -1056,11 +1060,15 @@ def replace_directory_transaction(directory_name):
shutil.rmtree(directory_name)
os.rename(backup_dir, directory_name)
except Exception as outer_exception:
print("CouldNOtRestoreDirectBackup")
raise CouldNotRestoreDirectoryBackup(inner_exception, outer_exception)
for a, b, c in os.walk(directory_name):
print("RESTORED", a, b, c)
tty.debug("Directory recovered [{0}]".format(directory_name))
raise
else:
print("NO FAILURE")
# Otherwise delete the temporary directory
shutil.rmtree(tmpdir, ignore_errors=True)
tty.debug("Temporary directory deleted [{0}]".format(tmpdir))

View File

@ -92,7 +92,7 @@ def update_installations(self) -> None:
tty.msg(f"[BOOTSTRAPPING] Installing dependencies ({', '.join(colorized_specs)})")
self.write(regenerate=False)
with tty.SuppressOutput(msg_enabled=log_enabled, warn_enabled=log_enabled):
self.install_all()
self.install_all(fail_fast=True)
self.write(regenerate=True)
def load(self) -> None:

View File

@ -31,12 +31,14 @@
Skimming this module is a nice way to get acquainted with the types of
calls you can make from within the install() function.
"""
import inspect
import io
import multiprocessing
import os
import re
import signal
import stat
import sys
import traceback
import types
@ -46,6 +48,7 @@
from itertools import chain
from multiprocessing.connection import Connection
from typing import (
Any,
Callable,
Dict,
List,
@ -452,6 +455,35 @@ def optimization_flags(compiler, target):
return result
class FilterDefaultDynamicLinkerSearchPaths:
"""Remove rpaths to directories that are default search paths of the dynamic linker."""
def __init__(self, dynamic_linker: Optional[str]) -> None:
# Identify directories by (inode, device) tuple, which handles symlinks too.
self.default_path_identifiers: Set[Tuple[int, int]] = set()
if not dynamic_linker:
return
for path in spack.util.libc.default_search_paths_from_dynamic_linker(dynamic_linker):
try:
s = os.stat(path)
if stat.S_ISDIR(s.st_mode):
self.default_path_identifiers.add((s.st_ino, s.st_dev))
except OSError:
continue
def is_dynamic_loader_default_path(self, p: str) -> bool:
try:
s = os.stat(p)
return (s.st_ino, s.st_dev) in self.default_path_identifiers
except OSError:
return False
def __call__(self, dirs: List[str]) -> List[str]:
if not self.default_path_identifiers:
return dirs
return [p for p in dirs if not self.is_dynamic_loader_default_path(p)]
def set_wrapper_variables(pkg, env):
"""Set environment variables used by the Spack compiler wrapper (which have the prefix
`SPACK_`) and also add the compiler wrappers to PATH.
@ -1093,6 +1125,52 @@ def load_external_modules(context: SetupContext) -> None:
load_module(external_module)
class ProcessHandle:
"""Manages and monitors the state of a child process for package installation."""
def __init__(
self,
pkg: "spack.package_base.PackageBase",
process: multiprocessing.Process,
read_pipe: multiprocessing.connection.Connection,
timeout: int,
):
"""
Parameters:
pkg: The package to be built and installed by the child process.
process: The child process instance being managed/monitored.
read_pipe: The pipe used for receiving information from the child process.
"""
self.pkg = pkg
self.process = process
self.read_pipe = read_pipe
self.timeout = timeout
def poll(self) -> bool:
"""Check if there is data available to receive from the read pipe."""
return self.read_pipe.poll()
def complete(self):
"""Wait (if needed) for child process to complete
and return its exit status.
See ``complete_build_process()``.
"""
return complete_build_process(self)
def terminate_processes(self):
"""Terminate the active child processes if installation failure/error"""
if self.process.is_alive():
# opportunity for graceful termination
self.process.terminate()
self.process.join(timeout=1)
# if the process didn't gracefully terminate, forcefully kill
if self.process.is_alive():
os.kill(self.process.pid, signal.SIGKILL)
self.process.join()
def _setup_pkg_and_run(
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
function: Callable,
@ -1106,7 +1184,7 @@ def _setup_pkg_and_run(
``_setup_pkg_and_run`` is called by the child process created in
``start_build_process()``, and its main job is to run ``function()`` on behalf of
some Spack installation (see :ref:`spack.installer.PackageInstaller._install_task`).
some Spack installation (see :ref:`spack.installer.PackageInstaller._complete_task`).
The child process is passed a ``write_pipe``, on which it's expected to send one of
the following:
@ -1243,19 +1321,30 @@ def terminate(self):
os.kill(self.p.pid, signal.SIGKILL)
self.p.join()
@property
def pid(self):
return self.p.pid
@property
def exitcode(self):
return self.p.exitcode
def start_build_process(pkg, function, kwargs, *, timeout: Optional[int] = None):
def start_build_process(
pkg: "spack.package_base.PackageBase",
function: Callable,
kwargs: Dict[str, Any],
*,
timeout: Optional[int] = None,
):
"""Create a child process to do part of a spack build.
Args:
pkg (spack.package_base.PackageBase): package whose environment we should set up the
pkg: package whose environment we should set up the
child process for.
function (typing.Callable): argless function to run in the child process.
function: argless function to run in the child
process.
kwargs: additional keyword arguments to pass to ``function()``
timeout: maximum time allowed to finish the execution of function
Usage::
@ -1269,9 +1358,6 @@ def child_fun():
control over the environment, etc. without affecting other builds
that might be executed in the same spack call.
If something goes wrong, the child process catches the error and
passes it to the parent wrapped in a ChildError. The parent is
expected to handle (or re-raise) the ChildError.
"""
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
input_fd = None
@ -1321,10 +1407,27 @@ def child_fun():
if input_fd is not None:
input_fd.close()
def exitcode_msg(p):
typ = "exit" if p.exitcode >= 0 else "signal"
return f"{typ} {abs(p.exitcode)}"
# Create a ProcessHandle that the caller can use to track
# and complete the process started by this function.
process_handle = ProcessHandle(pkg, p, read_pipe, timeout=timeout)
return process_handle
def complete_build_process(handle: ProcessHandle):
"""
Waits for the child process to complete and handles its exit status.
If something goes wrong, the child process catches the error and
passes it to the parent wrapped in a ChildError. The parent is
expected to handle (or re-raise) the ChildError.
"""
def exitcode_msg(process):
typ = "exit" if handle.process.exitcode >= 0 else "signal"
return f"{typ} {abs(handle.process.exitcode)}"
p = handle.process
timeout = handle.timeout
p.join(timeout=timeout)
if p.is_alive():
warnings.warn(f"Terminating process, since the timeout of {timeout}s was exceeded")
@ -1332,18 +1435,23 @@ def exitcode_msg(p):
p.join()
try:
child_result = read_pipe.recv()
# Check if information from the read pipe has been received.
child_result = handle.read_pipe.recv()
except EOFError:
raise InstallError(f"The process has stopped unexpectedly ({exitcode_msg(p)})")
handle.process.join()
raise InstallError(
f"The process has stopped unexpectedly ({exitcode_msg(handle.process)})"
)
handle.process.join()
# If returns a StopPhase, raise it
if isinstance(child_result, spack.error.StopPhase):
# do not print
raise child_result
# let the caller know which package went wrong.
if isinstance(child_result, InstallError):
child_result.pkg = pkg
child_result.pkg = handle.pkg
if isinstance(child_result, ChildError):
# If the child process raised an error, print its output here rather
@ -1354,13 +1462,13 @@ def exitcode_msg(p):
raise child_result
# Fallback. Usually caught beforehand in EOFError above.
if p.exitcode != 0:
raise InstallError(f"The process failed unexpectedly ({exitcode_msg(p)})")
if handle.process.exitcode != 0:
raise InstallError(f"The process failed unexpectedly ({exitcode_msg(handle.process)})")
return child_result
CONTEXT_BASES = (spack.package_base.PackageBase, spack.builder.Builder)
CONTEXT_BASES = (spack.package_base.PackageBase, spack.builder.BaseBuilder)
def get_package_context(traceback, context=3):

View File

@ -97,7 +97,7 @@ def _specs(self, **kwargs):
class SetParallelJobs(argparse.Action):
"""Sets the correct value for parallel build jobs.
The value is is set in the command line configuration scope so that
The value is set in the command line configuration scope so that
it can be retrieved using the spack.config API.
"""
@ -113,6 +113,23 @@ def __call__(self, parser, namespace, jobs, option_string):
setattr(namespace, "jobs", jobs)
class SetConcurrentPackages(argparse.Action):
"""Sets the value for maximum number of concurrent package builds
The value is set in the command line configuration scope so that
it can be retrieved using the spack.config API.
"""
def __call__(self, parser, namespace, concurrent_packages, option_string):
if concurrent_packages < 1:
msg = 'invalid value for argument "{0}" ' '[expectd a positive integer, got "{1}"]'
raise ValueError(msg.format(option_string, concurrent_packages))
spack.config.set("config:concurrent_packages", concurrent_packages, scope="command_line")
setattr(namespace, "concurrent_packages", concurrent_packages)
class DeptypeAction(argparse.Action):
"""Creates a flag of valid dependency types from a deptype argument."""
@ -377,6 +394,18 @@ def jobs():
)
@arg
def concurrent_packages():
return Args(
"-p",
"--concurrent-packages",
action=SetConcurrentPackages,
type=int,
default=4,
help="maximum number of packages to build concurrently",
)
@arg
def install_status():
return Args(

View File

@ -63,6 +63,7 @@ def install_kwargs_from_args(args):
"unsigned": args.unsigned,
"install_deps": ("dependencies" in args.things_to_install),
"install_package": ("package" in args.things_to_install),
"concurrent_packages": args.concurrent_packages,
}
@ -84,6 +85,7 @@ def setup_parser(subparser):
default=None,
help="phase to stop after when installing (default None)",
)
arguments.add_common_arguments(subparser, ["concurrent_packages"])
arguments.add_common_arguments(subparser, ["jobs"])
subparser.add_argument(
"--overwrite",
@ -329,16 +331,8 @@ def install(parser, args):
arguments.sanitize_reporter_options(args)
def reporter_factory(specs):
if args.log_format is None:
return lang.nullcontext()
return spack.report.build_context_manager(
reporter=args.reporter(), filename=report_filename(args, specs=specs), specs=specs
)
reporter = args.reporter() if args.log_format else None
install_kwargs = install_kwargs_from_args(args)
env = ev.active_environment()
if not env and not args.spec and not args.specfiles:
@ -346,9 +340,9 @@ def reporter_factory(specs):
try:
if env:
install_with_active_env(env, args, install_kwargs, reporter_factory)
install_with_active_env(env, args, install_kwargs, reporter)
else:
install_without_active_env(args, install_kwargs, reporter_factory)
install_without_active_env(args, install_kwargs, reporter)
except InstallError as e:
if args.show_log_on_error:
_dump_log_on_error(e)
@ -382,7 +376,7 @@ def _maybe_add_and_concretize(args, env, specs):
env.write(regenerate=False)
def install_with_active_env(env: ev.Environment, args, install_kwargs, reporter_factory):
def install_with_active_env(env: ev.Environment, args, install_kwargs, reporter):
specs = spack.cmd.parse_specs(args.spec)
# The following two commands are equivalent:
@ -416,7 +410,9 @@ def install_with_active_env(env: ev.Environment, args, install_kwargs, reporter_
install_kwargs["overwrite"] = [spec.dag_hash() for spec in specs_to_install]
try:
with reporter_factory(specs_to_install):
report_file = report_filename(args, specs_to_install)
install_kwargs["report_file"] = report_file
install_kwargs["reporter"] = reporter
env.install_specs(specs_to_install, **install_kwargs)
finally:
if env.views:
@ -461,18 +457,23 @@ def concrete_specs_from_file(args):
return result
def install_without_active_env(args, install_kwargs, reporter_factory):
def install_without_active_env(args, install_kwargs, reporter):
concrete_specs = concrete_specs_from_cli(args, install_kwargs) + concrete_specs_from_file(args)
if len(concrete_specs) == 0:
tty.die("The `spack install` command requires a spec to install.")
with reporter_factory(concrete_specs):
if args.overwrite:
require_user_confirmation_for_overwrite(concrete_specs, args)
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
installs = [s.package for s in concrete_specs]
install_kwargs["explicit"] = [s.dag_hash() for s in concrete_specs]
try:
builder = PackageInstaller(installs, **install_kwargs)
builder.install()
finally:
if reporter:
report_file = report_filename(args, concrete_specs)
reporter.build_report(report_file, list(builder.reports.values()))

View File

@ -1638,7 +1638,7 @@ def determine_number_of_jobs(
except ValueError:
pass
return min(max_cpus, cfg.get("config:build_jobs", 16))
return min(max_cpus, cfg.get("config:build_jobs", 4))
class ConfigSectionError(spack.error.ConfigError):

View File

@ -284,7 +284,7 @@ def remove_install_directory(self, spec: "spack.spec.Spec", deprecated: bool = F
Raised RemoveFailedError if something goes wrong.
"""
path = self.path_for_spec(spec)
assert path.startswith(self.root)
assert path.startswith(self.root), f"PATH: {path}, ROOT: {self.root}"
if deprecated:
if os.path.exists(path):

View File

@ -1906,6 +1906,10 @@ def install_specs(self, specs: Optional[List[Spec]] = None, **install_args):
roots = self.concrete_roots()
specs = specs if specs is not None else roots
# Extract reporter arguments
reporter = install_args.pop("reporter", None)
report_file = install_args.pop("report_file", None)
# Extend the set of specs to overwrite with modified dev specs and their parents
install_args["overwrite"] = {
*install_args.get("overwrite", ()),
@ -1918,7 +1922,12 @@ def install_specs(self, specs: Optional[List[Spec]] = None, **install_args):
*(s.dag_hash() for s in roots),
}
PackageInstaller([spec.package for spec in specs], **install_args).install()
try:
builder = PackageInstaller([spec.package for spec in specs], **install_args)
builder.install()
finally:
if reporter:
reporter.build_report(report_file, list(builder.reports.values()))
def all_specs_generator(self) -> Iterable[Spec]:
"""Returns a generator for all concrete specs"""

View File

@ -399,9 +399,10 @@ def stand_alone_tests(self, kwargs, timeout: Optional[int] = None) -> None:
"""
import spack.build_environment # avoid circular dependency
spack.build_environment.start_build_process(
ph = spack.build_environment.start_build_process(
self.pkg, test_process, kwargs, timeout=timeout
)
spack.build_environment.ProcessHandle.complete(ph)
def parts(self) -> int:
"""The total number of (checked) test parts."""

View File

@ -34,6 +34,7 @@
import os
import shutil
import sys
import tempfile
import time
from collections import defaultdict
from gzip import GzipFile
@ -59,6 +60,7 @@
import spack.package_base
import spack.package_prefs as prefs
import spack.repo
import spack.report
import spack.rewiring
import spack.spec
import spack.store
@ -73,6 +75,8 @@
#: were added (see https://docs.python.org/2/library/heapq.html).
_counter = itertools.count(0)
_fail_fast_err = "Terminating after first install failure"
class BuildStatus(enum.Enum):
"""Different build (task) states."""
@ -118,6 +122,9 @@ class ExecuteResult(enum.Enum):
FAILED = enum.auto()
# Task is missing build spec and will be requeued
MISSING_BUILD_SPEC = enum.auto()
# Task is installed upstream/external or
# task is not ready for installation (locked by another process)
NO_OP = enum.auto()
class InstallAction(enum.Enum):
@ -867,13 +874,17 @@ def traverse_dependencies(self, spec=None, visited=None) -> Iterator["spack.spec
class Task:
"""Base class for representing a task for a package."""
success_result: Optional[ExecuteResult] = None
error_result: Optional[BaseException] = None
no_op: bool = False
def __init__(
self,
pkg: "spack.package_base.PackageBase",
request: BuildRequest,
*,
compiler: bool = False,
start: float = 0.0,
start_time: float = 0.0,
attempts: int = 0,
status: BuildStatus = BuildStatus.QUEUED,
installed: Set[str] = set(),
@ -884,7 +895,7 @@ def __init__(
Args:
pkg: the package to be built and installed
request: the associated install request
start: the initial start time for the package, in seconds
start_time: the initial start time for the package, in seconds
attempts: the number of attempts to install the package, which
should be 0 when the task is initially instantiated
status: the installation status
@ -913,6 +924,9 @@ def __init__(
raise TypeError(f"{request} is not a valid build request")
self.request = request
# Report for tracking install success/failure
self.record = spack.report.InstallRecord(self.pkg.spec)
# Initialize the status to an active state. The status is used to
# ensure priority queue invariants when tasks are "removed" from the
# queue.
@ -930,7 +944,7 @@ def __init__(
self.pid = os.getpid()
# The initial start time for processing the spec
self.start = start
self.start_time = start_time
if not isinstance(installed, set):
raise TypeError(
@ -967,11 +981,19 @@ def __init__(
self.attempts = attempts
self._update()
def execute(self, install_status: InstallStatus) -> ExecuteResult:
"""Execute the work of this task.
# initialize cache variables
self._install_action = None
The ``install_status`` is an ``InstallStatus`` object used to format progress reporting for
this task in the context of the full ``BuildRequest``."""
def start(self):
"""Start the work of this task."""
raise NotImplementedError
def poll(self) -> bool:
"""Check if child process has information ready to receive."""
raise NotImplementedError
def complete(self) -> ExecuteResult:
"""Complete the work of this task."""
raise NotImplementedError
def __eq__(self, other):
@ -1002,8 +1024,8 @@ def __repr__(self) -> str:
def __str__(self) -> str:
"""Returns a printable version of the task."""
dependencies = f"#dependencies={len(self.dependencies)}"
return "priority={0}, status={1}, start={2}, {3}".format(
self.priority, self.status, self.start, dependencies
return "priority={0}, status={1}, start_time={2}, {3}".format(
self.priority, self.status, self.start_time, dependencies
)
def _update(self) -> None:
@ -1089,6 +1111,49 @@ def _setup_install_dir(self, pkg: "spack.package_base.PackageBase") -> None:
# Always write host environment - we assume this can change
spack.store.STORE.layout.write_host_environment(pkg.spec)
@property
def install_action(self):
if not self._install_action:
self._install_action = self.get_install_action()
return self._install_action
def get_install_action(self: "Task") -> InstallAction:
"""
Determine whether the installation should be overwritten (if it already
exists) or skipped (if has been handled by another process).
If the package has not been installed yet, this will indicate that the
installation should proceed as normal (i.e. no need to transactionally
preserve the old prefix).
"""
# If we don't have to overwrite, do a normal install
if self.pkg.spec.dag_hash() not in self.request.overwrite:
return InstallAction.INSTALL
# If it's not installed, do a normal install as well
rec, installed = check_db(self.pkg.spec)
if not installed:
return InstallAction.INSTALL
# Ensure install_tree projections have not changed.
assert rec and self.pkg.prefix == rec.path
# If another process has overwritten this, we shouldn't install at all
if rec.installation_time >= self.request.overwrite_time:
return InstallAction.NONE
# If the install prefix is missing, warn about it, and proceed with
# normal install.
if not os.path.exists(self.pkg.prefix):
tty.debug("Missing installation to overwrite")
return InstallAction.INSTALL
# Otherwise, do an actual overwrite install. We backup the original
# install directory, put the old prefix
# back on failure
return InstallAction.OVERWRITE
@property
def explicit(self) -> bool:
return self.pkg.spec.dag_hash() in self.request.install_args.get("explicit", [])
@ -1123,7 +1188,7 @@ def next_attempt(self, installed) -> "Task":
"""Create a new, updated task for the next installation attempt."""
task = copy.copy(self)
task._update()
task.start = self.start or time.time()
task.start_time = self.start_time or time.time()
task.flag_installed(installed)
return task
@ -1132,56 +1197,170 @@ def priority(self):
"""The priority is based on the remaining uninstalled dependencies."""
return len(self.uninstalled_deps)
def terminate(self) -> None:
"""End any processes and clean up any resources allocated by this Task.
By default this is a no-op.
"""
def check_db(spec: "spack.spec.Spec") -> Tuple[Optional[spack.database.InstallRecord], bool]:
"""Determine if the spec is flagged as installed in the database
Args:
spec: spec whose database install status is being checked
Return:
Tuple of optional database record, and a boolean installed_in_db
that's ``True`` iff the spec is considered installed
"""
try:
rec = spack.store.STORE.db.get_record(spec)
installed_in_db = rec.installed if rec else False
except KeyError:
# KeyError is raised if there is no matching spec in the database
# (versus no matching specs that are installed).
rec = None
installed_in_db = False
return rec, installed_in_db
class BuildTask(Task):
"""Class for representing a build task for a package."""
def execute(self, install_status):
"""
Perform the installation of the requested spec and/or dependency
represented by the build task.
"""
process_handle: Optional["spack.build_environment.ProcessHandle"] = None
started: bool = False
no_op: bool = False
tmpdir = None
backup_dir = None
def start(self):
"""Attempt to use the binary cache to install
requested spec and/or dependency if requested.
Otherwise, start a process for of the requested spec and/or
dependency represented by the BuildTask."""
self.record.start()
if self.install_action == InstallAction.OVERWRITE:
self.tmpdir = tempfile.mkdtemp(dir=os.path.dirname(self.pkg.prefix), prefix=".backup")
self.backup_dir = os.path.join(self.tmpdir, "backup")
os.rename(self.pkg.prefix, self.backup_dir)
assert not self.started, "Cannot start a task that has already been started."
self.started = True
install_args = self.request.install_args
tests = install_args.get("tests")
unsigned = install_args.get("unsigned")
pkg, pkg_id = self.pkg, self.pkg_id
self.start_time = self.start_time or time.time()
tty.msg(install_msg(pkg_id, self.pid, install_status))
self.start = self.start or time.time()
self.status = BuildStatus.INSTALLING
tests = install_args.get("tests")
pkg.run_tests = tests is True or tests and pkg.name in tests
# Use the binary cache if requested
# Use the binary cache to install if requested,
# save result to be handled in BuildTask.complete()
if self.use_cache:
if _install_from_cache(pkg, self.explicit, unsigned):
return ExecuteResult.SUCCESS
self.success_result = ExecuteResult.SUCCESS
return
elif self.cache_only:
raise spack.error.InstallError(
self.error_result = spack.error.InstallError(
"No binary found when cache-only was specified", pkg=pkg
)
return
else:
tty.msg(f"No binary for {pkg_id} found: installing from source")
pkg.run_tests = tests is True or tests and pkg.name in tests
# if there's an error result, don't start a new process, and leave
if self.error_result is not None:
return
# Create stage object now and let it be serialized for the child process. That
# way monkeypatch in tests works correctly.
pkg.stage
self._setup_install_dir(pkg)
# Create a child process to do the actual installation.
self.process_handle = spack.build_environment.start_build_process(
self.pkg, build_process, self.request.install_args
)
# Identify the child process
self.child_pid = self.process_handle.process.pid
def poll(self):
"""Check if task has successfully executed, caused an InstallError,
or the child process has information ready to receive."""
assert (
self.started or self.no_op
), "Can't call `poll()` before `start()` or identified no-operation task"
return self.no_op or self.success_result or self.error_result or self.process_handle.poll()
def succeed(self):
self.record.succeed()
# delete the temporary backup for an overwrite
# see llnl.util.filesystem.restore_directory_transaction
if self.install_action == InstallAction.OVERWRITE:
shutil.rmtree(self.tmpdir, ignore_errors=True)
def fail(self, inner_exception):
self.record.fail(inner_exception)
if self.install_action != InstallAction.OVERWRITE:
raise inner_exception
# restore the overwrite directory from backup
# see llnl.util.filesystem.restore_directory_transaction
try:
if os.path.exists(self.pkg.prefix):
shutil.rmtree(self.pkg.prefix)
os.rename(self.backup_dir, self.pkg.prefix)
except Exception as outer_exception:
raise fs.CouldNotRestoreDirectoryBackup(inner_exception, outer_exception)
raise inner_exception
def complete(self):
"""
Complete the installation of the requested spec and/or dependency
represented by the build task.
"""
assert (
self.started or self.no_op
), "Can't call `complete()` before `start()` or identified no-operation task"
install_args = self.request.install_args
pkg = self.pkg
self.status = BuildStatus.INSTALLING
# If task has been identified as a no operation,
# return ExecuteResult.NOOP
if self.no_op:
self.succeed()
return ExecuteResult.NO_OP
# If installing a package from binary cache is successful,
# return ExecuteResult.SUCCESS
if self.success_result is not None:
self.succeed()
return self.success_result
# If an error arises from installing a package,
# raise spack.error.InstallError
if self.error_result is not None:
self.fail(self.error_result)
# hook that allows tests to inspect the Package before installation
# see unit_test_check() docs.
if not pkg.unit_test_check():
self.succeed()
return ExecuteResult.FAILED
try:
# Create stage object now and let it be serialized for the child process. That
# way monkeypatch in tests works correctly.
pkg.stage
self._setup_install_dir(pkg)
# Create a child process to do the actual installation.
# Preserve verbosity settings across installs.
spack.package_base.PackageBase._verbose = spack.build_environment.start_build_process(
pkg, build_process, install_args
)
# Check if the task's child process has completed
spack.package_base.PackageBase._verbose = self.process_handle.complete()
# Note: PARENT of the build process adds the new package to
# the database, so that we don't need to re-read from file.
spack.store.STORE.db.add(pkg.spec, explicit=self.explicit)
@ -1191,13 +1370,28 @@ def execute(self, install_status):
pid = f"{self.pid}: " if tty.show_pid() else ""
tty.debug(f"{pid}{str(e)}")
tty.debug(f"Package stage directory: {pkg.stage.source_path}")
except (Exception, KeyboardInterrupt, SystemExit) as e:
self.fail(e)
self.succeed()
return ExecuteResult.SUCCESS
def terminate(self) -> None:
"""Terminate any processes this task still has running."""
if self.process_handle:
self.process_handle.terminate_processes()
class RewireTask(Task):
"""Class for representing a rewire task for a package."""
def execute(self, install_status):
def start(self):
self.record.start()
def poll(self):
return True
def complete(self):
"""Execute rewire task
Rewire tasks are executed by either rewiring self.package.spec.build_spec that is already
@ -1209,22 +1403,25 @@ def execute(self, install_status):
"""
oldstatus = self.status
self.status = BuildStatus.INSTALLING
tty.msg(install_msg(self.pkg_id, self.pid, install_status))
self.start = self.start or time.time()
if not self.pkg.spec.build_spec.installed:
try:
install_args = self.request.install_args
unsigned = install_args.get("unsigned")
_process_binary_cache_tarball(self.pkg, explicit=self.explicit, unsigned=unsigned)
_print_installed_pkg(self.pkg.prefix)
self.record.succeed()
return ExecuteResult.SUCCESS
except BaseException as e:
tty.error(f"Failed to rewire {self.pkg.spec} from binary. {e}")
self.status = oldstatus
return ExecuteResult.MISSING_BUILD_SPEC
try:
spack.rewiring.rewire_node(self.pkg.spec, self.explicit)
_print_installed_pkg(self.pkg.prefix)
self.record.succeed()
return ExecuteResult.SUCCESS
except BaseException as e:
self.record.fail(e)
class PackageInstaller:
@ -1263,6 +1460,7 @@ def __init__(
unsigned: Optional[bool] = None,
use_cache: bool = False,
verbose: bool = False,
concurrent_packages: int = 4,
) -> None:
"""
Arguments:
@ -1286,6 +1484,7 @@ def __init__(
run tests for some
use_cache: Install from binary package, if available.
verbose: Display verbose build output (by default, suppresses it)
concurrent_packages: Max packages to be built concurrently
"""
if isinstance(explicit, bool):
explicit = {pkg.spec.dag_hash() for pkg in packages} if explicit else set()
@ -1315,6 +1514,7 @@ def __init__(
"unsigned": unsigned,
"use_cache": use_cache,
"verbose": verbose,
"concurrent_packages": concurrent_packages,
}
# List of build requests
@ -1348,6 +1548,17 @@ def __init__(
# Initializing all_dependencies to empty. This will be set later in _init_queue.
self.all_dependencies: Dict[str, Set[str]] = {}
# Maximum number of concurrent packages to build
self.max_active_tasks = concurrent_packages
# Reports on install success/failure
self.reports: Dict[str, dict] = {}
for build_request in self.build_requests:
# Skip reporting for already installed specs
request_record = spack.report.RequestRecord(build_request.pkg.spec)
request_record.skip_installed()
self.reports[build_request.pkg_id] = request_record
def __repr__(self) -> str:
"""Returns a formal representation of the package installer."""
rep = f"{self.__class__.__name__}("
@ -1384,28 +1595,6 @@ def _add_init_task(
self._push_task(task)
def _check_db(
self, spec: "spack.spec.Spec"
) -> Tuple[Optional[spack.database.InstallRecord], bool]:
"""Determine if the spec is flagged as installed in the database
Args:
spec: spec whose database install status is being checked
Return:
Tuple of optional database record, and a boolean installed_in_db
that's ``True`` iff the spec is considered installed
"""
try:
rec = spack.store.STORE.db.get_record(spec)
installed_in_db = rec.installed if rec else False
except KeyError:
# KeyError is raised if there is no matching spec in the database
# (versus no matching specs that are installed).
rec = None
installed_in_db = False
return rec, installed_in_db
def _check_deps_status(self, request: BuildRequest) -> None:
"""Check the install status of the requested package
@ -1438,7 +1627,7 @@ def _check_deps_status(self, request: BuildRequest) -> None:
# Check the database to see if the dependency has been installed
# and flag as such if appropriate
rec, installed_in_db = self._check_db(dep)
rec, installed_in_db = check_db(dep)
if (
rec
and installed_in_db
@ -1476,22 +1665,23 @@ def _prepare_for_install(self, task: Task) -> None:
return
# Determine if the spec is flagged as installed in the database
rec, installed_in_db = self._check_db(task.pkg.spec)
rec, installed_in_db = check_db(task.pkg.spec)
if not installed_in_db:
# Ensure there is no other installed spec with the same prefix dir
if spack.store.STORE.db.is_occupied_install_prefix(task.pkg.spec.prefix):
raise spack.error.InstallError(
task.error_result = spack.error.InstallError(
f"Install prefix collision for {task.pkg_id}",
long_msg=f"Prefix directory {task.pkg.spec.prefix} already "
"used by another installed spec.",
pkg=task.pkg,
)
return
# Make sure the installation directory is in the desired state
# for uninstalled specs.
if os.path.isdir(task.pkg.spec.prefix):
if not keep_prefix:
if not keep_prefix and task.install_action != InstallAction.OVERWRITE:
task.pkg.remove_prefix()
else:
tty.debug(f"{task.pkg_id} is partially installed")
@ -1747,19 +1937,27 @@ def _add_tasks(self, request: BuildRequest, all_deps):
fail_fast = bool(request.install_args.get("fail_fast"))
self.fail_fast = self.fail_fast or fail_fast
def _install_task(self, task: Task, install_status: InstallStatus) -> None:
def _complete_task(self, task: Task, install_status: InstallStatus) -> None:
"""
Perform the installation of the requested spec and/or dependency
Complete the installation of the requested spec and/or dependency
represented by the task.
Args:
task: the installation task for a package
install_status: the installation status for the package"""
rc = task.execute(install_status)
install_status: the installation status for the package
"""
try:
rc = task.complete()
except BaseException:
self.reports[task.request.pkg_id].append_record(task.record)
raise
if rc == ExecuteResult.MISSING_BUILD_SPEC:
self._requeue_with_build_spec_tasks(task)
elif rc == ExecuteResult.NO_OP:
pass
else: # if rc == ExecuteResult.SUCCESS or rc == ExecuteResult.FAILED
self._update_installed(task)
self.reports[task.request.pkg_id].append_record(task.record)
def _next_is_pri0(self) -> bool:
"""
@ -1773,18 +1971,42 @@ def _next_is_pri0(self) -> bool:
task = self.build_pq[0][1]
return task.priority == 0
def _pop_task(self) -> Optional[Task]:
"""
Remove and return the lowest priority task.
def _clear_removed_tasks(self):
"""Get rid of any tasks in the queue with status 'BuildStatus.REMOVED'"""
while self.build_pq and self.build_pq[0][1].status == BuildStatus.REMOVED:
heapq.heappop(self.build_pq)
Source: Variant of function at docs.python.org/2/library/heapq.html
def _peek_ready_task(self) -> Optional[Task]:
"""
while self.build_pq:
task = heapq.heappop(self.build_pq)[1]
if task.status != BuildStatus.REMOVED:
Return the first ready task in the queue, or None if there are no ready tasks.
"""
self._clear_removed_tasks()
if not self.build_pq:
return None
task = self.build_pq[0][1]
return task if task.priority == 0 else None
def _pop_task(self) -> Task:
"""Pop the first task off the queue and return it.
Raise an index error if the queue is empty."""
self._clear_removed_tasks()
if not self.build_pq:
raise IndexError("Attempt to pop empty queue")
_, task = heapq.heappop(self.build_pq)
del self.build_tasks[task.pkg_id]
task.status = BuildStatus.DEQUEUED
return task
def _pop_ready_task(self) -> Optional[Task]:
"""
Pop the first ready task off the queue and return it.
Return None if no ready task.
"""
if self._peek_ready_task():
return self._pop_task()
return None
def _push_task(self, task: Task) -> None:
@ -1976,93 +2198,17 @@ def _init_queue(self) -> None:
task.add_dependent(dependent_id)
self.all_dependencies = all_dependencies
def _install_action(self, task: Task) -> InstallAction:
"""
Determine whether the installation should be overwritten (if it already
exists) or skipped (if has been handled by another process).
If the package has not been installed yet, this will indicate that the
installation should proceed as normal (i.e. no need to transactionally
preserve the old prefix).
"""
# If we don't have to overwrite, do a normal install
if task.pkg.spec.dag_hash() not in task.request.overwrite:
return InstallAction.INSTALL
# If it's not installed, do a normal install as well
rec, installed = self._check_db(task.pkg.spec)
if not installed:
return InstallAction.INSTALL
# Ensure install_tree projections have not changed.
assert rec and task.pkg.prefix == rec.path
# If another process has overwritten this, we shouldn't install at all
if rec.installation_time >= task.request.overwrite_time:
return InstallAction.NONE
# If the install prefix is missing, warn about it, and proceed with
# normal install.
if not os.path.exists(task.pkg.prefix):
tty.debug("Missing installation to overwrite")
return InstallAction.INSTALL
# Otherwise, do an actual overwrite install. We backup the original
# install directory, put the old prefix
# back on failure
return InstallAction.OVERWRITE
def install(self) -> None:
"""Install the requested package(s) and or associated dependencies."""
self._init_queue()
fail_fast_err = "Terminating after first install failure"
single_requested_spec = len(self.build_requests) == 1
failed_build_requests = []
install_status = InstallStatus(len(self.build_pq))
# Only enable the terminal status line when we're in a tty without debug info
# enabled, so that the output does not get cluttered.
term_status = TermStatusLine(
enabled=sys.stdout.isatty() and tty.msg_enabled() and not tty.is_debug()
)
while self.build_pq:
task = self._pop_task()
if task is None:
continue
install_args = task.request.install_args
keep_prefix = install_args.get("keep_prefix")
def start_task(
self, task: Task, install_status: InstallStatus, term_status: TermStatusLine
) -> None:
"""Attempts to start a package installation."""
pkg, pkg_id, spec = task.pkg, task.pkg_id, task.pkg.spec
install_status.next_pkg(pkg)
install_status.set_term_title(f"Processing {pkg.name}")
# install_status.set_term_title(f"Processing {task.pkg.name}")
tty.debug(f"Processing {pkg_id}: task={task}")
# Ensure that the current spec has NO uninstalled dependencies,
# which is assumed to be reflected directly in its priority.
#
# If the spec has uninstalled dependencies, then there must be
# a bug in the code (e.g., priority queue or uninstalled
# dependencies handling). So terminate under the assumption that
# all subsequent tasks will have non-zero priorities or may be
# dependencies of this task.
if task.priority != 0:
term_status.clear()
tty.error(
f"Detected uninstalled dependencies for {pkg_id}: " f"{task.uninstalled_deps}"
)
left = [dep_id for dep_id in task.uninstalled_deps if dep_id not in self.installed]
if not left:
tty.warn(f"{pkg_id} does NOT actually have any uninstalled deps left")
dep_str = "dependencies" if task.priority > 1 else "dependency"
raise spack.error.InstallError(
f"Cannot proceed with {pkg_id}: {task.priority} uninstalled "
f"{dep_str}: {','.join(task.uninstalled_deps)}",
pkg=pkg,
)
# Debug
task.record.start()
# Skip the installation if the spec is not being installed locally
# (i.e., if external or upstream) BUT flag it as installed since
@ -2070,7 +2216,8 @@ def install(self) -> None:
if _handle_external_and_upstream(pkg, task.explicit):
term_status.clear()
self._flag_installed(pkg, task.dependents)
continue
task.no_op = True
return
# Flag a failed spec. Do not need an (install) prefix lock since
# assume using a separate (failed) prefix lock file.
@ -2080,15 +2227,13 @@ def install(self) -> None:
self._update_failed(task)
if self.fail_fast:
raise spack.error.InstallError(fail_fast_err, pkg=pkg)
continue
task.error_result = spack.error.InstallError(_fail_fast_err, pkg=pkg)
# Attempt to get a write lock. If we can't get the lock then
# another process is likely (un)installing the spec or has
# determined the spec has already been installed (though the
# other process may be hung).
install_status.set_term_title(f"Acquiring lock for {pkg.name}")
install_status.set_term_title(f"Acquiring lock for {task.pkg.name}")
term_status.add(pkg_id)
ltype, lock = self._ensure_locked("write", pkg)
if lock is None:
@ -2101,7 +2246,8 @@ def install(self) -> None:
# -- failed, installed, or uninstalled -- on the next pass.
if lock is None:
self._requeue_task(task, install_status)
continue
task.no_op = True
return
term_status.clear()
@ -2110,8 +2256,7 @@ def install(self) -> None:
if task.request.overwrite and task.explicit:
task.request.overwrite_time = time.time()
# Determine state of installation artifacts and adjust accordingly.
install_status.set_term_title(f"Preparing {pkg.name}")
# install_status.set_term_title(f"Preparing {task.pkg.name}")
self._prepare_for_install(task)
# Flag an already installed package
@ -2135,7 +2280,8 @@ def install(self) -> None:
# or uninstalled -- on the next pass.
self.installed.remove(pkg_id)
self._requeue_task(task, install_status)
continue
task.no_op = True
return
# Having a read lock on an uninstalled pkg may mean another
# process completed an uninstall of the software between the
@ -2148,20 +2294,27 @@ def install(self) -> None:
if ltype == "read":
lock.release_read()
self._requeue_task(task, install_status)
continue
task.no_op = True
return
# Proceed with the installation since we have an exclusive write
# lock on the package.
install_status.set_term_title(f"Installing {pkg.name}")
try:
action = self._install_action(task)
install_status.set_term_title(f"Installing {task.pkg.name}")
action = task.install_action
if action == InstallAction.INSTALL:
self._install_task(task, install_status)
elif action == InstallAction.OVERWRITE:
# spack.store.STORE.db is not really a Database object, but a small
# wrapper -- silence mypy
OverwriteInstall(self, spack.store.STORE.db, task, install_status).install() # type: ignore[arg-type] # noqa: E501
if action in (InstallAction.INSTALL, InstallAction.OVERWRITE):
# Start a child process for a task that's ready to be installed.
task.start()
tty.msg(install_msg(pkg_id, self.pid, install_status))
def complete_task(self, task: Task, install_status: InstallStatus) -> Optional[Tuple]:
"""Attempts to complete a package installation."""
pkg, pkg_id = task.pkg, task.pkg_id
install_args = task.request.install_args
keep_prefix = install_args.get("keep_prefix")
action = task.install_action
try:
self._complete_task(task, install_status)
# If we installed then we should keep the prefix
stop_before_phase = getattr(pkg, "stop_before_phase", None)
@ -2188,9 +2341,22 @@ def install(self) -> None:
# this overrides a full method, which is ugly.
task.use_cache = False # type: ignore[misc]
self._requeue_task(task, install_status)
continue
return None
# Overwrite install exception handling
except fs.CouldNotRestoreDirectoryBackup as e:
spack.store.STORE.db.remove(task.pkg.spec)
tty.error(
f"Recovery of install dir of {task.pkg.name} failed due to "
f"{e.outer_exception.__class__.__name__}: {str(e.outer_exception)}. "
"The spec is now uninstalled."
)
# Unwrap the actual installation exception.
raise e.inner_exception
except (Exception, SystemExit) as exc:
# Overwrite process exception handling
self._update_failed(task, True, exc)
# Best effort installs suppress the exception and mark the
@ -2201,25 +2367,22 @@ def install(self) -> None:
# lower levels -- skip printing if already printed.
# TODO: sort out this and SpackError.print_context()
tty.error(
f"Failed to install {pkg.name} due to "
f"{exc.__class__.__name__}: {str(exc)}"
f"Failed to install {pkg.name} due to " f"{exc.__class__.__name__}: {str(exc)}"
)
# Terminate if requested to do so on the first failure.
if self.fail_fast:
raise spack.error.InstallError(
f"{fail_fast_err}: {str(exc)}", pkg=pkg
) from exc
raise spack.error.InstallError(f"{_fail_fast_err}: {str(exc)}", pkg=pkg) from exc
# Terminate when a single build request has failed, or summarize errors later.
if task.is_build_request:
if single_requested_spec:
if len(self.build_requests) == 1:
raise
failed_build_requests.append((pkg, pkg_id, str(exc)))
return (pkg, pkg_id, str(exc))
finally:
# Remove the install prefix if anything went wrong during
# install.
if not keep_prefix and not action == InstallAction.OVERWRITE:
if not keep_prefix and action != InstallAction.OVERWRITE:
pkg.remove_prefix()
# Perform basic task cleanup for the installed spec to
@ -2227,6 +2390,83 @@ def install(self) -> None:
if pkg.spec.installed:
self._cleanup_task(pkg)
return None
def install(self) -> None:
"""Install the requested package(s) and or associated dependencies."""
self._init_queue()
failed_build_requests = []
install_status = InstallStatus(len(self.build_pq))
active_tasks: List[Task] = []
# Only enable the terminal status line when we're in a tty without debug info
# enabled, so that the output does not get cluttered.
term_status = TermStatusLine(
enabled=sys.stdout.isatty() and tty.msg_enabled() and not tty.is_debug()
)
# While a task is ready or tasks are running
while self._peek_ready_task() or active_tasks:
# While there's space for more active tasks to start
while len(active_tasks) < self.max_active_tasks:
task = self._pop_ready_task()
if not task:
# no ready tasks
break
active_tasks.append(task)
try:
# Attempt to start the task's package installation
self.start_task(task, install_status, term_status)
except BaseException as e:
# Delegating any exception that happens in start_task() to be
# handled in complete_task()
task.error_result = e
time.sleep(0.1)
# Check if any tasks have completed and add to list
done = [task for task in active_tasks if task.poll()]
# Iterate through the done tasks and complete them
for task in done:
try:
# If complete_task does not return None, the build request failed
failure = self.complete_task(task, install_status)
if failure:
failed_build_requests.append(failure)
except Exception:
# Terminate any active child processes if there's an installation error
for task in active_tasks:
task.terminate()
raise
finally:
active_tasks.remove(task)
self._clear_removed_tasks()
if self.build_pq:
task = self._pop_task()
assert task.priority != 0, "Found ready task after _peek_ready_task returned None"
# If the spec has uninstalled dependencies
# and no active tasks running, then there must be
# a bug in the code (e.g., priority queue or uninstalled
# dependencies handling). So terminate under the assumption
# that all subsequent task will have non-zero priorities or may
# be dependencies of this task.
term_status.clear()
tty.error(
f"Detected uninstalled dependencies for {task.pkg_id}: " f"{task.uninstalled_deps}"
)
left = [dep_id for dep_id in task.uninstalled_deps if dep_id not in self.installed]
if not left:
tty.warn(f"{task.pkg_id} does NOT actually have any uninstalled deps left")
dep_str = "dependencies" if task.priority > 1 else "dependency"
raise spack.error.InstallError(
f"Cannot proceed with {task.pkg_id}: {task.priority} uninstalled "
f"{dep_str}: {','.join(task.uninstalled_deps)}",
pkg=task.pkg,
)
# Cleanup, which includes releasing all of the read locks
self._cleanup_all_tasks()
@ -2318,7 +2558,6 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
self.timer = timer.Timer()
# If we are using a padded path, filter the output to compress padded paths
# The real log still has full-length paths.
padding = spack.config.get("config:install_tree:padded_length", None)
self.filter_fn = spack.util.path.padding_filter if padding else None
@ -2523,44 +2762,7 @@ def deprecate(spec: "spack.spec.Spec", deprecator: "spack.spec.Spec", link_fn) -
link_fn(deprecator.prefix, spec.prefix)
class OverwriteInstall:
def __init__(
self,
installer: PackageInstaller,
database: spack.database.Database,
task: Task,
install_status: InstallStatus,
):
self.installer = installer
self.database = database
self.task = task
self.install_status = install_status
def install(self):
"""
Try to run the install task overwriting the package prefix.
If this fails, try to recover the original install prefix. If that fails
too, mark the spec as uninstalled. This function always the original
install error if installation fails.
"""
try:
with fs.replace_directory_transaction(self.task.pkg.prefix):
self.installer._install_task(self.task, self.install_status)
except fs.CouldNotRestoreDirectoryBackup as e:
self.database.remove(self.task.pkg.spec)
tty.error(
f"Recovery of install dir of {self.task.pkg.name} failed due to "
f"{e.outer_exception.__class__.__name__}: {str(e.outer_exception)}. "
"The spec is now uninstalled."
)
# Unwrap the actual installation exception.
raise e.inner_exception
class BadInstallPhase(spack.error.InstallError):
"""Raised for an install phase option is not allowed for a package."""
def __init__(self, pkg_name, phase):
super().__init__(f"'{phase}' is not a valid phase for package {pkg_name}")

View File

@ -1,276 +1,131 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Tools to produce reports of spec installations"""
"""Hooks to produce reports of spec installations"""
import collections
import contextlib
import functools
import gzip
import os
import time
import traceback
from typing import Any, Callable, Dict, List, Type
import llnl.util.lang
import llnl.util.filesystem as fs
import spack.build_environment
import spack.install_test
import spack.installer
import spack.package_base
import spack.reporters
import spack.spec
import spack.util.spack_json as sjson
reporter = None
report_file = None
Property = collections.namedtuple("Property", ["name", "value"])
class InfoCollector:
"""Base class for context manager objects that collect information during the execution of
certain package functions.
class Record(dict):
def __getattr__(self, name):
# only called if no attribute exists
if name in self:
return self[name]
raise AttributeError(f"RequestRecord for {self.name} has no attribute {name}")
The data collected is available through the ``specs`` attribute once exited, and it's
organized as a list where each item represents the installation of one spec.
"""
wrap_class: Type
do_fn: str
_backup_do_fn: Callable
input_specs: List[spack.spec.Spec]
specs: List[Dict[str, Any]]
def __init__(self, wrap_class: Type, do_fn: str, specs: List[spack.spec.Spec]):
#: Class for which to wrap a function
self.wrap_class = wrap_class
#: Action to be reported on
self.do_fn = do_fn
#: Backup of the wrapped class function
self._backup_do_fn = getattr(self.wrap_class, do_fn)
#: Specs that will be acted on
self.input_specs = specs
#: This is where we record the data that will be included in our report
self.specs: List[Dict[str, Any]] = []
def fetch_log(self, pkg: spack.package_base.PackageBase) -> str:
"""Return the stdout log associated with the function being monitored
Args:
pkg: package under consideration
"""
raise NotImplementedError("must be implemented by derived classes")
def extract_package_from_signature(self, instance, *args, **kwargs):
"""Return the package instance, given the signature of the wrapped function."""
raise NotImplementedError("must be implemented by derived classes")
def __enter__(self):
# Initialize the spec report with the data that is available upfront.
Property = collections.namedtuple("Property", ["name", "value"])
for input_spec in self.input_specs:
name_fmt = "{0}_{1}"
name = name_fmt.format(input_spec.name, input_spec.dag_hash(length=7))
spec_record = {
"name": name,
"nerrors": None,
"nfailures": None,
"npackages": None,
"time": None,
"timestamp": time.strftime("%a, %d %b %Y %H:%M:%S", time.gmtime()),
"properties": [],
"packages": [],
}
spec_record["properties"].append(Property("architecture", input_spec.architecture))
self.init_spec_record(input_spec, spec_record)
self.specs.append(spec_record)
def gather_info(wrapped_fn):
"""Decorates a function to gather useful information for a CI report."""
@functools.wraps(wrapped_fn)
def wrapper(instance, *args, **kwargs):
pkg = self.extract_package_from_signature(instance, *args, **kwargs)
package = {
"name": pkg.name,
"id": pkg.spec.dag_hash(),
"elapsed_time": None,
"result": None,
"message": None,
"installed_from_binary_cache": False,
}
# Append the package to the correct spec report. In some
# cases it may happen that a spec that is asked to be
# installed explicitly will also be installed as a
# dependency of another spec. In this case append to both
# spec reports.
for current_spec in llnl.util.lang.dedupe([pkg.spec.root, pkg.spec]):
name = name_fmt.format(current_spec.name, current_spec.dag_hash(length=7))
try:
item = next((x for x in self.specs if x["name"] == name))
item["packages"].append(package)
except StopIteration:
pass
start_time = time.time()
try:
value = wrapped_fn(instance, *args, **kwargs)
package["stdout"] = self.fetch_log(pkg)
package["installed_from_binary_cache"] = pkg.installed_from_binary_cache
self.on_success(pkg, kwargs, package)
return value
except spack.build_environment.InstallError as exc:
# An InstallError is considered a failure (the recipe
# didn't work correctly)
package["result"] = "failure"
package["message"] = exc.message or "Installation failure"
package["stdout"] = self.fetch_log(pkg)
package["stdout"] += package["message"]
package["exception"] = exc.traceback
raise
except (Exception, BaseException) as exc:
# Everything else is an error (the installation
# failed outside of the child process)
package["result"] = "error"
package["message"] = str(exc) or "Unknown error"
package["stdout"] = self.fetch_log(pkg)
package["stdout"] += package["message"]
package["exception"] = traceback.format_exc()
raise
finally:
package["elapsed_time"] = time.time() - start_time
return wrapper
setattr(self.wrap_class, self.do_fn, gather_info(getattr(self.wrap_class, self.do_fn)))
def on_success(self, pkg: spack.package_base.PackageBase, kwargs, package_record):
"""Add additional properties on function call success."""
raise NotImplementedError("must be implemented by derived classes")
def init_spec_record(self, input_spec: spack.spec.Spec, record):
"""Add additional entries to a spec record when entering the collection context."""
def __exit__(self, exc_type, exc_val, exc_tb):
# Restore the original method in PackageBase
setattr(self.wrap_class, self.do_fn, self._backup_do_fn)
for spec in self.specs:
spec["npackages"] = len(spec["packages"])
spec["nfailures"] = len([x for x in spec["packages"] if x["result"] == "failure"])
spec["nerrors"] = len([x for x in spec["packages"] if x["result"] == "error"])
spec["time"] = sum(float(x["elapsed_time"]) for x in spec["packages"])
class BuildInfoCollector(InfoCollector):
"""Collect information for the PackageInstaller._install_task method.
Args:
specs: specs whose install information will be recorded
"""
def __init__(self, specs: List[spack.spec.Spec]):
super().__init__(spack.installer.PackageInstaller, "_install_task", specs)
def init_spec_record(self, input_spec, record):
# Check which specs are already installed and mark them as skipped
for dep in filter(lambda x: x.installed, input_spec.traverse()):
package = {
"name": dep.name,
"id": dep.dag_hash(),
"elapsed_time": "0.0",
"result": "skipped",
"message": "Spec already installed",
}
record["packages"].append(package)
def on_success(self, pkg, kwargs, package_record):
package_record["result"] = "success"
def fetch_log(self, pkg):
try:
if os.path.exists(pkg.install_log_path):
stream = gzip.open(pkg.install_log_path, "rt", encoding="utf-8")
def __setattr__(self, name, value):
if name.startswith("_"):
super().__setattr__(name, value)
else:
stream = open(pkg.log_path, encoding="utf-8")
self[name] = value
class RequestRecord(Record):
def __init__(self, spec):
super().__init__()
self._spec = spec
self.name = spec.name
self.errors = None
self.nfailures = None
self.npackages = None
self.time = None
self.timestamp = time.strftime("%a, d %b %Y %H:%M:%S", time.gmtime())
self.properties = [
Property("architecture", spec.architecture),
# Property("compiler", spec.compiler),
]
self.packages = []
def skip_installed(self):
for dep in filter(lambda x: x.installed, self._spec.traverse()):
record = InstallRecord(dep)
record.skip(msg="Spec already installed")
self.packages.append(record)
def append_record(self, record):
self.packages.append(record)
def summarize(self):
self.npackages = len(self.packages)
self.nfailures = len([r for r in self.packages if r.result == "failure"])
self.nerrors = len([r for r in self.packages if r.result == "error"])
self.time = sum(float(r.elapsed_time or 0.0) for r in self.packages)
class SpecRecord(Record):
def __init__(self, spec):
super().__init__()
self._spec = spec
self._package = spec.package
self._start_time = None
self.name = spec.name
self.id = spec.dag_hash()
self.elapsed_time = None
def start(self):
self._start_time = time.time()
def skip(self, msg):
self.result = "skipped"
self.elapsed_time = 0.0
self.message = msg
class InstallRecord(SpecRecord):
def __init__(self, spec):
super().__init__(spec)
self.result = None
self.message = None
self.installed_from_binary_cache = None
def fetch_log(self):
try:
if os.path.exists(self._package.install_log_path):
stream = gzip.open(self._package.install_log_path, "rt", encoding="utf-8")
else:
stream = open(self._package.log_path, encoding="utf-8")
with stream as f:
return f.read()
except OSError:
return f"Cannot open log for {pkg.spec.cshort_spec}"
return f"Cannot open log for {self._spec.cshort_spec}"
def extract_package_from_signature(self, instance, *args, **kwargs):
return args[0].pkg
class TestInfoCollector(InfoCollector):
"""Collect information for the PackageBase.do_test method.
Args:
specs: specs whose install information will be recorded
record_directory: record directory for test log paths
"""
dir: str
def __init__(self, specs: List[spack.spec.Spec], record_directory: str):
super().__init__(spack.package_base.PackageBase, "do_test", specs)
self.dir = record_directory
def on_success(self, pkg, kwargs, package_record):
externals = kwargs.get("externals", False)
skip_externals = pkg.spec.external and not externals
if skip_externals:
package_record["result"] = "skipped"
package_record["result"] = "success"
def fetch_log(self, pkg: spack.package_base.PackageBase):
log_file = os.path.join(self.dir, spack.install_test.TestSuite.test_log_name(pkg.spec))
def fetch_time(self):
try:
with open(log_file, "r", encoding="utf-8") as stream:
return "".join(stream.readlines())
with open(self._package.times_log_path, "r", encoding="utf-8") as f:
data = sjson.load(f.read())
return data["total"]
except Exception:
return f"Cannot open log for {pkg.spec.cshort_spec}"
return None
def extract_package_from_signature(self, instance, *args, **kwargs):
return instance
def succeed(self):
self.result = "success"
self.stdout = self.fetch_log()
self.installed_from_binary_cache = self._package.installed_from_binary_cache
assert self._start_time, "Start time is None"
self.elapsed_time = time.time() - self._start_time
@contextlib.contextmanager
def build_context_manager(
reporter: spack.reporters.Reporter, filename: str, specs: List[spack.spec.Spec]
):
"""Decorate a package to generate a report after the installation function is executed.
Args:
reporter: object that generates the report
filename: filename for the report
specs: specs that need reporting
"""
collector = BuildInfoCollector(specs)
try:
with collector:
yield
finally:
reporter.build_report(filename, specs=collector.specs)
@contextlib.contextmanager
def test_context_manager(
reporter: spack.reporters.Reporter,
filename: str,
specs: List[spack.spec.Spec],
raw_logs_dir: str,
):
"""Decorate a package to generate a report after the test function is executed.
Args:
reporter: object that generates the report
filename: filename for the report
specs: specs that need reporting
raw_logs_dir: record directory for test log paths
"""
collector = TestInfoCollector(specs, raw_logs_dir)
try:
with collector:
yield
finally:
reporter.test_report(filename, specs=collector.specs)
def fail(self, exc):
if isinstance(exc, spack.build_environment.InstallError):
self.result = "failure"
self.message = exc.message or "Installation failure"
self.exception = exc.traceback
else:
self.result = "error"
self.message = str(exc) or "Unknown error"
self.exception = traceback.format_exc()
self.stdout = self.fetch_log() + self.message
assert self._start_time, "Start time is None"
self.elapsed_time = time.time() - self._start_time

View File

@ -278,6 +278,8 @@ def build_report(self, report_dir, specs):
self.multiple_packages = False
num_packages = 0
for spec in specs:
spec.summarize()
# Do not generate reports for packages that were installed
# from the binary cache.
spec["packages"] = [
@ -362,6 +364,8 @@ def test_report(self, report_dir, specs):
"""Generate reports for each package in each spec."""
tty.debug("Processing test report")
for spec in specs:
spec.summarize()
duration = 0
if "time" in spec:
duration = int(spec["time"])

View File

@ -17,12 +17,16 @@ def concretization_report(self, filename, msg):
pass
def build_report(self, filename, specs):
for spec in specs:
spec.summarize()
if not (os.path.splitext(filename))[1]:
# Ensure the report name will end with the proper extension;
# otherwise, it currently defaults to the "directory" name.
filename = filename + ".xml"
report_data = {"specs": specs}
with open(filename, "w", encoding="utf-8") as f:
env = spack.tengine.make_environment()
t = env.get_template(self._jinja_template)

View File

@ -66,30 +66,33 @@ def test_install_package_and_dependency(
assert 'errors="0"' in content
@pytest.mark.disable_clean_stage_check
def test_install_runtests_notests(monkeypatch, mock_packages, install_mockery):
def check(pkg):
def _check_runtests_none(pkg):
assert not pkg.run_tests
monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", check)
def _check_runtests_dttop(pkg):
assert pkg.run_tests == (pkg.name == "dttop")
def _check_runtests_all(pkg):
assert pkg.run_tests
@pytest.mark.disable_clean_stage_check
def test_install_runtests_notests(monkeypatch, mock_packages, install_mockery):
monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", _check_runtests_none)
install("-v", "dttop")
@pytest.mark.disable_clean_stage_check
def test_install_runtests_root(monkeypatch, mock_packages, install_mockery):
def check(pkg):
assert pkg.run_tests == (pkg.name == "dttop")
monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", check)
monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", _check_runtests_dttop)
install("--test=root", "dttop")
@pytest.mark.disable_clean_stage_check
def test_install_runtests_all(monkeypatch, mock_packages, install_mockery):
def check(pkg):
assert pkg.run_tests
monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", check)
monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", _check_runtests_all)
install("--test=all", "pkg-a")
@ -377,6 +380,7 @@ def test_install_from_file(spec, concretize, error_code, tmpdir):
def test_junit_output_with_failures(tmpdir, exc_typename, msg):
with tmpdir.as_cwd():
install(
"--verbose",
"--log-format=junit",
"--log-file=test.xml",
"raiser",
@ -409,6 +413,21 @@ def test_junit_output_with_failures(tmpdir, exc_typename, msg):
assert msg in content
def _throw(task, exc_typename, exc_type, msg):
# Self is a spack.installer.Task
exc_type = getattr(builtins, exc_typename)
exc = exc_type(msg)
task.fail(exc)
def _runtime_error(task, *args, **kwargs):
_throw(task, "RuntimeError", spack.error.InstallError, "something weird happened")
def _keyboard_error(task, *args, **kwargs):
_throw(task, "KeyboardInterrupt", KeyboardInterrupt, "Ctrl-C strikes again")
@pytest.mark.disable_clean_stage_check
@pytest.mark.parametrize(
"exc_typename,expected_exc,msg",
@ -428,14 +447,17 @@ def test_junit_output_with_errors(
tmpdir,
monkeypatch,
):
def just_throw(*args, **kwargs):
exc_type = getattr(builtins, exc_typename)
raise exc_type(msg)
monkeypatch.setattr(spack.installer.PackageInstaller, "_install_task", just_throw)
throw = _keyboard_error if expected_exc == KeyboardInterrupt else _runtime_error
monkeypatch.setattr(spack.installer.BuildTask, "complete", throw)
with tmpdir.as_cwd():
install("--log-format=junit", "--log-file=test.xml", "libdwarf", fail_on_error=False)
install(
"--verbose",
"--log-format=junit",
"--log-file=test.xml",
"trivial-install-test-dependent",
fail_on_error=False,
)
assert isinstance(install.error, expected_exc)
@ -445,7 +467,7 @@ def just_throw(*args, **kwargs):
content = filename.open().read()
# Only libelf error is reported (through libdwarf root spec). libdwarf
# Only original error is reported, dependent
# install is skipped and it is not an error.
assert 'tests="0"' not in content
assert 'failures="0"' in content
@ -1079,7 +1101,10 @@ def install_use_buildcache(opt):
@pytest.mark.disable_clean_stage_check
def test_padded_install_runtests_root(install_mockery, mock_fetch):
spack.config.set("config:install_tree:padded_length", 255)
output = install("--test=root", "--no-cache", "test-build-callbacks", fail_on_error=False)
output = install(
"--verbose", "--test=root", "--no-cache", "test-build-callbacks", fail_on_error=False
)
print(output)
assert output.count("method not implemented") == 1

View File

@ -721,18 +721,40 @@ def test_install_splice_root_from_binary(
assert len(spack.store.STORE.db.query()) == len(list(out.traverse()))
def test_install_task_use_cache(install_mockery, monkeypatch):
class MockInstallStatus:
def next_pkg(self, *args, **kwargs):
pass
def set_term_title(self, *args, **kwargs):
pass
def get_progress(self):
pass
class MockTermStatusLine:
def add(self, *args, **kwargs):
pass
def clear(self):
pass
def test_installing_task_use_cache(install_mockery, monkeypatch):
installer = create_installer(["trivial-install-test-package"], {})
request = installer.build_requests[0]
task = create_build_task(request.pkg)
install_status = MockInstallStatus()
term_status = MockTermStatusLine()
monkeypatch.setattr(inst, "_install_from_cache", _true)
installer._install_task(task, None)
installer.start_task(task, install_status, term_status)
installer.complete_task(task, install_status)
assert request.pkg_id in installer.installed
def test_install_task_requeue_build_specs(install_mockery, monkeypatch, capfd):
"""Check that a missing build_spec spec is added by _install_task."""
"""Check that a missing build_spec spec is added by _complete_task."""
# This test also ensures coverage of most of the new
# _requeue_with_build_spec_tasks method.
@ -746,12 +768,12 @@ def _missing(*args, **kwargs):
request = installer.build_requests[0]
task = create_build_task(request.pkg)
# Drop one of the specs so its task is missing before _install_task
popped_task = installer._pop_task()
# Drop one of the specs so its task is missing before _complete_task
popped_task = installer._pop_ready_task()
assert inst.package_id(popped_task.pkg.spec) not in installer.build_tasks
monkeypatch.setattr(task, "execute", _missing)
installer._install_task(task, None)
monkeypatch.setattr(task, "complete", _missing)
installer._complete_task(task, None)
# Ensure the dropped task/spec was added back by _install_task
assert inst.package_id(popped_task.pkg.spec) in installer.build_tasks
@ -907,14 +929,16 @@ def test_update_failed_no_dependent_task(install_mockery):
def test_install_uninstalled_deps(install_mockery, monkeypatch, capsys):
"""Test install with uninstalled dependencies."""
installer = create_installer(["dependent-install"], {})
installer = create_installer(["parallel-package-a"], {})
# Skip the actual installation and any status updates
monkeypatch.setattr(inst.PackageInstaller, "_install_task", _noop)
monkeypatch.setattr(inst.Task, "start", _noop)
monkeypatch.setattr(inst.Task, "poll", _noop)
monkeypatch.setattr(inst.Task, "complete", _noop)
monkeypatch.setattr(inst.PackageInstaller, "_update_installed", _noop)
monkeypatch.setattr(inst.PackageInstaller, "_update_failed", _noop)
msg = "Cannot proceed with dependent-install"
msg = "Cannot proceed with parallel-package-a"
with pytest.raises(spack.error.InstallError, match=msg):
installer.install()
@ -924,7 +948,7 @@ def test_install_uninstalled_deps(install_mockery, monkeypatch, capsys):
def test_install_failed(install_mockery, monkeypatch, capsys):
"""Test install with failed install."""
installer = create_installer(["pkg-b"], {})
installer = create_installer(["parallel-package-a"], {})
# Make sure the package is identified as failed
monkeypatch.setattr(spack.database.FailureTracker, "has_failed", _true)
@ -939,7 +963,7 @@ def test_install_failed(install_mockery, monkeypatch, capsys):
def test_install_failed_not_fast(install_mockery, monkeypatch, capsys):
"""Test install with failed install."""
installer = create_installer(["pkg-a"], {"fail_fast": False})
installer = create_installer(["parallel-package-a"], {"fail_fast": False})
# Make sure the package is identified as failed
monkeypatch.setattr(spack.database.FailureTracker, "has_failed", _true)
@ -949,7 +973,7 @@ def test_install_failed_not_fast(install_mockery, monkeypatch, capsys):
out = str(capsys.readouterr())
assert "failed to install" in out
assert "Skipping build of pkg-a" in out
assert "Skipping build of parallel-package-a" in out
def _interrupt(installer, task, install_status, **kwargs):
@ -965,9 +989,9 @@ def test_install_fail_on_interrupt(install_mockery, mock_fetch, monkeypatch):
spec_name = "pkg-a"
err_msg = "mock keyboard interrupt for {0}".format(spec_name)
installer = create_installer([spec_name], {"fake": True})
setattr(inst.PackageInstaller, "_real_install_task", inst.PackageInstaller._install_task)
setattr(inst.PackageInstaller, "_real_install_task", inst.PackageInstaller._complete_task)
# Raise a KeyboardInterrupt error to trigger early termination
monkeypatch.setattr(inst.PackageInstaller, "_install_task", _interrupt)
monkeypatch.setattr(inst.PackageInstaller, "_complete_task", _interrupt)
with pytest.raises(KeyboardInterrupt, match=err_msg):
installer.install()
@ -982,21 +1006,26 @@ class MyBuildException(Exception):
pass
_old_complete_task = None
def _install_fail_my_build_exception(installer, task, install_status, **kwargs):
if task.pkg.name == "pkg-a":
print("Raising MyBuildException for pkg-a")
raise MyBuildException("mock internal package build error for pkg-a")
else:
# No need for more complex logic here because no splices
task.execute(install_status)
installer._update_installed(task)
_old_complete_task(installer, task, install_status)
def test_install_fail_single(install_mockery, mock_fetch, monkeypatch):
"""Test expected results for failure of single package."""
global _old_complete_task
installer = create_installer(["pkg-a"], {"fake": True})
# Raise a KeyboardInterrupt error to trigger early termination
monkeypatch.setattr(inst.PackageInstaller, "_install_task", _install_fail_my_build_exception)
_old_complete_task = inst.PackageInstaller._complete_task
monkeypatch.setattr(inst.PackageInstaller, "_complete_task", _install_fail_my_build_exception)
with pytest.raises(MyBuildException, match="mock internal package build error for pkg-a"):
installer.install()
@ -1008,10 +1037,12 @@ def test_install_fail_single(install_mockery, mock_fetch, monkeypatch):
def test_install_fail_multi(install_mockery, mock_fetch, monkeypatch):
"""Test expected results for failure of multiple packages."""
global _old_complete_task
installer = create_installer(["pkg-a", "pkg-c"], {"fake": True})
# Raise a KeyboardInterrupt error to trigger early termination
monkeypatch.setattr(inst.PackageInstaller, "_install_task", _install_fail_my_build_exception)
_old_complete_task = inst.PackageInstaller._complete_task
monkeypatch.setattr(inst.PackageInstaller, "_complete_task", _install_fail_my_build_exception)
with pytest.raises(spack.error.InstallError, match="Installation request failed"):
installer.install()
@ -1023,22 +1054,30 @@ def test_install_fail_multi(install_mockery, mock_fetch, monkeypatch):
def test_install_fail_fast_on_detect(install_mockery, monkeypatch, capsys):
"""Test fail_fast install when an install failure is detected."""
# Note: this test depends on the order of the installations
b, c = spack.concretize.concretize_one("pkg-b"), spack.concretize.concretize_one("pkg-c")
b_id, c_id = inst.package_id(b), inst.package_id(c)
a = spack.concretize.concretize_one("parallel-package-a")
installer = create_installer([c, b], {"fail_fast": True})
a_id = inst.package_id(a)
b_id = inst.package_id(a["parallel-package-b"])
c_id = inst.package_id(a["parallel-package-c"])
installer = create_installer([a], {"fail_fast": True})
# Make sure all packages are identified as failed
# This will prevent b from installing, which will cause the build of c to be skipped.
# This will prevent a and b from installing, which will cause the build of c to be skipped
# and the active processes to be killed.
monkeypatch.setattr(spack.database.FailureTracker, "has_failed", _true)
installer.max_active_tasks = 2
with pytest.raises(spack.error.InstallError, match="after first install failure"):
installer.install()
assert c_id in installer.failed
assert b_id not in installer.failed, "Expected no attempt to install pkg-c"
assert f"{c_id} failed to install" in capsys.readouterr().err
assert b_id in installer.failed, "Expected b to be marked as failed"
assert c_id in installer.failed, "Exepected c to be marked as failed"
assert (
a_id not in installer.installed
), "Package a cannot install due to its dependencies failing"
# check that b's active process got killed when c failed
assert f"{b_id} failed to install" in capsys.readouterr().err
def _test_install_fail_fast_on_except_patch(installer, **kwargs):
@ -1181,94 +1220,91 @@ def test_install_implicit(install_mockery, mock_fetch):
assert not create_build_task(pkg).explicit
def test_overwrite_install_backup_success(temporary_store, config, mock_packages, tmpdir):
# Install that wipes the prefix directory
def wipe_prefix(pkg, install_args):
print("AAAAAAAAA")
shutil.rmtree(pkg.prefix, ignore_errors=True)
fs.mkdirp(pkg.prefix)
raise Exception("Some fatal install error")
def fail(*args, **kwargs):
assert False
def test_overwrite_install_backup_success(
monkeypatch, temporary_store, config, mock_packages, tmpdir
):
"""
When doing an overwrite install that fails, Spack should restore the backup
of the original prefix, and leave the original spec marked installed.
"""
# Note: this test relies on installing a package with no dependencies
# Get a build task. TODO: refactor this to avoid calling internal methods
installer = create_installer(["pkg-c"])
# Get a build task. TODO: Refactor this to avoid calling internal methods.
installer = create_installer(["pkg-b"])
installer._init_queue()
task = installer._pop_task()
install_status = MockInstallStatus()
term_status = MockTermStatusLine()
# Make sure the install prefix exists with some trivial file
installed_file = os.path.join(task.pkg.prefix, "some_file")
fs.touchp(installed_file)
class InstallerThatWipesThePrefixDir:
def _install_task(self, task, install_status):
shutil.rmtree(task.pkg.prefix, ignore_errors=True)
fs.mkdirp(task.pkg.prefix)
raise Exception("Some fatal install error")
monkeypatch.setattr(inst, "build_process", wipe_prefix)
class FakeDatabase:
called = False
def remove(self, spec):
self.called = True
fake_installer = InstallerThatWipesThePrefixDir()
fake_db = FakeDatabase()
overwrite_install = inst.OverwriteInstall(fake_installer, fake_db, task, None)
# Make sure the package is not marked uninstalled
monkeypatch.setattr(spack.store.STORE.db, "remove", fail)
# Make sure that the installer does an overwrite install
monkeypatch.setattr(task, "_install_action", inst.InstallAction.OVERWRITE)
# Installation should throw the installation exception, not the backup
# failure.
installer.start_task(task, install_status, term_status)
with pytest.raises(Exception, match="Some fatal install error"):
overwrite_install.install()
installer.complete_task(task, install_status)
# Make sure the package is not marked uninstalled and the original dir
# is back.
assert not fake_db.called
# Check that the original file is back.
assert os.path.exists(installed_file)
def test_overwrite_install_backup_failure(temporary_store, config, mock_packages, tmpdir):
# Install that removes the backup directory, which is at the same level as
# the prefix, starting with .backup
def remove_backup(pkg, install_args):
backup_glob = os.path.join(os.path.dirname(os.path.normpath(pkg.prefix)), ".backup*")
for backup in glob.iglob(backup_glob):
shutil.rmtree(backup)
raise Exception("Some fatal install error")
def test_overwrite_install_backup_failure(
monkeypatch, temporary_store, config, mock_packages, tmpdir
):
"""
When doing an overwrite install that fails, Spack should try to recover the
original prefix. If that fails, the spec is lost, and it should be removed
from the database.
"""
# Note: this test relies on installing a package with no dependencies
class InstallerThatAccidentallyDeletesTheBackupDir:
def _install_task(self, task, install_status):
# Remove the backup directory, which is at the same level as the prefix,
# starting with .backup
backup_glob = os.path.join(
os.path.dirname(os.path.normpath(task.pkg.prefix)), ".backup*"
)
for backup in glob.iglob(backup_glob):
shutil.rmtree(backup)
raise Exception("Some fatal install error")
class FakeDatabase:
called = False
def remove(self, spec):
self.called = True
# Get a build task. TODO: refactor this to avoid calling internal methods
installer = create_installer(["pkg-c"])
installer._init_queue()
task = installer._pop_task()
install_status = MockInstallStatus()
term_status = MockTermStatusLine()
# Make sure the install prefix exists
installed_file = os.path.join(task.pkg.prefix, "some_file")
fs.touchp(installed_file)
monkeypatch.setattr(inst, "build_process", remove_backup)
fake_installer = InstallerThatAccidentallyDeletesTheBackupDir()
fake_db = FakeDatabase()
overwrite_install = inst.OverwriteInstall(fake_installer, fake_db, task, None)
# Installation should throw the installation exception, not the backup
# failure.
with pytest.raises(Exception, match="Some fatal install error"):
overwrite_install.install()
# Make sure that the installer does an overwrite install
monkeypatch.setattr(task, "_install_action", inst.InstallAction.OVERWRITE)
# Make sure that `remove` was called on the database after an unsuccessful
# attempt to restore the backup.
assert fake_db.called
# This error is raised while handling the original install error
installer.start_task(task, install_status, term_status)
with pytest.raises(Exception, match="No such spec in database"):
installer.complete_task(task, install_status)
def test_term_status_line():

View File

@ -1335,7 +1335,7 @@ _spack_info() {
_spack_install() {
if $list_options
then
SPACK_COMPREPLY="-h --help --only -u --until -j --jobs --overwrite --fail-fast --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --use-buildcache --include-build-deps --no-check-signature --show-log-on-error --source -n --no-checksum -v --verbose --fake --only-concrete --add --no-add -f --file --clean --dirty --test --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all -U --fresh --reuse --fresh-roots --reuse-deps --deprecated"
SPACK_COMPREPLY="-h --help --only -u --until -p --concurrent-packages -j --jobs --overwrite --fail-fast --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --use-buildcache --include-build-deps --no-check-signature --show-log-on-error --source -n --no-checksum -v --verbose --fake --only-concrete --add --no-add -f --file --clean --dirty --test --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all -U --fresh --reuse --fresh-roots --reuse-deps --deprecated"
else
_all_packages
fi

View File

@ -2025,7 +2025,7 @@ complete -c spack -n '__fish_spack_using_command info' -l variants-by-name -f -a
complete -c spack -n '__fish_spack_using_command info' -l variants-by-name -d 'list variants in strict name order; don'"'"'t group by condition'
# spack install
set -g __fish_spack_optspecs_spack_install h/help only= u/until= j/jobs= overwrite fail-fast keep-prefix keep-stage dont-restage use-cache no-cache cache-only use-buildcache= include-build-deps no-check-signature show-log-on-error source n/no-checksum v/verbose fake only-concrete add no-add f/file= clean dirty test= log-format= log-file= help-cdash cdash-upload-url= cdash-build= cdash-site= cdash-track= cdash-buildstamp= y/yes-to-all U/fresh reuse fresh-roots deprecated
set -g __fish_spack_optspecs_spack_install h/help only= u/until= p/concurrent-packages= j/jobs= overwrite fail-fast keep-prefix keep-stage dont-restage use-cache no-cache cache-only use-buildcache= include-build-deps no-check-signature show-log-on-error source n/no-checksum v/verbose fake only-concrete add no-add f/file= clean dirty test= log-format= log-file= help-cdash cdash-upload-url= cdash-build= cdash-site= cdash-track= cdash-buildstamp= y/yes-to-all U/fresh reuse fresh-roots deprecated
complete -c spack -n '__fish_spack_using_command_pos_remainder 0 install' -f -k -a '(__fish_spack_specs)'
complete -c spack -n '__fish_spack_using_command install' -s h -l help -f -a help
complete -c spack -n '__fish_spack_using_command install' -s h -l help -d 'show this help message and exit'
@ -2033,6 +2033,8 @@ complete -c spack -n '__fish_spack_using_command install' -l only -r -f -a 'pack
complete -c spack -n '__fish_spack_using_command install' -l only -r -d 'select the mode of installation'
complete -c spack -n '__fish_spack_using_command install' -s u -l until -r -f -a until
complete -c spack -n '__fish_spack_using_command install' -s u -l until -r -d 'phase to stop after when installing (default None)'
complete -c spack -n '__fish_spack_using_command install' -s p -l concurrent-packages -r -f -a concurrent_packages
complete -c spack -n '__fish_spack_using_command install' -s p -l concurrent-packages -r -d 'maximum number of packages to build concurrently'
complete -c spack -n '__fish_spack_using_command install' -s j -l jobs -r -f -a jobs
complete -c spack -n '__fish_spack_using_command install' -s j -l jobs -r -d 'explicitly set number of parallel jobs'
complete -c spack -n '__fish_spack_using_command install' -l overwrite -f -a overwrite

View File

@ -0,0 +1,29 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import time
from llnl.util.filesystem import touch
from spack.package import *
class ParallelPackageA(Package):
"""This is a fake vtk-m package used to demonstrate virtual package providers
with dependencies."""
homepage = "http://www.example.com"
has_code = False
depends_on("parallel-package-b")
depends_on("parallel-package-c")
version("1.0")
def install(self, spec, prefix):
print("I'm building!")
time.sleep(2)
print("I'm done!")
touch(prefix.dummy_file)

View File

@ -0,0 +1,26 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import time
from llnl.util.filesystem import touch
from spack.package import *
class ParallelPackageB(Package):
"""This is a fake vtk-m package used to demonstrate virtual package providers
with dependencies."""
homepage = "http://www.example.com"
has_code = False
version("1.0")
def install(self, spec, prefix):
print("I'm building!")
time.sleep(6)
print("I'm done!")
touch(prefix.dummy_file)

View File

@ -0,0 +1,26 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import time
from llnl.util.filesystem import touch
from spack.package import *
class ParallelPackageC(Package):
"""This is a fake vtk-m package used to demonstrate virtual package providers
with dependencies."""
homepage = "http://www.example.com"
has_code = False
version("1.0")
def install(self, spec, prefix):
print("Package 3 building!")
time.sleep(2)
print("Ideally shouldnt get here and it should fail")
touch(prefix.dummy_file)