Compare commits

...

27 Commits

Author SHA1 Message Date
Gregory Becker
559ace64e1
reporters wip: working for installs
Signed-off-by: Gregory Becker <becker33@llnl.gov>
2025-04-11 15:47:21 -07:00
Gregory Becker
a2441f4656
fixup after rebase
Signed-off-by: Gregory Becker <becker33@llnl.gov>
2025-04-09 10:19:29 -07:00
kshea21
dcdbbd235d
style 2025-04-09 09:49:41 -07:00
Gregory Becker
be03df1b86
fix overwrite test 2025-04-09 09:49:40 -07:00
kshea21
e3d3230c9b
new mock packages 2025-04-09 09:49:40 -07:00
Gregory Becker
e687bb902f
cleanup from debugging 2025-04-09 09:49:40 -07:00
Gregory Becker
f1638365a9
refactor transaction across start/complete 2025-04-09 09:49:40 -07:00
Gregory Becker
4e9547703c
overwrite tests: work on macos 2025-04-09 09:49:38 -07:00
kshea21
56df316cc2
overwrite install refactoring and tests 2025-04-09 09:48:10 -07:00
kshea21
0cfd514c0c
spack commands --update-completion 2025-04-09 09:47:22 -07:00
Todd Gamblin
92e1b1795b
fix style issues and refactor a bit
Signed-off-by: Todd Gamblin <tgamblin@llnl.gov>
2025-04-09 09:47:22 -07:00
kshea21
d8362d02c7
pushing to rebase 2025-04-09 09:47:22 -07:00
kshea21
57a5722ba7
some style things 2025-04-09 09:47:19 -07:00
Todd Gamblin
1e5e416df0
docs: add nitpick ignore for internal multiprocessing.context.Process class
Signed-off-by: Todd Gamblin <tgamblin@llnl.gov>
2025-04-09 09:46:21 -07:00
Todd Gamblin
8d0923f29e
add quotes to type annotation
Signed-off-by: Todd Gamblin <tgamblin@llnl.gov>
2025-04-09 09:46:20 -07:00
kshea21
4a153a185b
refactors and test fixes 2025-04-09 09:46:18 -07:00
kshea21
2da51eaec7
failing overwrite build deps 2025-04-09 09:44:01 -07:00
kshea21
974d10f32b
added no_op state tracking 2025-04-09 09:41:56 -07:00
kshea21
1a19c09c55
style 2025-04-09 09:41:56 -07:00
kshea21
11572c0caf
multi and single tests 2025-04-09 09:41:56 -07:00
kshea21
5d7ba48b36
style 2025-04-09 09:41:53 -07:00
kshea21
fa088e4975
review changes 2025-04-09 09:40:38 -07:00
kshea21
9d0133a771
process handle back 2025-04-09 09:38:37 -07:00
kshea21
0b1e388a0f
remove process handle 2025-04-09 09:38:35 -07:00
kshea21
60380a4724
untest -> test 2025-04-09 09:37:38 -07:00
kshea21
03a3546f14
style 2025-04-09 09:37:38 -07:00
kshea21
0808fd1a44
Rebasing -p/--concurrent-packages on develop 2025-04-09 09:37:32 -07:00
21 changed files with 1102 additions and 736 deletions

View File

@ -206,6 +206,7 @@ def setup(sphinx):
("py:class", "TextIO"),
("py:class", "hashlib._Hash"),
("py:class", "concurrent.futures._base.Executor"),
("py:class", "multiprocessing.context.Process"),
# Spack classes that are private and we don't want to expose
("py:class", "spack.provider_index._IndexBase"),
("py:class", "spack.repo._PrependFileLoader"),

View File

@ -1030,6 +1030,9 @@ def replace_directory_transaction(directory_name):
Returns:
temporary directory where ``directory_name`` has been moved
"""
for a, b, c in os.walk(directory_name):
print("PRE", a, b, c)
# Check the input is indeed a directory with absolute path.
# Raise before anything is done to avoid moving the wrong directory
directory_name = os.path.abspath(directory_name)
@ -1048,6 +1051,7 @@ def replace_directory_transaction(directory_name):
try:
yield backup_dir
except (Exception, KeyboardInterrupt, SystemExit) as inner_exception:
print("hitting the proper exception block")
# Try to recover the original directory, if this fails, raise a
# composite exception.
try:
@ -1056,11 +1060,15 @@ def replace_directory_transaction(directory_name):
shutil.rmtree(directory_name)
os.rename(backup_dir, directory_name)
except Exception as outer_exception:
print("CouldNOtRestoreDirectBackup")
raise CouldNotRestoreDirectoryBackup(inner_exception, outer_exception)
for a, b, c in os.walk(directory_name):
print("RESTORED", a, b, c)
tty.debug("Directory recovered [{0}]".format(directory_name))
raise
else:
print("NO FAILURE")
# Otherwise delete the temporary directory
shutil.rmtree(tmpdir, ignore_errors=True)
tty.debug("Temporary directory deleted [{0}]".format(tmpdir))

View File

@ -92,7 +92,7 @@ def update_installations(self) -> None:
tty.msg(f"[BOOTSTRAPPING] Installing dependencies ({', '.join(colorized_specs)})")
self.write(regenerate=False)
with tty.SuppressOutput(msg_enabled=log_enabled, warn_enabled=log_enabled):
self.install_all()
self.install_all(fail_fast=True)
self.write(regenerate=True)
def load(self) -> None:

View File

@ -31,12 +31,14 @@
Skimming this module is a nice way to get acquainted with the types of
calls you can make from within the install() function.
"""
import inspect
import io
import multiprocessing
import os
import re
import signal
import stat
import sys
import traceback
import types
@ -46,6 +48,7 @@
from itertools import chain
from multiprocessing.connection import Connection
from typing import (
Any,
Callable,
Dict,
List,
@ -452,6 +455,35 @@ def optimization_flags(compiler, target):
return result
class FilterDefaultDynamicLinkerSearchPaths:
"""Remove rpaths to directories that are default search paths of the dynamic linker."""
def __init__(self, dynamic_linker: Optional[str]) -> None:
# Identify directories by (inode, device) tuple, which handles symlinks too.
self.default_path_identifiers: Set[Tuple[int, int]] = set()
if not dynamic_linker:
return
for path in spack.util.libc.default_search_paths_from_dynamic_linker(dynamic_linker):
try:
s = os.stat(path)
if stat.S_ISDIR(s.st_mode):
self.default_path_identifiers.add((s.st_ino, s.st_dev))
except OSError:
continue
def is_dynamic_loader_default_path(self, p: str) -> bool:
try:
s = os.stat(p)
return (s.st_ino, s.st_dev) in self.default_path_identifiers
except OSError:
return False
def __call__(self, dirs: List[str]) -> List[str]:
if not self.default_path_identifiers:
return dirs
return [p for p in dirs if not self.is_dynamic_loader_default_path(p)]
def set_wrapper_variables(pkg, env):
"""Set environment variables used by the Spack compiler wrapper (which have the prefix
`SPACK_`) and also add the compiler wrappers to PATH.
@ -1093,6 +1125,52 @@ def load_external_modules(context: SetupContext) -> None:
load_module(external_module)
class ProcessHandle:
"""Manages and monitors the state of a child process for package installation."""
def __init__(
self,
pkg: "spack.package_base.PackageBase",
process: multiprocessing.Process,
read_pipe: multiprocessing.connection.Connection,
timeout: int,
):
"""
Parameters:
pkg: The package to be built and installed by the child process.
process: The child process instance being managed/monitored.
read_pipe: The pipe used for receiving information from the child process.
"""
self.pkg = pkg
self.process = process
self.read_pipe = read_pipe
self.timeout = timeout
def poll(self) -> bool:
"""Check if there is data available to receive from the read pipe."""
return self.read_pipe.poll()
def complete(self):
"""Wait (if needed) for child process to complete
and return its exit status.
See ``complete_build_process()``.
"""
return complete_build_process(self)
def terminate_processes(self):
"""Terminate the active child processes if installation failure/error"""
if self.process.is_alive():
# opportunity for graceful termination
self.process.terminate()
self.process.join(timeout=1)
# if the process didn't gracefully terminate, forcefully kill
if self.process.is_alive():
os.kill(self.process.pid, signal.SIGKILL)
self.process.join()
def _setup_pkg_and_run(
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
function: Callable,
@ -1106,7 +1184,7 @@ def _setup_pkg_and_run(
``_setup_pkg_and_run`` is called by the child process created in
``start_build_process()``, and its main job is to run ``function()`` on behalf of
some Spack installation (see :ref:`spack.installer.PackageInstaller._install_task`).
some Spack installation (see :ref:`spack.installer.PackageInstaller._complete_task`).
The child process is passed a ``write_pipe``, on which it's expected to send one of
the following:
@ -1243,19 +1321,30 @@ def terminate(self):
os.kill(self.p.pid, signal.SIGKILL)
self.p.join()
@property
def pid(self):
return self.p.pid
@property
def exitcode(self):
return self.p.exitcode
def start_build_process(pkg, function, kwargs, *, timeout: Optional[int] = None):
def start_build_process(
pkg: "spack.package_base.PackageBase",
function: Callable,
kwargs: Dict[str, Any],
*,
timeout: Optional[int] = None,
):
"""Create a child process to do part of a spack build.
Args:
pkg (spack.package_base.PackageBase): package whose environment we should set up the
pkg: package whose environment we should set up the
child process for.
function (typing.Callable): argless function to run in the child process.
function: argless function to run in the child
process.
kwargs: additional keyword arguments to pass to ``function()``
timeout: maximum time allowed to finish the execution of function
Usage::
@ -1269,9 +1358,6 @@ def child_fun():
control over the environment, etc. without affecting other builds
that might be executed in the same spack call.
If something goes wrong, the child process catches the error and
passes it to the parent wrapped in a ChildError. The parent is
expected to handle (or re-raise) the ChildError.
"""
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
input_fd = None
@ -1321,10 +1407,27 @@ def child_fun():
if input_fd is not None:
input_fd.close()
def exitcode_msg(p):
typ = "exit" if p.exitcode >= 0 else "signal"
return f"{typ} {abs(p.exitcode)}"
# Create a ProcessHandle that the caller can use to track
# and complete the process started by this function.
process_handle = ProcessHandle(pkg, p, read_pipe, timeout=timeout)
return process_handle
def complete_build_process(handle: ProcessHandle):
"""
Waits for the child process to complete and handles its exit status.
If something goes wrong, the child process catches the error and
passes it to the parent wrapped in a ChildError. The parent is
expected to handle (or re-raise) the ChildError.
"""
def exitcode_msg(process):
typ = "exit" if handle.process.exitcode >= 0 else "signal"
return f"{typ} {abs(handle.process.exitcode)}"
p = handle.process
timeout = handle.timeout
p.join(timeout=timeout)
if p.is_alive():
warnings.warn(f"Terminating process, since the timeout of {timeout}s was exceeded")
@ -1332,18 +1435,23 @@ def exitcode_msg(p):
p.join()
try:
child_result = read_pipe.recv()
# Check if information from the read pipe has been received.
child_result = handle.read_pipe.recv()
except EOFError:
raise InstallError(f"The process has stopped unexpectedly ({exitcode_msg(p)})")
handle.process.join()
raise InstallError(
f"The process has stopped unexpectedly ({exitcode_msg(handle.process)})"
)
handle.process.join()
# If returns a StopPhase, raise it
if isinstance(child_result, spack.error.StopPhase):
# do not print
raise child_result
# let the caller know which package went wrong.
if isinstance(child_result, InstallError):
child_result.pkg = pkg
child_result.pkg = handle.pkg
if isinstance(child_result, ChildError):
# If the child process raised an error, print its output here rather
@ -1354,13 +1462,13 @@ def exitcode_msg(p):
raise child_result
# Fallback. Usually caught beforehand in EOFError above.
if p.exitcode != 0:
raise InstallError(f"The process failed unexpectedly ({exitcode_msg(p)})")
if handle.process.exitcode != 0:
raise InstallError(f"The process failed unexpectedly ({exitcode_msg(handle.process)})")
return child_result
CONTEXT_BASES = (spack.package_base.PackageBase, spack.builder.Builder)
CONTEXT_BASES = (spack.package_base.PackageBase, spack.builder.BaseBuilder)
def get_package_context(traceback, context=3):

View File

@ -97,7 +97,7 @@ def _specs(self, **kwargs):
class SetParallelJobs(argparse.Action):
"""Sets the correct value for parallel build jobs.
The value is is set in the command line configuration scope so that
The value is set in the command line configuration scope so that
it can be retrieved using the spack.config API.
"""
@ -113,6 +113,23 @@ def __call__(self, parser, namespace, jobs, option_string):
setattr(namespace, "jobs", jobs)
class SetConcurrentPackages(argparse.Action):
"""Sets the value for maximum number of concurrent package builds
The value is set in the command line configuration scope so that
it can be retrieved using the spack.config API.
"""
def __call__(self, parser, namespace, concurrent_packages, option_string):
if concurrent_packages < 1:
msg = 'invalid value for argument "{0}" ' '[expectd a positive integer, got "{1}"]'
raise ValueError(msg.format(option_string, concurrent_packages))
spack.config.set("config:concurrent_packages", concurrent_packages, scope="command_line")
setattr(namespace, "concurrent_packages", concurrent_packages)
class DeptypeAction(argparse.Action):
"""Creates a flag of valid dependency types from a deptype argument."""
@ -377,6 +394,18 @@ def jobs():
)
@arg
def concurrent_packages():
return Args(
"-p",
"--concurrent-packages",
action=SetConcurrentPackages,
type=int,
default=4,
help="maximum number of packages to build concurrently",
)
@arg
def install_status():
return Args(

View File

@ -63,6 +63,7 @@ def install_kwargs_from_args(args):
"unsigned": args.unsigned,
"install_deps": ("dependencies" in args.things_to_install),
"install_package": ("package" in args.things_to_install),
"concurrent_packages": args.concurrent_packages,
}
@ -84,6 +85,7 @@ def setup_parser(subparser):
default=None,
help="phase to stop after when installing (default None)",
)
arguments.add_common_arguments(subparser, ["concurrent_packages"])
arguments.add_common_arguments(subparser, ["jobs"])
subparser.add_argument(
"--overwrite",
@ -329,16 +331,8 @@ def install(parser, args):
arguments.sanitize_reporter_options(args)
def reporter_factory(specs):
if args.log_format is None:
return lang.nullcontext()
return spack.report.build_context_manager(
reporter=args.reporter(), filename=report_filename(args, specs=specs), specs=specs
)
reporter = args.reporter() if args.log_format else None
install_kwargs = install_kwargs_from_args(args)
env = ev.active_environment()
if not env and not args.spec and not args.specfiles:
@ -346,9 +340,9 @@ def reporter_factory(specs):
try:
if env:
install_with_active_env(env, args, install_kwargs, reporter_factory)
install_with_active_env(env, args, install_kwargs, reporter)
else:
install_without_active_env(args, install_kwargs, reporter_factory)
install_without_active_env(args, install_kwargs, reporter)
except InstallError as e:
if args.show_log_on_error:
_dump_log_on_error(e)
@ -382,7 +376,7 @@ def _maybe_add_and_concretize(args, env, specs):
env.write(regenerate=False)
def install_with_active_env(env: ev.Environment, args, install_kwargs, reporter_factory):
def install_with_active_env(env: ev.Environment, args, install_kwargs, reporter):
specs = spack.cmd.parse_specs(args.spec)
# The following two commands are equivalent:
@ -416,8 +410,10 @@ def install_with_active_env(env: ev.Environment, args, install_kwargs, reporter_
install_kwargs["overwrite"] = [spec.dag_hash() for spec in specs_to_install]
try:
with reporter_factory(specs_to_install):
env.install_specs(specs_to_install, **install_kwargs)
report_file = report_filename(args, specs_to_install)
install_kwargs["report_file"] = report_file
install_kwargs["reporter"] = reporter
env.install_specs(specs_to_install, **install_kwargs)
finally:
if env.views:
with env.write_transaction():
@ -461,18 +457,23 @@ def concrete_specs_from_file(args):
return result
def install_without_active_env(args, install_kwargs, reporter_factory):
def install_without_active_env(args, install_kwargs, reporter):
concrete_specs = concrete_specs_from_cli(args, install_kwargs) + concrete_specs_from_file(args)
if len(concrete_specs) == 0:
tty.die("The `spack install` command requires a spec to install.")
with reporter_factory(concrete_specs):
if args.overwrite:
require_user_confirmation_for_overwrite(concrete_specs, args)
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
if args.overwrite:
require_user_confirmation_for_overwrite(concrete_specs, args)
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
installs = [s.package for s in concrete_specs]
install_kwargs["explicit"] = [s.dag_hash() for s in concrete_specs]
installs = [s.package for s in concrete_specs]
install_kwargs["explicit"] = [s.dag_hash() for s in concrete_specs]
try:
builder = PackageInstaller(installs, **install_kwargs)
builder.install()
finally:
if reporter:
report_file = report_filename(args, concrete_specs)
reporter.build_report(report_file, list(builder.reports.values()))

View File

@ -1638,7 +1638,7 @@ def determine_number_of_jobs(
except ValueError:
pass
return min(max_cpus, cfg.get("config:build_jobs", 16))
return min(max_cpus, cfg.get("config:build_jobs", 4))
class ConfigSectionError(spack.error.ConfigError):

View File

@ -284,7 +284,7 @@ def remove_install_directory(self, spec: "spack.spec.Spec", deprecated: bool = F
Raised RemoveFailedError if something goes wrong.
"""
path = self.path_for_spec(spec)
assert path.startswith(self.root)
assert path.startswith(self.root), f"PATH: {path}, ROOT: {self.root}"
if deprecated:
if os.path.exists(path):

View File

@ -1906,6 +1906,10 @@ def install_specs(self, specs: Optional[List[Spec]] = None, **install_args):
roots = self.concrete_roots()
specs = specs if specs is not None else roots
# Extract reporter arguments
reporter = install_args.pop("reporter", None)
report_file = install_args.pop("report_file", None)
# Extend the set of specs to overwrite with modified dev specs and their parents
install_args["overwrite"] = {
*install_args.get("overwrite", ()),
@ -1918,7 +1922,12 @@ def install_specs(self, specs: Optional[List[Spec]] = None, **install_args):
*(s.dag_hash() for s in roots),
}
PackageInstaller([spec.package for spec in specs], **install_args).install()
try:
builder = PackageInstaller([spec.package for spec in specs], **install_args)
builder.install()
finally:
if reporter:
reporter.build_report(report_file, list(builder.reports.values()))
def all_specs_generator(self) -> Iterable[Spec]:
"""Returns a generator for all concrete specs"""

View File

@ -399,9 +399,10 @@ def stand_alone_tests(self, kwargs, timeout: Optional[int] = None) -> None:
"""
import spack.build_environment # avoid circular dependency
spack.build_environment.start_build_process(
ph = spack.build_environment.start_build_process(
self.pkg, test_process, kwargs, timeout=timeout
)
spack.build_environment.ProcessHandle.complete(ph)
def parts(self) -> int:
"""The total number of (checked) test parts."""

File diff suppressed because it is too large Load Diff

View File

@ -1,276 +1,131 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Tools to produce reports of spec installations"""
"""Hooks to produce reports of spec installations"""
import collections
import contextlib
import functools
import gzip
import os
import time
import traceback
from typing import Any, Callable, Dict, List, Type
import llnl.util.lang
import llnl.util.filesystem as fs
import spack.build_environment
import spack.install_test
import spack.installer
import spack.package_base
import spack.reporters
import spack.spec
import spack.util.spack_json as sjson
reporter = None
report_file = None
Property = collections.namedtuple("Property", ["name", "value"])
class InfoCollector:
"""Base class for context manager objects that collect information during the execution of
certain package functions.
class Record(dict):
def __getattr__(self, name):
# only called if no attribute exists
if name in self:
return self[name]
raise AttributeError(f"RequestRecord for {self.name} has no attribute {name}")
The data collected is available through the ``specs`` attribute once exited, and it's
organized as a list where each item represents the installation of one spec.
"""
wrap_class: Type
do_fn: str
_backup_do_fn: Callable
input_specs: List[spack.spec.Spec]
specs: List[Dict[str, Any]]
def __init__(self, wrap_class: Type, do_fn: str, specs: List[spack.spec.Spec]):
#: Class for which to wrap a function
self.wrap_class = wrap_class
#: Action to be reported on
self.do_fn = do_fn
#: Backup of the wrapped class function
self._backup_do_fn = getattr(self.wrap_class, do_fn)
#: Specs that will be acted on
self.input_specs = specs
#: This is where we record the data that will be included in our report
self.specs: List[Dict[str, Any]] = []
def fetch_log(self, pkg: spack.package_base.PackageBase) -> str:
"""Return the stdout log associated with the function being monitored
Args:
pkg: package under consideration
"""
raise NotImplementedError("must be implemented by derived classes")
def extract_package_from_signature(self, instance, *args, **kwargs):
"""Return the package instance, given the signature of the wrapped function."""
raise NotImplementedError("must be implemented by derived classes")
def __enter__(self):
# Initialize the spec report with the data that is available upfront.
Property = collections.namedtuple("Property", ["name", "value"])
for input_spec in self.input_specs:
name_fmt = "{0}_{1}"
name = name_fmt.format(input_spec.name, input_spec.dag_hash(length=7))
spec_record = {
"name": name,
"nerrors": None,
"nfailures": None,
"npackages": None,
"time": None,
"timestamp": time.strftime("%a, %d %b %Y %H:%M:%S", time.gmtime()),
"properties": [],
"packages": [],
}
spec_record["properties"].append(Property("architecture", input_spec.architecture))
self.init_spec_record(input_spec, spec_record)
self.specs.append(spec_record)
def gather_info(wrapped_fn):
"""Decorates a function to gather useful information for a CI report."""
@functools.wraps(wrapped_fn)
def wrapper(instance, *args, **kwargs):
pkg = self.extract_package_from_signature(instance, *args, **kwargs)
package = {
"name": pkg.name,
"id": pkg.spec.dag_hash(),
"elapsed_time": None,
"result": None,
"message": None,
"installed_from_binary_cache": False,
}
# Append the package to the correct spec report. In some
# cases it may happen that a spec that is asked to be
# installed explicitly will also be installed as a
# dependency of another spec. In this case append to both
# spec reports.
for current_spec in llnl.util.lang.dedupe([pkg.spec.root, pkg.spec]):
name = name_fmt.format(current_spec.name, current_spec.dag_hash(length=7))
try:
item = next((x for x in self.specs if x["name"] == name))
item["packages"].append(package)
except StopIteration:
pass
start_time = time.time()
try:
value = wrapped_fn(instance, *args, **kwargs)
package["stdout"] = self.fetch_log(pkg)
package["installed_from_binary_cache"] = pkg.installed_from_binary_cache
self.on_success(pkg, kwargs, package)
return value
except spack.build_environment.InstallError as exc:
# An InstallError is considered a failure (the recipe
# didn't work correctly)
package["result"] = "failure"
package["message"] = exc.message or "Installation failure"
package["stdout"] = self.fetch_log(pkg)
package["stdout"] += package["message"]
package["exception"] = exc.traceback
raise
except (Exception, BaseException) as exc:
# Everything else is an error (the installation
# failed outside of the child process)
package["result"] = "error"
package["message"] = str(exc) or "Unknown error"
package["stdout"] = self.fetch_log(pkg)
package["stdout"] += package["message"]
package["exception"] = traceback.format_exc()
raise
finally:
package["elapsed_time"] = time.time() - start_time
return wrapper
setattr(self.wrap_class, self.do_fn, gather_info(getattr(self.wrap_class, self.do_fn)))
def on_success(self, pkg: spack.package_base.PackageBase, kwargs, package_record):
"""Add additional properties on function call success."""
raise NotImplementedError("must be implemented by derived classes")
def init_spec_record(self, input_spec: spack.spec.Spec, record):
"""Add additional entries to a spec record when entering the collection context."""
def __exit__(self, exc_type, exc_val, exc_tb):
# Restore the original method in PackageBase
setattr(self.wrap_class, self.do_fn, self._backup_do_fn)
for spec in self.specs:
spec["npackages"] = len(spec["packages"])
spec["nfailures"] = len([x for x in spec["packages"] if x["result"] == "failure"])
spec["nerrors"] = len([x for x in spec["packages"] if x["result"] == "error"])
spec["time"] = sum(float(x["elapsed_time"]) for x in spec["packages"])
def __setattr__(self, name, value):
if name.startswith("_"):
super().__setattr__(name, value)
else:
self[name] = value
class BuildInfoCollector(InfoCollector):
"""Collect information for the PackageInstaller._install_task method.
class RequestRecord(Record):
def __init__(self, spec):
super().__init__()
self._spec = spec
self.name = spec.name
self.errors = None
self.nfailures = None
self.npackages = None
self.time = None
self.timestamp = time.strftime("%a, d %b %Y %H:%M:%S", time.gmtime())
self.properties = [
Property("architecture", spec.architecture),
# Property("compiler", spec.compiler),
]
self.packages = []
Args:
specs: specs whose install information will be recorded
"""
def skip_installed(self):
for dep in filter(lambda x: x.installed, self._spec.traverse()):
record = InstallRecord(dep)
record.skip(msg="Spec already installed")
self.packages.append(record)
def __init__(self, specs: List[spack.spec.Spec]):
super().__init__(spack.installer.PackageInstaller, "_install_task", specs)
def append_record(self, record):
self.packages.append(record)
def init_spec_record(self, input_spec, record):
# Check which specs are already installed and mark them as skipped
for dep in filter(lambda x: x.installed, input_spec.traverse()):
package = {
"name": dep.name,
"id": dep.dag_hash(),
"elapsed_time": "0.0",
"result": "skipped",
"message": "Spec already installed",
}
record["packages"].append(package)
def summarize(self):
self.npackages = len(self.packages)
self.nfailures = len([r for r in self.packages if r.result == "failure"])
self.nerrors = len([r for r in self.packages if r.result == "error"])
self.time = sum(float(r.elapsed_time or 0.0) for r in self.packages)
def on_success(self, pkg, kwargs, package_record):
package_record["result"] = "success"
def fetch_log(self, pkg):
class SpecRecord(Record):
def __init__(self, spec):
super().__init__()
self._spec = spec
self._package = spec.package
self._start_time = None
self.name = spec.name
self.id = spec.dag_hash()
self.elapsed_time = None
def start(self):
self._start_time = time.time()
def skip(self, msg):
self.result = "skipped"
self.elapsed_time = 0.0
self.message = msg
class InstallRecord(SpecRecord):
def __init__(self, spec):
super().__init__(spec)
self.result = None
self.message = None
self.installed_from_binary_cache = None
def fetch_log(self):
try:
if os.path.exists(pkg.install_log_path):
stream = gzip.open(pkg.install_log_path, "rt", encoding="utf-8")
if os.path.exists(self._package.install_log_path):
stream = gzip.open(self._package.install_log_path, "rt", encoding="utf-8")
else:
stream = open(pkg.log_path, encoding="utf-8")
stream = open(self._package.log_path, encoding="utf-8")
with stream as f:
return f.read()
except OSError:
return f"Cannot open log for {pkg.spec.cshort_spec}"
return f"Cannot open log for {self._spec.cshort_spec}"
def extract_package_from_signature(self, instance, *args, **kwargs):
return args[0].pkg
class TestInfoCollector(InfoCollector):
"""Collect information for the PackageBase.do_test method.
Args:
specs: specs whose install information will be recorded
record_directory: record directory for test log paths
"""
dir: str
def __init__(self, specs: List[spack.spec.Spec], record_directory: str):
super().__init__(spack.package_base.PackageBase, "do_test", specs)
self.dir = record_directory
def on_success(self, pkg, kwargs, package_record):
externals = kwargs.get("externals", False)
skip_externals = pkg.spec.external and not externals
if skip_externals:
package_record["result"] = "skipped"
package_record["result"] = "success"
def fetch_log(self, pkg: spack.package_base.PackageBase):
log_file = os.path.join(self.dir, spack.install_test.TestSuite.test_log_name(pkg.spec))
def fetch_time(self):
try:
with open(log_file, "r", encoding="utf-8") as stream:
return "".join(stream.readlines())
with open(self._package.times_log_path, "r", encoding="utf-8") as f:
data = sjson.load(f.read())
return data["total"]
except Exception:
return f"Cannot open log for {pkg.spec.cshort_spec}"
return None
def extract_package_from_signature(self, instance, *args, **kwargs):
return instance
def succeed(self):
self.result = "success"
self.stdout = self.fetch_log()
self.installed_from_binary_cache = self._package.installed_from_binary_cache
assert self._start_time, "Start time is None"
self.elapsed_time = time.time() - self._start_time
@contextlib.contextmanager
def build_context_manager(
reporter: spack.reporters.Reporter, filename: str, specs: List[spack.spec.Spec]
):
"""Decorate a package to generate a report after the installation function is executed.
Args:
reporter: object that generates the report
filename: filename for the report
specs: specs that need reporting
"""
collector = BuildInfoCollector(specs)
try:
with collector:
yield
finally:
reporter.build_report(filename, specs=collector.specs)
@contextlib.contextmanager
def test_context_manager(
reporter: spack.reporters.Reporter,
filename: str,
specs: List[spack.spec.Spec],
raw_logs_dir: str,
):
"""Decorate a package to generate a report after the test function is executed.
Args:
reporter: object that generates the report
filename: filename for the report
specs: specs that need reporting
raw_logs_dir: record directory for test log paths
"""
collector = TestInfoCollector(specs, raw_logs_dir)
try:
with collector:
yield
finally:
reporter.test_report(filename, specs=collector.specs)
def fail(self, exc):
if isinstance(exc, spack.build_environment.InstallError):
self.result = "failure"
self.message = exc.message or "Installation failure"
self.exception = exc.traceback
else:
self.result = "error"
self.message = str(exc) or "Unknown error"
self.exception = traceback.format_exc()
self.stdout = self.fetch_log() + self.message
assert self._start_time, "Start time is None"
self.elapsed_time = time.time() - self._start_time

View File

@ -278,6 +278,8 @@ def build_report(self, report_dir, specs):
self.multiple_packages = False
num_packages = 0
for spec in specs:
spec.summarize()
# Do not generate reports for packages that were installed
# from the binary cache.
spec["packages"] = [
@ -362,6 +364,8 @@ def test_report(self, report_dir, specs):
"""Generate reports for each package in each spec."""
tty.debug("Processing test report")
for spec in specs:
spec.summarize()
duration = 0
if "time" in spec:
duration = int(spec["time"])

View File

@ -17,12 +17,16 @@ def concretization_report(self, filename, msg):
pass
def build_report(self, filename, specs):
for spec in specs:
spec.summarize()
if not (os.path.splitext(filename))[1]:
# Ensure the report name will end with the proper extension;
# otherwise, it currently defaults to the "directory" name.
filename = filename + ".xml"
report_data = {"specs": specs}
with open(filename, "w", encoding="utf-8") as f:
env = spack.tengine.make_environment()
t = env.get_template(self._jinja_template)

View File

@ -66,30 +66,33 @@ def test_install_package_and_dependency(
assert 'errors="0"' in content
def _check_runtests_none(pkg):
assert not pkg.run_tests
def _check_runtests_dttop(pkg):
assert pkg.run_tests == (pkg.name == "dttop")
def _check_runtests_all(pkg):
assert pkg.run_tests
@pytest.mark.disable_clean_stage_check
def test_install_runtests_notests(monkeypatch, mock_packages, install_mockery):
def check(pkg):
assert not pkg.run_tests
monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", check)
monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", _check_runtests_none)
install("-v", "dttop")
@pytest.mark.disable_clean_stage_check
def test_install_runtests_root(monkeypatch, mock_packages, install_mockery):
def check(pkg):
assert pkg.run_tests == (pkg.name == "dttop")
monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", check)
monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", _check_runtests_dttop)
install("--test=root", "dttop")
@pytest.mark.disable_clean_stage_check
def test_install_runtests_all(monkeypatch, mock_packages, install_mockery):
def check(pkg):
assert pkg.run_tests
monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", check)
monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", _check_runtests_all)
install("--test=all", "pkg-a")
@ -377,6 +380,7 @@ def test_install_from_file(spec, concretize, error_code, tmpdir):
def test_junit_output_with_failures(tmpdir, exc_typename, msg):
with tmpdir.as_cwd():
install(
"--verbose",
"--log-format=junit",
"--log-file=test.xml",
"raiser",
@ -409,6 +413,21 @@ def test_junit_output_with_failures(tmpdir, exc_typename, msg):
assert msg in content
def _throw(task, exc_typename, exc_type, msg):
# Self is a spack.installer.Task
exc_type = getattr(builtins, exc_typename)
exc = exc_type(msg)
task.fail(exc)
def _runtime_error(task, *args, **kwargs):
_throw(task, "RuntimeError", spack.error.InstallError, "something weird happened")
def _keyboard_error(task, *args, **kwargs):
_throw(task, "KeyboardInterrupt", KeyboardInterrupt, "Ctrl-C strikes again")
@pytest.mark.disable_clean_stage_check
@pytest.mark.parametrize(
"exc_typename,expected_exc,msg",
@ -428,14 +447,17 @@ def test_junit_output_with_errors(
tmpdir,
monkeypatch,
):
def just_throw(*args, **kwargs):
exc_type = getattr(builtins, exc_typename)
raise exc_type(msg)
monkeypatch.setattr(spack.installer.PackageInstaller, "_install_task", just_throw)
throw = _keyboard_error if expected_exc == KeyboardInterrupt else _runtime_error
monkeypatch.setattr(spack.installer.BuildTask, "complete", throw)
with tmpdir.as_cwd():
install("--log-format=junit", "--log-file=test.xml", "libdwarf", fail_on_error=False)
install(
"--verbose",
"--log-format=junit",
"--log-file=test.xml",
"trivial-install-test-dependent",
fail_on_error=False,
)
assert isinstance(install.error, expected_exc)
@ -445,7 +467,7 @@ def just_throw(*args, **kwargs):
content = filename.open().read()
# Only libelf error is reported (through libdwarf root spec). libdwarf
# Only original error is reported, dependent
# install is skipped and it is not an error.
assert 'tests="0"' not in content
assert 'failures="0"' in content
@ -1079,7 +1101,10 @@ def install_use_buildcache(opt):
@pytest.mark.disable_clean_stage_check
def test_padded_install_runtests_root(install_mockery, mock_fetch):
spack.config.set("config:install_tree:padded_length", 255)
output = install("--test=root", "--no-cache", "test-build-callbacks", fail_on_error=False)
output = install(
"--verbose", "--test=root", "--no-cache", "test-build-callbacks", fail_on_error=False
)
print(output)
assert output.count("method not implemented") == 1

View File

@ -721,18 +721,40 @@ def test_install_splice_root_from_binary(
assert len(spack.store.STORE.db.query()) == len(list(out.traverse()))
def test_install_task_use_cache(install_mockery, monkeypatch):
class MockInstallStatus:
def next_pkg(self, *args, **kwargs):
pass
def set_term_title(self, *args, **kwargs):
pass
def get_progress(self):
pass
class MockTermStatusLine:
def add(self, *args, **kwargs):
pass
def clear(self):
pass
def test_installing_task_use_cache(install_mockery, monkeypatch):
installer = create_installer(["trivial-install-test-package"], {})
request = installer.build_requests[0]
task = create_build_task(request.pkg)
install_status = MockInstallStatus()
term_status = MockTermStatusLine()
monkeypatch.setattr(inst, "_install_from_cache", _true)
installer._install_task(task, None)
installer.start_task(task, install_status, term_status)
installer.complete_task(task, install_status)
assert request.pkg_id in installer.installed
def test_install_task_requeue_build_specs(install_mockery, monkeypatch, capfd):
"""Check that a missing build_spec spec is added by _install_task."""
"""Check that a missing build_spec spec is added by _complete_task."""
# This test also ensures coverage of most of the new
# _requeue_with_build_spec_tasks method.
@ -746,12 +768,12 @@ def _missing(*args, **kwargs):
request = installer.build_requests[0]
task = create_build_task(request.pkg)
# Drop one of the specs so its task is missing before _install_task
popped_task = installer._pop_task()
# Drop one of the specs so its task is missing before _complete_task
popped_task = installer._pop_ready_task()
assert inst.package_id(popped_task.pkg.spec) not in installer.build_tasks
monkeypatch.setattr(task, "execute", _missing)
installer._install_task(task, None)
monkeypatch.setattr(task, "complete", _missing)
installer._complete_task(task, None)
# Ensure the dropped task/spec was added back by _install_task
assert inst.package_id(popped_task.pkg.spec) in installer.build_tasks
@ -907,14 +929,16 @@ def test_update_failed_no_dependent_task(install_mockery):
def test_install_uninstalled_deps(install_mockery, monkeypatch, capsys):
"""Test install with uninstalled dependencies."""
installer = create_installer(["dependent-install"], {})
installer = create_installer(["parallel-package-a"], {})
# Skip the actual installation and any status updates
monkeypatch.setattr(inst.PackageInstaller, "_install_task", _noop)
monkeypatch.setattr(inst.Task, "start", _noop)
monkeypatch.setattr(inst.Task, "poll", _noop)
monkeypatch.setattr(inst.Task, "complete", _noop)
monkeypatch.setattr(inst.PackageInstaller, "_update_installed", _noop)
monkeypatch.setattr(inst.PackageInstaller, "_update_failed", _noop)
msg = "Cannot proceed with dependent-install"
msg = "Cannot proceed with parallel-package-a"
with pytest.raises(spack.error.InstallError, match=msg):
installer.install()
@ -924,7 +948,7 @@ def test_install_uninstalled_deps(install_mockery, monkeypatch, capsys):
def test_install_failed(install_mockery, monkeypatch, capsys):
"""Test install with failed install."""
installer = create_installer(["pkg-b"], {})
installer = create_installer(["parallel-package-a"], {})
# Make sure the package is identified as failed
monkeypatch.setattr(spack.database.FailureTracker, "has_failed", _true)
@ -939,7 +963,7 @@ def test_install_failed(install_mockery, monkeypatch, capsys):
def test_install_failed_not_fast(install_mockery, monkeypatch, capsys):
"""Test install with failed install."""
installer = create_installer(["pkg-a"], {"fail_fast": False})
installer = create_installer(["parallel-package-a"], {"fail_fast": False})
# Make sure the package is identified as failed
monkeypatch.setattr(spack.database.FailureTracker, "has_failed", _true)
@ -949,7 +973,7 @@ def test_install_failed_not_fast(install_mockery, monkeypatch, capsys):
out = str(capsys.readouterr())
assert "failed to install" in out
assert "Skipping build of pkg-a" in out
assert "Skipping build of parallel-package-a" in out
def _interrupt(installer, task, install_status, **kwargs):
@ -965,9 +989,9 @@ def test_install_fail_on_interrupt(install_mockery, mock_fetch, monkeypatch):
spec_name = "pkg-a"
err_msg = "mock keyboard interrupt for {0}".format(spec_name)
installer = create_installer([spec_name], {"fake": True})
setattr(inst.PackageInstaller, "_real_install_task", inst.PackageInstaller._install_task)
setattr(inst.PackageInstaller, "_real_install_task", inst.PackageInstaller._complete_task)
# Raise a KeyboardInterrupt error to trigger early termination
monkeypatch.setattr(inst.PackageInstaller, "_install_task", _interrupt)
monkeypatch.setattr(inst.PackageInstaller, "_complete_task", _interrupt)
with pytest.raises(KeyboardInterrupt, match=err_msg):
installer.install()
@ -982,21 +1006,26 @@ class MyBuildException(Exception):
pass
_old_complete_task = None
def _install_fail_my_build_exception(installer, task, install_status, **kwargs):
if task.pkg.name == "pkg-a":
print("Raising MyBuildException for pkg-a")
raise MyBuildException("mock internal package build error for pkg-a")
else:
# No need for more complex logic here because no splices
task.execute(install_status)
installer._update_installed(task)
_old_complete_task(installer, task, install_status)
def test_install_fail_single(install_mockery, mock_fetch, monkeypatch):
"""Test expected results for failure of single package."""
global _old_complete_task
installer = create_installer(["pkg-a"], {"fake": True})
# Raise a KeyboardInterrupt error to trigger early termination
monkeypatch.setattr(inst.PackageInstaller, "_install_task", _install_fail_my_build_exception)
_old_complete_task = inst.PackageInstaller._complete_task
monkeypatch.setattr(inst.PackageInstaller, "_complete_task", _install_fail_my_build_exception)
with pytest.raises(MyBuildException, match="mock internal package build error for pkg-a"):
installer.install()
@ -1008,10 +1037,12 @@ def test_install_fail_single(install_mockery, mock_fetch, monkeypatch):
def test_install_fail_multi(install_mockery, mock_fetch, monkeypatch):
"""Test expected results for failure of multiple packages."""
global _old_complete_task
installer = create_installer(["pkg-a", "pkg-c"], {"fake": True})
# Raise a KeyboardInterrupt error to trigger early termination
monkeypatch.setattr(inst.PackageInstaller, "_install_task", _install_fail_my_build_exception)
_old_complete_task = inst.PackageInstaller._complete_task
monkeypatch.setattr(inst.PackageInstaller, "_complete_task", _install_fail_my_build_exception)
with pytest.raises(spack.error.InstallError, match="Installation request failed"):
installer.install()
@ -1023,22 +1054,30 @@ def test_install_fail_multi(install_mockery, mock_fetch, monkeypatch):
def test_install_fail_fast_on_detect(install_mockery, monkeypatch, capsys):
"""Test fail_fast install when an install failure is detected."""
# Note: this test depends on the order of the installations
b, c = spack.concretize.concretize_one("pkg-b"), spack.concretize.concretize_one("pkg-c")
b_id, c_id = inst.package_id(b), inst.package_id(c)
a = spack.concretize.concretize_one("parallel-package-a")
installer = create_installer([c, b], {"fail_fast": True})
a_id = inst.package_id(a)
b_id = inst.package_id(a["parallel-package-b"])
c_id = inst.package_id(a["parallel-package-c"])
installer = create_installer([a], {"fail_fast": True})
# Make sure all packages are identified as failed
# This will prevent b from installing, which will cause the build of c to be skipped.
# This will prevent a and b from installing, which will cause the build of c to be skipped
# and the active processes to be killed.
monkeypatch.setattr(spack.database.FailureTracker, "has_failed", _true)
installer.max_active_tasks = 2
with pytest.raises(spack.error.InstallError, match="after first install failure"):
installer.install()
assert c_id in installer.failed
assert b_id not in installer.failed, "Expected no attempt to install pkg-c"
assert f"{c_id} failed to install" in capsys.readouterr().err
assert b_id in installer.failed, "Expected b to be marked as failed"
assert c_id in installer.failed, "Exepected c to be marked as failed"
assert (
a_id not in installer.installed
), "Package a cannot install due to its dependencies failing"
# check that b's active process got killed when c failed
assert f"{b_id} failed to install" in capsys.readouterr().err
def _test_install_fail_fast_on_except_patch(installer, **kwargs):
@ -1181,94 +1220,91 @@ def test_install_implicit(install_mockery, mock_fetch):
assert not create_build_task(pkg).explicit
def test_overwrite_install_backup_success(temporary_store, config, mock_packages, tmpdir):
# Install that wipes the prefix directory
def wipe_prefix(pkg, install_args):
print("AAAAAAAAA")
shutil.rmtree(pkg.prefix, ignore_errors=True)
fs.mkdirp(pkg.prefix)
raise Exception("Some fatal install error")
def fail(*args, **kwargs):
assert False
def test_overwrite_install_backup_success(
monkeypatch, temporary_store, config, mock_packages, tmpdir
):
"""
When doing an overwrite install that fails, Spack should restore the backup
of the original prefix, and leave the original spec marked installed.
"""
# Note: this test relies on installing a package with no dependencies
# Get a build task. TODO: refactor this to avoid calling internal methods
installer = create_installer(["pkg-c"])
# Get a build task. TODO: Refactor this to avoid calling internal methods.
installer = create_installer(["pkg-b"])
installer._init_queue()
task = installer._pop_task()
install_status = MockInstallStatus()
term_status = MockTermStatusLine()
# Make sure the install prefix exists with some trivial file
installed_file = os.path.join(task.pkg.prefix, "some_file")
fs.touchp(installed_file)
class InstallerThatWipesThePrefixDir:
def _install_task(self, task, install_status):
shutil.rmtree(task.pkg.prefix, ignore_errors=True)
fs.mkdirp(task.pkg.prefix)
raise Exception("Some fatal install error")
monkeypatch.setattr(inst, "build_process", wipe_prefix)
class FakeDatabase:
called = False
def remove(self, spec):
self.called = True
fake_installer = InstallerThatWipesThePrefixDir()
fake_db = FakeDatabase()
overwrite_install = inst.OverwriteInstall(fake_installer, fake_db, task, None)
# Make sure the package is not marked uninstalled
monkeypatch.setattr(spack.store.STORE.db, "remove", fail)
# Make sure that the installer does an overwrite install
monkeypatch.setattr(task, "_install_action", inst.InstallAction.OVERWRITE)
# Installation should throw the installation exception, not the backup
# failure.
installer.start_task(task, install_status, term_status)
with pytest.raises(Exception, match="Some fatal install error"):
overwrite_install.install()
installer.complete_task(task, install_status)
# Make sure the package is not marked uninstalled and the original dir
# is back.
assert not fake_db.called
# Check that the original file is back.
assert os.path.exists(installed_file)
def test_overwrite_install_backup_failure(temporary_store, config, mock_packages, tmpdir):
# Install that removes the backup directory, which is at the same level as
# the prefix, starting with .backup
def remove_backup(pkg, install_args):
backup_glob = os.path.join(os.path.dirname(os.path.normpath(pkg.prefix)), ".backup*")
for backup in glob.iglob(backup_glob):
shutil.rmtree(backup)
raise Exception("Some fatal install error")
def test_overwrite_install_backup_failure(
monkeypatch, temporary_store, config, mock_packages, tmpdir
):
"""
When doing an overwrite install that fails, Spack should try to recover the
original prefix. If that fails, the spec is lost, and it should be removed
from the database.
"""
# Note: this test relies on installing a package with no dependencies
class InstallerThatAccidentallyDeletesTheBackupDir:
def _install_task(self, task, install_status):
# Remove the backup directory, which is at the same level as the prefix,
# starting with .backup
backup_glob = os.path.join(
os.path.dirname(os.path.normpath(task.pkg.prefix)), ".backup*"
)
for backup in glob.iglob(backup_glob):
shutil.rmtree(backup)
raise Exception("Some fatal install error")
class FakeDatabase:
called = False
def remove(self, spec):
self.called = True
# Get a build task. TODO: refactor this to avoid calling internal methods
installer = create_installer(["pkg-c"])
installer._init_queue()
task = installer._pop_task()
install_status = MockInstallStatus()
term_status = MockTermStatusLine()
# Make sure the install prefix exists
installed_file = os.path.join(task.pkg.prefix, "some_file")
fs.touchp(installed_file)
monkeypatch.setattr(inst, "build_process", remove_backup)
fake_installer = InstallerThatAccidentallyDeletesTheBackupDir()
fake_db = FakeDatabase()
overwrite_install = inst.OverwriteInstall(fake_installer, fake_db, task, None)
# Installation should throw the installation exception, not the backup
# failure.
with pytest.raises(Exception, match="Some fatal install error"):
overwrite_install.install()
# Make sure that the installer does an overwrite install
monkeypatch.setattr(task, "_install_action", inst.InstallAction.OVERWRITE)
# Make sure that `remove` was called on the database after an unsuccessful
# attempt to restore the backup.
assert fake_db.called
# This error is raised while handling the original install error
installer.start_task(task, install_status, term_status)
with pytest.raises(Exception, match="No such spec in database"):
installer.complete_task(task, install_status)
def test_term_status_line():

View File

@ -1335,7 +1335,7 @@ _spack_info() {
_spack_install() {
if $list_options
then
SPACK_COMPREPLY="-h --help --only -u --until -j --jobs --overwrite --fail-fast --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --use-buildcache --include-build-deps --no-check-signature --show-log-on-error --source -n --no-checksum -v --verbose --fake --only-concrete --add --no-add -f --file --clean --dirty --test --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all -U --fresh --reuse --fresh-roots --reuse-deps --deprecated"
SPACK_COMPREPLY="-h --help --only -u --until -p --concurrent-packages -j --jobs --overwrite --fail-fast --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --use-buildcache --include-build-deps --no-check-signature --show-log-on-error --source -n --no-checksum -v --verbose --fake --only-concrete --add --no-add -f --file --clean --dirty --test --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all -U --fresh --reuse --fresh-roots --reuse-deps --deprecated"
else
_all_packages
fi

View File

@ -2025,7 +2025,7 @@ complete -c spack -n '__fish_spack_using_command info' -l variants-by-name -f -a
complete -c spack -n '__fish_spack_using_command info' -l variants-by-name -d 'list variants in strict name order; don'"'"'t group by condition'
# spack install
set -g __fish_spack_optspecs_spack_install h/help only= u/until= j/jobs= overwrite fail-fast keep-prefix keep-stage dont-restage use-cache no-cache cache-only use-buildcache= include-build-deps no-check-signature show-log-on-error source n/no-checksum v/verbose fake only-concrete add no-add f/file= clean dirty test= log-format= log-file= help-cdash cdash-upload-url= cdash-build= cdash-site= cdash-track= cdash-buildstamp= y/yes-to-all U/fresh reuse fresh-roots deprecated
set -g __fish_spack_optspecs_spack_install h/help only= u/until= p/concurrent-packages= j/jobs= overwrite fail-fast keep-prefix keep-stage dont-restage use-cache no-cache cache-only use-buildcache= include-build-deps no-check-signature show-log-on-error source n/no-checksum v/verbose fake only-concrete add no-add f/file= clean dirty test= log-format= log-file= help-cdash cdash-upload-url= cdash-build= cdash-site= cdash-track= cdash-buildstamp= y/yes-to-all U/fresh reuse fresh-roots deprecated
complete -c spack -n '__fish_spack_using_command_pos_remainder 0 install' -f -k -a '(__fish_spack_specs)'
complete -c spack -n '__fish_spack_using_command install' -s h -l help -f -a help
complete -c spack -n '__fish_spack_using_command install' -s h -l help -d 'show this help message and exit'
@ -2033,6 +2033,8 @@ complete -c spack -n '__fish_spack_using_command install' -l only -r -f -a 'pack
complete -c spack -n '__fish_spack_using_command install' -l only -r -d 'select the mode of installation'
complete -c spack -n '__fish_spack_using_command install' -s u -l until -r -f -a until
complete -c spack -n '__fish_spack_using_command install' -s u -l until -r -d 'phase to stop after when installing (default None)'
complete -c spack -n '__fish_spack_using_command install' -s p -l concurrent-packages -r -f -a concurrent_packages
complete -c spack -n '__fish_spack_using_command install' -s p -l concurrent-packages -r -d 'maximum number of packages to build concurrently'
complete -c spack -n '__fish_spack_using_command install' -s j -l jobs -r -f -a jobs
complete -c spack -n '__fish_spack_using_command install' -s j -l jobs -r -d 'explicitly set number of parallel jobs'
complete -c spack -n '__fish_spack_using_command install' -l overwrite -f -a overwrite

View File

@ -0,0 +1,29 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import time
from llnl.util.filesystem import touch
from spack.package import *
class ParallelPackageA(Package):
"""This is a fake vtk-m package used to demonstrate virtual package providers
with dependencies."""
homepage = "http://www.example.com"
has_code = False
depends_on("parallel-package-b")
depends_on("parallel-package-c")
version("1.0")
def install(self, spec, prefix):
print("I'm building!")
time.sleep(2)
print("I'm done!")
touch(prefix.dummy_file)

View File

@ -0,0 +1,26 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import time
from llnl.util.filesystem import touch
from spack.package import *
class ParallelPackageB(Package):
"""This is a fake vtk-m package used to demonstrate virtual package providers
with dependencies."""
homepage = "http://www.example.com"
has_code = False
version("1.0")
def install(self, spec, prefix):
print("I'm building!")
time.sleep(6)
print("I'm done!")
touch(prefix.dummy_file)

View File

@ -0,0 +1,26 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import time
from llnl.util.filesystem import touch
from spack.package import *
class ParallelPackageC(Package):
"""This is a fake vtk-m package used to demonstrate virtual package providers
with dependencies."""
homepage = "http://www.example.com"
has_code = False
version("1.0")
def install(self, spec, prefix):
print("Package 3 building!")
time.sleep(2)
print("Ideally shouldnt get here and it should fail")
touch(prefix.dummy_file)