Compare commits

..

11 Commits

Author SHA1 Message Date
Gregory Becker
d87158a80f more robust buildcache tests for concretization to non-default arch
Signed-off-by: Gregory Becker <becker33@llnl.gov>
2025-03-28 13:42:40 -05:00
Gregory Becker
441efad2d5 fix count test now that builtin.mock.mpileaks has build deps
Signed-off-by: Gregory Becker <becker33@llnl.gov>
2025-03-28 13:42:14 -05:00
Gregory Becker
b6f394ed00 fixup after rebase
Signed-off-by: Gregory Becker <becker33@llnl.gov>
2025-03-26 12:12:29 -07:00
Tamara Dahlgren
a8a62e8f5a Installer: update installation progress tracking
- test/installer: use existing inst for spack.installer
- remove install status from Installing message
- Add specs count visitor
- Report status on installed plus minor refactor
- Add the "+" to the tracker; include one experimental dynamic calculation
- tweak status reporting to include ensuring numerator unique across installed packages
- _print_installed_pkg -> InstallStatus.print_installed()
- move set_term_title outside of InstallStatus
- InstallStatus: remove unnecessary next_pkg
- InstallStatus: class and method name changes
  * changed InstallStatus to InstallerStatus since already have former in
    database.py and spec.py
  * changed print_installed to set_installed since does more than print now
- InstallerStatus -> InstallerProgress, install_status -> progress
- InstallerProgress: cache config:install_status
- InstallerProgress: restore get_progress and set_term_title methods (w/ tweaks)
- Task execute(): added returns to docstrings
- Don't pass progress to build_process or Installer.run, but set installed on successful return
- fix mypy issue with pkg.run_tests assignment
2025-03-26 09:29:07 -07:00
Gregory Becker
a3b6b873da rebase fixup 2025-03-26 09:22:56 -07:00
Gregory Becker
04f8ebd1eb PackageInstaller._install_task: fix type annotation 2025-03-26 09:22:56 -07:00
Gregory Becker
a3344c5672 refactor overwrite installs into main installer class 2025-03-26 09:22:52 -07:00
Gregory Becker
4f2f253bc3 update edges for existing tasks for build deps 2025-03-26 09:20:24 -07:00
Gregory Becker
78e39f2207 use db lookup that cannot return None 2025-03-26 09:20:24 -07:00
Tamara Dahlgren
eaf332c03e Resolve mypy issues 2025-03-26 09:20:24 -07:00
Gregory Becker
c74d6117e5 Installer: queue only link/run deps and requeue with build deps as needed
Refactors BuildTask into separate classes BuildTask and InstallTask
Queues all packages as InstallTask, with link/run deps only
If an InstallTask fails to install from binary, a BuildTask is generated
The BuildTask is queued with dependencies on the new InstallTasks for its
build deps and their link/run dependencies.
The Tasks telescope open to include all build deps of build deps ad-hoc
2025-03-26 09:20:15 -07:00
16 changed files with 673 additions and 521 deletions

View File

@@ -4,6 +4,7 @@
import json
import os
import re
import shutil
import sys
from typing import Dict
@@ -25,10 +26,12 @@
import spack.hash_types as ht
import spack.mirrors.mirror
import spack.package_base
import spack.paths
import spack.repo
import spack.spec
import spack.stage
import spack.util.executable
import spack.util.git
import spack.util.gpg as gpg_util
import spack.util.timer as timer
import spack.util.url as url_util
@@ -42,6 +45,7 @@
SPACK_COMMAND = "spack"
INSTALL_FAIL_CODE = 1
FAILED_CREATE_BUILDCACHE_CODE = 100
BUILTIN = re.compile(r"var\/spack\/repos\/builtin\/packages\/([^\/]+)\/package\.py")
def deindent(desc):
@@ -779,15 +783,18 @@ def ci_verify_versions(args):
then parses the git diff between the two to determine which packages
have been modified verifies the new checksums inside of them.
"""
# Get a list of all packages that have been changed or added
# between from_ref and to_ref
pkgs = spack.repo.get_all_package_diffs("AC", args.from_ref, args.to_ref)
with fs.working_dir(spack.paths.prefix):
# We use HEAD^1 explicitly on the merge commit created by
# GitHub Actions. However HEAD~1 is a safer default for the helper function.
files = spack.util.git.get_modified_files(from_ref=args.from_ref, to_ref=args.to_ref)
# Get a list of package names from the modified files.
pkgs = [(m.group(1), p) for p in files for m in [BUILTIN.search(p)] if m]
failed_version = False
for pkg_name in pkgs:
for pkg_name, path in pkgs:
spec = spack.spec.Spec(pkg_name)
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
path = spack.repo.PATH.package_path(pkg_name)
# Skip checking manual download packages and trust the maintainers
if pkg.manual_download:
@@ -811,7 +818,7 @@ def ci_verify_versions(args):
# TODO: enforce every version have a commit or a sha256 defined if not
# an infinite version (there are a lot of package's where this doesn't work yet.)
with fs.working_dir(os.path.dirname(path)):
with fs.working_dir(spack.paths.prefix):
added_checksums = spack_ci.get_added_versions(
checksums_version_dict, path, from_ref=args.from_ref, to_ref=args.to_ref
)

View File

@@ -65,6 +65,7 @@
import spack.util.executable
import spack.util.path
import spack.util.timer as timer
from spack.traverse import CoverNodesVisitor, traverse_breadth_first_with_visitor
from spack.util.environment import EnvironmentModifications, dump_environment
from spack.util.executable import which
@@ -118,6 +119,11 @@ class ExecuteResult(enum.Enum):
FAILED = enum.auto()
# Task is missing build spec and will be requeued
MISSING_BUILD_SPEC = enum.auto()
# Task is queued to install from binary but no binary found
MISSING_BINARY = enum.auto()
requeue_results = [ExecuteResult.MISSING_BUILD_SPEC, ExecuteResult.MISSING_BINARY]
class InstallAction(enum.Enum):
@@ -129,22 +135,46 @@ class InstallAction(enum.Enum):
OVERWRITE = enum.auto()
class InstallStatus:
def __init__(self, pkg_count: int):
# Counters used for showing status information
self.pkg_num: int = 0
self.pkg_count: int = pkg_count
class InstallerProgress:
"""Installation progress tracker"""
def __init__(self, packages: List["spack.package_base.PackageBase"]):
self.counter = SpecsCount(dt.BUILD | dt.LINK | dt.RUN)
self.pkg_count: int = self.counter.total([pkg.spec for pkg in packages])
self.pkg_ids: Set[str] = set()
self.pkg_num: int = 0
self.add_progress: bool = spack.config.get("config:install_status", True)
def next_pkg(self, pkg: "spack.package_base.PackageBase"):
def set_installed(self, pkg: "spack.package_base.PackageBase", message: str) -> None:
"""
Flag package as installed and output the installation status if
enabled by config:install_status.
Args:
pkg: installed package
message: message to be output
"""
pkg_id = package_id(pkg.spec)
if pkg_id not in self.pkg_ids:
self.pkg_num += 1
self.pkg_ids.add(pkg_id)
visited = max(len(self.pkg_ids), self.counter.total([pkg.spec]), self.pkg_num + 1)
self.pkg_num = visited
if tty.msg_enabled():
post = self.get_progress() if self.add_progress else ""
print(
colorize("@*g{[+]} ") + spack.util.path.debug_padded_filter(message) + f" {post}"
)
self.set_term_title("Installed")
def set_term_title(self, text: str):
if not spack.config.get("config:install_status", True):
"""Update the terminal title bar.
Args:
text: message to output in the terminal title bar
"""
if not self.add_progress:
return
if not sys.stdout.isatty():
@@ -155,7 +185,11 @@ def set_term_title(self, text: str):
sys.stdout.flush()
def get_progress(self) -> str:
return f"[{self.pkg_num}/{self.pkg_count}]"
"""Current installation progress
Returns: string showing the current installation progress
"""
return f"[{self.pkg_num}/{self.pkg_count} completed]"
class TermStatusLine:
@@ -224,7 +258,9 @@ def _check_last_phase(pkg: "spack.package_base.PackageBase") -> None:
pkg.last_phase = None # type: ignore[attr-defined]
def _handle_external_and_upstream(pkg: "spack.package_base.PackageBase", explicit: bool) -> bool:
def _handle_external_and_upstream(
pkg: "spack.package_base.PackageBase", explicit: bool, progress: InstallerProgress
) -> bool:
"""
Determine if the package is external or upstream and register it in the
database if it is external package.
@@ -232,6 +268,8 @@ def _handle_external_and_upstream(pkg: "spack.package_base.PackageBase", explici
Args:
pkg: the package whose installation is under consideration
explicit: the package was explicitly requested by the user
progress: installation progress tracker
Return:
``True`` if the package is not to be installed locally, otherwise ``False``
"""
@@ -239,7 +277,7 @@ def _handle_external_and_upstream(pkg: "spack.package_base.PackageBase", explici
# consists in module file generation and registration in the DB.
if pkg.spec.external:
_process_external_package(pkg, explicit)
_print_installed_pkg(f"{pkg.prefix} (external {package_id(pkg.spec)})")
progress.set_installed(pkg, f"{pkg.prefix} (external {package_id(pkg.spec)})")
return True
if pkg.spec.installed_upstream:
@@ -247,7 +285,7 @@ def _handle_external_and_upstream(pkg: "spack.package_base.PackageBase", explici
f"{package_id(pkg.spec)} is installed in an upstream Spack instance at "
f"{pkg.spec.prefix}"
)
_print_installed_pkg(pkg.prefix)
progress.set_installed(pkg, pkg.prefix)
# This will result in skipping all post-install hooks. In the case
# of modules this is considered correct because we want to retrieve
@@ -323,17 +361,6 @@ def _log_prefix(pkg_name) -> str:
return f"{pid}{pkg_name}:"
def _print_installed_pkg(message: str) -> None:
"""
Output a message with a package icon.
Args:
message (str): message to be output
"""
if tty.msg_enabled():
print(colorize("@*g{[+]} ") + spack.util.path.debug_padded_filter(message))
def print_install_test_log(pkg: "spack.package_base.PackageBase") -> None:
"""Output install test log file path but only if have test failures.
@@ -354,13 +381,17 @@ def _print_timer(pre: str, pkg_id: str, timer: timer.BaseTimer) -> None:
def _install_from_cache(
pkg: "spack.package_base.PackageBase", explicit: bool, unsigned: Optional[bool] = False
pkg: "spack.package_base.PackageBase",
progress: InstallerProgress,
explicit: bool,
unsigned: Optional[bool] = False,
) -> bool:
"""
Install the package from binary cache
Args:
pkg: package to install from the binary cache
progress: installation status tracker
explicit: ``True`` if installing the package was explicitly
requested by the user, otherwise, ``False``
unsigned: if ``True`` or ``False`` override the mirror signature verification defaults
@@ -380,7 +411,7 @@ def _install_from_cache(
_write_timer_json(pkg, t, True)
_print_timer(pre=_log_prefix(pkg.name), pkg_id=pkg_id, timer=t)
_print_installed_pkg(pkg.spec.prefix)
progress.set_installed(pkg, pkg.spec.prefix)
spack.hooks.post_install(pkg.spec, explicit)
return True
@@ -591,7 +622,7 @@ def get_dependent_ids(spec: "spack.spec.Spec") -> List[str]:
return [package_id(d) for d in spec.dependents()]
def install_msg(name: str, pid: int, install_status: InstallStatus) -> str:
def install_msg(name: str, pid: int) -> str:
"""
Colorize the name/id of the package being installed
@@ -602,12 +633,7 @@ def install_msg(name: str, pid: int, install_status: InstallStatus) -> str:
Return: Colorized installing message
"""
pre = f"{pid}: " if tty.show_pid() else ""
post = (
" @*{%s}" % install_status.get_progress()
if install_status and spack.config.get("config:install_status", True)
else ""
)
return pre + colorize("@*{Installing} @*g{%s}%s" % (name, post))
return pre + colorize("@*{Installing} @*g{%s}" % (name))
def archive_install_logs(pkg: "spack.package_base.PackageBase", phase_log_dir: str) -> None:
@@ -716,6 +742,18 @@ def package_id(spec: "spack.spec.Spec") -> str:
return f"{spec.name}-{spec.version}-{spec.dag_hash()}"
class SpecsCount:
def __init__(self, depflag: int):
self.depflag = depflag
def total(self, specs: List["spack.spec.Spec"]):
visitor = CoverNodesVisitor(
spack.spec.DagCountVisitor(self.depflag), key=lambda s: package_id(s)
)
traverse_breadth_first_with_visitor(specs, visitor)
return visitor.visitor.number
class BuildRequest:
"""Class for representing an installation request."""
@@ -806,16 +844,7 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
depflag = dt.LINK | dt.RUN
include_build_deps = self.install_args.get("include_build_deps")
if self.pkg_id == package_id(pkg.spec):
cache_only = self.install_args.get("package_cache_only")
else:
cache_only = self.install_args.get("dependencies_cache_only")
# Include build dependencies if pkg is going to be built from sources, or
# if build deps are explicitly requested.
if include_build_deps or not (
cache_only or pkg.spec.installed and pkg.spec.dag_hash() not in self.overwrite
):
if include_build_deps:
depflag |= dt.BUILD
if self.run_tests(pkg):
depflag |= dt.TEST
@@ -872,7 +901,6 @@ def __init__(
pkg: "spack.package_base.PackageBase",
request: BuildRequest,
*,
compiler: bool = False,
start: float = 0.0,
attempts: int = 0,
status: BuildStatus = BuildStatus.QUEUED,
@@ -967,11 +995,14 @@ def __init__(
self.attempts = attempts
self._update()
def execute(self, install_status: InstallStatus) -> ExecuteResult:
def execute(self, progress: InstallerProgress) -> ExecuteResult:
"""Execute the work of this task.
The ``install_status`` is an ``InstallStatus`` object used to format progress reporting for
this task in the context of the full ``BuildRequest``."""
Args:
progress: installation progress tracker
Returns: execution result
"""
raise NotImplementedError
def __eq__(self, other):
@@ -1136,33 +1167,26 @@ def priority(self):
class BuildTask(Task):
"""Class for representing a build task for a package."""
def execute(self, install_status):
def execute(self, progress: InstallerProgress) -> ExecuteResult:
"""
Perform the installation of the requested spec and/or dependency
represented by the build task.
Args:
progress: installation progress tracker
Returns: execution result
"""
install_args = self.request.install_args
tests = install_args.get("tests")
unsigned = install_args.get("unsigned")
tests = install_args.get("tests", False)
pkg, pkg_id = self.pkg, self.pkg_id
tty.msg(install_msg(pkg_id, self.pid, install_status))
tty.msg(install_msg(pkg_id, self.pid))
self.start = self.start or time.time()
self.status = BuildStatus.INSTALLING
# Use the binary cache if requested
if self.use_cache:
if _install_from_cache(pkg, self.explicit, unsigned):
return ExecuteResult.SUCCESS
elif self.cache_only:
raise spack.error.InstallError(
"No binary found when cache-only was specified", pkg=pkg
)
else:
tty.msg(f"No binary for {pkg_id} found: installing from source")
pkg.run_tests = tests is True or tests and pkg.name in tests
pkg.run_tests = tests is True or (tests and pkg.name in tests)
# hook that allows tests to inspect the Package before installation
# see unit_test_check() docs.
@@ -1185,6 +1209,8 @@ def execute(self, install_status):
# Note: PARENT of the build process adds the new package to
# the database, so that we don't need to re-read from file.
spack.store.STORE.db.add(pkg.spec, explicit=self.explicit)
progress.set_installed(self.pkg, self.pkg.prefix)
except spack.error.StopPhase as e:
# A StopPhase exception means that do_install was asked to
# stop early from clients, and is not an error at this point
@@ -1194,10 +1220,77 @@ def execute(self, install_status):
return ExecuteResult.SUCCESS
class InstallTask(Task):
"""Class for representing a build task for a package."""
def execute(self, progress: InstallerProgress) -> ExecuteResult:
"""
Perform the installation of the requested spec and/or dependency
represented by the build task.
Args:
progress: installation progress tracker
Returns: execution result
"""
# no-op and requeue to build if not allowed to use cache
if not self.use_cache:
return ExecuteResult.MISSING_BINARY
install_args = self.request.install_args
unsigned = install_args.get("unsigned")
pkg, pkg_id = self.pkg, self.pkg_id
tty.msg(install_msg(pkg_id, self.pid))
self.start = self.start or time.time()
self.status = BuildStatus.INSTALLING
try:
if _install_from_cache(pkg, progress, self.explicit, unsigned):
return ExecuteResult.SUCCESS
elif self.cache_only:
raise spack.error.InstallError(
"No binary found when cache-only was specified", pkg=pkg
)
else:
tty.msg(f"No binary for {pkg_id} found: installing from source")
return ExecuteResult.MISSING_BINARY
except binary_distribution.NoChecksumException as exc:
if self.cache_only:
raise
tty.error(
f"Failed to install {self.pkg.name} from binary cache due "
f"to {str(exc)}: Requeueing to install from source."
)
return ExecuteResult.MISSING_BINARY
def build_task(self, installed):
build_task = BuildTask(
pkg=self.pkg,
request=self.request,
start=0,
attempts=self.attempts,
status=BuildStatus.QUEUED,
installed=installed,
)
# Fixup dependents in case it was changed by `add_dependent`
# This would be the case of a `build_spec` for a spliced spec
build_task.dependents = self.dependents
# Same for dependencies
build_task.dependencies = self.dependencies
build_task.uninstalled_deps = self.uninstalled_deps - installed
return build_task
class RewireTask(Task):
"""Class for representing a rewire task for a package."""
def execute(self, install_status):
def execute(self, progress: InstallerProgress) -> ExecuteResult:
"""Execute rewire task
Rewire tasks are executed by either rewiring self.package.spec.build_spec that is already
@@ -1206,24 +1299,30 @@ def execute(self, install_status):
If not available installed or as binary, return ExecuteResult.MISSING_BUILD_SPEC.
This will prompt the Installer to requeue the task with a dependency on the BuildTask
to install self.pkg.spec.build_spec
Args:
progress: installation progress tracker
Returns: execution result
"""
oldstatus = self.status
self.status = BuildStatus.INSTALLING
tty.msg(install_msg(self.pkg_id, self.pid, install_status))
tty.msg(install_msg(self.pkg_id, self.pid))
self.start = self.start or time.time()
if not self.pkg.spec.build_spec.installed:
try:
install_args = self.request.install_args
unsigned = install_args.get("unsigned")
_process_binary_cache_tarball(self.pkg, explicit=self.explicit, unsigned=unsigned)
_print_installed_pkg(self.pkg.prefix)
progress.set_installed(self.pkg, self.pkg.prefix)
return ExecuteResult.SUCCESS
except BaseException as e:
tty.error(f"Failed to rewire {self.pkg.spec} from binary. {e}")
self.status = oldstatus
return ExecuteResult.MISSING_BUILD_SPEC
spack.rewiring.rewire_node(self.pkg.spec, self.explicit)
_print_installed_pkg(self.pkg.prefix)
progress.set_installed(self.pkg, self.pkg.prefix)
return ExecuteResult.SUCCESS
@@ -1323,6 +1422,9 @@ def __init__(
# Priority queue of tasks
self.build_pq: List[Tuple[Tuple[int, int], Task]] = []
# Installation status tracker
self.progress: InstallerProgress = InstallerProgress(packages)
# Mapping of unique package ids to task
self.build_tasks: Dict[str, Task] = {}
@@ -1377,8 +1479,9 @@ def _add_init_task(
request: the associated install request
all_deps: dictionary of all dependencies and associated dependents
"""
cls = RewireTask if pkg.spec.spliced else BuildTask
task = cls(pkg, request=request, status=BuildStatus.QUEUED, installed=self.installed)
cls = RewireTask if pkg.spec.spliced else InstallTask
task: Task = cls(pkg, request=request, status=BuildStatus.QUEUED, installed=self.installed)
for dep_id in task.dependencies:
all_deps[dep_id].add(package_id(pkg.spec))
@@ -1671,7 +1774,7 @@ def _requeue_with_build_spec_tasks(self, task):
"""Requeue the task and its missing build spec dependencies"""
# Full install of the build_spec is necessary because it didn't already exist somewhere
spec = task.pkg.spec
for dep in spec.build_spec.traverse():
for dep in spec.build_spec.traverse(deptype=task.request.get_depflags(task.pkg)):
dep_pkg = dep.package
dep_id = package_id(dep)
@@ -1694,6 +1797,48 @@ def _requeue_with_build_spec_tasks(self, task):
spec_task.add_dependency(build_pkg_id)
self._push_task(spec_task)
def _requeue_as_build_task(self, task):
# TODO: handle the compile bootstrapping stuff?
spec = task.pkg.spec
build_dep_ids = []
for builddep in spec.dependencies(deptype=dt.BUILD):
# track which package ids are the direct build deps
build_dep_ids.append(package_id(builddep))
for dep in builddep.traverse(deptype=task.request.get_depflags(task.pkg)):
dep_pkg = dep.package
dep_id = package_id(dep)
# Add a new task if we need one
if dep_id not in self.build_tasks and dep_id not in self.installed:
self._add_init_task(dep_pkg, task.request, self.all_dependencies)
# Add edges for an existing task if it exists
elif dep_id in self.build_tasks:
for parent in dep.dependents():
parent_id = package_id(parent)
self.build_tasks[dep_id].add_dependent(parent_id)
# Clear any persistent failure markings _unless_ they
# are associated with another process in this parallel build
spack.store.STORE.failure_tracker.clear(dep, force=False)
# Remove InstallTask
self._remove_task(task.pkg_id)
# New task to build this spec from source
build_task = task.build_task(self.installed)
build_task_id = package_id(spec)
# Attach dependency relationships between spec and build deps
for build_dep_id in build_dep_ids:
if build_dep_id not in self.installed:
build_dep_task = self.build_tasks[build_dep_id]
build_dep_task.add_dependent(build_task_id)
build_task.add_dependency(build_dep_id)
# Add new Task -- this removes the old task as well
self._push_task(build_task)
def _add_tasks(self, request: BuildRequest, all_deps):
"""Add tasks to the priority queue for the given build request.
@@ -1747,19 +1892,55 @@ def _add_tasks(self, request: BuildRequest, all_deps):
fail_fast = bool(request.install_args.get("fail_fast"))
self.fail_fast = self.fail_fast or fail_fast
def _install_task(self, task: Task, install_status: InstallStatus) -> None:
def _install_task(self, task: Task) -> ExecuteResult:
"""
Perform the installation of the requested spec and/or dependency
represented by the task.
Args:
task: the installation task for a package
install_status: the installation status for the package"""
rc = task.execute(install_status)
"""
rc = task.execute(self.progress)
if rc == ExecuteResult.MISSING_BUILD_SPEC:
self._requeue_with_build_spec_tasks(task)
elif rc == ExecuteResult.MISSING_BINARY:
self._requeue_as_build_task(task)
else: # if rc == ExecuteResult.SUCCESS or rc == ExecuteResult.FAILED
self._update_installed(task)
return rc
def _overwrite_install_task(self, task: Task):
"""
Try to run the install task overwriting the package prefix.
If this fails, try to recover the original install prefix. If that fails
too, mark the spec as uninstalled.
"""
try:
with fs.replace_directory_transaction(task.pkg.prefix):
rc = self._install_task(task)
if rc in requeue_results:
raise Requeue # raise to trigger transactional replacement of directory
except Requeue:
pass # This task is requeueing, not failing
except fs.CouldNotRestoreDirectoryBackup as e:
spack.store.STORE.db.remove(task.pkg.spec)
if isinstance(e.inner_exception, Requeue):
message_fn = tty.warn
else:
message_fn = tty.error
message_fn(
f"Recovery of install dir of {task.pkg.name} failed due to "
f"{e.outer_exception.__class__.__name__}: {str(e.outer_exception)}. "
"The spec is now uninstalled."
)
# Unwrap the actuall installation exception
if isinstance(e.inner_exception, Requeue):
tty.warn("Task will be requeued to build from source")
else:
raise e.inner_exception
def _next_is_pri0(self) -> bool:
"""
@@ -1863,7 +2044,7 @@ def _remove_task(self, pkg_id: str) -> Optional[Task]:
else:
return None
def _requeue_task(self, task: Task, install_status: InstallStatus) -> None:
def _requeue_task(self, task: Task) -> None:
"""
Requeues a task that appears to be in progress by another process.
@@ -1871,10 +2052,7 @@ def _requeue_task(self, task: Task, install_status: InstallStatus) -> None:
task (Task): the installation task for a package
"""
if task.status not in [BuildStatus.INSTALLED, BuildStatus.INSTALLING]:
tty.debug(
f"{install_msg(task.pkg_id, self.pid, install_status)} "
"in progress by another process"
)
tty.debug(f"{install_msg(task.pkg_id, self.pid)} in progress by another process")
new_task = task.next_attempt(self.installed)
new_task.status = BuildStatus.INSTALLING
@@ -2020,8 +2198,6 @@ def install(self) -> None:
single_requested_spec = len(self.build_requests) == 1
failed_build_requests = []
install_status = InstallStatus(len(self.build_pq))
# Only enable the terminal status line when we're in a tty without debug info
# enabled, so that the output does not get cluttered.
term_status = TermStatusLine(
@@ -2037,8 +2213,7 @@ def install(self) -> None:
keep_prefix = install_args.get("keep_prefix")
pkg, pkg_id, spec = task.pkg, task.pkg_id, task.pkg.spec
install_status.next_pkg(pkg)
install_status.set_term_title(f"Processing {pkg.name}")
self.progress.set_term_title(f"Processing {pkg.name}")
tty.debug(f"Processing {pkg_id}: task={task}")
# Ensure that the current spec has NO uninstalled dependencies,
# which is assumed to be reflected directly in its priority.
@@ -2067,7 +2242,7 @@ def install(self) -> None:
# Skip the installation if the spec is not being installed locally
# (i.e., if external or upstream) BUT flag it as installed since
# some package likely depends on it.
if _handle_external_and_upstream(pkg, task.explicit):
if _handle_external_and_upstream(pkg, task.explicit, self.progress):
term_status.clear()
self._flag_installed(pkg, task.dependents)
continue
@@ -2088,7 +2263,7 @@ def install(self) -> None:
# another process is likely (un)installing the spec or has
# determined the spec has already been installed (though the
# other process may be hung).
install_status.set_term_title(f"Acquiring lock for {pkg.name}")
self.progress.set_term_title(f"Acquiring lock for {pkg.name}")
term_status.add(pkg_id)
ltype, lock = self._ensure_locked("write", pkg)
if lock is None:
@@ -2100,7 +2275,7 @@ def install(self) -> None:
# can check the status presumably established by another process
# -- failed, installed, or uninstalled -- on the next pass.
if lock is None:
self._requeue_task(task, install_status)
self._requeue_task(task)
continue
term_status.clear()
@@ -2111,7 +2286,7 @@ def install(self) -> None:
task.request.overwrite_time = time.time()
# Determine state of installation artifacts and adjust accordingly.
install_status.set_term_title(f"Preparing {pkg.name}")
self.progress.set_term_title(f"Preparing {pkg.name}")
self._prepare_for_install(task)
# Flag an already installed package
@@ -2123,7 +2298,7 @@ def install(self) -> None:
if lock is not None:
self._update_installed(task)
path = spack.util.path.debug_padded_filter(pkg.prefix)
_print_installed_pkg(path)
self.progress.set_installed(pkg, path)
else:
# At this point we've failed to get a write or a read
# lock, which means another process has taken a write
@@ -2134,7 +2309,7 @@ def install(self) -> None:
# established by the other process -- failed, installed,
# or uninstalled -- on the next pass.
self.installed.remove(pkg_id)
self._requeue_task(task, install_status)
self._requeue_task(task)
continue
# Having a read lock on an uninstalled pkg may mean another
@@ -2147,21 +2322,19 @@ def install(self) -> None:
# uninstalled -- on the next pass.
if ltype == "read":
lock.release_read()
self._requeue_task(task, install_status)
self._requeue_task(task)
continue
# Proceed with the installation since we have an exclusive write
# lock on the package.
install_status.set_term_title(f"Installing {pkg.name}")
self.progress.set_term_title(f"Installing {pkg.name}")
try:
action = self._install_action(task)
if action == InstallAction.INSTALL:
self._install_task(task, install_status)
self._install_task(task)
elif action == InstallAction.OVERWRITE:
# spack.store.STORE.db is not really a Database object, but a small
# wrapper -- silence mypy
OverwriteInstall(self, spack.store.STORE.db, task, install_status).install() # type: ignore[arg-type] # noqa: E501
self._overwrite_install_task(task)
# If we installed then we should keep the prefix
stop_before_phase = getattr(pkg, "stop_before_phase", None)
@@ -2176,20 +2349,6 @@ def install(self) -> None:
)
raise
except binary_distribution.NoChecksumException as exc:
if task.cache_only:
raise
# Checking hash on downloaded binary failed.
tty.error(
f"Failed to install {pkg.name} from binary cache due "
f"to {str(exc)}: Requeueing to install from source."
)
# this overrides a full method, which is ugly.
task.use_cache = False # type: ignore[misc]
self._requeue_task(task, install_status)
continue
except (Exception, SystemExit) as exc:
self._update_failed(task, True, exc)
@@ -2225,7 +2384,12 @@ def install(self) -> None:
# Perform basic task cleanup for the installed spec to
# include downgrading the write to a read lock
if pkg.spec.installed:
self._cleanup_task(pkg)
# Do not clean up this was an overwrite that wasn't completed
overwrite = spec.dag_hash() in task.request.overwrite
rec = spack.store.STORE.db.get_record(pkg.spec)
incomplete = task.request.overwrite_time > rec.installation_time
if not (overwrite and incomplete):
self._cleanup_task(pkg)
# Cleanup, which includes releasing all of the read locks
self._cleanup_all_tasks()
@@ -2377,7 +2541,6 @@ def run(self) -> bool:
print_install_test_log(self.pkg)
_print_timer(pre=self.pre, pkg_id=self.pkg_id, timer=self.timer)
_print_installed_pkg(self.pkg.prefix)
# preserve verbosity across runs
return self.echo
@@ -2523,39 +2686,22 @@ def deprecate(spec: "spack.spec.Spec", deprecator: "spack.spec.Spec", link_fn) -
link_fn(deprecator.prefix, spec.prefix)
class OverwriteInstall:
def __init__(
self,
installer: PackageInstaller,
database: spack.database.Database,
task: Task,
install_status: InstallStatus,
):
self.installer = installer
self.database = database
self.task = task
self.install_status = install_status
class Requeue(Exception):
"""Raised when we need an error to indicate a requeueing situation.
def install(self):
"""
Try to run the install task overwriting the package prefix.
If this fails, try to recover the original install prefix. If that fails
too, mark the spec as uninstalled. This function always the original
install error if installation fails.
"""
try:
with fs.replace_directory_transaction(self.task.pkg.prefix):
self.installer._install_task(self.task, self.install_status)
except fs.CouldNotRestoreDirectoryBackup as e:
self.database.remove(self.task.pkg.spec)
tty.error(
f"Recovery of install dir of {self.task.pkg.name} failed due to "
f"{e.outer_exception.__class__.__name__}: {str(e.outer_exception)}. "
"The spec is now uninstalled."
)
While this is raised and excepted, it does not represent an Error."""
# Unwrap the actual installation exception.
raise e.inner_exception
class InstallError(spack.error.SpackError):
"""Raised when something goes wrong during install or uninstall.
The error can be annotated with a ``pkg`` attribute to allow the
caller to get the package for which the exception was raised.
"""
def __init__(self, message, long_msg=None, pkg=None):
super().__init__(message, long_msg)
self.pkg = pkg
class BadInstallPhase(spack.error.InstallError):

View File

@@ -22,22 +22,9 @@
import textwrap
import time
import traceback
from typing import (
Any,
Callable,
Dict,
Iterable,
List,
Optional,
Sequence,
Set,
Tuple,
Type,
TypeVar,
Union,
)
from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, TypeVar, Union
from typing_extensions import Literal, final
from typing_extensions import Literal
import llnl.util.filesystem as fsys
import llnl.util.tty as tty
@@ -1394,75 +1381,6 @@ def command(self) -> spack.util.executable.Executable:
return spack.util.executable.Executable(path)
raise RuntimeError(f"Unable to locate {self.spec.name} command in {self.home.bin}")
def find_headers(
self, *, features: Sequence[str] = (), virtual: Optional[str] = None
) -> fsys.HeaderList:
"""Return the header list for this package based on the query. This method can be
overridden by individual packages to return package specific headers.
Args:
features: query argument to filter or extend the header list.
virtual: when set, return headers relevant for the virtual provided by this package.
Raises:
spack.error.NoHeadersError: if there was an error locating the headers.
"""
spec = self.spec
home = self.home
headers = fsys.find_headers("*", root=home.include, recursive=True)
if headers:
return headers
raise spack.error.NoHeadersError(f"Unable to locate {spec.name} headers in {home}")
def find_libs(
self, *, features: Sequence[str] = (), virtual: Optional[str] = None
) -> fsys.LibraryList:
"""Return the library list for this package based on the query. This method can be
overridden by individual packages to return package specific libraries.
Args:
features: query argument to filter or extend the library list.
virtual: when set, return libraries relevant for the virtual provided by this package.
Raises:
spack.error.NoLibrariesError: if there was an error locating the libraries.
"""
spec = self.spec
home = self.home
name = self.spec.name.replace("-", "?")
# Avoid double 'lib' for packages whose names already start with lib
if not name.startswith("lib") and not spec.satisfies("platform=windows"):
name = "lib" + name
# If '+shared' search only for shared library; if '~shared' search only for
# static library; otherwise, first search for shared and then for static.
search_shared = (
[True] if ("+shared" in spec) else ([False] if ("~shared" in spec) else [True, False])
)
for shared in search_shared:
# Since we are searching for link libraries, on Windows search only for
# ".Lib" extensions by default as those represent import libraries for implicit links.
libs = fsys.find_libraries(name, home, shared=shared, recursive=True, runtime=False)
if libs:
return libs
raise spack.error.NoLibrariesError(
f"Unable to recursively locate {spec.name} libraries in {home}"
)
@final
def query_headers(self, name: str, *, features: Sequence[str] = ()) -> fsys.HeaderList:
"""Returns the header list for a dependency ``name``."""
spec, is_virtual = self.spec._get_dependency_by_name(name)
return spec.package.find_headers(features=features, virtual=name if is_virtual else None)
@final
def query_libs(self, name: str, *, features: Sequence[str] = ()) -> fsys.LibraryList:
"""Returns the library list for a dependency ``name``."""
spec, is_virtual = self.spec._get_dependency_by_name(name)
return spec.package.find_libs(features=features, virtual=name if is_virtual else None)
def url_version(self, version):
"""
Given a version, this returns a string that should be substituted

View File

@@ -101,17 +101,26 @@ def wrapper(instance, *args, **kwargs):
# installed explicitly will also be installed as a
# dependency of another spec. In this case append to both
# spec reports.
added = []
for current_spec in llnl.util.lang.dedupe([pkg.spec.root, pkg.spec]):
name = name_fmt.format(current_spec.name, current_spec.dag_hash(length=7))
try:
item = next((x for x in self.specs if x["name"] == name))
item["packages"].append(package)
added.append(item)
except StopIteration:
pass
start_time = time.time()
try:
value = wrapped_fn(instance, *args, **kwargs)
# If we are requeuing the task, it neither succeeded nor failed
# remove the package so we don't count it (yet) in either category
if value in spack.installer.requeue_results:
for item in added:
item["packages"].remove(package)
package["stdout"] = self.fetch_log(pkg)
package["installed_from_binary_cache"] = pkg.installed_from_binary_cache
self.on_success(pkg, kwargs, package)

View File

@@ -3005,10 +3005,6 @@ def setup(
# Fail if we already know an unreachable node is requested
for spec in specs:
# concrete roots don't need their dependencies verified
if spec.concrete:
continue
missing_deps = [
str(d)
for d in spec.traverse()

View File

@@ -153,8 +153,7 @@
r"(})?" # finish format string with non-escaped close brace }, or missing if not present
r"|"
# OPTION 3: mismatched close brace (option 2 would consume a matched open brace)
r"(})" # brace
r")",
r"(})" r")", # brace
re.IGNORECASE,
)
@@ -663,9 +662,11 @@ def versions(self):
def display_str(self):
"""Equivalent to {compiler.name}{@compiler.version} for Specs, without extra
@= for readability."""
if self.versions != vn.any_version:
return self.spec.format("{name}{@version}")
return self.spec.format("{name}")
if self.spec.concrete:
return f"{self.name}@{self.version}"
elif self.versions != vn.any_version:
return f"{self.name}@{self.versions}"
return self.name
def __lt__(self, other):
if not isinstance(other, CompilerSpec):
@@ -1070,26 +1071,123 @@ def clear(self):
self.edges.clear()
def _headers_default_handler(spec: "Spec"):
"""Default handler when looking for the 'headers' attribute.
Tries to search for ``*.h`` files recursively starting from
``spec.package.home.include``.
Parameters:
spec: spec that is being queried
Returns:
HeaderList: The headers in ``prefix.include``
Raises:
NoHeadersError: If no headers are found
"""
home = getattr(spec.package, "home")
headers = fs.find_headers("*", root=home.include, recursive=True)
if headers:
return headers
raise spack.error.NoHeadersError(f"Unable to locate {spec.name} headers in {home}")
def _libs_default_handler(spec: "Spec"):
"""Default handler when looking for the 'libs' attribute.
Tries to search for ``lib{spec.name}`` recursively starting from
``spec.package.home``. If ``spec.name`` starts with ``lib``, searches for
``{spec.name}`` instead.
Parameters:
spec: spec that is being queried
Returns:
LibraryList: The libraries found
Raises:
NoLibrariesError: If no libraries are found
"""
# Variable 'name' is passed to function 'find_libraries', which supports
# glob characters. For example, we have a package with a name 'abc-abc'.
# Now, we don't know if the original name of the package is 'abc_abc'
# (and it generates a library 'libabc_abc.so') or 'abc-abc' (and it
# generates a library 'libabc-abc.so'). So, we tell the function
# 'find_libraries' to give us anything that matches 'libabc?abc' and it
# gives us either 'libabc-abc.so' or 'libabc_abc.so' (or an error)
# depending on which one exists (there is a possibility, of course, to
# get something like 'libabcXabc.so, but for now we consider this
# unlikely).
name = spec.name.replace("-", "?")
home = getattr(spec.package, "home")
# Avoid double 'lib' for packages whose names already start with lib
if not name.startswith("lib") and not spec.satisfies("platform=windows"):
name = "lib" + name
# If '+shared' search only for shared library; if '~shared' search only for
# static library; otherwise, first search for shared and then for static.
search_shared = (
[True] if ("+shared" in spec) else ([False] if ("~shared" in spec) else [True, False])
)
for shared in search_shared:
# Since we are searching for link libraries, on Windows search only for
# ".Lib" extensions by default as those represent import libraries for implicit links.
libs = fs.find_libraries(name, home, shared=shared, recursive=True, runtime=False)
if libs:
return libs
raise spack.error.NoLibrariesError(
f"Unable to recursively locate {spec.name} libraries in {home}"
)
class ForwardQueryToPackage:
"""Descriptor used to forward queries from Spec to Package"""
def __init__(self, attribute_name: str, _indirect: bool = False) -> None:
def __init__(
self,
attribute_name: str,
default_handler: Optional[Callable[["Spec"], Any]] = None,
_indirect: bool = False,
) -> None:
"""Create a new descriptor.
Parameters:
attribute_name: name of the attribute to be searched for in the Package instance
default_handler: default function to be called if the attribute was not found in the
Package instance
_indirect: temporarily added to redirect a query to another package.
"""
self.attribute_name = attribute_name
self.default = default_handler
self.indirect = _indirect
def __get__(self, instance: "SpecBuildInterface", cls):
"""Retrieves the property from Package using a well defined chain of responsibility.
"""Retrieves the property from Package using a well defined chain
of responsibility.
The call order is:
The order of call is:
1. `pkg.{virtual_name}_{attribute_name}` if the query is for a virtual package
2. `pkg.{attribute_name}` otherwise
1. if the query was through the name of a virtual package try to
search for the attribute `{virtual_name}_{attribute_name}`
in Package
2. try to search for attribute `{attribute_name}` in Package
3. try to call the default handler
The first call that produces a value will stop the chain.
If no call can handle the request then AttributeError is raised with a
message indicating that no relevant attribute exists.
If a call returns None, an AttributeError is raised with a message
indicating a query failure, e.g. that library files were not found in a
'libs' query.
"""
# TODO: this indirection exist solely for `spec["python"].command` to actually return
# spec["python-venv"].command. It should be removed when `python` is a virtual.
@@ -1105,36 +1203,61 @@ def __get__(self, instance: "SpecBuildInterface", cls):
_ = instance.wrapped_obj[instance.wrapped_obj.name] # NOQA: ignore=F841
query = instance.last_query
# First try the deprecated attributes (e.g. `<virtual>_libs` and `libs`)
callbacks_chain = []
# First in the chain : specialized attribute for virtual packages
if query.isvirtual:
deprecated_attrs = [f"{query.name}_{self.attribute_name}", self.attribute_name]
else:
deprecated_attrs = [self.attribute_name]
specialized_name = "{0}_{1}".format(query.name, self.attribute_name)
callbacks_chain.append(lambda: getattr(pkg, specialized_name))
# Try to get the generic method from Package
callbacks_chain.append(lambda: getattr(pkg, self.attribute_name))
# Final resort : default callback
if self.default is not None:
_default = self.default # make mypy happy
callbacks_chain.append(lambda: _default(instance.wrapped_obj))
for attr in deprecated_attrs:
if not hasattr(pkg, attr):
continue
value = getattr(pkg, attr)
# Deprecated properties can return None to indicate the query failed.
if value is None:
raise AttributeError(
f"Query of package '{pkg.name}' for '{self.attribute_name}' failed\n"
f"\tprefix : {instance.prefix}\n" # type: ignore[attr-defined]
f"\tspec : {instance}\n"
f"\tqueried as : {query.name}\n"
f"\textra parameters : {query.extra_parameters}"
)
return value
# Then try the new functions (e.g. `find_libs`).
features = query.extra_parameters
virtual = query.name if query.isvirtual else None
if self.attribute_name == "libs":
return pkg.find_libs(features=features, virtual=virtual)
elif self.attribute_name == "headers":
return pkg.find_headers(features=features, virtual=virtual)
raise AttributeError(f"Package {pkg.name} has no attribute {self.attribute_name}")
# Trigger the callbacks in order, the first one producing a
# value wins
value = None
message = None
for f in callbacks_chain:
try:
value = f()
# A callback can return None to trigger an error indicating
# that the query failed.
if value is None:
msg = "Query of package '{name}' for '{attrib}' failed\n"
msg += "\tprefix : {spec.prefix}\n"
msg += "\tspec : {spec}\n"
msg += "\tqueried as : {query.name}\n"
msg += "\textra parameters : {query.extra_parameters}"
message = msg.format(
name=pkg.name,
attrib=self.attribute_name,
spec=instance,
query=instance.last_query,
)
else:
return value
break
except AttributeError:
pass
# value is 'None'
if message is not None:
# Here we can use another type of exception. If we do that, the
# unit test 'test_getitem_exceptional_paths' in the file
# lib/spack/spack/test/spec_dag.py will need to be updated to match
# the type.
raise AttributeError(message)
# 'None' value at this point means that there are no appropriate
# properties defined and no default handler, or that all callbacks
# raised AttributeError. In this case, we raise AttributeError with an
# appropriate message.
fmt = "'{name}' package has no relevant attribute '{query}'\n"
fmt += "\tspec : '{spec}'\n"
fmt += "\tqueried as : '{spec.last_query.name}'\n"
fmt += "\textra parameters : '{spec.last_query.extra_parameters}'\n"
message = fmt.format(name=pkg.name, query=self.attribute_name, spec=instance)
raise AttributeError(message)
def __set__(self, instance, value):
cls_name = type(instance).__name__
@@ -1148,10 +1271,10 @@ def __set__(self, instance, value):
class SpecBuildInterface(lang.ObjectWrapper):
# home is available in the base Package so no default is needed
home = ForwardQueryToPackage("home")
headers = ForwardQueryToPackage("headers")
libs = ForwardQueryToPackage("libs")
command = ForwardQueryToPackage("command", _indirect=True)
home = ForwardQueryToPackage("home", default_handler=None)
headers = ForwardQueryToPackage("headers", default_handler=_headers_default_handler)
libs = ForwardQueryToPackage("libs", default_handler=_libs_default_handler)
command = ForwardQueryToPackage("command", default_handler=None, _indirect=True)
def __init__(
self,
@@ -2556,7 +2679,7 @@ def name_and_dependency_types(s: str) -> Tuple[str, dt.DepFlag]:
return name, depflag
def spec_and_dependency_types(
s: Union[Spec, Tuple[Spec, str]],
s: Union[Spec, Tuple[Spec, str]]
) -> Tuple[Spec, dt.DepFlag]:
"""Given a non-string key in the literal, extracts the spec
and its dependency types.
@@ -3520,21 +3643,6 @@ def version(self):
raise spack.error.SpecError("Spec version is not concrete: " + str(self))
return self.versions[0]
def _get_dependency_by_name(self, name: str) -> Tuple["Spec", bool]:
"""Get a dependency by package name or virtual. Returns a tuple with the matching spec
and a boolean indicating if the spec is a virtual dependency. Raises a KeyError if the
dependency is not found."""
# Consider all direct dependencies and transitive runtime dependencies
order = itertools.chain(
self.edges_to_dependencies(depflag=dt.BUILD | dt.TEST),
self.traverse_edges(deptype=dt.LINK | dt.RUN, order="breadth", cover="edges"),
)
edge = next((e for e in order if e.spec.name == name or name in e.virtuals), None)
if edge is None:
raise KeyError(f"No spec with name {name} in {self}")
return edge.spec, name in edge.virtuals
def __getitem__(self, name: str):
"""Get a dependency from the spec by its name. This call implicitly
sets a query state in the package being retrieved. The behavior of
@@ -3555,14 +3663,23 @@ def __getitem__(self, name: str):
csv = query_parameters.pop().strip()
query_parameters = re.split(r"\s*,\s*", csv)
spec, is_virtual = self._get_dependency_by_name(name)
# Consider all direct dependencies and transitive runtime dependencies
order = itertools.chain(
self.edges_to_dependencies(depflag=dt.BUILD | dt.TEST),
self.traverse_edges(deptype=dt.LINK | dt.RUN, order="breadth", cover="edges"),
)
try:
edge = next((e for e in order if e.spec.name == name or name in e.virtuals))
except StopIteration as e:
raise KeyError(f"No spec with name {name} in {self}") from e
if self._concrete:
return SpecBuildInterface(
spec, name, query_parameters, _parent=self, is_virtual=is_virtual
edge.spec, name, query_parameters, _parent=self, is_virtual=name in edge.virtuals
)
return spec
return edge.spec
def __contains__(self, spec):
"""True if this spec or some dependency satisfies the spec.
@@ -5033,6 +5150,21 @@ def eval_conditional(string):
return eval(string, valid_variables)
class DagCountVisitor:
"""Class for counting the number of specs encountered during traversal."""
def __init__(self, depflag: int):
self.depflag: int = depflag
self.number: int = 0
def accept(self, item: spack.traverse.EdgeAndDepth) -> bool:
self.number += 1
return True
def neighbors(self, item: spack.traverse.EdgeAndDepth):
return item.edge.spec.edges_to_dependencies(depflag=self.depflag)
class SpecParseError(spack.error.SpecError):
"""Wrapper for ParseError for when we're parsing specs."""

View File

@@ -231,13 +231,13 @@ def test_default_rpaths_create_install_default_layout(temporary_mirror_dir):
uninstall_cmd("-y", "--dependents", gspec.name)
# Test installing from build caches
buildcache_cmd("install", "-u", cspec.name, sy_spec.name)
buildcache_cmd("install", "-uo", cspec.name, sy_spec.name)
# This gives warning that spec is already installed
buildcache_cmd("install", "-u", cspec.name)
buildcache_cmd("install", "-uo", cspec.name)
# Test overwrite install
buildcache_cmd("install", "-fu", cspec.name)
buildcache_cmd("install", "-fuo", cspec.name)
buildcache_cmd("keys", "-f")
buildcache_cmd("list")
@@ -263,10 +263,10 @@ def test_default_rpaths_install_nondefault_layout(temporary_mirror_dir):
# Install some packages with dependent packages
# test install in non-default install path scheme
buildcache_cmd("install", "-u", cspec.name, sy_spec.name)
buildcache_cmd("install", "-uo", cspec.name, sy_spec.name)
# Test force install in non-default install path scheme
buildcache_cmd("install", "-uf", cspec.name)
buildcache_cmd("install", "-ufo", cspec.name)
@pytest.mark.requires_executables(*required_executables)
@@ -288,19 +288,19 @@ def test_relative_rpaths_install_default_layout(temporary_mirror_dir):
cspec = spack.concretize.concretize_one("corge")
# Install buildcache created with relativized rpaths
buildcache_cmd("install", "-uf", cspec.name)
buildcache_cmd("install", "-ufo", cspec.name)
# This gives warning that spec is already installed
buildcache_cmd("install", "-uf", cspec.name)
buildcache_cmd("install", "-ufo", cspec.name)
# Uninstall the package and deps
uninstall_cmd("-y", "--dependents", gspec.name)
# Install build cache
buildcache_cmd("install", "-uf", cspec.name)
buildcache_cmd("install", "-ufo", cspec.name)
# Test overwrite install
buildcache_cmd("install", "-uf", cspec.name)
buildcache_cmd("install", "-ufo", cspec.name)
@pytest.mark.requires_executables(*required_executables)
@@ -317,7 +317,7 @@ def test_relative_rpaths_install_nondefault(temporary_mirror_dir):
cspec = spack.concretize.concretize_one("corge")
# Test install in non-default install path scheme and relative path
buildcache_cmd("install", "-uf", cspec.name)
buildcache_cmd("install", "-ufo", cspec.name)
def test_push_and_fetch_keys(mock_gnupghome, tmp_path):

View File

@@ -56,33 +56,14 @@ def test_build_request_strings(install_mockery):
@pytest.mark.parametrize(
"package_cache_only,dependencies_cache_only,package_deptypes,dependencies_deptypes",
[
(False, False, dt.BUILD | dt.LINK | dt.RUN, dt.BUILD | dt.LINK | dt.RUN),
(True, False, dt.LINK | dt.RUN, dt.BUILD | dt.LINK | dt.RUN),
(False, True, dt.BUILD | dt.LINK | dt.RUN, dt.LINK | dt.RUN),
(True, True, dt.LINK | dt.RUN, dt.LINK | dt.RUN),
],
"include_build_deps,deptypes", [(True, dt.BUILD | dt.LINK | dt.RUN), (False, dt.LINK | dt.RUN)]
)
def test_build_request_deptypes(
install_mockery,
package_cache_only,
dependencies_cache_only,
package_deptypes,
dependencies_deptypes,
):
def test_build_request_deptypes(install_mockery, include_build_deps, deptypes):
s = spack.concretize.concretize_one("dependent-install")
build_request = inst.BuildRequest(
s.package,
{
"package_cache_only": package_cache_only,
"dependencies_cache_only": dependencies_cache_only,
},
)
build_request = inst.BuildRequest(s.package, {"include_build_deps": include_build_deps})
actual_package_deptypes = build_request.get_depflags(s.package)
actual_dependency_deptypes = build_request.get_depflags(s["dependency-install"].package)
package_deptypes = build_request.get_depflags(s.package)
dependency_deptypes = build_request.get_depflags(s["dependency-install"].package)
assert actual_package_deptypes == package_deptypes
assert actual_dependency_deptypes == dependencies_deptypes
assert package_deptypes == dependency_deptypes == deptypes

View File

@@ -32,7 +32,7 @@ def repro_dir(tmp_path):
def test_get_added_versions_new_checksum(mock_git_package_changes):
repo, filename, commits = mock_git_package_changes
repo_path, filename, commits = mock_git_package_changes
checksum_versions = {
"3f6576971397b379d4205ae5451ff5a68edf6c103b2f03c4188ed7075fbb5f04": Version("2.1.5"),
@@ -41,7 +41,7 @@ def test_get_added_versions_new_checksum(mock_git_package_changes):
"86993903527d9b12fc543335c19c1d33a93797b3d4d37648b5addae83679ecd8": Version("2.0.0"),
}
with fs.working_dir(repo.packages_path):
with fs.working_dir(str(repo_path)):
added_versions = ci.get_added_versions(
checksum_versions, filename, from_ref=commits[-1], to_ref=commits[-2]
)
@@ -50,7 +50,7 @@ def test_get_added_versions_new_checksum(mock_git_package_changes):
def test_get_added_versions_new_commit(mock_git_package_changes):
repo, filename, commits = mock_git_package_changes
repo_path, filename, commits = mock_git_package_changes
checksum_versions = {
"74253725f884e2424a0dd8ae3f69896d5377f325": Version("2.1.6"),
@@ -60,9 +60,9 @@ def test_get_added_versions_new_commit(mock_git_package_changes):
"86993903527d9b12fc543335c19c1d33a93797b3d4d37648b5addae83679ecd8": Version("2.0.0"),
}
with fs.working_dir(repo.packages_path):
with fs.working_dir(str(repo_path)):
added_versions = ci.get_added_versions(
checksum_versions, filename, from_ref=commits[-2], to_ref=commits[-3]
checksum_versions, filename, from_ref=commits[2], to_ref=commits[1]
)
assert len(added_versions) == 1
assert added_versions[0] == Version("2.1.6")

View File

@@ -1978,13 +1978,6 @@ def test_ci_validate_git_versions_invalid(
assert f"Invalid commit for diff-test@{version}" in err
def mock_packages_path(path):
def packages_path():
return path
return packages_path
@pytest.fixture
def verify_standard_versions_valid(monkeypatch):
def validate_standard_versions(pkg, versions):
@@ -2031,12 +2024,9 @@ def test_ci_verify_versions_valid(
mock_git_package_changes,
verify_standard_versions_valid,
verify_git_versions_valid,
tmpdir,
):
repo, _, commits = mock_git_package_changes
spack.repo.PATH.put_first(repo)
monkeypatch.setattr(spack.repo, "packages_path", mock_packages_path(repo.packages_path))
repo_path, _, commits = mock_git_package_changes
monkeypatch.setattr(spack.paths, "prefix", repo_path)
out = ci_cmd("verify-versions", commits[-1], commits[-3])
assert "Validated diff-test@2.1.5" in out
@@ -2050,10 +2040,9 @@ def test_ci_verify_versions_standard_invalid(
verify_standard_versions_invalid,
verify_git_versions_invalid,
):
repo, _, commits = mock_git_package_changes
spack.repo.PATH.put_first(repo)
repo_path, _, commits = mock_git_package_changes
monkeypatch.setattr(spack.repo, "packages_path", mock_packages_path(repo.packages_path))
monkeypatch.setattr(spack.paths, "prefix", repo_path)
out = ci_cmd("verify-versions", commits[-1], commits[-3], fail_on_error=False)
assert "Invalid checksum found diff-test@2.1.5" in out
@@ -2061,10 +2050,8 @@ def test_ci_verify_versions_standard_invalid(
def test_ci_verify_versions_manual_package(monkeypatch, mock_packages, mock_git_package_changes):
repo, _, commits = mock_git_package_changes
spack.repo.PATH.put_first(repo)
monkeypatch.setattr(spack.repo, "packages_path", mock_packages_path(repo.packages_path))
repo_path, _, commits = mock_git_package_changes
monkeypatch.setattr(spack.paths, "prefix", repo_path)
pkg_class = spack.spec.Spec("diff-test").package_class
monkeypatch.setattr(pkg_class, "manual_download", True)

View File

@@ -243,11 +243,13 @@ def latest_commit():
@pytest.fixture
def mock_git_package_changes(git, tmpdir, override_git_repos_cache_path, monkeypatch):
def mock_git_package_changes(git, tmpdir, override_git_repos_cache_path):
"""Create a mock git repo with known structure of package edits
The structure of commits in this repo is as follows::
o diff-test: modification to make manual download package
|
o diff-test: add v1.2 (from a git ref)
|
o diff-test: add v1.1 (from source tarball)
@@ -259,12 +261,8 @@ def mock_git_package_changes(git, tmpdir, override_git_repos_cache_path, monkeyp
Important attributes of the repo for test coverage are: multiple package
versions are added with some coming from a tarball and some from git refs.
"""
filename = "diff-test/package.py"
repo_path, _ = spack.repo.create_repo(str(tmpdir.mkdir("myrepo")))
repo_cache = spack.util.file_cache.FileCache(str(tmpdir.mkdir("cache")))
repo = spack.repo.Repo(repo_path, cache=repo_cache)
repo_path = str(tmpdir.mkdir("git_package_changes_repo"))
filename = "var/spack/repos/builtin/packages/diff-test/package.py"
def commit(message):
global commit_counter
@@ -278,7 +276,7 @@ def commit(message):
)
commit_counter += 1
with working_dir(repo.packages_path):
with working_dir(repo_path):
git("init")
git("config", "user.name", "Spack")
@@ -309,11 +307,17 @@ def latest_commit():
commit("diff-test: add v2.1.6")
commits.append(latest_commit())
# convert pkg-a to a manual download package
shutil.copy2(f"{spack.paths.test_path}/data/conftest/diff-test/package-3.txt", filename)
git("add", filename)
commit("diff-test: modification to make manual download package")
commits.append(latest_commit())
# The commits are ordered with the last commit first in the list
commits = list(reversed(commits))
# Return the git directory to install, the filename used, and the commits
yield repo, filename, commits
yield repo_path, filename, commits
@pytest.fixture(autouse=True)

View File

@@ -0,0 +1,23 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class DiffTest(AutotoolsPackage):
"""zlib replacement with optimizations for next generation systems."""
homepage = "https://github.com/zlib-ng/zlib-ng"
url = "https://github.com/zlib-ng/zlib-ng/archive/2.0.0.tar.gz"
git = "https://github.com/zlib-ng/zlib-ng.git"
license("Zlib")
manual_download = True
version("2.1.6", tag="2.1.6", commit="74253725f884e2424a0dd8ae3f69896d5377f325")
version("2.1.5", sha256="3f6576971397b379d4205ae5451ff5a68edf6c103b2f03c4188ed7075fbb5f04")
version("2.1.4", sha256="a0293475e6a44a3f6c045229fe50f69dc0eebc62a42405a51f19d46a5541e77a")
version("2.0.7", sha256="6c0853bb27738b811f2b4d4af095323c3d5ce36ceed6b50e5f773204fb8f7200")
version("2.0.0", sha256="86993903527d9b12fc543335c19c1d33a93797b3d4d37648b5addae83679ecd8")

View File

@@ -28,7 +28,7 @@
import spack.spec
import spack.store
import spack.util.lock as lk
from spack.installer import PackageInstaller
import spack.util.spack_json as sjson
from spack.main import SpackCommand
@@ -77,6 +77,13 @@ def create_build_task(
return inst.BuildTask(pkg, request=request, status=inst.BuildStatus.QUEUED)
def create_install_task(
pkg: spack.package_base.PackageBase, install_args: Optional[dict] = None
) -> inst.InstallTask:
request = inst.BuildRequest(pkg, {} if install_args is None else install_args)
return inst.InstallTask(pkg, request=request, status=inst.BuildStatus.QUEUED)
def create_installer(
specs: Union[List[str], List[spack.spec.Spec]], install_args: Optional[dict] = None
) -> inst.PackageInstaller:
@@ -116,19 +123,15 @@ def test_install_msg(monkeypatch):
install_msg = "Installing {0}".format(name)
monkeypatch.setattr(tty, "_debug", 0)
assert inst.install_msg(name, pid, None) == install_msg
install_status = inst.InstallStatus(1)
expected = "{0} [0/1]".format(install_msg)
assert inst.install_msg(name, pid, install_status) == expected
assert inst.install_msg(name, pid) == install_msg
monkeypatch.setattr(tty, "_debug", 1)
assert inst.install_msg(name, pid, None) == install_msg
assert inst.install_msg(name, pid) == install_msg
# Expect the PID to be added at debug level 2
monkeypatch.setattr(tty, "_debug", 2)
expected = "{0}: {1}".format(pid, install_msg)
assert inst.install_msg(name, pid, None) == expected
assert inst.install_msg(name, pid) == expected
def test_install_from_cache_errors(install_mockery):
@@ -140,13 +143,15 @@ def test_install_from_cache_errors(install_mockery):
with pytest.raises(
spack.error.InstallError, match="No binary found when cache-only was specified"
):
PackageInstaller(
inst.PackageInstaller(
[spec.package], package_cache_only=True, dependencies_cache_only=True
).install()
assert not spec.package.installed_from_binary_cache
# Check when don't expect to install only from binary cache
assert not inst._install_from_cache(spec.package, explicit=True, unsigned=False)
assert not inst._install_from_cache(
spec.package, inst.InstallerProgress([spec.package]), explicit=True, unsigned=False
)
assert not spec.package.installed_from_binary_cache
@@ -156,7 +161,9 @@ def test_install_from_cache_ok(install_mockery, monkeypatch):
monkeypatch.setattr(inst, "_try_install_from_binary_cache", _true)
monkeypatch.setattr(spack.hooks, "post_install", _noop)
assert inst._install_from_cache(spec.package, explicit=True, unsigned=False)
assert inst._install_from_cache(
spec.package, inst.InstallerProgress([spec.package]), explicit=True, unsigned=False
)
def test_process_external_package_module(install_mockery, monkeypatch, capfd):
@@ -221,54 +228,6 @@ def test_installer_str(install_mockery):
assert "failed (0)" in istr
def test_installer_prune_built_build_deps(install_mockery, monkeypatch, tmpdir):
r"""
Ensure that build dependencies of installed deps are pruned
from installer package queues.
(a)
/ \
/ \
(b) (c) <--- is installed already so we should
\ / | \ prune (f) from this install since
\ / | \ it is *only* needed to build (b)
(d) (e) (f)
Thus since (c) is already installed our build_pq dag should
only include four packages. [(a), (b), (c), (d), (e)]
"""
@property
def _mock_installed(self):
return self.name == "pkg-c"
# Mock the installed property to say that (b) is installed
monkeypatch.setattr(spack.spec.Spec, "installed", _mock_installed)
# Create mock repository with packages (a), (b), (c), (d), and (e)
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock-repo"))
builder.add_package("pkg-a", dependencies=[("pkg-b", "build", None), ("pkg-c", "build", None)])
builder.add_package("pkg-b", dependencies=[("pkg-d", "build", None)])
builder.add_package(
"pkg-c",
dependencies=[("pkg-d", "build", None), ("pkg-e", "all", None), ("pkg-f", "build", None)],
)
builder.add_package("pkg-d")
builder.add_package("pkg-e")
builder.add_package("pkg-f")
with spack.repo.use_repositories(builder.root):
installer = create_installer(["pkg-a"])
installer._init_queue()
# Assert that (c) is not in the build_pq
result = {task.pkg_id[:5] for _, task in installer.build_pq}
expected = {"pkg-a", "pkg-b", "pkg-c", "pkg-d", "pkg-e"}
assert result == expected
def test_check_before_phase_error(install_mockery):
s = spack.concretize.concretize_one("trivial-install-test-package")
s.package.stop_before_phase = "beforephase"
@@ -605,7 +564,7 @@ def test_check_deps_status_external(install_mockery, monkeypatch):
monkeypatch.setattr(spack.spec.Spec, "external", True)
installer._check_deps_status(request)
for dep in request.spec.traverse(root=False):
for dep in request.spec.traverse(root=False, deptype=request.get_depflags(request.spec)):
assert inst.package_id(dep) in installer.installed
@@ -617,7 +576,7 @@ def test_check_deps_status_upstream(install_mockery, monkeypatch):
monkeypatch.setattr(spack.spec.Spec, "installed_upstream", True)
installer._check_deps_status(request)
for dep in request.spec.traverse(root=False):
for dep in request.spec.traverse(root=False, deptype=request.get_depflags(request.spec)):
assert inst.package_id(dep) in installer.installed
@@ -668,12 +627,13 @@ def test_install_spliced_build_spec_installed(install_mockery, capfd, mock_fetch
# Do the splice.
out = spec.splice(dep, transitive)
PackageInstaller([out.build_spec.package]).install()
inst.PackageInstaller([out.build_spec.package]).install()
installer = create_installer([out], {"verbose": True, "fail_fast": True})
installer._init_queue()
for _, task in installer.build_pq:
assert isinstance(task, inst.RewireTask if task.pkg.spec.spliced else inst.BuildTask)
assert isinstance(task, inst.RewireTask if task.pkg.spec.spliced else inst.InstallTask)
installer.install()
for node in out.traverse():
assert node.installed
@@ -699,7 +659,7 @@ def test_install_splice_root_from_binary(
original_spec = spack.concretize.concretize_one(root_str)
spec_to_splice = spack.concretize.concretize_one("splice-h+foo")
PackageInstaller([original_spec.package, spec_to_splice.package]).install()
inst.PackageInstaller([original_spec.package, spec_to_splice.package]).install()
out = original_spec.splice(spec_to_splice, transitive)
@@ -716,7 +676,7 @@ def test_install_splice_root_from_binary(
uninstall = SpackCommand("uninstall")
uninstall("-ay")
PackageInstaller([out.package], unsigned=True).install()
inst.PackageInstaller([out.package], unsigned=True).install()
assert len(spack.store.STORE.db.query()) == len(list(out.traverse()))
@@ -724,10 +684,10 @@ def test_install_splice_root_from_binary(
def test_install_task_use_cache(install_mockery, monkeypatch):
installer = create_installer(["trivial-install-test-package"], {})
request = installer.build_requests[0]
task = create_build_task(request.pkg)
task = create_install_task(request.pkg)
monkeypatch.setattr(inst, "_install_from_cache", _true)
installer._install_task(task, None)
installer._install_task(task)
assert request.pkg_id in installer.installed
@@ -751,7 +711,7 @@ def _missing(*args, **kwargs):
assert inst.package_id(popped_task.pkg.spec) not in installer.build_tasks
monkeypatch.setattr(task, "execute", _missing)
installer._install_task(task, None)
installer._install_task(task)
# Ensure the dropped task/spec was added back by _install_task
assert inst.package_id(popped_task.pkg.spec) in installer.build_tasks
@@ -799,7 +759,7 @@ def test_requeue_task(install_mockery, capfd):
# temporarily set tty debug messages on so we can test output
current_debug_level = tty.debug_level()
tty.set_debug(1)
installer._requeue_task(task, None)
installer._requeue_task(task)
tty.set_debug(current_debug_level)
ids = list(installer.build_tasks)
@@ -952,11 +912,11 @@ def test_install_failed_not_fast(install_mockery, monkeypatch, capsys):
assert "Skipping build of pkg-a" in out
def _interrupt(installer, task, install_status, **kwargs):
def _interrupt(installer, task, **kwargs):
if task.pkg.name == "pkg-a":
raise KeyboardInterrupt("mock keyboard interrupt for pkg-a")
else:
return installer._real_install_task(task, None)
return installer._real_install_task(task)
# installer.installed.add(task.pkg.name)
@@ -982,12 +942,12 @@ class MyBuildException(Exception):
pass
def _install_fail_my_build_exception(installer, task, install_status, **kwargs):
def _install_fail_my_build_exception(installer, task, **kwargs):
if task.pkg.name == "pkg-a":
raise MyBuildException("mock internal package build error for pkg-a")
else:
# No need for more complex logic here because no splices
task.execute(install_status)
task.execute(installer.progress)
installer._update_installed(task)
@@ -1072,8 +1032,8 @@ def test_install_lock_failures(install_mockery, monkeypatch, capfd):
"""Cover basic install lock failure handling in a single pass."""
# Note: this test relies on installing a package with no dependencies
def _requeued(installer, task, install_status):
tty.msg("requeued {0}".format(task.pkg.spec.name))
def _requeued(installer, task):
tty.msg(f"requeued {task.pkg.spec.name}")
installer = create_installer(["pkg-c"], {})
@@ -1106,7 +1066,7 @@ def _prep(installer, task):
# also do not allow the package to be locked again
monkeypatch.setattr(inst.PackageInstaller, "_ensure_locked", _not_locked)
def _requeued(installer, task, install_status):
def _requeued(installer, task):
tty.msg(f"requeued {inst.package_id(task.pkg.spec)}")
# Flag the package as installed
@@ -1138,8 +1098,8 @@ def _prep(installer, task):
tty.msg("preparing {0}".format(task.pkg.spec.name))
assert task.pkg.spec.name not in installer.installed
def _requeued(installer, task, install_status):
tty.msg("requeued {0}".format(task.pkg.spec.name))
def _requeued(installer, task):
tty.msg(f"requeued {task.pkg.spec.name}")
# Force a read lock
monkeypatch.setattr(inst.PackageInstaller, "_ensure_locked", _read)
@@ -1181,7 +1141,7 @@ def test_install_implicit(install_mockery, mock_fetch):
assert not create_build_task(pkg).explicit
def test_overwrite_install_backup_success(temporary_store, config, mock_packages, tmpdir):
def test_overwrite_install_backup_success(temporary_store, config, mock_packages, monkeypatch):
"""
When doing an overwrite install that fails, Spack should restore the backup
of the original prefix, and leave the original spec marked installed.
@@ -1196,11 +1156,12 @@ def test_overwrite_install_backup_success(temporary_store, config, mock_packages
installed_file = os.path.join(task.pkg.prefix, "some_file")
fs.touchp(installed_file)
class InstallerThatWipesThePrefixDir:
def _install_task(self, task, install_status):
shutil.rmtree(task.pkg.prefix, ignore_errors=True)
fs.mkdirp(task.pkg.prefix)
raise Exception("Some fatal install error")
def _install_task(self, task):
shutil.rmtree(task.pkg.prefix, ignore_errors=True)
fs.mkdirp(task.pkg.prefix)
raise Exception("Some fatal install error")
monkeypatch.setattr(inst.PackageInstaller, "_install_task", _install_task)
class FakeDatabase:
called = False
@@ -1208,46 +1169,25 @@ class FakeDatabase:
def remove(self, spec):
self.called = True
fake_installer = InstallerThatWipesThePrefixDir()
fake_db = FakeDatabase()
overwrite_install = inst.OverwriteInstall(fake_installer, fake_db, task, None)
monkeypatch.setattr(spack.store.STORE, "db", FakeDatabase())
# Installation should throw the installation exception, not the backup
# failure.
with pytest.raises(Exception, match="Some fatal install error"):
overwrite_install.install()
installer._overwrite_install_task(task)
# Make sure the package is not marked uninstalled and the original dir
# is back.
assert not fake_db.called
assert not spack.store.STORE.db.called
assert os.path.exists(installed_file)
def test_overwrite_install_backup_failure(temporary_store, config, mock_packages, tmpdir):
def test_overwrite_install_backup_failure(temporary_store, config, mock_packages, monkeypatch):
"""
When doing an overwrite install that fails, Spack should try to recover the
original prefix. If that fails, the spec is lost, and it should be removed
from the database.
"""
# Note: this test relies on installing a package with no dependencies
class InstallerThatAccidentallyDeletesTheBackupDir:
def _install_task(self, task, install_status):
# Remove the backup directory, which is at the same level as the prefix,
# starting with .backup
backup_glob = os.path.join(
os.path.dirname(os.path.normpath(task.pkg.prefix)), ".backup*"
)
for backup in glob.iglob(backup_glob):
shutil.rmtree(backup)
raise Exception("Some fatal install error")
class FakeDatabase:
called = False
def remove(self, spec):
self.called = True
# Get a build task. TODO: refactor this to avoid calling internal methods
installer = create_installer(["pkg-c"])
installer._init_queue()
@@ -1257,18 +1197,32 @@ def remove(self, spec):
installed_file = os.path.join(task.pkg.prefix, "some_file")
fs.touchp(installed_file)
fake_installer = InstallerThatAccidentallyDeletesTheBackupDir()
fake_db = FakeDatabase()
overwrite_install = inst.OverwriteInstall(fake_installer, fake_db, task, None)
def _install_task(self, task):
# Remove the backup directory, which is at the same level as the prefix,
# starting with .backup
backup_glob = os.path.join(os.path.dirname(os.path.normpath(task.pkg.prefix)), ".backup*")
for backup in glob.iglob(backup_glob):
shutil.rmtree(backup)
raise Exception("Some fatal install error")
monkeypatch.setattr(inst.PackageInstaller, "_install_task", _install_task)
class FakeDatabase:
called = False
def remove(self, spec):
self.called = True
monkeypatch.setattr(spack.store.STORE, "db", FakeDatabase())
# Installation should throw the installation exception, not the backup
# failure.
with pytest.raises(Exception, match="Some fatal install error"):
overwrite_install.install()
installer._overwrite_install_task(task)
# Make sure that `remove` was called on the database after an unsuccessful
# attempt to restore the backup.
assert fake_db.called
assert spack.store.STORE.db.called
def test_term_status_line():
@@ -1320,7 +1274,7 @@ def test_print_install_test_log_skipped(install_mockery, mock_packages, capfd, r
pkg = s.package
pkg.run_tests = run_tests
spack.installer.print_install_test_log(pkg)
inst.print_install_test_log(pkg)
out = capfd.readouterr()[0]
assert out == ""
@@ -1337,12 +1291,23 @@ def test_print_install_test_log_failures(
pkg.run_tests = True
pkg.tester.test_log_file = str(tmpdir.join("test-log.txt"))
pkg.tester.add_failure(AssertionError("test"), "test-failure")
spack.installer.print_install_test_log(pkg)
inst.print_install_test_log(pkg)
err = capfd.readouterr()[1]
assert "no test log file" in err
# Having test log results in path being output
fs.touch(pkg.tester.test_log_file)
spack.installer.print_install_test_log(pkg)
inst.print_install_test_log(pkg)
out = capfd.readouterr()[0]
assert "See test results at" in out
def test_specs_count(install_mockery, mock_packages):
"""Check SpecCounts DAG visitor total matches expected."""
spec = spack.spec.Spec("mpileaks^mpich").concretized()
counter = inst.SpecsCount(dt.LINK | dt.RUN | dt.BUILD)
number_specs = counter.total([spec])
json = sjson.load(spec.to_json())
number_spec_nodes = len(json["spec"]["nodes"])
assert number_specs == number_spec_nodes

View File

@@ -7,9 +7,9 @@
def test_modified_files(mock_git_package_changes):
repo, filename, commits = mock_git_package_changes
repo_path, filename, commits = mock_git_package_changes
with working_dir(repo.packages_path):
with working_dir(repo_path):
files = get_modified_files(from_ref="HEAD~1", to_ref="HEAD")
assert len(files) == 1
assert files[0] == filename

View File

@@ -15,8 +15,6 @@ class Glab(GoPackage):
license("MIT")
version("1.55.0", sha256="21f58698b92035461e8e8ba9040429f4b5a0f6d528d8333834ef522a973384c8")
version("1.54.0", sha256="99f5dd785041ad26c8463ae8630e98a657aa542a2bb02333d50243dd5cfdf9cb")
version("1.53.0", sha256="2930aa5dd76030cc6edcc33483bb49dd6a328eb531d0685733ca7be7b906e915")
version("1.52.0", sha256="585495e53d3994172fb927218627b7470678bc766320cb52f4b4204238677dde")
version("1.51.0", sha256="6a95d827004fee258aacb49a427875e3b505b063cc578933d965cd56481f5a19")
@@ -36,38 +34,20 @@ class Glab(GoPackage):
version("1.21.1", sha256="8bb35c5cf6b011ff14d1eaa9ab70ec052d296978792984250e9063b006ee4d50")
version("1.20.0", sha256="6beb0186fa50d0dea3b05fcfe6e4bc1f9be0c07aa5fa15b37ca2047b16980412")
with default_args(type="build"):
depends_on("go@1.24.1:", when="@1.54:")
depends_on("go@1.23.4:", when="@1.52:")
depends_on("go@1.23.2:", when="@1.48:")
depends_on("go@1.23.0:", when="@1.46:")
depends_on("go@1.22.5:", when="@1.44:")
depends_on("go@1.22.4:", when="@1.42:")
depends_on("go@1.22.3:", when="@1.41:")
depends_on("go@1.21.0:", when="@1.37:")
depends_on("go@1.19.0:", when="@1.35:")
depends_on("go@1.18.0:", when="@1.23:")
depends_on("go@1.17.0:", when="@1.22:")
depends_on("go@1.13.0:")
depends_on("go@1.13:", type="build")
depends_on("go@1.17:", type="build", when="@1.22:")
depends_on("go@1.18:", type="build", when="@1.23:")
depends_on("go@1.19:", type="build", when="@1.35:")
depends_on("go@1.21:", type="build", when="@1.37:")
depends_on("go@1.22.3:", type="build", when="@1.41:")
depends_on("go@1.22.4:", type="build", when="@1.42:")
depends_on("go@1.22.5:", type="build", when="@1.44:")
depends_on("go@1.23:", type="build", when="@1.46:")
depends_on("go@1.23.2:", type="build", when="@1.48:")
depends_on("go@1.23.4:", type="build", when="@1.52:")
build_directory = "cmd/glab"
# Required to correctly set the version
# https://gitlab.com/gitlab-org/cli/-/blob/v1.55.0/Makefile?ref_type=tags#L44
@property
def build_args(self):
extra_ldflags = [f"-X 'main.version=v{self.version}'"]
args = super().build_args
if "-ldflags" in args:
ldflags_index = args.index("-ldflags") + 1
args[ldflags_index] = args[ldflags_index] + " " + " ".join(extra_ldflags)
else:
args.extend(["-ldflags", " ".join(extra_ldflags)])
return args
@run_after("install")
def install_completions(self):
glab = Executable(self.prefix.bin.glab)

View File

@@ -14,14 +14,12 @@ class GtkDoc(AutotoolsPackage):
pdf/man-pages with some extra work."""
homepage = "https://wiki.gnome.org/DocumentationProject/GtkDoc"
url = "https://download.gnome.org/sources/gtk-doc/1.33/gtk-doc-1.33.2.tar.xz"
list_url = "https://download.gnome.org/sources/gtk-doc/"
list_depth = 1
url = "https://gitlab.gnome.org/GNOME/gtk-doc/-/archive/1.33.2/gtk-doc-1.33.2.tar.gz"
license("GPL-2.0-or-later AND GFDL-1.1-or-later")
version("1.33.2", sha256="cc1b709a20eb030a278a1f9842a362e00402b7f834ae1df4c1998a723152bf43")
version("1.32", sha256="de0ef034fb17cb21ab0c635ec730d19746bce52984a6706e7bbec6fb5e0b907c")
version("1.33.2", sha256="2d1b0cbd26edfcb54694b2339106a02a81d630a7dedc357461aeb186874cc7c0")
version("1.32", sha256="0890c1f00d4817279be51602e67c4805daf264092adc58f9c04338566e8225ba")
depends_on("c", type="build") # generated
@@ -62,8 +60,14 @@ def installcheck(self):
pass
def url_for_version(self, version):
url = "https://download.gnome.org/sources/gtk-doc/{0}/gtk-doc-{1}.tar.xz"
return url.format(version.up_to(2), version)
"""Handle gnome's version-based custom URLs."""
if version <= Version("1.32"):
url = "https://gitlab.gnome.org/GNOME/gtk-doc/-/archive/GTK_DOC_{0}/gtk-doc-GTK_DOC_{0}.tar.gz"
return url.format(version.underscored)
url = "https://gitlab.gnome.org/GNOME/gtk-doc/-/archive/{0}/gtk-doc-{0}.tar.gz"
return url.format(version)
def configure_args(self):
args = ["--with-xml-catalog={0}".format(self["docbook-xml"].catalog)]