Lock, database and store don't need global configuration on construction (#33495)

Lock objects can now be instantiated independently,
without being tied to the global configuration. The
same is true for database and store objects.

The database __init__ method has been simplified to
take a single lock configuration object. Some common
lock configurations (e.g. NO_LOCK or NO_TIMEOUT) have
been named and are provided as globals.

The use_store context manager keeps the configuration
consistent by pushing and popping an internal scope.
It can also be tuned by passing extra data to set up
e.g. upstreams or anything else that might be related
to the store.
This commit is contained in:
Massimiliano Culpo 2023-07-17 16:51:04 +02:00 committed by GitHub
parent 2b5a7bb4d7
commit 53ae969aa0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 429 additions and 335 deletions

View File

@ -214,6 +214,7 @@ def setup(sphinx):
# Spack classes that intersphinx is unable to resolve
("py:class", "spack.version.StandardVersion"),
("py:class", "spack.spec.DependencySpec"),
("py:class", "spack.spec.SpecfileReaderBase"),
("py:class", "spack.install_test.Pb"),
]

View File

@ -821,7 +821,7 @@ def __getattr__(self, name):
# 'instance'/'_instance' to be defined or it will enter an infinite
# loop, so protect against that here.
if name in ["_instance", "instance"]:
raise AttributeError()
raise AttributeError(f"cannot create {name}")
return getattr(self.instance, name)
def __getitem__(self, name):

View File

@ -61,6 +61,22 @@
_build_cache_keys_relative_path = "_pgp"
class BuildCacheDatabase(spack_db.Database):
"""A database for binary buildcaches.
A database supports writing buildcache index files, in which case certain fields are not
needed in each install record, and no locking is required. To use this feature, it provides
``lock_cfg=NO_LOCK``, and override the list of ``record_fields``.
"""
record_fields = ("spec", "ref_count", "in_buildcache")
def __init__(self, root):
super().__init__(root, lock_cfg=spack_db.NO_LOCK)
self._write_transaction_impl = llnl.util.lang.nullcontext
self._read_transaction_impl = llnl.util.lang.nullcontext
class FetchCacheError(Exception):
"""Error thrown when fetching the cache failed, usually a composite error list."""
@ -190,8 +206,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
tmpdir = tempfile.mkdtemp()
try:
db_root_dir = os.path.join(tmpdir, "db_root")
db = spack_db.Database(None, db_dir=db_root_dir, enable_transaction_locking=False)
db = BuildCacheDatabase(tmpdir)
try:
self._index_file_cache.init_entry(cache_key)
@ -1059,13 +1074,10 @@ def generate_package_index(cache_prefix, concurrency=32):
tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))
tmpdir = tempfile.mkdtemp()
db_root_dir = os.path.join(tmpdir, "db_root")
db = spack_db.Database(
None,
db_dir=db_root_dir,
enable_transaction_locking=False,
record_fields=["spec", "ref_count", "in_buildcache"],
)
db = BuildCacheDatabase(tmpdir)
db.root = None
db_root_dir = db.database_directory
try:
_read_specs_and_push_index(file_list, read_fn, cache_prefix, db, db_root_dir, concurrency)

View File

@ -4,7 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Function and classes needed to bootstrap Spack itself."""
from .config import ensure_bootstrap_configuration, is_bootstrapping
from .config import ensure_bootstrap_configuration, is_bootstrapping, store_path
from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
from .environment import BootstrapEnvironment, ensure_environment_dependencies
from .status import status_message
@ -18,4 +18,5 @@
"ensure_environment_dependencies",
"BootstrapEnvironment",
"status_message",
"store_path",
]

View File

@ -368,7 +368,9 @@ def refresh(module_type, specs, args):
def modules_cmd(parser, args, module_type, callbacks=callbacks):
# Qualifiers to be used when querying the db for specs
constraint_qualifiers = {"refresh": {"installed": True, "known": True}}
constraint_qualifiers = {
"refresh": {"installed": True, "known": lambda x: not spack.repo.path.exists(x)}
}
query_args = constraint_qualifiers.get(args.subparser_name, {})
# Get the specs that match the query from the DB

View File

@ -767,7 +767,7 @@ def _add_command_line_scopes(cfg, command_line_scopes):
_add_platform_scope(cfg, ImmutableConfigScope, name, path)
def _config():
def create():
"""Singleton Configuration instance.
This constructs one instance associated with this module and returns
@ -825,7 +825,7 @@ def _config():
#: This is the singleton configuration instance for Spack.
config: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_config)
config: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(create)
def add_from_file(filename, scope=None):

View File

@ -2,7 +2,6 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Spack's installation tracking database.
The database serves two purposes:
@ -19,14 +18,13 @@
provides a cache and a sanity checking mechanism for what is in the
filesystem.
"""
import contextlib
import datetime
import os
import socket
import sys
import time
from typing import Dict
from typing import Dict, List, NamedTuple, Set, Type, Union
try:
import uuid
@ -39,13 +37,10 @@
from typing import Optional, Tuple
import llnl.util.filesystem as fs
import llnl.util.lang as lang
import llnl.util.tty as tty
import spack.hash_types as ht
import spack.repo
import spack.spec
import spack.store
import spack.util.lock as lk
import spack.util.spack_json as sjson
import spack.version as vn
@ -56,17 +51,17 @@
# TODO: Provide an API automatically retyring a build after detecting and
# TODO: clearing a failure.
# DB goes in this directory underneath the root
_db_dirname = ".spack-db"
#: DB goes in this directory underneath the root
_DB_DIRNAME = ".spack-db"
# DB version. This is stuck in the DB file to track changes in format.
# Increment by one when the database format changes.
# Versions before 5 were not integers.
_db_version = vn.Version("7")
#: DB version. This is stuck in the DB file to track changes in format.
#: Increment by one when the database format changes.
#: Versions before 5 were not integers.
_DB_VERSION = vn.Version("7")
# For any version combinations here, skip reindex when upgrading.
# Reindexing can take considerable time and is not always necessary.
_skip_reindex = [
#: For any version combinations here, skip reindex when upgrading.
#: Reindexing can take considerable time and is not always necessary.
_SKIP_REINDEX = [
# reindexing takes a significant amount of time, and there's
# no reason to do it from DB version 0.9.3 to version 5. The
# only difference is that v5 can contain "deprecated_for"
@ -77,26 +72,26 @@
(vn.Version("6"), vn.Version("7")),
]
# Default timeout for spack database locks in seconds or None (no timeout).
# A balance needs to be struck between quick turnaround for parallel installs
# (to avoid excess delays) and waiting long enough when the system is busy
# (to ensure the database is updated).
_db_lock_timeout = 120
#: Default timeout for spack database locks in seconds or None (no timeout).
#: A balance needs to be struck between quick turnaround for parallel installs
#: (to avoid excess delays) and waiting long enough when the system is busy
#: (to ensure the database is updated).
_DEFAULT_DB_LOCK_TIMEOUT = 120
# Default timeout for spack package locks in seconds or None (no timeout).
# A balance needs to be struck between quick turnaround for parallel installs
# (to avoid excess delays when performing a parallel installation) and waiting
# long enough for the next possible spec to install (to avoid excessive
# checking of the last high priority package) or holding on to a lock (to
# ensure a failed install is properly tracked).
_pkg_lock_timeout = None
#: Default timeout for spack package locks in seconds or None (no timeout).
#: A balance needs to be struck between quick turnaround for parallel installs
#: (to avoid excess delays when performing a parallel installation) and waiting
#: long enough for the next possible spec to install (to avoid excessive
#: checking of the last high priority package) or holding on to a lock (to
#: ensure a failed install is properly tracked).
_DEFAULT_PKG_LOCK_TIMEOUT = None
# Types of dependencies tracked by the database
# We store by DAG hash, so we track the dependencies that the DAG hash includes.
_tracked_deps = ht.dag_hash.deptype
#: Types of dependencies tracked by the database
#: We store by DAG hash, so we track the dependencies that the DAG hash includes.
_TRACKED_DEPENDENCIES = ht.dag_hash.deptype
# Default list of fields written for each install record
default_install_record_fields = [
#: Default list of fields written for each install record
DEFAULT_INSTALL_RECORD_FIELDS = (
"spec",
"ref_count",
"path",
@ -104,10 +99,10 @@
"explicit",
"installation_time",
"deprecated_for",
]
)
def reader(version):
def reader(version: vn.StandardVersion) -> Type["spack.spec.SpecfileReaderBase"]:
reader_cls = {
vn.Version("5"): spack.spec.SpecfileV1,
vn.Version("6"): spack.spec.SpecfileV3,
@ -116,7 +111,7 @@ def reader(version):
return reader_cls[version]
def _now():
def _now() -> float:
"""Returns the time since the epoch"""
return time.time()
@ -220,7 +215,7 @@ def install_type_matches(self, installed):
else:
return InstallStatuses.MISSING in installed
def to_dict(self, include_fields=default_install_record_fields):
def to_dict(self, include_fields=DEFAULT_INSTALL_RECORD_FIELDS):
rec_dict = {}
for field_name in include_fields:
@ -256,11 +251,14 @@ class ForbiddenLockError(SpackError):
class ForbiddenLock:
def __getattribute__(self, name):
def __getattr__(self, name):
raise ForbiddenLockError("Cannot access attribute '{0}' of lock".format(name))
def __reduce__(self):
return ForbiddenLock, tuple()
_query_docstring = """
_QUERY_DOCSTRING = """
Args:
query_spec: queries iterate through specs in the database and
@ -308,73 +306,106 @@ def __getattribute__(self, name):
"""
#: Data class to configure locks in Database objects
#:
#: Args:
#: enable (bool): whether to enable locks or not.
#: database_timeout (int or None): timeout for the database lock
#: package_timeout (int or None): timeout for the package lock
class LockConfiguration(NamedTuple):
enable: bool
database_timeout: Optional[int]
package_timeout: Optional[int]
#: Configure a database to avoid using locks
NO_LOCK: LockConfiguration = LockConfiguration(
enable=False, database_timeout=None, package_timeout=None
)
#: Configure the database to use locks without a timeout
NO_TIMEOUT: LockConfiguration = LockConfiguration(
enable=True, database_timeout=None, package_timeout=None
)
#: Default configuration for database locks
DEFAULT_LOCK_CFG: LockConfiguration = LockConfiguration(
enable=True,
database_timeout=_DEFAULT_DB_LOCK_TIMEOUT,
package_timeout=_DEFAULT_PKG_LOCK_TIMEOUT,
)
def lock_configuration(configuration):
"""Return a LockConfiguration from a spack.config.Configuration object."""
return LockConfiguration(
enable=configuration.get("config:locks", True),
database_timeout=configuration.get("config:db_lock_timeout"),
package_timeout=configuration.get("config:db_lock_timeout"),
)
class Database:
"""Per-process lock objects for each install prefix."""
#: Per-process lock objects for each install prefix
_prefix_locks: Dict[str, lk.Lock] = {}
"""Per-process failure (lock) objects for each install prefix."""
#: Per-process failure (lock) objects for each install prefix
_prefix_failures: Dict[str, lk.Lock] = {}
#: Fields written for each install record
record_fields: Tuple[str, ...] = DEFAULT_INSTALL_RECORD_FIELDS
def __init__(
self,
root,
db_dir=None,
upstream_dbs=None,
is_upstream=False,
enable_transaction_locking=True,
record_fields=default_install_record_fields,
):
"""Create a Database for Spack installations under ``root``.
root: str,
upstream_dbs: Optional[List["Database"]] = None,
is_upstream: bool = False,
lock_cfg: LockConfiguration = DEFAULT_LOCK_CFG,
) -> None:
"""Database for Spack installations.
A Database is a cache of Specs data from ``$prefix/spec.yaml``
files in Spack installation directories.
A Database is a cache of Specs data from ``$prefix/spec.yaml`` files
in Spack installation directories.
By default, Database files (data and lock files) are stored
under ``root/.spack-db``, which is created if it does not
exist. This is the ``db_dir``.
Database files (data and lock files) are stored under ``root/.spack-db``, which is
created if it does not exist. This is the "database directory".
The Database will attempt to read an ``index.json`` file in
``db_dir``. If that does not exist, it will create a database
when needed by scanning the entire Database root for ``spec.yaml``
files according to Spack's ``DirectoryLayout``.
The database will attempt to read an ``index.json`` file in the database directory.
If that does not exist, it will create a database when needed by scanning the entire
store root for ``spec.json`` files according to Spack's directory layout.
Caller may optionally provide a custom ``db_dir`` parameter
where data will be stored. This is intended to be used for
testing the Database class.
This class supports writing buildcache index files, in which case
certain fields are not needed in each install record, and no
transaction locking is required. To use this feature, provide
``enable_transaction_locking=False``, and specify a list of needed
fields in ``record_fields``.
Args:
root: root directory where to create the database directory.
upstream_dbs: upstream databases for this repository.
is_upstream: whether this repository is an upstream.
lock_cfg: configuration for the locks to be used by this repository.
Relevant only if the repository is not an upstream.
"""
self.root = root
# If the db_dir is not provided, default to within the db root.
self._db_dir = db_dir or os.path.join(self.root, _db_dirname)
self.database_directory = os.path.join(self.root, _DB_DIRNAME)
# Set up layout of database files within the db dir
self._index_path = os.path.join(self._db_dir, "index.json")
self._verifier_path = os.path.join(self._db_dir, "index_verifier")
self._lock_path = os.path.join(self._db_dir, "lock")
self._index_path = os.path.join(self.database_directory, "index.json")
self._verifier_path = os.path.join(self.database_directory, "index_verifier")
self._lock_path = os.path.join(self.database_directory, "lock")
# This is for other classes to use to lock prefix directories.
self.prefix_lock_path = os.path.join(self._db_dir, "prefix_lock")
self.prefix_lock_path = os.path.join(self.database_directory, "prefix_lock")
# Ensure a persistent location for dealing with parallel installation
# failures (e.g., across near-concurrent processes).
self._failure_dir = os.path.join(self._db_dir, "failures")
self._failure_dir = os.path.join(self.database_directory, "failures")
# Support special locks for handling parallel installation failures
# of a spec.
self.prefix_fail_path = os.path.join(self._db_dir, "prefix_failures")
self.prefix_fail_path = os.path.join(self.database_directory, "prefix_failures")
# Create needed directories and files
if not is_upstream and not os.path.exists(self._db_dir):
fs.mkdirp(self._db_dir)
if not is_upstream and not os.path.exists(self.database_directory):
fs.mkdirp(self.database_directory)
if not is_upstream and not os.path.exists(self._failure_dir):
fs.mkdirp(self._failure_dir)
@ -391,10 +422,9 @@ def __init__(
self._state_is_inconsistent = False
# initialize rest of state.
self.db_lock_timeout = spack.config.get("config:db_lock_timeout") or _db_lock_timeout
self.package_lock_timeout = (
spack.config.get("config:package_lock_timeout") or _pkg_lock_timeout
)
self.db_lock_timeout = lock_cfg.database_timeout
self.package_lock_timeout = lock_cfg.package_timeout
tty.debug("DATABASE LOCK TIMEOUT: {0}s".format(str(self.db_lock_timeout)))
timeout_format_str = (
"{0}s".format(str(self.package_lock_timeout))
@ -403,18 +433,22 @@ def __init__(
)
tty.debug("PACKAGE LOCK TIMEOUT: {0}".format(str(timeout_format_str)))
self.lock: Union[ForbiddenLock, lk.Lock]
if self.is_upstream:
self.lock = ForbiddenLock()
else:
self.lock = lk.Lock(
self._lock_path, default_timeout=self.db_lock_timeout, desc="database"
self._lock_path,
default_timeout=self.db_lock_timeout,
desc="database",
enable=lock_cfg.enable,
)
self._data: Dict[str, InstallRecord] = {}
# For every installed spec we keep track of its install prefix, so that
# we can answer the simple query whether a given path is already taken
# before installing a different spec.
self._installed_prefixes = set()
self._installed_prefixes: Set[str] = set()
self.upstream_dbs = list(upstream_dbs) if upstream_dbs else []
@ -426,14 +460,8 @@ def __init__(
# message)
self._fail_when_missing_deps = False
if enable_transaction_locking:
self._write_transaction_impl = lk.WriteTransaction
self._read_transaction_impl = lk.ReadTransaction
else:
self._write_transaction_impl = lang.nullcontext
self._read_transaction_impl = lang.nullcontext
self._record_fields = record_fields
self._write_transaction_impl = lk.WriteTransaction
self._read_transaction_impl = lk.ReadTransaction
def write_transaction(self):
"""Get a write lock context manager for use in a `with` block."""
@ -450,7 +478,7 @@ def _failed_spec_path(self, spec):
return os.path.join(self._failure_dir, "{0}-{1}".format(spec.name, spec.dag_hash()))
def clear_all_failures(self):
def clear_all_failures(self) -> None:
"""Force remove install failure tracking files."""
tty.debug("Releasing prefix failure locks")
for pkg_id in list(self._prefix_failures.keys()):
@ -468,19 +496,17 @@ def clear_all_failures(self):
"Unable to remove failure marking file {0}: {1}".format(fail_mark, str(exc))
)
def clear_failure(self, spec, force=False):
def clear_failure(self, spec: "spack.spec.Spec", force: bool = False) -> None:
"""
Remove any persistent and cached failure tracking for the spec.
see `mark_failed()`.
Args:
spec (spack.spec.Spec): the spec whose failure indicators are being removed
force (bool): True if the failure information should be cleared
when a prefix failure lock exists for the file or False if
the failure should not be cleared (e.g., it may be
associated with a concurrent build)
spec: the spec whose failure indicators are being removed
force: True if the failure information should be cleared when a prefix failure
lock exists for the file, or False if the failure should not be cleared (e.g.,
it may be associated with a concurrent build)
"""
failure_locked = self.prefix_failure_locked(spec)
if failure_locked and not force:
@ -506,7 +532,7 @@ def clear_failure(self, spec, force=False):
)
)
def mark_failed(self, spec):
def mark_failed(self, spec: "spack.spec.Spec") -> lk.Lock:
"""
Mark a spec as failing to install.
@ -556,7 +582,7 @@ def mark_failed(self, spec):
return self._prefix_failures[prefix]
def prefix_failed(self, spec):
def prefix_failed(self, spec: "spack.spec.Spec") -> bool:
"""Return True if the prefix (installation) is marked as failed."""
# The failure was detected in this process.
if spec.prefix in self._prefix_failures:
@ -571,7 +597,7 @@ def prefix_failed(self, spec):
# spack build process running concurrently.
return self.prefix_failure_marked(spec)
def prefix_failure_locked(self, spec):
def prefix_failure_locked(self, spec: "spack.spec.Spec") -> bool:
"""Return True if a process has a failure lock on the spec."""
check = lk.Lock(
self.prefix_fail_path,
@ -583,11 +609,11 @@ def prefix_failure_locked(self, spec):
return check.is_write_locked()
def prefix_failure_marked(self, spec):
def prefix_failure_marked(self, spec: "spack.spec.Spec") -> bool:
"""Determine if the spec has a persistent failure marking."""
return os.path.exists(self._failed_spec_path(spec))
def prefix_lock(self, spec, timeout=None):
def prefix_lock(self, spec: "spack.spec.Spec", timeout: Optional[float] = None) -> lk.Lock:
"""Get a lock on a particular spec's installation directory.
NOTE: The installation directory **does not** need to exist.
@ -659,7 +685,7 @@ def _write_to_file(self, stream):
"""
# map from per-spec hash code to installation record.
installs = dict(
(k, v.to_dict(include_fields=self._record_fields)) for k, v in self._data.items()
(k, v.to_dict(include_fields=self.record_fields)) for k, v in self._data.items()
)
# database includes installation list and version.
@ -672,7 +698,7 @@ def _write_to_file(self, stream):
"database": {
# TODO: move this to a top-level _meta section if we ever
# TODO: bump the DB version to 7
"version": str(_db_version),
"version": str(_DB_VERSION),
# dictionary of installation records, keyed by DAG hash
"installs": installs,
}
@ -809,13 +835,13 @@ def check(cond, msg):
# TODO: better version checking semantics.
version = vn.Version(db["version"])
if version > _db_version:
raise InvalidDatabaseVersionError(self, _db_version, version)
elif version < _db_version:
if not any(old == version and new == _db_version for old, new in _skip_reindex):
if version > _DB_VERSION:
raise InvalidDatabaseVersionError(self, _DB_VERSION, version)
elif version < _DB_VERSION:
if not any(old == version and new == _DB_VERSION for old, new in _SKIP_REINDEX):
tty.warn(
"Spack database version changed from %s to %s. Upgrading."
% (version, _db_version)
% (version, _DB_VERSION)
)
self.reindex(spack.store.layout)
@ -980,7 +1006,7 @@ def _construct_from_directory_layout(self, directory_layout, old_data):
# applications.
tty.debug("RECONSTRUCTING FROM OLD DB: {0}".format(entry.spec))
try:
layout = None if entry.spec.external else spack.store.layout
layout = None if entry.spec.external else directory_layout
kwargs = {
"spec": entry.spec,
"directory_layout": layout,
@ -1006,7 +1032,7 @@ def _check_ref_counts(self):
counts = {}
for key, rec in self._data.items():
counts.setdefault(key, 0)
for dep in rec.spec.dependencies(deptype=_tracked_deps):
for dep in rec.spec.dependencies(deptype=_TRACKED_DEPENDENCIES):
dep_key = dep.dag_hash()
counts.setdefault(dep_key, 0)
counts[dep_key] += 1
@ -1095,13 +1121,13 @@ def _add(
):
"""Add an install record for this spec to the database.
Assumes spec is installed in ``layout.path_for_spec(spec)``.
Assumes spec is installed in ``directory_layout.path_for_spec(spec)``.
Also ensures dependencies are present and updated in the DB as
either installed or missing.
Args:
spec: spec to be added
spec (spack.spec.Spec): spec to be added
directory_layout: layout of the spec installation
explicit:
Possible values: True, False, any
@ -1128,7 +1154,7 @@ def _add(
# Retrieve optional arguments
installation_time = installation_time or _now()
for edge in spec.edges_to_dependencies(deptype=_tracked_deps):
for edge in spec.edges_to_dependencies(deptype=_TRACKED_DEPENDENCIES):
if edge.spec.dag_hash() in self._data:
continue
# allow missing build-only deps. This prevents excessive
@ -1180,7 +1206,7 @@ def _add(
self._data[key] = InstallRecord(new_spec, path, installed, ref_count=0, **extra_args)
# Connect dependencies from the DB to the new copy.
for dep in spec.edges_to_dependencies(deptype=_tracked_deps):
for dep in spec.edges_to_dependencies(deptype=_TRACKED_DEPENDENCIES):
dkey = dep.spec.dag_hash()
upstream, record = self.query_by_spec_hash(dkey)
new_spec._add_dependency(record.spec, deptypes=dep.deptypes, virtuals=dep.virtuals)
@ -1243,7 +1269,7 @@ def _decrement_ref_count(self, spec):
if rec.ref_count == 0 and not rec.installed:
del self._data[key]
for dep in spec.dependencies(deptype=_tracked_deps):
for dep in spec.dependencies(deptype=_TRACKED_DEPENDENCIES):
self._decrement_ref_count(dep)
def _increment_ref_count(self, spec):
@ -1273,8 +1299,8 @@ def _remove(self, spec):
# Remove any reference to this node from dependencies and
# decrement the reference count
rec.spec.detach(deptype=_tracked_deps)
for dep in rec.spec.dependencies(deptype=_tracked_deps):
rec.spec.detach(deptype=_TRACKED_DEPENDENCIES)
for dep in rec.spec.dependencies(deptype=_TRACKED_DEPENDENCIES):
self._decrement_ref_count(dep)
if rec.deprecated_for:
@ -1390,10 +1416,7 @@ def installed_relatives(self, spec, direction="children", transitive=True, depty
@_autospec
def installed_extensions_for(self, extendee_spec):
"""
Return the specs of all packages that extend
the given spec
"""
"""Returns the specs of all packages that extend the given spec"""
for spec in self.query():
if spec.package.extends(extendee_spec):
yield spec.package
@ -1420,7 +1443,7 @@ def _get_by_hash_local(self, dag_hash, default=None, installed=any):
# nothing found
return default
def get_by_hash_local(self, *args, **kwargs):
def get_by_hash_local(self, dag_hash, default=None, installed=any):
"""Look up a spec in *this DB* by DAG hash, or by a DAG hash prefix.
Arguments:
@ -1444,7 +1467,7 @@ def get_by_hash_local(self, *args, **kwargs):
"""
with self.read_transaction():
return self._get_by_hash_local(*args, **kwargs)
return self._get_by_hash_local(dag_hash, default=default, installed=installed)
def get_by_hash(self, dag_hash, default=None, installed=any):
"""Look up a spec by DAG hash, or by a DAG hash prefix.
@ -1530,7 +1553,7 @@ def _query(
if explicit is not any and rec.explicit != explicit:
continue
if known is not any and spack.repo.path.exists(rec.spec.name) != known:
if known is not any and known(rec.spec.name):
continue
if start_date or end_date:
@ -1545,7 +1568,7 @@ def _query(
if _query.__doc__ is None:
_query.__doc__ = ""
_query.__doc__ += _query_docstring
_query.__doc__ += _QUERY_DOCSTRING
def query_local(self, *args, **kwargs):
"""Query only the local Spack database.
@ -1559,7 +1582,7 @@ def query_local(self, *args, **kwargs):
if query_local.__doc__ is None:
query_local.__doc__ = ""
query_local.__doc__ += _query_docstring
query_local.__doc__ += _QUERY_DOCSTRING
def query(self, *args, **kwargs):
"""Query the Spack database including all upstream databases."""
@ -1578,7 +1601,7 @@ def query(self, *args, **kwargs):
if query.__doc__ is None:
query.__doc__ = ""
query.__doc__ += _query_docstring
query.__doc__ += _QUERY_DOCSTRING
def query_one(self, query_spec, known=any, installed=True):
"""Query for exactly one spec that matches the query spec.

View File

@ -19,8 +19,10 @@
"""
import contextlib
import os
import pathlib
import re
from typing import Union
import uuid
from typing import Any, Callable, Dict, Generator, List, Optional, Union
import llnl.util.lang
import llnl.util.tty as tty
@ -33,7 +35,10 @@
import spack.util.path
#: default installation root, relative to the Spack install path
default_install_tree_root = os.path.join(spack.paths.opt_path, "spack")
DEFAULT_INSTALL_TREE_ROOT = os.path.join(spack.paths.opt_path, "spack")
ConfigurationType = Union["spack.config.Configuration", "llnl.util.lang.Singleton"]
def parse_install_tree(config_dict):
@ -79,7 +84,7 @@ def parse_install_tree(config_dict):
projections = {"all": all_projection}
else:
unpadded_root = install_tree.get("root", default_install_tree_root)
unpadded_root = install_tree.get("root", DEFAULT_INSTALL_TREE_ROOT)
unpadded_root = spack.util.path.canonicalize_path(unpadded_root)
padded_length = install_tree.get("padded_length", False)
@ -123,7 +128,7 @@ def parse_install_tree(config_dict):
else:
root = unpadded_root
return (root, unpadded_root, projections)
return root, unpadded_root, projections
class Store:
@ -132,95 +137,133 @@ class Store:
Stores consist of packages installed according to a
``DirectoryLayout``, along with an index, or _database_ of their
contents. The directory layout controls what paths look like and how
Spack ensures that each uniqe spec gets its own unique directory (or
not, though we don't recommend that). The database is a signle file
Spack ensures that each unique spec gets its own unique directory (or
not, though we don't recommend that). The database is a single file
that caches metadata for the entire Spack installation. It prevents
us from having to spider the install tree to figure out what's there.
Args:
root (str): path to the root of the install tree
unpadded_root (str): path to the root of the install tree without
padding; the sbang script has to be installed here to work with
padded roots
path_scheme (str): expression according to guidelines in
``spack.util.path`` that describes how to construct a path to
root: path to the root of the install tree
unpadded_root: path to the root of the install tree without padding.
The sbang script has to be installed here to work with padded roots
projections: expression according to guidelines that describes how to construct a path to
a package prefix in this store
hash_length (int): length of the hashes used in the directory
layout; spec hash suffixes will be truncated to this length
hash_length: length of the hashes used in the directory layout. Spec hash suffixes will be
truncated to this length
upstreams: optional list of upstream databases
lock_cfg: lock configuration for the database
"""
def __init__(self, root, unpadded_root=None, projections=None, hash_length=None):
def __init__(
self,
root: str,
unpadded_root: Optional[str] = None,
projections: Optional[Dict[str, str]] = None,
hash_length: Optional[int] = None,
upstreams: Optional[List[spack.database.Database]] = None,
lock_cfg: spack.database.LockConfiguration = spack.database.NO_LOCK,
) -> None:
self.root = root
self.unpadded_root = unpadded_root or root
self.projections = projections
self.hash_length = hash_length
self.db = spack.database.Database(root, upstream_dbs=retrieve_upstream_dbs())
self.upstreams = upstreams
self.lock_cfg = lock_cfg
self.db = spack.database.Database(root, upstream_dbs=upstreams, lock_cfg=lock_cfg)
self.layout = spack.directory_layout.DirectoryLayout(
root, projections=projections, hash_length=hash_length
)
def reindex(self):
def reindex(self) -> None:
"""Convenience function to reindex the store DB with its own layout."""
return self.db.reindex(self.layout)
def serialize(self):
"""Return a pickle-able object that can be used to reconstruct
a store.
"""
return (self.root, self.unpadded_root, self.projections, self.hash_length)
@staticmethod
def deserialize(token):
"""Return a store reconstructed from a token created by
the serialize method.
Args:
token: return value of the serialize method
Returns:
Store object reconstructed from the token
"""
return Store(*token)
def __reduce__(self):
return Store, (
self.root,
self.unpadded_root,
self.projections,
self.hash_length,
self.upstreams,
self.lock_cfg,
)
def _store():
"""Get the singleton store instance."""
import spack.bootstrap
def create(configuration: ConfigurationType) -> Store:
"""Create a store from the configuration passed as input.
config_dict = spack.config.get("config")
Args:
configuration: configuration to create a store.
"""
configuration = configuration or spack.config.config
config_dict = configuration.get("config")
root, unpadded_root, projections = parse_install_tree(config_dict)
hash_length = spack.config.get("config:install_hash_length")
hash_length = configuration.get("config:install_hash_length")
install_roots = [
install_properties["install_tree"]
for install_properties in configuration.get("upstreams", {}).values()
]
upstreams = _construct_upstream_dbs_from_install_roots(install_roots)
return Store(
root=root, unpadded_root=unpadded_root, projections=projections, hash_length=hash_length
root=root,
unpadded_root=unpadded_root,
projections=projections,
hash_length=hash_length,
upstreams=upstreams,
lock_cfg=spack.database.lock_configuration(configuration),
)
def _create_global() -> Store:
# Check that the user is not trying to install software into the store
# reserved by Spack to bootstrap its own dependencies, since this would
# lead to bizarre behaviors (e.g. cleaning the bootstrap area would wipe
# user installed software)
import spack.bootstrap
enable_bootstrap = spack.config.config.get("bootstrap:enable", True)
if enable_bootstrap and spack.bootstrap.store_path() == root:
msg = (
'please change the install tree root "{0}" in your '
"configuration [path reserved for Spack internal use]"
)
raise ValueError(msg.format(root))
return create(configuration=spack.config.config)
#: Singleton store instance
store: Union[Store, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_store)
store: Union[Store, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_create_global)
def _store_root():
def _store_root() -> str:
return store.root
def _store_unpadded_root():
def _store_unpadded_root() -> str:
return store.unpadded_root
def _store_db():
def _store_db() -> spack.database.Database:
return store.db
def _store_layout():
def _store_layout() -> spack.directory_layout.DirectoryLayout:
return store.layout
# convenience accessors for parts of the singleton store
root = llnl.util.lang.LazyReference(_store_root)
unpadded_root = llnl.util.lang.LazyReference(_store_unpadded_root)
db = llnl.util.lang.LazyReference(_store_db)
layout = llnl.util.lang.LazyReference(_store_layout)
root: Union[llnl.util.lang.LazyReference, str] = llnl.util.lang.LazyReference(_store_root)
unpadded_root: Union[llnl.util.lang.LazyReference, str] = llnl.util.lang.LazyReference(
_store_unpadded_root
)
db: Union[llnl.util.lang.LazyReference, spack.database.Database] = llnl.util.lang.LazyReference(
_store_db
)
layout: Union[
llnl.util.lang.LazyReference, "spack.directory_layout.DirectoryLayout"
] = llnl.util.lang.LazyReference(_store_layout)
def reinitialize():
@ -232,7 +275,7 @@ def reinitialize():
token = store, root, unpadded_root, db, layout
store = llnl.util.lang.Singleton(_store)
store = llnl.util.lang.Singleton(_create_global)
root = llnl.util.lang.LazyReference(_store_root)
unpadded_root = llnl.util.lang.LazyReference(_store_unpadded_root)
db = llnl.util.lang.LazyReference(_store_db)
@ -248,18 +291,10 @@ def restore(token):
store, root, unpadded_root, db, layout = token
def retrieve_upstream_dbs():
other_spack_instances = spack.config.get("upstreams", {})
install_roots = []
for install_properties in other_spack_instances.values():
install_roots.append(install_properties["install_tree"])
return _construct_upstream_dbs_from_install_roots(install_roots)
def _construct_upstream_dbs_from_install_roots(install_roots, _test=False):
accumulated_upstream_dbs = []
def _construct_upstream_dbs_from_install_roots(
install_roots: List[str], _test: bool = False
) -> List[spack.database.Database]:
accumulated_upstream_dbs: List[spack.database.Database] = []
for install_root in reversed(install_roots):
upstream_dbs = list(accumulated_upstream_dbs)
next_db = spack.database.Database(
@ -274,8 +309,13 @@ def _construct_upstream_dbs_from_install_roots(install_roots, _test=False):
return accumulated_upstream_dbs
def find(constraints, multiple=False, query_fn=None, **kwargs):
"""Return a list of specs matching the constraints passed as inputs.
def find(
constraints: Union[str, List[str], List["spack.spec.Spec"]],
multiple: bool = False,
query_fn: Optional[Callable[[Any], List["spack.spec.Spec"]]] = None,
**kwargs,
) -> List["spack.spec.Spec"]:
"""Returns a list of specs matching the constraints passed as inputs.
At least one spec per constraint must match, otherwise the function
will error with an appropriate message.
@ -287,21 +327,17 @@ def find(constraints, multiple=False, query_fn=None, **kwargs):
The query function must accept a spec as its first argument.
Args:
constraints (List[spack.spec.Spec]): specs to be matched against
installed packages
multiple (bool): if True multiple matches per constraint are admitted
constraints: spec(s) to be matched against installed packages
multiple: if True multiple matches per constraint are admitted
query_fn (Callable): query function to get matching specs. By default,
``spack.store.db.query``
**kwargs: keyword arguments forwarded to the query function
Return:
List of matching specs
"""
# Normalize input to list of specs
if isinstance(constraints, str):
constraints = [spack.spec.Spec(constraints)]
matching_specs, errors = [], []
matching_specs: List[spack.spec.Spec] = []
errors = []
query_fn = query_fn or spack.store.db.query
for spec in constraints:
current_matches = query_fn(spec, **kwargs)
@ -327,39 +363,45 @@ def find(constraints, multiple=False, query_fn=None, **kwargs):
return matching_specs
def specfile_matches(filename, **kwargs):
def specfile_matches(filename: str, **kwargs) -> List["spack.spec.Spec"]:
"""Same as find but reads the query from a spec file.
Args:
filename (str): YAML or JSON file from which to read the query.
filename: YAML or JSON file from which to read the query.
**kwargs: keyword arguments forwarded to "find"
Return:
List of matches
"""
query = [spack.spec.Spec.from_specfile(filename)]
return spack.store.find(query, **kwargs)
@contextlib.contextmanager
def use_store(store_or_path):
def use_store(
path: Union[str, pathlib.Path], extra_data: Optional[Dict[str, Any]] = None
) -> Generator[Store, None, None]:
"""Use the store passed as argument within the context manager.
Args:
store_or_path: either a Store object ot a path to where the store resides
path: path to the store.
extra_data: extra configuration under "config:install_tree" to be
taken into account.
Returns:
Yields:
Store object associated with the context manager's store
"""
global store, db, layout, root, unpadded_root
# Normalize input arguments
temporary_store = store_or_path
if not isinstance(store_or_path, Store):
temporary_store = Store(store_or_path)
assert not isinstance(path, Store), "cannot pass a store anymore"
scope_name = "use-store-{}".format(uuid.uuid4())
data = {"root": str(path)}
if extra_data:
data.update(extra_data)
# Swap the store with the one just constructed and return it
_ = store.db
spack.config.config.push_scope(
spack.config.InternalConfigScope(name=scope_name, data={"config": {"install_tree": data}})
)
temporary_store = create(configuration=spack.config.config)
original_store, store = store, temporary_store
db, layout = store.db, store.layout
root, unpadded_root = store.root, store.unpadded_root
@ -371,6 +413,7 @@ def use_store(store_or_path):
store = original_store
db, layout = original_store.db, original_store.layout
root, unpadded_root = original_store.root, original_store.unpadded_root
spack.config.config.remove_scope(scope_name=scope_name)
class MatchError(spack.error.SpackError):

View File

@ -27,7 +27,7 @@
import spack.repo
import spack.store
_serialize = sys.platform == "win32" or (sys.version_info >= (3, 8) and sys.platform == "darwin")
_SERIALIZE = sys.platform == "win32" or (sys.version_info >= (3, 8) and sys.platform == "darwin")
patches = None
@ -66,7 +66,7 @@ class PackageInstallContext:
"""
def __init__(self, pkg):
if _serialize:
if _SERIALIZE:
self.serialized_pkg = serialize(pkg)
self.serialized_env = serialize(spack.environment.active_environment())
else:
@ -78,8 +78,8 @@ def __init__(self, pkg):
def restore(self):
self.test_state.restore()
spack.main.spack_working_dir = self.spack_working_dir
env = pickle.load(self.serialized_env) if _serialize else self.env
pkg = pickle.load(self.serialized_pkg) if _serialize else self.pkg
env = pickle.load(self.serialized_env) if _SERIALIZE else self.env
pkg = pickle.load(self.serialized_pkg) if _SERIALIZE else self.pkg
if env:
spack.environment.activate(env)
return pkg
@ -93,25 +93,23 @@ class TestState:
"""
def __init__(self):
if _serialize:
self.repo_dirs = list(r.root for r in spack.repo.path.repos)
if _SERIALIZE:
self.config = spack.config.config
self.platform = spack.platforms.host
self.test_patches = store_patches()
self.store_token = spack.store.store.serialize()
self.store = spack.store.store
def restore(self):
if _serialize:
if _SERIALIZE:
spack.config.config = self.config
spack.repo.path = spack.repo._path(self.config)
spack.repo.path = spack.repo.create(self.config)
spack.platforms.host = self.platform
new_store = spack.store.Store.deserialize(self.store_token)
spack.store.store = new_store
spack.store.root = new_store.root
spack.store.unpadded_root = new_store.unpadded_root
spack.store.db = new_store.db
spack.store.layout = new_store.layout
spack.store.store = self.store
spack.store.root = self.store.root
spack.store.unpadded_root = self.store.unpadded_root
spack.store.db = self.store.db
spack.store.layout = self.store.layout
self.test_patches.restore()

View File

@ -479,9 +479,6 @@ def test_update_sbang(tmpdir, test_mirror):
into the non-default directory layout scheme, triggering an update of the
sbang.
"""
scheme = os.path.join(
"${name}", "${version}", "${architecture}-${compiler.name}-${compiler.version}-${hash}"
)
spec_str = "old-sbang"
# Concretize a package with some old-fashioned sbang lines.
old_spec = Spec(spec_str).concretized()
@ -504,12 +501,8 @@ def test_update_sbang(tmpdir, test_mirror):
# Switch the store to the new install tree locations
newtree_dir = tmpdir.join("newtree")
s = spack.store.Store(str(newtree_dir))
s.layout = DirectoryLayout(str(newtree_dir), path_scheme=scheme)
with spack.store.use_store(s):
new_spec = Spec("old-sbang")
new_spec.concretize()
with spack.store.use_store(str(newtree_dir)):
new_spec = Spec("old-sbang").concretized()
assert new_spec.dag_hash() == old_spec.dag_hash()
# Install package from buildcache

View File

@ -99,7 +99,7 @@ def test_reset_in_file_scopes_overwrites_backup_files(mutable_config):
assert os.path.exists(backup_file)
def test_list_sources(capsys):
def test_list_sources(config, capsys):
# Get the merged list and ensure we get our defaults
with capsys.disabled():
output = _bootstrap("list")

View File

@ -150,7 +150,7 @@ def test_env_list(mutable_mock_env_path):
assert "baz" in out
# make sure `spack env list` skips invalid things in var/spack/env
mutable_mock_env_path.join(".DS_Store").ensure(file=True)
(mutable_mock_env_path / ".DS_Store").touch()
out = env("list")
assert "foo" in out
@ -1118,12 +1118,12 @@ def test_uninstall_removes_from_env(mock_stage, mock_fetch, install_mockery):
@pytest.mark.usefixtures("config")
def test_indirect_build_dep(tmpdir):
def test_indirect_build_dep(tmp_path):
"""Simple case of X->Y->Z where Y is a build/link dep and Z is a
build-only dep. Make sure this concrete DAG is preserved when writing the
environment out and reading it back.
"""
builder = spack.repo.MockRepositoryBuilder(tmpdir)
builder = spack.repo.MockRepositoryBuilder(tmp_path / "repo")
builder.add_package("z")
builder.add_package("y", dependencies=[("z", "build", None)])
builder.add_package("x", dependencies=[("y", None, None)])
@ -1146,7 +1146,7 @@ def test_indirect_build_dep(tmpdir):
@pytest.mark.usefixtures("config")
def test_store_different_build_deps(tmpdir):
def test_store_different_build_deps(tmp_path):
r"""Ensure that an environment can store two instances of a build-only
dependency::
@ -1157,7 +1157,7 @@ def test_store_different_build_deps(tmpdir):
z1
"""
builder = spack.repo.MockRepositoryBuilder(tmpdir)
builder = spack.repo.MockRepositoryBuilder(tmp_path / "mirror")
builder.add_package("z")
builder.add_package("y", dependencies=[("z", "build", None)])
builder.add_package("x", dependencies=[("y", None, None), ("z", "build", None)])
@ -3350,12 +3350,11 @@ def test_relative_view_path_on_command_line_is_made_absolute(tmp_path, config):
assert os.path.samefile("view", environment.default_view.root)
def test_environment_created_in_users_location(mutable_config, tmpdir):
def test_environment_created_in_users_location(mutable_mock_env_path, tmp_path):
"""Test that an environment is created in a location based on the config"""
spack.config.set("config:environments_root", str(tmpdir.join("envs")))
env_dir = spack.config.get("config:environments_root")
env_dir = str(mutable_mock_env_path)
assert tmpdir.strpath in env_dir
assert str(tmp_path) in env_dir
assert not os.path.isdir(env_dir)
dir_name = "user_env"

View File

@ -97,7 +97,7 @@ def test_location_with_active_env(mutable_mock_env_path):
assert location("--env").strip() == e.path
def test_location_env_flag_interference(mutable_mock_env_path, tmpdir):
def test_location_env_flag_interference(mutable_mock_env_path):
"""
Tests that specifying an active environment using `spack -e x location ...`
does not interfere with the location command flags.

View File

@ -36,8 +36,8 @@ def test_regression_8083(tmpdir, capfd, mock_packages, mock_fetch, config):
@pytest.mark.regression("12345")
def test_mirror_from_env(tmpdir, mock_packages, mock_fetch, config, mutable_mock_env_path):
mirror_dir = str(tmpdir)
def test_mirror_from_env(tmp_path, mock_packages, mock_fetch, config, mutable_mock_env_path):
mirror_dir = str(tmp_path / "mirror")
env_name = "test"
env("create", env_name)

View File

@ -467,7 +467,7 @@ def test_substitute_date(mock_low_high_config):
],
)
def test_parse_install_tree(config_settings, expected, mutable_config):
expected_root = expected[0] or spack.store.default_install_tree_root
expected_root = expected[0] or spack.store.DEFAULT_INSTALL_TREE_ROOT
expected_unpadded_root = expected[1] or expected_root
expected_proj = expected[2] or spack.directory_layout.default_projections
@ -522,7 +522,7 @@ def test_parse_install_tree(config_settings, expected, mutable_config):
],
)
def test_parse_install_tree_padded(config_settings, expected, mutable_config):
expected_root = expected[0] or spack.store.default_install_tree_root
expected_root = expected[0] or spack.store.DEFAULT_INSTALL_TREE_ROOT
expected_unpadded_root = expected[1] or expected_root
expected_proj = expected[2] or spack.directory_layout.default_projections
@ -1230,21 +1230,21 @@ def test_default_install_tree(monkeypatch):
def test_local_config_can_be_disabled(working_env):
os.environ["SPACK_DISABLE_LOCAL_CONFIG"] = "true"
cfg = spack.config._config()
cfg = spack.config.create()
assert "defaults" in cfg.scopes
assert "system" not in cfg.scopes
assert "site" in cfg.scopes
assert "user" not in cfg.scopes
os.environ["SPACK_DISABLE_LOCAL_CONFIG"] = ""
cfg = spack.config._config()
cfg = spack.config.create()
assert "defaults" in cfg.scopes
assert "system" not in cfg.scopes
assert "site" in cfg.scopes
assert "user" not in cfg.scopes
del os.environ["SPACK_DISABLE_LOCAL_CONFIG"]
cfg = spack.config._config()
cfg = spack.config.create()
assert "defaults" in cfg.scopes
assert "system" in cfg.scopes
assert "site" in cfg.scopes

View File

@ -13,12 +13,7 @@
@pytest.mark.usefixtures("mock_packages")
def test_set_install_hash_length(hash_length, mutable_config, tmpdir):
mutable_config.set("config:install_hash_length", hash_length)
mutable_config.set("config:install_tree", {"root": str(tmpdir)})
# The call below is to reinitialize the directory layout associated
# with the store according to the configuration changes above (i.e.
# with the shortened hash)
store = spack.store._store()
with spack.store.use_store(store):
with spack.store.use_store(str(tmpdir)):
spec = spack.spec.Spec("libelf").concretized()
prefix = spec.prefix
hash_str = prefix.rsplit("-")[-1]
@ -28,14 +23,7 @@ def test_set_install_hash_length(hash_length, mutable_config, tmpdir):
@pytest.mark.usefixtures("mock_packages")
def test_set_install_hash_length_upper_case(mutable_config, tmpdir):
mutable_config.set("config:install_hash_length", 5)
mutable_config.set(
"config:install_tree", {"root": str(tmpdir), "projections": {"all": "{name}-{HASH}"}}
)
# The call below is to reinitialize the directory layout associated
# with the store according to the configuration changes above (i.e.
# with the shortened hash and projection)
store = spack.store._store()
with spack.store.use_store(store):
with spack.store.use_store(str(tmpdir), extra_data={"projections": {"all": "{name}-{HASH}"}}):
spec = spack.spec.Spec("libelf").concretized()
prefix = spec.prefix
hash_str = prefix.rsplit("-")[-1]

View File

@ -950,7 +950,7 @@ def disable_compiler_execution(monkeypatch, request):
@pytest.fixture(scope="function")
def install_mockery(temporary_store, config, mock_packages):
def install_mockery(temporary_store, mutable_config, mock_packages):
"""Hooks a fake install directory, DB, and stage directory into Spack."""
# We use a fake package, so temporarily disable checksumming
with spack.config.override("config:checksum", False):
@ -968,8 +968,9 @@ def install_mockery(temporary_store, config, mock_packages):
@pytest.fixture(scope="function")
def temporary_store(tmpdir):
def temporary_store(tmpdir, request):
"""Hooks a temporary empty store for the test function."""
ensure_configuration_fixture_run_before(request)
temporary_store_path = tmpdir.join("opt")
with spack.store.use_store(str(temporary_store_path)) as s:
yield s
@ -1536,13 +1537,12 @@ def get_rev():
@pytest.fixture(scope="function")
def mutable_mock_env_path(tmpdir_factory, mutable_config):
def mutable_mock_env_path(tmp_path, mutable_config, monkeypatch):
"""Fixture for mocking the internal spack environments directory."""
saved_path = ev.environment.default_env_path
mock_path = tmpdir_factory.mktemp("mock-env-path")
ev.environment.default_env_path = str(mock_path)
yield mock_path
ev.environment.default_env_path = saved_path
mock_path = tmp_path / "mock-env-path"
mutable_config.set("config:environments_root", str(mock_path))
monkeypatch.setattr(ev.environment, "default_env_path", str(mock_path))
return mock_path
@pytest.fixture()
@ -1938,3 +1938,12 @@ def shell_as(shell):
# restore old shell if one was set
if _shell:
os.environ["SPACK_SHELL"] = _shell
@pytest.fixture()
def nullify_globals(request, monkeypatch):
ensure_configuration_fixture_run_before(request)
monkeypatch.setattr(spack.config, "config", None)
monkeypatch.setattr(spack.caches, "misc_cache", None)
monkeypatch.setattr(spack.repo, "path", None)
monkeypatch.setattr(spack.store, "store", None)

View File

@ -1060,8 +1060,18 @@ def test_error_message_when_using_too_new_db(database, monkeypatch):
back to an older version of Spack. This test ensures that the error message for a too
new database version stays comprehensible across refactoring of the database code.
"""
monkeypatch.setattr(spack.database, "_db_version", vn.Version("0"))
monkeypatch.setattr(spack.database, "_DB_VERSION", vn.Version("0"))
with pytest.raises(
spack.database.InvalidDatabaseVersionError, match="you need a newer Spack version"
):
spack.database.Database(database.root)._read()
@pytest.mark.parametrize(
"lock_cfg",
[spack.database.NO_LOCK, spack.database.NO_TIMEOUT, spack.database.DEFAULT_LOCK_CFG, None],
)
def test_database_construction_doesnt_use_globals(tmpdir, config, nullify_globals, lock_cfg):
lock_cfg = lock_cfg or spack.database.lock_configuration(config)
db = spack.database.Database(str(tmpdir), lock_cfg=lock_cfg)
assert os.path.exists(db.database_directory)

View File

@ -288,17 +288,19 @@ def install_upstream(tmpdir_factory, gen_mock_layout, install_mockery):
mock_db_root = str(tmpdir_factory.mktemp("mock_db_root"))
prepared_db = spack.database.Database(mock_db_root)
upstream_layout = gen_mock_layout("/a/")
spack.config.config.push_scope(
spack.config.InternalConfigScope(
name="install-upstream-fixture",
data={"upstreams": {"mock1": {"install_tree": prepared_db.root}}},
)
)
def _install_upstream(*specs):
for spec_str in specs:
s = spack.spec.Spec(spec_str).concretized()
prepared_db.add(s, upstream_layout)
downstream_root = str(tmpdir_factory.mktemp("mock_downstream_db_root"))
db_for_test = spack.database.Database(downstream_root, upstream_dbs=[prepared_db])
store = spack.store.Store(downstream_root)
store.db = db_for_test
return store, upstream_layout
return downstream_root, upstream_layout
return _install_upstream
@ -307,8 +309,8 @@ def test_installed_upstream_external(install_upstream, mock_fetch):
"""Check that when a dependency package is recorded as installed in
an upstream database that it is not reinstalled.
"""
s, _ = install_upstream("externaltool")
with spack.store.use_store(s):
store_root, _ = install_upstream("externaltool")
with spack.store.use_store(store_root):
dependent = spack.spec.Spec("externaltest")
dependent.concretize()
@ -326,8 +328,8 @@ def test_installed_upstream(install_upstream, mock_fetch):
"""Check that when a dependency package is recorded as installed in
an upstream database that it is not reinstalled.
"""
s, upstream_layout = install_upstream("dependency-install")
with spack.store.use_store(s):
store_root, upstream_layout = install_upstream("dependency-install")
with spack.store.use_store(store_root):
dependency = spack.spec.Spec("dependency-install").concretized()
dependent = spack.spec.Spec("dependent-install").concretized()
@ -379,9 +381,8 @@ def test_install_prefix_collision_fails(config, mock_fetch, mock_packages, tmpdi
Test that different specs with coinciding install prefixes will fail
to install.
"""
projections = {"all": "all-specs-project-to-this-prefix"}
store = spack.store.Store(str(tmpdir), projections=projections)
with spack.store.use_store(store):
projections = {"projections": {"all": "all-specs-project-to-this-prefix"}}
with spack.store.use_store(str(tmpdir), extra_data=projections):
with spack.config.override("config:checksum", False):
pkg_a = Spec("libelf@0.8.13").concretized().package
pkg_b = Spec("libelf@0.8.12").concretized().package

View File

@ -123,12 +123,11 @@ def test_relative_import_spack_packages_as_python_modules(mock_packages):
def test_all_virtual_packages_have_default_providers():
"""All virtual packages must have a default provider explicitly set."""
defaults = spack.config.get("packages", scope="defaults")
configuration = spack.config.create()
defaults = configuration.get("packages", scope="defaults")
default_providers = defaults["all"]["providers"]
providers = spack.repo.path.provider_index.providers
default_providers_filename = spack.config.config.scopes["defaults"].get_section_filename(
"packages"
)
default_providers_filename = configuration.scopes["defaults"].get_section_filename("packages")
for provider in providers:
assert provider in default_providers, (
"all providers must have a default in %s" % default_providers_filename
@ -167,3 +166,18 @@ def test_repo_dump_virtuals(tmpdir, mutable_mock_repo, mock_packages, ensure_deb
captured = capsys.readouterr()[1]
assert "Installing" in captured
assert "package.py" in os.listdir(tmpdir), "Expected the virtual's package to be copied"
@pytest.mark.parametrize(
"repo_paths,namespaces",
[
([spack.paths.packages_path], ["builtin"]),
([spack.paths.mock_packages_path], ["builtin.mock"]),
([spack.paths.packages_path, spack.paths.mock_packages_path], ["builtin", "builtin.mock"]),
([spack.paths.mock_packages_path, spack.paths.packages_path], ["builtin.mock", "builtin"]),
],
)
def test_repository_construction_doesnt_use_globals(nullify_globals, repo_paths, namespaces):
repo_path = spack.repo.RepoPath(*repo_paths)
assert len(repo_path.repos) == len(namespaces)
assert [x.namespace for x in repo_path.repos] == namespaces

View File

@ -368,7 +368,7 @@ def test_install_sbang_too_long(tmpdir):
add = min(num_extend, 255)
long_path = os.path.join(long_path, "e" * add)
num_extend -= add
with spack.store.use_store(spack.store.Store(long_path)):
with spack.store.use_store(long_path):
with pytest.raises(sbang.SbangPathError) as exc_info:
sbang.sbang_install_path()

View File

@ -17,25 +17,19 @@
def test_disable_locking(tmpdir):
"""Ensure that locks do no real locking when disabled."""
lock_path = str(tmpdir.join("lockfile"))
lock = lk.Lock(lock_path, enable=False)
old_value = spack.config.get("config:locks")
lock.acquire_read()
assert not os.path.exists(lock_path)
with spack.config.override("config:locks", False):
lock = lk.Lock(lock_path)
lock.acquire_write()
assert not os.path.exists(lock_path)
lock.acquire_read()
assert not os.path.exists(lock_path)
lock.release_write()
assert not os.path.exists(lock_path)
lock.acquire_write()
assert not os.path.exists(lock_path)
lock.release_write()
assert not os.path.exists(lock_path)
lock.release_read()
assert not os.path.exists(lock_path)
assert old_value == spack.config.get("config:locks")
lock.release_read()
assert not os.path.exists(lock_path)
# "Disable" mock_stage fixture to avoid subdir permissions issues on cleanup.

View File

@ -86,11 +86,13 @@ def get_file_lines(filename):
if match:
filename, line, key, val = match.groups()
line = int(line)
val = val.strip("'\"")
lines = get_file_lines(filename)
assert key in lines[line]
val = val.strip("'\"")
printed_line = lines[line]
if val.lower() in ("true", "false"):
val = val.lower()
printed_line = printed_line.lower()
lines = get_file_lines(filename)
assert key in lines[line], filename
assert val in lines[line]
assert val in printed_line, filename

View File

@ -17,7 +17,6 @@
from llnl.util.lock import ReadTransaction # noqa: F401
from llnl.util.lock import WriteTransaction # noqa: F401
import spack.config
import spack.error
import spack.paths
@ -31,8 +30,13 @@ class Lock(llnl.util.lock.Lock):
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._enable = spack.config.get("config:locks", sys.platform != "win32")
enable_lock = kwargs.pop("enable", None)
if sys.platform == "win32":
enable_lock = False
elif sys.platform != "win32" and enable_lock is None:
enable_lock = True
self._enable = enable_lock
super(Lock, self).__init__(*args, **kwargs)
def _lock(self, op, timeout=0):
if self._enable: