Database: make constructor safe for read-only filesystems (#48454)
`spack find` and other commands that read the DB will fail if the databse is in a read-only location, because `FailureTracker` and `Database` can try to create their parent directories when they are constructed, even if the DB root is read-only. Instead, we should only write to the filesystem when needed. - [x] don't create parent directories in constructors - [x] have write operations create parents if needed - [x] in tests, `chmod` databases to be read-only unless absolutely needed. --------- Signed-off-by: Todd Gamblin <tgamblin@llnl.gov>
This commit is contained in:
parent
53d1665a8b
commit
d89ae7bcde
@ -23,7 +23,7 @@
|
||||
import urllib.request
|
||||
import warnings
|
||||
from contextlib import closing
|
||||
from typing import IO, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
|
||||
from typing import IO, Callable, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.lang
|
||||
@ -669,19 +669,24 @@ def sign_specfile(key: str, specfile_path: str) -> str:
|
||||
|
||||
|
||||
def _read_specs_and_push_index(
|
||||
file_list, read_method, cache_prefix, db: BuildCacheDatabase, temp_dir, concurrency
|
||||
file_list: List[str],
|
||||
read_method: Callable,
|
||||
cache_prefix: str,
|
||||
db: BuildCacheDatabase,
|
||||
temp_dir: str,
|
||||
concurrency: int,
|
||||
):
|
||||
"""Read all the specs listed in the provided list, using thread given thread parallelism,
|
||||
generate the index, and push it to the mirror.
|
||||
|
||||
Args:
|
||||
file_list (list(str)): List of urls or file paths pointing at spec files to read
|
||||
file_list: List of urls or file paths pointing at spec files to read
|
||||
read_method: A function taking a single argument, either a url or a file path,
|
||||
and which reads the spec file at that location, and returns the spec.
|
||||
cache_prefix (str): prefix of the build cache on s3 where index should be pushed.
|
||||
cache_prefix: prefix of the build cache on s3 where index should be pushed.
|
||||
db: A spack database used for adding specs and then writing the index.
|
||||
temp_dir (str): Location to write index.json and hash for pushing
|
||||
concurrency (int): Number of parallel processes to use when fetching
|
||||
temp_dir: Location to write index.json and hash for pushing
|
||||
concurrency: Number of parallel processes to use when fetching
|
||||
"""
|
||||
for file in file_list:
|
||||
contents = read_method(file)
|
||||
@ -861,9 +866,12 @@ def _url_generate_package_index(url: str, tmpdir: str, concurrency: int = 32):
|
||||
tty.debug(f"Retrieving spec descriptor files from {url} to build index")
|
||||
|
||||
db = BuildCacheDatabase(tmpdir)
|
||||
db._write()
|
||||
|
||||
try:
|
||||
_read_specs_and_push_index(file_list, read_fn, url, db, db.database_directory, concurrency)
|
||||
_read_specs_and_push_index(
|
||||
file_list, read_fn, url, db, str(db.database_directory), concurrency
|
||||
)
|
||||
except Exception as e:
|
||||
raise GenerateIndexError(f"Encountered problem pushing package index to {url}: {e}") from e
|
||||
|
||||
|
@ -419,14 +419,25 @@ class FailureTracker:
|
||||
the likelihood of collision very low with no cleanup required.
|
||||
"""
|
||||
|
||||
#: root directory of the failure tracker
|
||||
dir: pathlib.Path
|
||||
|
||||
#: File for locking particular concrete spec hashes
|
||||
locker: SpecLocker
|
||||
|
||||
def __init__(self, root_dir: Union[str, pathlib.Path], default_timeout: Optional[float]):
|
||||
#: Ensure a persistent location for dealing with parallel installation
|
||||
#: failures (e.g., across near-concurrent processes).
|
||||
self.dir = pathlib.Path(root_dir) / _DB_DIRNAME / "failures"
|
||||
self.dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.locker = SpecLocker(failures_lock_path(root_dir), default_timeout=default_timeout)
|
||||
|
||||
def _ensure_parent_directories(self) -> None:
|
||||
"""Ensure that parent directories of the FailureTracker exist.
|
||||
|
||||
Accesses the filesystem only once, the first time it's called on a given FailureTracker.
|
||||
"""
|
||||
self.dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def clear(self, spec: "spack.spec.Spec", force: bool = False) -> None:
|
||||
"""Removes any persistent and cached failure tracking for the spec.
|
||||
|
||||
@ -469,13 +480,18 @@ def clear_all(self) -> None:
|
||||
|
||||
tty.debug("Removing prefix failure tracking files")
|
||||
try:
|
||||
for fail_mark in os.listdir(str(self.dir)):
|
||||
try:
|
||||
(self.dir / fail_mark).unlink()
|
||||
except OSError as exc:
|
||||
tty.warn(f"Unable to remove failure marking file {fail_mark}: {str(exc)}")
|
||||
marks = os.listdir(str(self.dir))
|
||||
except FileNotFoundError:
|
||||
return # directory doesn't exist yet
|
||||
except OSError as exc:
|
||||
tty.warn(f"Unable to remove failure marking files: {str(exc)}")
|
||||
return
|
||||
|
||||
for fail_mark in marks:
|
||||
try:
|
||||
(self.dir / fail_mark).unlink()
|
||||
except OSError as exc:
|
||||
tty.warn(f"Unable to remove failure marking file {fail_mark}: {str(exc)}")
|
||||
|
||||
def mark(self, spec: "spack.spec.Spec") -> lk.Lock:
|
||||
"""Marks a spec as failing to install.
|
||||
@ -483,6 +499,8 @@ def mark(self, spec: "spack.spec.Spec") -> lk.Lock:
|
||||
Args:
|
||||
spec: spec that failed to install
|
||||
"""
|
||||
self._ensure_parent_directories()
|
||||
|
||||
# Dump the spec to the failure file for (manual) debugging purposes
|
||||
path = self._path(spec)
|
||||
path.write_text(spec.to_json())
|
||||
@ -567,17 +585,13 @@ def __init__(
|
||||
Relevant only if the repository is not an upstream.
|
||||
"""
|
||||
self.root = root
|
||||
self.database_directory = os.path.join(self.root, _DB_DIRNAME)
|
||||
self.database_directory = pathlib.Path(self.root) / _DB_DIRNAME
|
||||
self.layout = layout
|
||||
|
||||
# Set up layout of database files within the db dir
|
||||
self._index_path = os.path.join(self.database_directory, "index.json")
|
||||
self._verifier_path = os.path.join(self.database_directory, "index_verifier")
|
||||
self._lock_path = os.path.join(self.database_directory, "lock")
|
||||
|
||||
# Create needed directories and files
|
||||
if not is_upstream and not os.path.exists(self.database_directory):
|
||||
fs.mkdirp(self.database_directory)
|
||||
self._index_path = self.database_directory / "index.json"
|
||||
self._verifier_path = self.database_directory / "index_verifier"
|
||||
self._lock_path = self.database_directory / "lock"
|
||||
|
||||
self.is_upstream = is_upstream
|
||||
self.last_seen_verifier = ""
|
||||
@ -599,7 +613,7 @@ def __init__(
|
||||
self.lock = ForbiddenLock()
|
||||
else:
|
||||
self.lock = lk.Lock(
|
||||
self._lock_path,
|
||||
str(self._lock_path),
|
||||
default_timeout=self.db_lock_timeout,
|
||||
desc="database",
|
||||
enable=lock_cfg.enable,
|
||||
@ -616,6 +630,11 @@ def __init__(
|
||||
self._write_transaction_impl = lk.WriteTransaction
|
||||
self._read_transaction_impl = lk.ReadTransaction
|
||||
|
||||
def _ensure_parent_directories(self):
|
||||
"""Create the parent directory for the DB, if necessary."""
|
||||
if not self.is_upstream:
|
||||
self.database_directory.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def write_transaction(self):
|
||||
"""Get a write lock context manager for use in a `with` block."""
|
||||
return self._write_transaction_impl(self.lock, acquire=self._read, release=self._write)
|
||||
@ -630,6 +649,8 @@ def _write_to_file(self, stream):
|
||||
|
||||
This function does not do any locking or transactions.
|
||||
"""
|
||||
self._ensure_parent_directories()
|
||||
|
||||
# map from per-spec hash code to installation record.
|
||||
installs = dict(
|
||||
(k, v.to_dict(include_fields=self.record_fields)) for k, v in self._data.items()
|
||||
@ -759,7 +780,7 @@ def _read_from_file(self, filename):
|
||||
Does not do any locking.
|
||||
"""
|
||||
try:
|
||||
with open(filename, "r", encoding="utf-8") as f:
|
||||
with open(str(filename), "r", encoding="utf-8") as f:
|
||||
# In the future we may use a stream of JSON objects, hence `raw_decode` for compat.
|
||||
fdata, _ = JSONDecoder().raw_decode(f.read())
|
||||
except Exception as e:
|
||||
@ -860,11 +881,13 @@ def reindex(self):
|
||||
if self.is_upstream:
|
||||
raise UpstreamDatabaseLockingError("Cannot reindex an upstream database")
|
||||
|
||||
self._ensure_parent_directories()
|
||||
|
||||
# Special transaction to avoid recursive reindex calls and to
|
||||
# ignore errors if we need to rebuild a corrupt database.
|
||||
def _read_suppress_error():
|
||||
try:
|
||||
if os.path.isfile(self._index_path):
|
||||
if self._index_path.is_file():
|
||||
self._read_from_file(self._index_path)
|
||||
except CorruptDatabaseError as e:
|
||||
tty.warn(f"Reindexing corrupt database, error was: {e}")
|
||||
@ -1007,7 +1030,7 @@ def _check_ref_counts(self):
|
||||
% (key, found, expected, self._index_path)
|
||||
)
|
||||
|
||||
def _write(self, type, value, traceback):
|
||||
def _write(self, type=None, value=None, traceback=None):
|
||||
"""Write the in-memory database index to its file path.
|
||||
|
||||
This is a helper function called by the WriteTransaction context
|
||||
@ -1018,6 +1041,8 @@ def _write(self, type, value, traceback):
|
||||
|
||||
This routine does no locking.
|
||||
"""
|
||||
self._ensure_parent_directories()
|
||||
|
||||
# Do not write if exceptions were raised
|
||||
if type is not None:
|
||||
# A failure interrupted a transaction, so we should record that
|
||||
@ -1026,16 +1051,16 @@ def _write(self, type, value, traceback):
|
||||
self._state_is_inconsistent = True
|
||||
return
|
||||
|
||||
temp_file = self._index_path + (".%s.%s.temp" % (_getfqdn(), os.getpid()))
|
||||
temp_file = str(self._index_path) + (".%s.%s.temp" % (_getfqdn(), os.getpid()))
|
||||
|
||||
# Write a temporary database file them move it into place
|
||||
try:
|
||||
with open(temp_file, "w", encoding="utf-8") as f:
|
||||
self._write_to_file(f)
|
||||
fs.rename(temp_file, self._index_path)
|
||||
fs.rename(temp_file, str(self._index_path))
|
||||
|
||||
if _use_uuid:
|
||||
with open(self._verifier_path, "w", encoding="utf-8") as f:
|
||||
with self._verifier_path.open("w", encoding="utf-8") as f:
|
||||
new_verifier = str(uuid.uuid4())
|
||||
f.write(new_verifier)
|
||||
self.last_seen_verifier = new_verifier
|
||||
@ -1048,11 +1073,11 @@ def _write(self, type, value, traceback):
|
||||
|
||||
def _read(self):
|
||||
"""Re-read Database from the data in the set location. This does no locking."""
|
||||
if os.path.isfile(self._index_path):
|
||||
if self._index_path.is_file():
|
||||
current_verifier = ""
|
||||
if _use_uuid:
|
||||
try:
|
||||
with open(self._verifier_path, "r", encoding="utf-8") as f:
|
||||
with self._verifier_path.open("r", encoding="utf-8") as f:
|
||||
current_verifier = f.read()
|
||||
except BaseException:
|
||||
pass
|
||||
|
@ -887,21 +887,26 @@ def mock_store(
|
||||
"""
|
||||
store_path, store_cache = _store_dir_and_cache
|
||||
|
||||
# Make the DB filesystem read-only to ensure constructors don't modify anything in it.
|
||||
# We want Spack to be able to point to a DB on a read-only filesystem easily.
|
||||
store_path.chmod(mode=0o555, rec=1)
|
||||
|
||||
# If the cache does not exist populate the store and create it
|
||||
if not os.path.exists(str(store_cache.join(".spack-db"))):
|
||||
with spack.config.use_configuration(*mock_configuration_scopes):
|
||||
with spack.store.use_store(str(store_path)) as store:
|
||||
with spack.repo.use_repositories(mock_repo_path):
|
||||
# make the DB filesystem writable only while we populate it
|
||||
store_path.chmod(mode=0o755, rec=1)
|
||||
_populate(store.db)
|
||||
copy_tree(str(store_path), str(store_cache))
|
||||
store_path.chmod(mode=0o555, rec=1)
|
||||
|
||||
# Make the DB filesystem read-only to ensure we can't modify entries
|
||||
store_path.join(".spack-db").chmod(mode=0o555, rec=1)
|
||||
store_cache.chmod(mode=0o755, rec=1)
|
||||
copy_tree(str(store_path), str(store_cache))
|
||||
store_cache.chmod(mode=0o555, rec=1)
|
||||
|
||||
yield store_path
|
||||
|
||||
store_path.join(".spack-db").chmod(mode=0o755, rec=1)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def database(mock_store, mock_packages, config):
|
||||
@ -927,7 +932,7 @@ def mutable_database(database_mutable_config, _store_dir_and_cache):
|
||||
"""
|
||||
# Make the database writeable, as we are going to modify it
|
||||
store_path, store_cache = _store_dir_and_cache
|
||||
store_path.join(".spack-db").chmod(mode=0o755, rec=1)
|
||||
store_path.chmod(mode=0o755, rec=1)
|
||||
|
||||
yield database_mutable_config
|
||||
|
||||
@ -935,7 +940,7 @@ def mutable_database(database_mutable_config, _store_dir_and_cache):
|
||||
# the store and making the database read-only
|
||||
store_path.remove(rec=1)
|
||||
copy_tree(str(store_cache), str(store_path))
|
||||
store_path.join(".spack-db").chmod(mode=0o555, rec=1)
|
||||
store_path.chmod(mode=0o555, rec=1)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
@ -1039,7 +1044,8 @@ def temporary_store(tmpdir, request):
|
||||
temporary_store_path = tmpdir.join("opt")
|
||||
with spack.store.use_store(str(temporary_store_path)) as s:
|
||||
yield s
|
||||
temporary_store_path.remove()
|
||||
if temporary_store_path.exists():
|
||||
temporary_store_path.remove()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
|
@ -2,10 +2,12 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Check the database is functioning properly, both in memory and in its file."""
|
||||
import contextlib
|
||||
import datetime
|
||||
import functools
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
@ -32,6 +34,7 @@
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.lock
|
||||
import spack.version as vn
|
||||
from spack.enums import InstallRecordStatus
|
||||
from spack.installer import PackageInstaller
|
||||
@ -41,24 +44,51 @@
|
||||
pytestmark = pytest.mark.db
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def writable(database):
|
||||
"""Allow a database to be written inside this context manager."""
|
||||
old_lock, old_is_upstream = database.lock, database.is_upstream
|
||||
db_root = pathlib.Path(database.root)
|
||||
|
||||
try:
|
||||
# this is safe on all platforms during tests (tests get their own tmpdirs)
|
||||
database.lock = spack.util.lock.Lock(str(database._lock_path), enable=False)
|
||||
database.is_upstream = False
|
||||
db_root.chmod(mode=0o755)
|
||||
with database.write_transaction():
|
||||
yield
|
||||
finally:
|
||||
db_root.chmod(mode=0o555)
|
||||
database.lock = old_lock
|
||||
database.is_upstream = old_is_upstream
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def upstream_and_downstream_db(tmpdir, gen_mock_layout):
|
||||
mock_db_root = str(tmpdir.mkdir("mock_db_root"))
|
||||
upstream_layout = gen_mock_layout("/a/")
|
||||
upstream_write_db = spack.database.Database(mock_db_root, layout=upstream_layout)
|
||||
upstream_db = spack.database.Database(mock_db_root, is_upstream=True, layout=upstream_layout)
|
||||
# Generate initial DB file to avoid reindex
|
||||
with open(upstream_write_db._index_path, "w", encoding="utf-8") as db_file:
|
||||
upstream_write_db._write_to_file(db_file)
|
||||
"""Fixture for a pair of stores: upstream and downstream.
|
||||
|
||||
downstream_db_root = str(tmpdir.mkdir("mock_downstream_db_root"))
|
||||
downstream_db = spack.database.Database(
|
||||
downstream_db_root, upstream_dbs=[upstream_db], layout=gen_mock_layout("/b/")
|
||||
Upstream API prohibits writing to an upstream, so we also return a writable version
|
||||
of the upstream DB for tests to use.
|
||||
|
||||
"""
|
||||
mock_db_root = tmpdir.mkdir("mock_db_root")
|
||||
mock_db_root.chmod(0o555)
|
||||
|
||||
upstream_db = spack.database.Database(
|
||||
str(mock_db_root), is_upstream=True, layout=gen_mock_layout("/a/")
|
||||
)
|
||||
with open(downstream_db._index_path, "w", encoding="utf-8") as db_file:
|
||||
downstream_db._write_to_file(db_file)
|
||||
with writable(upstream_db):
|
||||
upstream_db._write()
|
||||
|
||||
yield upstream_write_db, upstream_db, downstream_db
|
||||
downstream_db_root = tmpdir.mkdir("mock_downstream_db_root")
|
||||
downstream_db_root.chmod(0o755)
|
||||
|
||||
downstream_db = spack.database.Database(
|
||||
str(downstream_db_root), upstream_dbs=[upstream_db], layout=gen_mock_layout("/b/")
|
||||
)
|
||||
downstream_db._write()
|
||||
|
||||
yield upstream_db, downstream_db
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@ -70,16 +100,18 @@ def upstream_and_downstream_db(tmpdir, gen_mock_layout):
|
||||
("{u}", ["pkg-c"]),
|
||||
("{d}", ["pkg-b"]),
|
||||
],
|
||||
ids=["all", "upstream", "local", "upstream_path", "downstream_path"],
|
||||
)
|
||||
def test_query_by_install_tree(
|
||||
install_tree, result, upstream_and_downstream_db, mock_packages, monkeypatch, config
|
||||
):
|
||||
up_write_db, up_db, down_db = upstream_and_downstream_db
|
||||
up_db, down_db = upstream_and_downstream_db
|
||||
|
||||
# Set the upstream DB to contain "pkg-c" and downstream to contain "pkg-b")
|
||||
b = spack.spec.Spec("pkg-b").concretized()
|
||||
c = spack.spec.Spec("pkg-c").concretized()
|
||||
up_write_db.add(c)
|
||||
with writable(up_db):
|
||||
up_db.add(c)
|
||||
up_db._read()
|
||||
down_db.add(b)
|
||||
|
||||
@ -91,7 +123,7 @@ def test_spec_installed_upstream(
|
||||
upstream_and_downstream_db, mock_custom_repository, config, monkeypatch
|
||||
):
|
||||
"""Test whether Spec.installed_upstream() works."""
|
||||
upstream_write_db, upstream_db, downstream_db = upstream_and_downstream_db
|
||||
upstream_db, downstream_db = upstream_and_downstream_db
|
||||
|
||||
# a known installed spec should say that it's installed
|
||||
with spack.repo.use_repositories(mock_custom_repository):
|
||||
@ -99,7 +131,8 @@ def test_spec_installed_upstream(
|
||||
assert not spec.installed
|
||||
assert not spec.installed_upstream
|
||||
|
||||
upstream_write_db.add(spec)
|
||||
with writable(upstream_db):
|
||||
upstream_db.add(spec)
|
||||
upstream_db._read()
|
||||
|
||||
monkeypatch.setattr(spack.store.STORE, "db", downstream_db)
|
||||
@ -115,7 +148,7 @@ def test_spec_installed_upstream(
|
||||
|
||||
@pytest.mark.usefixtures("config")
|
||||
def test_installed_upstream(upstream_and_downstream_db, tmpdir):
|
||||
upstream_write_db, upstream_db, downstream_db = upstream_and_downstream_db
|
||||
upstream_db, downstream_db = upstream_and_downstream_db
|
||||
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock.repo"))
|
||||
builder.add_package("x")
|
||||
@ -125,8 +158,9 @@ def test_installed_upstream(upstream_and_downstream_db, tmpdir):
|
||||
|
||||
with spack.repo.use_repositories(builder.root):
|
||||
spec = spack.spec.Spec("w").concretized()
|
||||
for dep in spec.traverse(root=False):
|
||||
upstream_write_db.add(dep)
|
||||
with writable(upstream_db):
|
||||
for dep in spec.traverse(root=False):
|
||||
upstream_db.add(dep)
|
||||
upstream_db._read()
|
||||
|
||||
for dep in spec.traverse(root=False):
|
||||
@ -150,7 +184,7 @@ def test_installed_upstream(upstream_and_downstream_db, tmpdir):
|
||||
|
||||
|
||||
def test_removed_upstream_dep(upstream_and_downstream_db, tmpdir, capsys, config):
|
||||
upstream_write_db, upstream_db, downstream_db = upstream_and_downstream_db
|
||||
upstream_db, downstream_db = upstream_and_downstream_db
|
||||
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock.repo"))
|
||||
builder.add_package("z")
|
||||
@ -161,12 +195,14 @@ def test_removed_upstream_dep(upstream_and_downstream_db, tmpdir, capsys, config
|
||||
z = y["z"]
|
||||
|
||||
# add dependency to upstream, dependents to downstream
|
||||
upstream_write_db.add(z)
|
||||
with writable(upstream_db):
|
||||
upstream_db.add(z)
|
||||
upstream_db._read()
|
||||
downstream_db.add(y)
|
||||
|
||||
# remove the dependency from the upstream DB
|
||||
upstream_write_db.remove(z)
|
||||
with writable(upstream_db):
|
||||
upstream_db.remove(z)
|
||||
upstream_db._read()
|
||||
|
||||
# then rereading the downstream DB should warn about the missing dep
|
||||
@ -183,7 +219,7 @@ def test_add_to_upstream_after_downstream(upstream_and_downstream_db, tmpdir):
|
||||
DB. When a package is recorded as installed in both, the results should
|
||||
refer to the downstream DB.
|
||||
"""
|
||||
upstream_write_db, upstream_db, downstream_db = upstream_and_downstream_db
|
||||
upstream_db, downstream_db = upstream_and_downstream_db
|
||||
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock.repo"))
|
||||
builder.add_package("x")
|
||||
@ -192,7 +228,8 @@ def test_add_to_upstream_after_downstream(upstream_and_downstream_db, tmpdir):
|
||||
spec = spack.spec.Spec("x").concretized()
|
||||
|
||||
downstream_db.add(spec)
|
||||
upstream_write_db.add(spec)
|
||||
with writable(upstream_db):
|
||||
upstream_db.add(spec)
|
||||
upstream_db._read()
|
||||
|
||||
upstream, record = downstream_db.query_by_spec_hash(spec.dag_hash())
|
||||
@ -843,7 +880,7 @@ def test_query_virtual_spec(database):
|
||||
assert all(name in names for name in ["mpich", "mpich2", "zmpi"])
|
||||
|
||||
|
||||
def test_failed_spec_path_error(database):
|
||||
def test_failed_spec_path_error(mutable_database):
|
||||
"""Ensure spec not concrete check is covered."""
|
||||
s = spack.spec.Spec("pkg-a")
|
||||
with pytest.raises(AssertionError, match="concrete spec required"):
|
||||
@ -950,7 +987,7 @@ def test_database_works_with_empty_dir(tmpdir):
|
||||
db_dir = tmpdir.ensure_dir(".spack-db")
|
||||
db_dir.ensure("lock")
|
||||
db_dir.ensure_dir("failures")
|
||||
tmpdir.chmod(mode=0o555, rec=1)
|
||||
tmpdir.chmod(mode=0o555)
|
||||
db = spack.database.Database(str(tmpdir))
|
||||
with db.read_transaction():
|
||||
db.query()
|
||||
@ -1105,6 +1142,8 @@ def test_error_message_when_using_too_new_db(database, monkeypatch):
|
||||
def test_database_construction_doesnt_use_globals(tmpdir, config, nullify_globals, lock_cfg):
|
||||
lock_cfg = lock_cfg or spack.database.lock_configuration(config)
|
||||
db = spack.database.Database(str(tmpdir), lock_cfg=lock_cfg)
|
||||
with db.write_transaction():
|
||||
pass # ensure the DB is written
|
||||
assert os.path.exists(db.database_directory)
|
||||
|
||||
|
||||
@ -1125,15 +1164,13 @@ def test_database_read_works_with_trailing_data(tmp_path, default_mock_concretiz
|
||||
assert spack.database.Database(root).query_local() == specs_in_db
|
||||
|
||||
|
||||
def test_database_errors_with_just_a_version_key(tmp_path):
|
||||
root = str(tmp_path)
|
||||
db = spack.database.Database(root)
|
||||
def test_database_errors_with_just_a_version_key(mutable_database):
|
||||
next_version = f"{spack.database._DB_VERSION}.next"
|
||||
with open(db._index_path, "w", encoding="utf-8") as f:
|
||||
with open(mutable_database._index_path, "w", encoding="utf-8") as f:
|
||||
f.write(json.dumps({"database": {"version": next_version}}))
|
||||
|
||||
with pytest.raises(spack.database.InvalidDatabaseVersionError):
|
||||
spack.database.Database(root).query_local()
|
||||
spack.database.Database(mutable_database.root).query_local()
|
||||
|
||||
|
||||
def test_reindex_with_upstreams(tmp_path, monkeypatch, mock_packages, config):
|
||||
|
Loading…
Reference in New Issue
Block a user