Compare commits

..

1 Commits

Author SHA1 Message Date
Harmen Stoppels
09407ae960 ccache: set base_dir 2025-02-05 12:56:19 +01:00
4 changed files with 62 additions and 39 deletions

3
lib/spack/env/cc vendored
View File

@@ -960,9 +960,6 @@ if [ -n "$SPACK_CCACHE_BINARY" ]; then
case "$lang_flags" in
C|CXX) # ccache only supports C languages
prepend full_command_list "${SPACK_CCACHE_BINARY}"
# workaround for stage being a temp folder
# see #3761#issuecomment-294352232
export CCACHE_NOHASHDIR=yes
;;
esac
fi

View File

@@ -610,9 +610,11 @@ def set_wrapper_variables(pkg, env):
if spack.config.get("config:ccache"):
# Enable ccache in the compiler wrapper
env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True))
env.set("CCACHE_NOHASHDIR", "")
env.set("CCACHE_BASEDIR", os.path.realpath(spack.stage.get_stage_root()))
else:
# Avoid cache pollution if a build system forces `ccache <compiler wrapper invocation>`.
env.set("CCACHE_DISABLE", "1")
env.set("CCACHE_DISABLE", "")
# Gather information about various types of dependencies
rpath_hashes = set(s.dag_hash() for s in get_rpath_deps(pkg))

View File

@@ -263,6 +263,18 @@ def from_dict(cls, spec, dictionary):
return InstallRecord(spec, **d)
class ForbiddenLockError(SpackError):
"""Raised when an upstream DB attempts to acquire a lock"""
class ForbiddenLock:
def __getattr__(self, name):
raise ForbiddenLockError(f"Cannot access attribute '{name}' of lock")
def __reduce__(self):
return ForbiddenLock, tuple()
class LockConfiguration(NamedTuple):
"""Data class to configure locks in Database objects
@@ -605,11 +617,15 @@ def __init__(
self.db_lock_timeout = lock_cfg.database_timeout
tty.debug(f"DATABASE LOCK TIMEOUT: {str(self.db_lock_timeout)}s")
self.lock: Union[ForbiddenLock, lk.Lock]
if self.is_upstream:
self.lock = ForbiddenLock()
else:
self.lock = lk.Lock(
str(self._lock_path),
default_timeout=self.db_lock_timeout,
desc="database",
enable=not self.is_upstream and lock_cfg.enable,
enable=lock_cfg.enable,
)
self._data: Dict[str, InstallRecord] = {}
@@ -1034,9 +1050,6 @@ def _write(self, type=None, value=None, traceback=None):
This routine does no locking.
"""
if self.is_upstream:
raise UpstreamDatabaseLockingError("Cannot write to an upstream database")
self._ensure_parent_directories()
# Do not write if exceptions were raised
@@ -1657,24 +1670,38 @@ def query(
"""
valid_trees = ["all", "upstream", "local", self.root] + [u.root for u in self.upstream_dbs]
if install_tree not in valid_trees:
raise ValueError(
f"Invalid install_tree argument to Database.query(). Try one of {valid_trees}"
msg = "Invalid install_tree argument to Database.query()\n"
msg += f"Try one of {', '.join(valid_trees)}"
tty.error(msg)
return []
upstream_results = []
upstreams = self.upstream_dbs
if install_tree not in ("all", "upstream"):
upstreams = [u for u in self.upstream_dbs if u.root == install_tree]
for upstream_db in upstreams:
# queries for upstream DBs need to *not* lock - we may not
# have permissions to do this and the upstream DBs won't know about
# us anyway (so e.g. they should never uninstall specs)
upstream_results.extend(
upstream_db._query(
query_spec,
predicate_fn=predicate_fn,
installed=installed,
explicit=explicit,
start_date=start_date,
end_date=end_date,
hashes=hashes,
in_buildcache=in_buildcache,
origin=origin,
)
or []
)
if install_tree == "all":
databases = [self, *self.upstream_dbs]
elif install_tree == "upstream":
databases = self.upstream_dbs
elif install_tree == "local" or self.root == install_tree:
databases = [self]
else:
databases = [u for u in self.upstream_dbs if u.root == install_tree]
results: List[spack.spec.Spec] = []
for db in databases:
results.extend(
db.query_local(
local_results: Set["spack.spec.Spec"] = set()
if install_tree in ("all", "local") or self.root == install_tree:
local_results = set(
self.query_local(
query_spec,
predicate_fn=predicate_fn,
installed=installed,
@@ -1687,13 +1714,8 @@ def query(
)
)
# Stable deduplication on dag hash picks local specs over upstreams.
if len(databases) > 1:
results = list(llnl.util.lang.dedupe(results, key=lambda x: x.dag_hash()))
# reduce number of comparisons with slow default __lt__
results.sort(key=lambda s: s.name)
results.sort()
results = list(local_results) + list(x for x in upstream_results if x not in local_results)
results.sort() # type: ignore[call-overload]
return results
def query_one(

View File

@@ -165,8 +165,10 @@ def test_installed_upstream(upstream_and_downstream_db, tmpdir):
upstream_db._read()
for dep in spec.traverse(root=False):
assert downstream_db.get_by_hash(dep.dag_hash()) is not None
assert upstream_db.get_by_hash(dep.dag_hash()) is not None
record = downstream_db.get_by_hash(dep.dag_hash())
assert record is not None
with pytest.raises(spack.database.ForbiddenLockError):
upstream_db.get_by_hash(dep.dag_hash())
new_spec = spack.concretize.concretize_one("w")
downstream_db.add(new_spec)
@@ -256,7 +258,7 @@ def test_cannot_write_upstream(tmp_path, mock_packages, config):
# Create it as an upstream
db = spack.database.Database(str(tmp_path), is_upstream=True)
with pytest.raises(spack.database.UpstreamDatabaseLockingError):
with pytest.raises(spack.database.ForbiddenLockError):
db.add(spack.concretize.concretize_one("pkg-a"))