db: type hints (#46168)

This commit is contained in:
Harmen Stoppels 2024-09-02 14:44:49 +02:00 committed by GitHub
parent b5dec35113
commit fe2d06399f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 74 additions and 70 deletions

View File

@ -59,7 +59,11 @@
import spack.util.lock as lk
import spack.util.spack_json as sjson
import spack.version as vn
from spack.directory_layout import DirectoryLayoutError, InconsistentInstallDirectoryError
from spack.directory_layout import (
DirectoryLayout,
DirectoryLayoutError,
InconsistentInstallDirectoryError,
)
from spack.error import SpackError
from spack.util.crypto import bit_length
@ -950,13 +954,17 @@ def _read_suppress_error():
raise
def _construct_entry_from_directory_layout(
self, directory_layout, old_data, spec, deprecator=None
self,
directory_layout: DirectoryLayout,
old_data: Dict[str, InstallRecord],
spec: "spack.spec.Spec",
deprecator: Optional["spack.spec.Spec"] = None,
):
# Try to recover explicit value from old DB, but
# default it to True if DB was corrupt. This is
# just to be conservative in case a command like
# "autoremove" is run by the user after a reindex.
tty.debug("RECONSTRUCTING FROM SPEC.YAML: {0}".format(spec))
tty.debug(f"Reconstructing from spec file: {spec}")
explicit = True
inst_time = os.stat(spec.prefix).st_ctime
if old_data is not None:
@ -965,17 +973,17 @@ def _construct_entry_from_directory_layout(
explicit = old_info.explicit
inst_time = old_info.installation_time
extra_args = {"explicit": explicit, "installation_time": inst_time}
self._add(spec, directory_layout, **extra_args)
self._add(spec, directory_layout, explicit=explicit, installation_time=inst_time)
if deprecator:
self._deprecate(spec, deprecator)
def _construct_from_directory_layout(self, directory_layout, old_data):
# Read first the `spec.yaml` files in the prefixes. They should be
# considered authoritative with respect to DB reindexing, as
# entries in the DB may be corrupted in a way that still makes
# them readable. If we considered DB entries authoritative
# instead, we would perpetuate errors over a reindex.
def _construct_from_directory_layout(
self, directory_layout: DirectoryLayout, old_data: Dict[str, InstallRecord]
):
# Read first the spec files in the prefixes. They should be considered authoritative with
# respect to DB reindexing, as entries in the DB may be corrupted in a way that still makes
# them readable. If we considered DB entries authoritative instead, we would perpetuate
# errors over a reindex.
with directory_layout.disable_upstream_check():
# Initialize data in the reconstructed DB
self._data = {}
@ -994,34 +1002,29 @@ def _construct_from_directory_layout(self, directory_layout, old_data):
)
processed_specs.add(spec)
for key, entry in old_data.items():
# We already took care of this spec using
# `spec.yaml` from its prefix.
for entry in old_data.values():
# We already took care of this spec using spec file from its prefix.
if entry.spec in processed_specs:
msg = "SKIPPING RECONSTRUCTION FROM OLD DB: {0}"
msg += " [already reconstructed from spec.yaml]"
tty.debug(msg.format(entry.spec))
tty.debug(
f"Skipping reconstruction from old db: {entry.spec}"
" [already reconstructed from spec file]"
)
continue
# If we arrived here it very likely means that
# we have external specs that are not dependencies
# of other specs. This may be the case for externally
# installed compilers or externally installed
# applications.
tty.debug("RECONSTRUCTING FROM OLD DB: {0}".format(entry.spec))
# If we arrived here it very likely means that we have external specs that are not
# dependencies of other specs. This may be the case for externally installed
# compilers or externally installed applications.
tty.debug(f"Reconstructing from old db: {entry.spec}")
try:
layout = None if entry.spec.external else directory_layout
kwargs = {
"spec": entry.spec,
"directory_layout": layout,
"explicit": entry.explicit,
"installation_time": entry.installation_time,
}
self._add(**kwargs)
self._add(
spec=entry.spec,
directory_layout=None if entry.spec.external else directory_layout,
explicit=entry.explicit,
installation_time=entry.installation_time,
)
processed_specs.add(entry.spec)
except Exception as e:
# Something went wrong, so the spec was not restored
# from old data
# Something went wrong, so the spec was not restored from old data
tty.debug(e)
self._check_ref_counts()
@ -1117,29 +1120,27 @@ def _read(self):
def _add(
self,
spec,
directory_layout=None,
explicit=False,
installation_time=None,
allow_missing=False,
spec: "spack.spec.Spec",
directory_layout: Optional[DirectoryLayout] = None,
explicit: bool = False,
installation_time: Optional[float] = None,
allow_missing: bool = False,
):
"""Add an install record for this spec to the database.
Assumes spec is installed in ``directory_layout.path_for_spec(spec)``.
Also ensures dependencies are present and updated in the DB as
either installed or missing.
Also ensures dependencies are present and updated in the DB as either installed or missing.
Args:
spec (spack.spec.Spec): spec to be added
spec: spec to be added
directory_layout: layout of the spec installation
explicit:
Possible values: True, False, any
A spec that was installed following a specific user
request is marked as explicit. If instead it was
pulled-in as a dependency of a user requested spec
it's considered implicit.
A spec that was installed following a specific user request is marked as explicit.
If instead it was pulled-in as a dependency of a user requested spec it's
considered implicit.
installation_time:
Date and time of installation
@ -1150,29 +1151,25 @@ def _add(
raise NonConcreteSpecAddError("Specs added to DB must be concrete.")
key = spec.dag_hash()
spec_pkg_hash = spec._package_hash
spec_pkg_hash = spec._package_hash # type: ignore[attr-defined]
upstream, record = self.query_by_spec_hash(key)
if upstream:
return
# Retrieve optional arguments
installation_time = installation_time or _now()
for edge in spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES):
if edge.spec.dag_hash() in self._data:
continue
# allow missing build-only deps. This prevents excessive
# warnings when a spec is installed, and its build dep
# is missing a build dep; there's no need to install the
# build dep's build dep first, and there's no need to warn
# about it missing.
dep_allow_missing = allow_missing or edge.depflag == dt.BUILD
self._add(
edge.spec,
directory_layout,
explicit=False,
installation_time=installation_time,
allow_missing=dep_allow_missing,
# allow missing build-only deps. This prevents excessive warnings when a spec is
# installed, and its build dep is missing a build dep; there's no need to install
# the build dep's build dep first, and there's no need to warn about it missing.
allow_missing=allow_missing or edge.depflag == dt.BUILD,
)
# Make sure the directory layout agrees whether the spec is installed
@ -1185,13 +1182,12 @@ def _add(
self._installed_prefixes.add(path)
except DirectoryLayoutError as e:
if not (allow_missing and isinstance(e, InconsistentInstallDirectoryError)):
msg = (
"{0} is being {1} in the database with prefix {2}, "
"but this directory does not contain an installation of "
"the spec, due to: {3}"
)
action = "updated" if key in self._data else "registered"
tty.warn(msg.format(spec.short_spec, action, path, str(e)))
tty.warn(
f"{spec.short_spec} is being {action} in the database with prefix {path}, "
"but this directory does not contain an installation of "
f"the spec, due to: {e}"
)
elif spec.external_path:
path = spec.external_path
installed = True
@ -1202,23 +1198,27 @@ def _add(
if key not in self._data:
# Create a new install record with no deps initially.
new_spec = spec.copy(deps=False)
extra_args = {"explicit": explicit, "installation_time": installation_time}
# Commands other than 'spack install' may add specs to the DB,
# we can record the source of an installed Spec with 'origin'
if hasattr(spec, "origin"):
extra_args["origin"] = spec.origin
self._data[key] = InstallRecord(new_spec, path, installed, ref_count=0, **extra_args)
self._data[key] = InstallRecord(
new_spec,
path=path,
installed=installed,
ref_count=0,
explicit=explicit,
installation_time=installation_time,
origin=None if not hasattr(spec, "origin") else spec.origin,
)
# Connect dependencies from the DB to the new copy.
for dep in spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES):
dkey = dep.spec.dag_hash()
upstream, record = self.query_by_spec_hash(dkey)
assert record, f"Missing dependency {dep.spec} in DB"
new_spec._add_dependency(record.spec, depflag=dep.depflag, virtuals=dep.virtuals)
if not upstream:
record.ref_count += 1
# Mark concrete once everything is built, and preserve
# the original hashes of concrete specs.
# Mark concrete once everything is built, and preserve the original hashes of concrete
# specs.
new_spec._mark_concrete()
new_spec._hash = key
new_spec._package_hash = spec_pkg_hash

View File

@ -1552,7 +1552,9 @@ def _get_dependency(self, name):
raise spack.error.SpecError(err_msg.format(name, len(deps)))
return deps[0]
def edges_from_dependents(self, name=None, depflag: dt.DepFlag = dt.ALL):
def edges_from_dependents(
self, name=None, depflag: dt.DepFlag = dt.ALL
) -> List[DependencySpec]:
"""Return a list of edges connecting this node in the DAG
to parents.
@ -1562,7 +1564,9 @@ def edges_from_dependents(self, name=None, depflag: dt.DepFlag = dt.ALL):
"""
return [d for d in self._dependents.select(parent=name, depflag=depflag)]
def edges_to_dependencies(self, name=None, depflag: dt.DepFlag = dt.ALL):
def edges_to_dependencies(
self, name=None, depflag: dt.DepFlag = dt.ALL
) -> List[DependencySpec]:
"""Return a list of edges connecting this node in the DAG
to children.