spack.caches: uppercase global variables (#39428)
This commit is contained in:
parent
41bd6a75d5
commit
29e2997bd5
@ -20,9 +20,9 @@
|
||||
|
||||
|
||||
def misc_cache_location():
|
||||
"""The ``misc_cache`` is Spack's cache for small data.
|
||||
"""The ``MISC_CACHE`` is Spack's cache for small data.
|
||||
|
||||
Currently the ``misc_cache`` stores indexes for virtual dependency
|
||||
Currently the ``MISC_CACHE`` stores indexes for virtual dependency
|
||||
providers and for which packages provide which tags.
|
||||
"""
|
||||
path = spack.config.get("config:misc_cache", spack.paths.default_misc_cache_path)
|
||||
@ -35,7 +35,7 @@ def _misc_cache():
|
||||
|
||||
|
||||
#: Spack's cache for small data
|
||||
misc_cache: Union[
|
||||
MISC_CACHE: Union[
|
||||
spack.util.file_cache.FileCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_misc_cache)
|
||||
|
||||
@ -91,6 +91,6 @@ def symlink(self, mirror_ref):
|
||||
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
fetch_cache: Union[
|
||||
FETCH_CACHE: Union[
|
||||
spack.fetch_strategy.FsCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_fetch_cache)
|
||||
|
@ -118,7 +118,7 @@ def clean(parser, args):
|
||||
|
||||
if args.downloads:
|
||||
tty.msg("Removing cached downloads")
|
||||
spack.caches.fetch_cache.destroy()
|
||||
spack.caches.FETCH_CACHE.destroy()
|
||||
|
||||
if args.failures:
|
||||
tty.msg("Removing install failure marks")
|
||||
@ -126,7 +126,7 @@ def clean(parser, args):
|
||||
|
||||
if args.misc_cache:
|
||||
tty.msg("Removing cached information on repositories")
|
||||
spack.caches.misc_cache.destroy()
|
||||
spack.caches.MISC_CACHE.destroy()
|
||||
|
||||
if args.python_cache:
|
||||
tty.msg("Removing python cache files")
|
||||
|
@ -443,7 +443,7 @@ def mirror_create(args):
|
||||
)
|
||||
|
||||
# When no directory is provided, the source dir is used
|
||||
path = args.directory or spack.caches.fetch_cache_location()
|
||||
path = args.directory or spack.caches.FETCH_CACHE_location()
|
||||
|
||||
if args.all and not ev.active_environment():
|
||||
create_mirror_for_all_specs(
|
||||
|
@ -647,7 +647,7 @@ class RepoPath:
|
||||
"""
|
||||
|
||||
def __init__(self, *repos, **kwargs):
|
||||
cache = kwargs.get("cache", spack.caches.misc_cache)
|
||||
cache = kwargs.get("cache", spack.caches.MISC_CACHE)
|
||||
self.repos = []
|
||||
self.by_namespace = nm.NamespaceTrie()
|
||||
|
||||
@ -966,7 +966,7 @@ def check(condition, msg):
|
||||
|
||||
# Indexes for this repository, computed lazily
|
||||
self._repo_index = None
|
||||
self._cache = cache or spack.caches.misc_cache
|
||||
self._cache = cache or spack.caches.MISC_CACHE
|
||||
|
||||
def real_name(self, import_name):
|
||||
"""Allow users to import Spack packages using Python identifiers.
|
||||
|
@ -484,7 +484,7 @@ def fetch(self, mirror_only=False, err_msg=None):
|
||||
|
||||
if self.default_fetcher.cachable:
|
||||
for rel_path in reversed(list(self.mirror_paths)):
|
||||
cache_fetcher = spack.caches.fetch_cache.fetcher(
|
||||
cache_fetcher = spack.caches.FETCH_CACHE.fetcher(
|
||||
rel_path, digest, expand=expand, extension=extension
|
||||
)
|
||||
fetchers.insert(0, cache_fetcher)
|
||||
@ -577,7 +577,7 @@ def check(self):
|
||||
self.fetcher.check()
|
||||
|
||||
def cache_local(self):
|
||||
spack.caches.fetch_cache.store(self.fetcher, self.mirror_paths.storage_path)
|
||||
spack.caches.FETCH_CACHE.store(self.fetcher, self.mirror_paths.storage_path)
|
||||
|
||||
def cache_mirror(self, mirror, stats):
|
||||
"""Perform a fetch if the resource is not already cached
|
||||
|
@ -51,7 +51,7 @@
|
||||
def cache_directory(tmpdir):
|
||||
fetch_cache_dir = tmpdir.ensure("fetch_cache", dir=True)
|
||||
fsc = spack.fetch_strategy.FsCache(str(fetch_cache_dir))
|
||||
spack.config.caches, old_cache_path = fsc, spack.caches.fetch_cache
|
||||
spack.config.caches, old_cache_path = fsc, spack.caches.FETCH_CACHE
|
||||
|
||||
yield spack.config.caches
|
||||
|
||||
|
@ -33,8 +33,8 @@ def __call__(self, *args, **kwargs):
|
||||
|
||||
monkeypatch.setattr(spack.package_base.PackageBase, "do_clean", Counter("package"))
|
||||
monkeypatch.setattr(spack.stage, "purge", Counter("stages"))
|
||||
monkeypatch.setattr(spack.caches.fetch_cache, "destroy", Counter("downloads"), raising=False)
|
||||
monkeypatch.setattr(spack.caches.misc_cache, "destroy", Counter("caches"))
|
||||
monkeypatch.setattr(spack.caches.FETCH_CACHE, "destroy", Counter("downloads"), raising=False)
|
||||
monkeypatch.setattr(spack.caches.MISC_CACHE, "destroy", Counter("caches"))
|
||||
monkeypatch.setattr(spack.store.STORE.failure_tracker, "clear_all", Counter("failures"))
|
||||
monkeypatch.setattr(spack.cmd.clean, "remove_python_cache", Counter("python_cache"))
|
||||
|
||||
|
@ -480,10 +480,10 @@ def __str__(self):
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_fetch_cache(monkeypatch):
|
||||
"""Substitutes spack.paths.fetch_cache with a mock object that does nothing
|
||||
"""Substitutes spack.paths.FETCH_CACHE with a mock object that does nothing
|
||||
and raises on fetch.
|
||||
"""
|
||||
monkeypatch.setattr(spack.caches, "fetch_cache", MockCache())
|
||||
monkeypatch.setattr(spack.caches, "FETCH_CACHE", MockCache())
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
@ -1936,6 +1936,7 @@ def shell_as(shell):
|
||||
def nullify_globals(request, monkeypatch):
|
||||
ensure_configuration_fixture_run_before(request)
|
||||
monkeypatch.setattr(spack.config, "CONFIG", None)
|
||||
monkeypatch.setattr(spack.caches, "misc_cache", None)
|
||||
monkeypatch.setattr(spack.caches, "MISC_CACHE", None)
|
||||
monkeypatch.setattr(spack.caches, "FETCH_CACHE", None)
|
||||
monkeypatch.setattr(spack.repo, "PATH", None)
|
||||
monkeypatch.setattr(spack.store, "STORE", None)
|
||||
|
@ -38,7 +38,7 @@
|
||||
class GitRefLookup(AbstractRefLookup):
|
||||
"""An object for cached lookups of git refs
|
||||
|
||||
GitRefLookup objects delegate to the misc_cache for locking. GitRefLookup objects may
|
||||
GitRefLookup objects delegate to the MISC_CACHE for locking. GitRefLookup objects may
|
||||
be attached to a GitVersion to allow for comparisons between git refs and versions as
|
||||
represented by tags in the git repository.
|
||||
"""
|
||||
@ -63,15 +63,15 @@ def cache_key(self):
|
||||
key_base = "git_metadata"
|
||||
self._cache_key = (Path(key_base) / self.repository_uri).as_posix()
|
||||
|
||||
# Cache data in misc_cache
|
||||
# Cache data in MISC_CACHE
|
||||
# If this is the first lazy access, initialize the cache as well
|
||||
spack.caches.misc_cache.init_entry(self.cache_key)
|
||||
spack.caches.MISC_CACHE.init_entry(self.cache_key)
|
||||
return self._cache_key
|
||||
|
||||
@property
|
||||
def cache_path(self):
|
||||
if not self._cache_path:
|
||||
self._cache_path = spack.caches.misc_cache.cache_path(self.cache_key)
|
||||
self._cache_path = spack.caches.MISC_CACHE.cache_path(self.cache_key)
|
||||
return self._cache_path
|
||||
|
||||
@property
|
||||
@ -101,13 +101,13 @@ def repository_uri(self):
|
||||
|
||||
def save(self):
|
||||
"""Save the data to file"""
|
||||
with spack.caches.misc_cache.write_transaction(self.cache_key) as (old, new):
|
||||
with spack.caches.MISC_CACHE.write_transaction(self.cache_key) as (old, new):
|
||||
sjson.dump(self.data, new)
|
||||
|
||||
def load_data(self):
|
||||
"""Load data if the path already exists."""
|
||||
if os.path.isfile(self.cache_path):
|
||||
with spack.caches.misc_cache.read_transaction(self.cache_key) as cache_file:
|
||||
with spack.caches.MISC_CACHE.read_transaction(self.cache_key) as cache_file:
|
||||
self.data = sjson.load(cache_file)
|
||||
|
||||
def get(self, ref) -> Tuple[Optional[str], int]:
|
||||
|
Loading…
Reference in New Issue
Block a user