Revert "Solver: Cache Concretization Results (#48198)"
This reverts commit d234df62d7
.
This commit is contained in:
parent
e9cc1b36bc
commit
a660d78670
@ -125,8 +125,6 @@ are stored in ``$spack/var/spack/cache``. These are stored indefinitely
|
|||||||
by default. Can be purged with :ref:`spack clean --downloads
|
by default. Can be purged with :ref:`spack clean --downloads
|
||||||
<cmd-spack-clean>`.
|
<cmd-spack-clean>`.
|
||||||
|
|
||||||
.. _Misc Cache:
|
|
||||||
|
|
||||||
--------------------
|
--------------------
|
||||||
``misc_cache``
|
``misc_cache``
|
||||||
--------------------
|
--------------------
|
||||||
@ -336,52 +334,3 @@ create a new alias called ``inst`` that will always call ``install -v``:
|
|||||||
|
|
||||||
aliases:
|
aliases:
|
||||||
inst: install -v
|
inst: install -v
|
||||||
|
|
||||||
-------------------------------
|
|
||||||
``concretization_cache:enable``
|
|
||||||
-------------------------------
|
|
||||||
|
|
||||||
When set to ``true``, Spack will utilize a cache of solver outputs from
|
|
||||||
successful concretization runs. When enabled, Spack will check the concretization
|
|
||||||
cache prior to running the solver. If a previous request to solve a given
|
|
||||||
problem is present in the cache, Spack will load the concrete specs and other
|
|
||||||
solver data from the cache rather than running the solver. Specs not previously
|
|
||||||
concretized will be added to the cache on a successful solve. The cache additionally
|
|
||||||
holds solver statistics, so commands like ``spack solve`` will still return information
|
|
||||||
about the run that produced a given solver result.
|
|
||||||
|
|
||||||
This cache is a subcache of the :ref:`Misc Cache` and as such will be cleaned when the Misc
|
|
||||||
Cache is cleaned.
|
|
||||||
|
|
||||||
When ``false`` or ommitted, all concretization requests will be performed from scatch
|
|
||||||
|
|
||||||
----------------------------
|
|
||||||
``concretization_cache:url``
|
|
||||||
----------------------------
|
|
||||||
|
|
||||||
Path to the location where Spack will root the concretization cache. Currently this only supports
|
|
||||||
paths on the local filesystem.
|
|
||||||
|
|
||||||
Default location is under the :ref:`Misc Cache` at: ``$misc_cache/concretization``
|
|
||||||
|
|
||||||
------------------------------------
|
|
||||||
``concretization_cache:entry_limit``
|
|
||||||
------------------------------------
|
|
||||||
|
|
||||||
Sets a limit on the number of concretization results that Spack will cache. The limit is evaluated
|
|
||||||
after each concretization run; if Spack has stored more results than the limit allows, the
|
|
||||||
oldest concretization results are pruned until 10% of the limit has been removed.
|
|
||||||
|
|
||||||
Setting this value to 0 disables the automatic pruning. It is expected users will be
|
|
||||||
responsible for maintaining this cache.
|
|
||||||
|
|
||||||
-----------------------------------
|
|
||||||
``concretization_cache:size_limit``
|
|
||||||
-----------------------------------
|
|
||||||
|
|
||||||
Sets a limit on the size of the concretization cache in bytes. The limit is evaluated
|
|
||||||
after each concretization run; if Spack has stored more results than the limit allows, the
|
|
||||||
oldest concretization results are pruned until 10% of the limit has been removed.
|
|
||||||
|
|
||||||
Setting this value to 0 disables the automatic pruning. It is expected users will be
|
|
||||||
responsible for maintaining this cache.
|
|
||||||
|
@ -7,7 +7,6 @@
|
|||||||
import fnmatch
|
import fnmatch
|
||||||
import glob
|
import glob
|
||||||
import hashlib
|
import hashlib
|
||||||
import io
|
|
||||||
import itertools
|
import itertools
|
||||||
import numbers
|
import numbers
|
||||||
import os
|
import os
|
||||||
@ -21,7 +20,6 @@
|
|||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from itertools import accumulate
|
from itertools import accumulate
|
||||||
from typing import (
|
from typing import (
|
||||||
IO,
|
|
||||||
Callable,
|
Callable,
|
||||||
Deque,
|
Deque,
|
||||||
Dict,
|
Dict,
|
||||||
@ -2883,20 +2881,6 @@ def keep_modification_time(*filenames):
|
|||||||
os.utime(f, (os.path.getatime(f), mtime))
|
os.utime(f, (os.path.getatime(f), mtime))
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def temporary_file_position(stream):
|
|
||||||
orig_pos = stream.tell()
|
|
||||||
yield
|
|
||||||
stream.seek(orig_pos)
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def current_file_position(stream: IO[str], loc: int, relative_to=io.SEEK_CUR):
|
|
||||||
with temporary_file_position(stream):
|
|
||||||
stream.seek(loc, relative_to)
|
|
||||||
yield
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def temporary_dir(
|
def temporary_dir(
|
||||||
suffix: Optional[str] = None, prefix: Optional[str] = None, dir: Optional[str] = None
|
suffix: Optional[str] = None, prefix: Optional[str] = None, dir: Optional[str] = None
|
||||||
|
@ -108,8 +108,6 @@ def _get_user_cache_path():
|
|||||||
#: transient caches for Spack data (virtual cache, patch sha256 lookup, etc.)
|
#: transient caches for Spack data (virtual cache, patch sha256 lookup, etc.)
|
||||||
default_misc_cache_path = os.path.join(user_cache_path, "cache")
|
default_misc_cache_path = os.path.join(user_cache_path, "cache")
|
||||||
|
|
||||||
#: concretization cache for Spack concretizations
|
|
||||||
default_conc_cache_path = os.path.join(default_misc_cache_path, "concretization")
|
|
||||||
|
|
||||||
# Below paths pull configuration from the host environment.
|
# Below paths pull configuration from the host environment.
|
||||||
#
|
#
|
||||||
|
@ -58,15 +58,6 @@
|
|||||||
{"type": "string"}, # deprecated
|
{"type": "string"}, # deprecated
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"concretization_cache": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"enable": {"type": "boolean"},
|
|
||||||
"url": {"type": "string"},
|
|
||||||
"entry_limit": {"type": "integer", "minimum": 0},
|
|
||||||
"size_limit": {"type": "integer", "minimum": 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"install_hash_length": {"type": "integer", "minimum": 1},
|
"install_hash_length": {"type": "integer", "minimum": 1},
|
||||||
"install_path_scheme": {"type": "string"}, # deprecated
|
"install_path_scheme": {"type": "string"}, # deprecated
|
||||||
"build_stage": {
|
"build_stage": {
|
||||||
|
@ -5,12 +5,9 @@
|
|||||||
import collections.abc
|
import collections.abc
|
||||||
import copy
|
import copy
|
||||||
import enum
|
import enum
|
||||||
import errno
|
|
||||||
import functools
|
import functools
|
||||||
import hashlib
|
|
||||||
import io
|
import io
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import pprint
|
import pprint
|
||||||
@ -20,25 +17,12 @@
|
|||||||
import typing
|
import typing
|
||||||
import warnings
|
import warnings
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from typing import (
|
from typing import Callable, Dict, Iterator, List, NamedTuple, Optional, Set, Tuple, Type, Union
|
||||||
IO,
|
|
||||||
Callable,
|
|
||||||
Dict,
|
|
||||||
Iterator,
|
|
||||||
List,
|
|
||||||
NamedTuple,
|
|
||||||
Optional,
|
|
||||||
Set,
|
|
||||||
Tuple,
|
|
||||||
Type,
|
|
||||||
Union,
|
|
||||||
)
|
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import current_file_position
|
|
||||||
from llnl.util.lang import elide_list
|
from llnl.util.lang import elide_list
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
@ -53,14 +37,12 @@
|
|||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.package_prefs
|
import spack.package_prefs
|
||||||
import spack.patch
|
import spack.patch
|
||||||
import spack.paths
|
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.solver.splicing
|
import spack.solver.splicing
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.util.crypto
|
import spack.util.crypto
|
||||||
import spack.util.hash
|
|
||||||
import spack.util.libc
|
import spack.util.libc
|
||||||
import spack.util.module_cmd as md
|
import spack.util.module_cmd as md
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
@ -69,7 +51,6 @@
|
|||||||
import spack.version as vn
|
import spack.version as vn
|
||||||
import spack.version.git_ref_lookup
|
import spack.version.git_ref_lookup
|
||||||
from spack import traverse
|
from spack import traverse
|
||||||
from spack.util.file_cache import FileCache
|
|
||||||
|
|
||||||
from .core import (
|
from .core import (
|
||||||
AspFunction,
|
AspFunction,
|
||||||
@ -557,363 +538,6 @@ def format_unsolved(unsolved_specs):
|
|||||||
msg += "\n\t(No candidate specs from solver)"
|
msg += "\n\t(No candidate specs from solver)"
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
def to_dict(self, test: bool = False) -> dict:
|
|
||||||
"""Produces dict representation of Result object
|
|
||||||
|
|
||||||
Does not include anything related to unsatisfiability as we
|
|
||||||
are only interested in storing satisfiable results
|
|
||||||
"""
|
|
||||||
serial_node_arg = (
|
|
||||||
lambda node_dict: f"""{{"id": "{node_dict.id}", "pkg": "{node_dict.pkg}"}}"""
|
|
||||||
)
|
|
||||||
ret = dict()
|
|
||||||
ret["asp"] = self.asp
|
|
||||||
ret["criteria"] = self.criteria
|
|
||||||
ret["optimal"] = self.optimal
|
|
||||||
ret["warnings"] = self.warnings
|
|
||||||
ret["nmodels"] = self.nmodels
|
|
||||||
ret["abstract_specs"] = [str(x) for x in self.abstract_specs]
|
|
||||||
ret["satisfiable"] = self.satisfiable
|
|
||||||
serial_answers = []
|
|
||||||
for answer in self.answers:
|
|
||||||
serial_answer = answer[:2]
|
|
||||||
serial_answer_dict = {}
|
|
||||||
for node, spec in answer[2].items():
|
|
||||||
serial_answer_dict[serial_node_arg(node)] = spec.to_dict()
|
|
||||||
serial_answer = serial_answer + (serial_answer_dict,)
|
|
||||||
serial_answers.append(serial_answer)
|
|
||||||
ret["answers"] = serial_answers
|
|
||||||
ret["specs_by_input"] = {}
|
|
||||||
input_specs = {} if not self.specs_by_input else self.specs_by_input
|
|
||||||
for input, spec in input_specs.items():
|
|
||||||
ret["specs_by_input"][str(input)] = spec.to_dict()
|
|
||||||
return ret
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_dict(obj: dict):
|
|
||||||
"""Returns Result object from compatible dictionary"""
|
|
||||||
|
|
||||||
def _dict_to_node_argument(dict):
|
|
||||||
id = dict["id"]
|
|
||||||
pkg = dict["pkg"]
|
|
||||||
return NodeArgument(id=id, pkg=pkg)
|
|
||||||
|
|
||||||
def _str_to_spec(spec_str):
|
|
||||||
return spack.spec.Spec(spec_str)
|
|
||||||
|
|
||||||
def _dict_to_spec(spec_dict):
|
|
||||||
loaded_spec = spack.spec.Spec.from_dict(spec_dict)
|
|
||||||
_ensure_external_path_if_external(loaded_spec)
|
|
||||||
spack.spec.Spec.ensure_no_deprecated(loaded_spec)
|
|
||||||
return loaded_spec
|
|
||||||
|
|
||||||
asp = obj.get("asp")
|
|
||||||
spec_list = obj.get("abstract_specs")
|
|
||||||
if not spec_list:
|
|
||||||
raise RuntimeError("Invalid json for concretization Result object")
|
|
||||||
if spec_list:
|
|
||||||
spec_list = [_str_to_spec(x) for x in spec_list]
|
|
||||||
result = Result(spec_list, asp)
|
|
||||||
result.criteria = obj.get("criteria")
|
|
||||||
result.optimal = obj.get("optimal")
|
|
||||||
result.warnings = obj.get("warnings")
|
|
||||||
result.nmodels = obj.get("nmodels")
|
|
||||||
result.satisfiable = obj.get("satisfiable")
|
|
||||||
result._unsolved_specs = []
|
|
||||||
answers = []
|
|
||||||
for answer in obj.get("answers", []):
|
|
||||||
loaded_answer = answer[:2]
|
|
||||||
answer_node_dict = {}
|
|
||||||
for node, spec in answer[2].items():
|
|
||||||
answer_node_dict[_dict_to_node_argument(json.loads(node))] = _dict_to_spec(spec)
|
|
||||||
loaded_answer.append(answer_node_dict)
|
|
||||||
answers.append(tuple(loaded_answer))
|
|
||||||
result.answers = answers
|
|
||||||
result._concrete_specs_by_input = {}
|
|
||||||
result._concrete_specs = []
|
|
||||||
for input, spec in obj.get("specs_by_input", {}).items():
|
|
||||||
result._concrete_specs_by_input[_str_to_spec(input)] = _dict_to_spec(spec)
|
|
||||||
result._concrete_specs.append(_dict_to_spec(spec))
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
class ConcretizationCache:
|
|
||||||
"""Store for Spack concretization results and statistics
|
|
||||||
|
|
||||||
Serializes solver result objects and statistics to json and stores
|
|
||||||
at a given endpoint in a cache associated by the sha256 of the
|
|
||||||
asp problem and the involved control files.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, root: Union[str, None] = None):
|
|
||||||
root = root or spack.config.get(
|
|
||||||
"config:concretization_cache:url", spack.paths.default_conc_cache_path
|
|
||||||
)
|
|
||||||
self.root = pathlib.Path(spack.util.path.canonicalize_path(root))
|
|
||||||
self._fc = FileCache(self.root)
|
|
||||||
self._cache_manifest = ".cache_manifest"
|
|
||||||
self._manifest_queue: List[Tuple[pathlib.Path, int]] = []
|
|
||||||
|
|
||||||
def cleanup(self):
|
|
||||||
"""Prunes the concretization cache according to configured size and entry
|
|
||||||
count limits. Cleanup is done in FIFO ordering."""
|
|
||||||
# TODO: determine a better default
|
|
||||||
entry_limit = spack.config.get("config:concretization_cache:entry_limit", 1000)
|
|
||||||
bytes_limit = spack.config.get("config:concretization_cache:size_limit", 3e8)
|
|
||||||
# lock the entire buildcache as we're removing a lot of data from the
|
|
||||||
# manifest and cache itself
|
|
||||||
with self._fc.read_transaction(self._cache_manifest) as f:
|
|
||||||
count, cache_bytes = self._extract_cache_metadata(f)
|
|
||||||
if not count or not cache_bytes:
|
|
||||||
return
|
|
||||||
entry_count = int(count)
|
|
||||||
manifest_bytes = int(cache_bytes)
|
|
||||||
# move beyond the metadata entry
|
|
||||||
f.readline()
|
|
||||||
if entry_count > entry_limit and entry_limit > 0:
|
|
||||||
with self._fc.write_transaction(self._cache_manifest) as (old, new):
|
|
||||||
# prune the oldest 10% or until we have removed 10% of
|
|
||||||
# total bytes starting from oldest entry
|
|
||||||
# TODO: make this configurable?
|
|
||||||
prune_count = entry_limit // 10
|
|
||||||
lines_to_prune = f.readlines(prune_count)
|
|
||||||
for i, line in enumerate(lines_to_prune):
|
|
||||||
sha, cache_entry_bytes = self._parse_manifest_entry(line)
|
|
||||||
if sha and cache_entry_bytes:
|
|
||||||
cache_path = self._cache_path_from_hash(sha)
|
|
||||||
if self._fc.remove(cache_path):
|
|
||||||
entry_count -= 1
|
|
||||||
manifest_bytes -= int(cache_entry_bytes)
|
|
||||||
else:
|
|
||||||
tty.warn(
|
|
||||||
f"Invalid concretization cache entry: '{line}' on line: {i+1}"
|
|
||||||
)
|
|
||||||
self._write_manifest(f, entry_count, manifest_bytes)
|
|
||||||
|
|
||||||
elif manifest_bytes > bytes_limit and bytes_limit > 0:
|
|
||||||
with self._fc.write_transaction(self._cache_manifest) as (old, new):
|
|
||||||
# take 10% of current size off
|
|
||||||
prune_amount = bytes_limit // 10
|
|
||||||
total_pruned = 0
|
|
||||||
i = 0
|
|
||||||
while total_pruned < prune_amount:
|
|
||||||
sha, manifest_cache_bytes = self._parse_manifest_entry(f.readline())
|
|
||||||
if sha and manifest_cache_bytes:
|
|
||||||
entry_bytes = int(manifest_cache_bytes)
|
|
||||||
cache_path = self.root / sha[:2] / sha
|
|
||||||
if self._safe_remove(cache_path):
|
|
||||||
entry_count -= 1
|
|
||||||
entry_bytes -= entry_bytes
|
|
||||||
total_pruned += entry_bytes
|
|
||||||
else:
|
|
||||||
tty.warn(
|
|
||||||
"Invalid concretization cache entry "
|
|
||||||
f"'{sha} {manifest_cache_bytes}' on line: {i}"
|
|
||||||
)
|
|
||||||
i += 1
|
|
||||||
self._write_manifest(f, entry_count, manifest_bytes)
|
|
||||||
for cache_dir in self.root.iterdir():
|
|
||||||
if cache_dir.is_dir() and not any(cache_dir.iterdir()):
|
|
||||||
self._safe_remove(cache_dir)
|
|
||||||
|
|
||||||
def cache_entries(self):
|
|
||||||
"""Generator producing cache entries"""
|
|
||||||
for cache_dir in self.root.iterdir():
|
|
||||||
# ensure component is cache entry directory
|
|
||||||
# not metadata file
|
|
||||||
if cache_dir.is_dir():
|
|
||||||
for cache_entry in cache_dir.iterdir():
|
|
||||||
if not cache_entry.is_dir():
|
|
||||||
yield cache_entry
|
|
||||||
else:
|
|
||||||
raise RuntimeError(
|
|
||||||
"Improperly formed concretization cache. "
|
|
||||||
f"Directory {cache_entry.name} is improperly located "
|
|
||||||
"within the concretization cache."
|
|
||||||
)
|
|
||||||
|
|
||||||
def _parse_manifest_entry(self, line):
|
|
||||||
"""Returns parsed manifest entry lines
|
|
||||||
with handling for invalid reads."""
|
|
||||||
if line:
|
|
||||||
cache_values = line.strip("\n").split(" ")
|
|
||||||
if len(cache_values) < 2:
|
|
||||||
tty.warn(f"Invalid cache entry at {line}")
|
|
||||||
return None, None
|
|
||||||
return None, None
|
|
||||||
|
|
||||||
def _write_manifest(self, manifest_file, entry_count, entry_bytes):
|
|
||||||
"""Writes new concretization cache manifest file.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
manifest_file: IO stream opened for readin
|
|
||||||
and writing wrapping the manifest file
|
|
||||||
with cursor at calltime set to location
|
|
||||||
where manifest should be truncated
|
|
||||||
entry_count: new total entry count
|
|
||||||
entry_bytes: new total entry bytes count
|
|
||||||
|
|
||||||
"""
|
|
||||||
persisted_entries = manifest_file.readlines()
|
|
||||||
manifest_file.truncate(0)
|
|
||||||
manifest_file.write(f"{entry_count} {entry_bytes}\n")
|
|
||||||
manifest_file.writelines(persisted_entries)
|
|
||||||
|
|
||||||
def _results_from_cache(self, cache_entry_buffer: IO[str]) -> Union[Result, None]:
|
|
||||||
"""Returns a Results object from the concretizer cache
|
|
||||||
|
|
||||||
Reads the cache hit and uses `Result`'s own deserializer
|
|
||||||
to produce a new Result object
|
|
||||||
"""
|
|
||||||
|
|
||||||
with current_file_position(cache_entry_buffer, 0):
|
|
||||||
cache_str = cache_entry_buffer.read()
|
|
||||||
# TODO: Should this be an error if None?
|
|
||||||
# Same for _stats_from_cache
|
|
||||||
if cache_str:
|
|
||||||
cache_entry = json.loads(cache_str)
|
|
||||||
result_json = cache_entry["results"]
|
|
||||||
return Result.from_dict(result_json)
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _stats_from_cache(self, cache_entry_buffer: IO[str]) -> Union[List, None]:
|
|
||||||
"""Returns concretization statistic from the
|
|
||||||
concretization associated with the cache.
|
|
||||||
|
|
||||||
Deserialzes the the json representation of the
|
|
||||||
statistics covering the cached concretization run
|
|
||||||
and returns the Python data structures
|
|
||||||
"""
|
|
||||||
with current_file_position(cache_entry_buffer, 0):
|
|
||||||
cache_str = cache_entry_buffer.read()
|
|
||||||
if cache_str:
|
|
||||||
return json.loads(cache_str)["statistics"]
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _extract_cache_metadata(self, cache_stream: IO[str]):
|
|
||||||
"""Extracts and returns cache entry count and bytes count from head of manifest
|
|
||||||
file"""
|
|
||||||
# make sure we're always reading from the beginning of the stream
|
|
||||||
# concretization cache manifest data lives at the top of the file
|
|
||||||
with current_file_position(cache_stream, 0):
|
|
||||||
return self._parse_manifest_entry(cache_stream.readline())
|
|
||||||
|
|
||||||
def _prefix_digest(self, problem: str) -> Tuple[str, str]:
|
|
||||||
"""Return the first two characters of, and the full, sha256 of the given asp problem"""
|
|
||||||
prob_digest = hashlib.sha256(problem.encode()).hexdigest()
|
|
||||||
prefix = prob_digest[:2]
|
|
||||||
return prefix, prob_digest
|
|
||||||
|
|
||||||
def _cache_path_from_problem(self, problem: str) -> pathlib.Path:
|
|
||||||
"""Returns a Path object representing the path to the cache
|
|
||||||
entry for the given problem"""
|
|
||||||
prefix, digest = self._prefix_digest(problem)
|
|
||||||
return pathlib.Path(prefix) / digest
|
|
||||||
|
|
||||||
def _cache_path_from_hash(self, hash: str) -> pathlib.Path:
|
|
||||||
"""Returns a Path object representing the cache entry
|
|
||||||
corresponding to the given sha256 hash"""
|
|
||||||
return pathlib.Path(hash[:2]) / hash
|
|
||||||
|
|
||||||
def _lock_prefix_from_cache_path(self, cache_path: str):
|
|
||||||
"""Returns the bit location corresponding to a given cache entry path
|
|
||||||
for file locking"""
|
|
||||||
return spack.util.hash.base32_prefix_bits(
|
|
||||||
spack.util.hash.b32_hash(cache_path), spack.util.crypto.bit_length(sys.maxsize)
|
|
||||||
)
|
|
||||||
|
|
||||||
def flush_manifest(self):
|
|
||||||
"""Updates the concretization cache manifest file after a cache write operation
|
|
||||||
Updates the current byte count and entry counts and writes to the head of the
|
|
||||||
manifest file"""
|
|
||||||
manifest_file = self.root / self._cache_manifest
|
|
||||||
manifest_file.touch(exist_ok=True)
|
|
||||||
with open(manifest_file, "r+", encoding="utf-8") as f:
|
|
||||||
# check if manifest is empty
|
|
||||||
count, cache_bytes = self._extract_cache_metadata(f)
|
|
||||||
if not count or not cache_bytes:
|
|
||||||
# cache is unintialized
|
|
||||||
count = 0
|
|
||||||
cache_bytes = 0
|
|
||||||
f.seek(0, io.SEEK_END)
|
|
||||||
for manifest_update in self._manifest_queue:
|
|
||||||
entry_path, entry_bytes = manifest_update
|
|
||||||
count += 1
|
|
||||||
cache_bytes += entry_bytes
|
|
||||||
f.write(f"{entry_path.name} {entry_bytes}")
|
|
||||||
f.seek(0, io.SEEK_SET)
|
|
||||||
new_stats = f"{int(count)+1} {int(cache_bytes)}\n"
|
|
||||||
f.write(new_stats)
|
|
||||||
|
|
||||||
def _register_cache_update(self, cache_path: pathlib.Path, bytes_written: int):
|
|
||||||
"""Adds manifest entry to update queue for later updates to the manifest"""
|
|
||||||
self._manifest_queue.append((cache_path, bytes_written))
|
|
||||||
|
|
||||||
def _safe_remove(self, cache_dir: pathlib.Path):
|
|
||||||
"""Removes cache entries with handling for the case where the entry has been
|
|
||||||
removed already or there are multiple cache entries in a directory"""
|
|
||||||
try:
|
|
||||||
if cache_dir.is_dir():
|
|
||||||
cache_dir.rmdir()
|
|
||||||
else:
|
|
||||||
cache_dir.unlink()
|
|
||||||
return True
|
|
||||||
except FileNotFoundError:
|
|
||||||
# This is acceptable, removal is idempotent
|
|
||||||
pass
|
|
||||||
except OSError as e:
|
|
||||||
if e.errno == errno.ENOTEMPTY:
|
|
||||||
# there exists another cache entry in this directory, don't clean yet
|
|
||||||
pass
|
|
||||||
return False
|
|
||||||
|
|
||||||
def store(self, problem: str, result: Result, statistics: List, test: bool = False):
|
|
||||||
"""Creates entry in concretization cache for problem if none exists,
|
|
||||||
storing the concretization Result object and statistics in the cache
|
|
||||||
as serialized json joined as a single file.
|
|
||||||
|
|
||||||
Hash membership is computed based on the sha256 of the provided asp
|
|
||||||
problem.
|
|
||||||
"""
|
|
||||||
cache_path = self._cache_path_from_problem(problem)
|
|
||||||
if self._fc.init_entry(cache_path):
|
|
||||||
# if an entry for this conc hash exists already, we're don't want
|
|
||||||
# to overwrite, just exit
|
|
||||||
tty.debug(f"Cache entry {cache_path} exists, will not be overwritten")
|
|
||||||
return
|
|
||||||
with self._fc.write_transaction(cache_path) as (old, new):
|
|
||||||
if old:
|
|
||||||
# Entry for this conc hash exists already, do not overwrite
|
|
||||||
tty.debug(f"Cache entry {cache_path} exists, will not be overwritten")
|
|
||||||
return
|
|
||||||
cache_dict = {"results": result.to_dict(test=test), "statistics": statistics}
|
|
||||||
bytes_written = new.write(json.dumps(cache_dict))
|
|
||||||
self._register_cache_update(cache_path, bytes_written)
|
|
||||||
|
|
||||||
def fetch(self, problem: str) -> Union[Tuple[Result, List], Tuple[None, None]]:
|
|
||||||
"""Returns the concretization cache result for a lookup based on the given problem.
|
|
||||||
|
|
||||||
Checks the concretization cache for the given problem, and either returns the
|
|
||||||
Python objects cached on disk representing the concretization results and statistics
|
|
||||||
or returns none if no cache entry was found.
|
|
||||||
"""
|
|
||||||
cache_path = self._cache_path_from_problem(problem)
|
|
||||||
result, statistics = None, None
|
|
||||||
with self._fc.read_transaction(cache_path) as f:
|
|
||||||
if f:
|
|
||||||
result = self._results_from_cache(f)
|
|
||||||
statistics = self._stats_from_cache(f)
|
|
||||||
if result and statistics:
|
|
||||||
tty.debug(f"Concretization cache hit at {str(cache_path)}")
|
|
||||||
return result, statistics
|
|
||||||
tty.debug(f"Concretization cache miss at {str(cache_path)}")
|
|
||||||
return None, None
|
|
||||||
|
|
||||||
|
|
||||||
CONC_CACHE: ConcretizationCache = llnl.util.lang.Singleton(
|
|
||||||
lambda: ConcretizationCache()
|
|
||||||
) # type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
def _normalize_packages_yaml(packages_yaml):
|
def _normalize_packages_yaml(packages_yaml):
|
||||||
normalized_yaml = copy.copy(packages_yaml)
|
normalized_yaml = copy.copy(packages_yaml)
|
||||||
@ -1182,15 +806,6 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
|||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
tty.debug("Ensuring basic dependencies {win-sdk, wgl} available")
|
tty.debug("Ensuring basic dependencies {win-sdk, wgl} available")
|
||||||
spack.bootstrap.core.ensure_winsdk_external_or_raise()
|
spack.bootstrap.core.ensure_winsdk_external_or_raise()
|
||||||
control_files = ["concretize.lp", "heuristic.lp", "display.lp"]
|
|
||||||
if not setup.concretize_everything:
|
|
||||||
control_files.append("when_possible.lp")
|
|
||||||
if using_libc_compatibility():
|
|
||||||
control_files.append("libc_compatibility.lp")
|
|
||||||
else:
|
|
||||||
control_files.append("os_compatibility.lp")
|
|
||||||
if setup.enable_splicing:
|
|
||||||
control_files.append("splices.lp")
|
|
||||||
|
|
||||||
timer.start("setup")
|
timer.start("setup")
|
||||||
asp_problem = setup.setup(specs, reuse=reuse, allow_deprecated=allow_deprecated)
|
asp_problem = setup.setup(specs, reuse=reuse, allow_deprecated=allow_deprecated)
|
||||||
@ -1200,30 +815,25 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
|||||||
return Result(specs), None, None
|
return Result(specs), None, None
|
||||||
timer.stop("setup")
|
timer.stop("setup")
|
||||||
|
|
||||||
timer.start("cache-check")
|
|
||||||
timer.start("ordering")
|
|
||||||
# ensure deterministic output
|
|
||||||
problem_repr = "\n".join(sorted(asp_problem.split("\n")))
|
|
||||||
timer.stop("ordering")
|
|
||||||
parent_dir = os.path.dirname(__file__)
|
|
||||||
full_path = lambda x: os.path.join(parent_dir, x)
|
|
||||||
abs_control_files = [full_path(x) for x in control_files]
|
|
||||||
for ctrl_file in abs_control_files:
|
|
||||||
with open(ctrl_file, "r+", encoding="utf-8") as f:
|
|
||||||
problem_repr += "\n" + f.read()
|
|
||||||
|
|
||||||
result = None
|
|
||||||
conc_cache_enabled = spack.config.get("config:concretization_cache:enable", True)
|
|
||||||
if conc_cache_enabled:
|
|
||||||
result, concretization_stats = CONC_CACHE.fetch(problem_repr)
|
|
||||||
|
|
||||||
timer.stop("cache-check")
|
|
||||||
if not result:
|
|
||||||
timer.start("load")
|
timer.start("load")
|
||||||
# Add the problem instance
|
# Add the problem instance
|
||||||
self.control.add("base", [], asp_problem)
|
self.control.add("base", [], asp_problem)
|
||||||
# Load the files
|
# Load the file itself
|
||||||
[self.control.load(lp) for lp in abs_control_files]
|
parent_dir = os.path.dirname(__file__)
|
||||||
|
self.control.load(os.path.join(parent_dir, "concretize.lp"))
|
||||||
|
self.control.load(os.path.join(parent_dir, "heuristic.lp"))
|
||||||
|
self.control.load(os.path.join(parent_dir, "display.lp"))
|
||||||
|
if not setup.concretize_everything:
|
||||||
|
self.control.load(os.path.join(parent_dir, "when_possible.lp"))
|
||||||
|
|
||||||
|
# Binary compatibility is based on libc on Linux, and on the os tag elsewhere
|
||||||
|
if using_libc_compatibility():
|
||||||
|
self.control.load(os.path.join(parent_dir, "libc_compatibility.lp"))
|
||||||
|
else:
|
||||||
|
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
|
||||||
|
if setup.enable_splicing:
|
||||||
|
self.control.load(os.path.join(parent_dir, "splices.lp"))
|
||||||
|
|
||||||
timer.stop("load")
|
timer.stop("load")
|
||||||
|
|
||||||
# Grounding is the first step in the solve -- it turns our facts
|
# Grounding is the first step in the solve -- it turns our facts
|
||||||
@ -1258,9 +868,7 @@ def on_model(model):
|
|||||||
finished = handle.wait(time_limit)
|
finished = handle.wait(time_limit)
|
||||||
if not finished:
|
if not finished:
|
||||||
specs_str = ", ".join(llnl.util.lang.elide_list([str(s) for s in specs], 4))
|
specs_str = ", ".join(llnl.util.lang.elide_list([str(s) for s in specs], 4))
|
||||||
header = (
|
header = f"Spack is taking more than {time_limit} seconds to solve for {specs_str}"
|
||||||
f"Spack is taking more than {time_limit} seconds to solve for {specs_str}"
|
|
||||||
)
|
|
||||||
if error_on_timeout:
|
if error_on_timeout:
|
||||||
raise UnsatisfiableSpecError(f"{header}, stopping concretization")
|
raise UnsatisfiableSpecError(f"{header}, stopping concretization")
|
||||||
warnings.warn(f"{header}, using the best configuration found so far")
|
warnings.warn(f"{header}, using the best configuration found so far")
|
||||||
@ -1284,9 +892,7 @@ def on_model(model):
|
|||||||
error_handler.raise_if_errors()
|
error_handler.raise_if_errors()
|
||||||
|
|
||||||
# build specs from spec attributes in the model
|
# build specs from spec attributes in the model
|
||||||
spec_attrs = [
|
spec_attrs = [(name, tuple(rest)) for name, *rest in extract_args(best_model, "attr")]
|
||||||
(name, tuple(rest)) for name, *rest in extract_args(best_model, "attr")
|
|
||||||
]
|
|
||||||
answers = builder.build_specs(spec_attrs)
|
answers = builder.build_specs(spec_attrs)
|
||||||
|
|
||||||
# add best spec to the results
|
# add best spec to the results
|
||||||
@ -1307,6 +913,14 @@ def on_model(model):
|
|||||||
result.control = self.control
|
result.control = self.control
|
||||||
result.cores.extend(cores)
|
result.cores.extend(cores)
|
||||||
|
|
||||||
|
if output.timers:
|
||||||
|
timer.write_tty()
|
||||||
|
print()
|
||||||
|
|
||||||
|
if output.stats:
|
||||||
|
print("Statistics:")
|
||||||
|
pprint.pprint(self.control.statistics)
|
||||||
|
|
||||||
result.raise_if_unsat()
|
result.raise_if_unsat()
|
||||||
|
|
||||||
if result.satisfiable and result.unsolved_specs and setup.concretize_everything:
|
if result.satisfiable and result.unsolved_specs and setup.concretize_everything:
|
||||||
@ -1316,17 +930,8 @@ def on_model(model):
|
|||||||
" that do not satisfy the request. Please report a bug at "
|
" that do not satisfy the request. Please report a bug at "
|
||||||
f"https://github.com/spack/spack/issues\n\t{unsolved_str}"
|
f"https://github.com/spack/spack/issues\n\t{unsolved_str}"
|
||||||
)
|
)
|
||||||
if conc_cache_enabled:
|
|
||||||
CONC_CACHE.store(problem_repr, result, self.control.statistics, test=setup.tests)
|
|
||||||
concretization_stats = self.control.statistics
|
|
||||||
if output.timers:
|
|
||||||
timer.write_tty()
|
|
||||||
print()
|
|
||||||
|
|
||||||
if output.stats:
|
return result, timer, self.control.statistics
|
||||||
print("Statistics:")
|
|
||||||
pprint.pprint(concretization_stats)
|
|
||||||
return result, timer, concretization_stats
|
|
||||||
|
|
||||||
|
|
||||||
class ConcreteSpecsByHash(collections.abc.Mapping):
|
class ConcreteSpecsByHash(collections.abc.Mapping):
|
||||||
@ -1768,7 +1373,7 @@ def effect_rules(self):
|
|||||||
return
|
return
|
||||||
|
|
||||||
self.gen.h2("Imposed requirements")
|
self.gen.h2("Imposed requirements")
|
||||||
for name in sorted(self._effect_cache):
|
for name in self._effect_cache:
|
||||||
cache = self._effect_cache[name]
|
cache = self._effect_cache[name]
|
||||||
for (spec_str, _), (effect_id, requirements) in cache.items():
|
for (spec_str, _), (effect_id, requirements) in cache.items():
|
||||||
self.gen.fact(fn.pkg_fact(name, fn.effect_id(effect_id)))
|
self.gen.fact(fn.pkg_fact(name, fn.effect_id(effect_id)))
|
||||||
@ -1821,8 +1426,8 @@ def define_variant(
|
|||||||
|
|
||||||
elif isinstance(values, vt.DisjointSetsOfValues):
|
elif isinstance(values, vt.DisjointSetsOfValues):
|
||||||
union = set()
|
union = set()
|
||||||
for sid, s in enumerate(sorted(values.sets)):
|
for sid, s in enumerate(values.sets):
|
||||||
for value in sorted(s):
|
for value in s:
|
||||||
pkg_fact(fn.variant_value_from_disjoint_sets(vid, value, sid))
|
pkg_fact(fn.variant_value_from_disjoint_sets(vid, value, sid))
|
||||||
union.update(s)
|
union.update(s)
|
||||||
values = union
|
values = union
|
||||||
@ -2003,7 +1608,7 @@ def package_provider_rules(self, pkg):
|
|||||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.possible_provider(vpkg_name)))
|
self.gen.fact(fn.pkg_fact(pkg.name, fn.possible_provider(vpkg_name)))
|
||||||
|
|
||||||
for when, provided in pkg.provided.items():
|
for when, provided in pkg.provided.items():
|
||||||
for vpkg in sorted(provided):
|
for vpkg in provided:
|
||||||
if vpkg.name not in self.possible_virtuals:
|
if vpkg.name not in self.possible_virtuals:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -2018,8 +1623,8 @@ def package_provider_rules(self, pkg):
|
|||||||
condition_id = self.condition(
|
condition_id = self.condition(
|
||||||
when, required_name=pkg.name, msg="Virtuals are provided together"
|
when, required_name=pkg.name, msg="Virtuals are provided together"
|
||||||
)
|
)
|
||||||
for set_id, virtuals_together in enumerate(sorted(sets_of_virtuals)):
|
for set_id, virtuals_together in enumerate(sets_of_virtuals):
|
||||||
for name in sorted(virtuals_together):
|
for name in virtuals_together:
|
||||||
self.gen.fact(
|
self.gen.fact(
|
||||||
fn.pkg_fact(pkg.name, fn.provided_together(condition_id, set_id, name))
|
fn.pkg_fact(pkg.name, fn.provided_together(condition_id, set_id, name))
|
||||||
)
|
)
|
||||||
@ -2129,7 +1734,7 @@ def package_splice_rules(self, pkg):
|
|||||||
for map in pkg.variants.values():
|
for map in pkg.variants.values():
|
||||||
for k in map:
|
for k in map:
|
||||||
filt_match_variants.add(k)
|
filt_match_variants.add(k)
|
||||||
filt_match_variants = sorted(filt_match_variants)
|
filt_match_variants = list(filt_match_variants)
|
||||||
variant_constraints = self._gen_match_variant_splice_constraints(
|
variant_constraints = self._gen_match_variant_splice_constraints(
|
||||||
pkg, cond, spec_to_splice, hash_var, splice_node, filt_match_variants
|
pkg, cond, spec_to_splice, hash_var, splice_node, filt_match_variants
|
||||||
)
|
)
|
||||||
@ -2659,7 +2264,7 @@ def define_package_versions_and_validate_preferences(
|
|||||||
):
|
):
|
||||||
"""Declare any versions in specs not declared in packages."""
|
"""Declare any versions in specs not declared in packages."""
|
||||||
packages_yaml = spack.config.get("packages")
|
packages_yaml = spack.config.get("packages")
|
||||||
for pkg_name in sorted(possible_pkgs):
|
for pkg_name in possible_pkgs:
|
||||||
pkg_cls = self.pkg_class(pkg_name)
|
pkg_cls = self.pkg_class(pkg_name)
|
||||||
|
|
||||||
# All the versions from the corresponding package.py file. Since concepts
|
# All the versions from the corresponding package.py file. Since concepts
|
||||||
@ -2987,7 +2592,7 @@ def define_variant_values(self):
|
|||||||
"""
|
"""
|
||||||
# Tell the concretizer about possible values from specs seen in spec_clauses().
|
# Tell the concretizer about possible values from specs seen in spec_clauses().
|
||||||
# We might want to order these facts by pkg and name if we are debugging.
|
# We might want to order these facts by pkg and name if we are debugging.
|
||||||
for pkg_name, variant_def_id, value in sorted(self.variant_values_from_specs):
|
for pkg_name, variant_def_id, value in self.variant_values_from_specs:
|
||||||
try:
|
try:
|
||||||
vid = self.variant_ids_by_def_id[variant_def_id]
|
vid = self.variant_ids_by_def_id[variant_def_id]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
@ -3025,8 +2630,6 @@ def concrete_specs(self):
|
|||||||
# Declare as possible parts of specs that are not in package.py
|
# Declare as possible parts of specs that are not in package.py
|
||||||
# - Add versions to possible versions
|
# - Add versions to possible versions
|
||||||
# - Add OS to possible OS's
|
# - Add OS to possible OS's
|
||||||
|
|
||||||
# is traverse deterministic?
|
|
||||||
for dep in spec.traverse():
|
for dep in spec.traverse():
|
||||||
self.possible_versions[dep.name].add(dep.version)
|
self.possible_versions[dep.name].add(dep.version)
|
||||||
if isinstance(dep.version, vn.GitVersion):
|
if isinstance(dep.version, vn.GitVersion):
|
||||||
@ -3264,7 +2867,7 @@ def define_runtime_constraints(self):
|
|||||||
recorder.consume_facts()
|
recorder.consume_facts()
|
||||||
|
|
||||||
def literal_specs(self, specs):
|
def literal_specs(self, specs):
|
||||||
for spec in sorted(specs):
|
for spec in specs:
|
||||||
self.gen.h2("Spec: %s" % str(spec))
|
self.gen.h2("Spec: %s" % str(spec))
|
||||||
condition_id = next(self._id_counter)
|
condition_id = next(self._id_counter)
|
||||||
trigger_id = next(self._id_counter)
|
trigger_id = next(self._id_counter)
|
||||||
@ -3765,7 +3368,7 @@ def consume_facts(self):
|
|||||||
# on the available compilers)
|
# on the available compilers)
|
||||||
self._setup.pkg_version_rules(runtime_pkg)
|
self._setup.pkg_version_rules(runtime_pkg)
|
||||||
|
|
||||||
for imposed_spec, when_spec in sorted(self.runtime_conditions):
|
for imposed_spec, when_spec in self.runtime_conditions:
|
||||||
msg = f"{when_spec} requires {imposed_spec} at runtime"
|
msg = f"{when_spec} requires {imposed_spec} at runtime"
|
||||||
_ = self._setup.condition(when_spec, imposed_spec=imposed_spec, msg=msg)
|
_ = self._setup.condition(when_spec, imposed_spec=imposed_spec, msg=msg)
|
||||||
|
|
||||||
@ -4622,9 +4225,6 @@ def solve_with_stats(
|
|||||||
reusable_specs.extend(self.selector.reusable_specs(specs))
|
reusable_specs.extend(self.selector.reusable_specs(specs))
|
||||||
setup = SpackSolverSetup(tests=tests)
|
setup = SpackSolverSetup(tests=tests)
|
||||||
output = OutputConfiguration(timers=timers, stats=stats, out=out, setup_only=setup_only)
|
output = OutputConfiguration(timers=timers, stats=stats, out=out, setup_only=setup_only)
|
||||||
|
|
||||||
CONC_CACHE.flush_manifest()
|
|
||||||
CONC_CACHE.cleanup()
|
|
||||||
return self.driver.solve(
|
return self.driver.solve(
|
||||||
setup, specs, reuse=reusable_specs, output=output, allow_deprecated=allow_deprecated
|
setup, specs, reuse=reusable_specs, output=output, allow_deprecated=allow_deprecated
|
||||||
)
|
)
|
||||||
@ -4694,9 +4294,6 @@ def solve_in_rounds(
|
|||||||
for spec in result.specs:
|
for spec in result.specs:
|
||||||
reusable_specs.extend(spec.traverse())
|
reusable_specs.extend(spec.traverse())
|
||||||
|
|
||||||
CONC_CACHE.flush_manifest()
|
|
||||||
CONC_CACHE.cleanup()
|
|
||||||
|
|
||||||
|
|
||||||
class UnsatisfiableSpecError(spack.error.UnsatisfiableSpecError):
|
class UnsatisfiableSpecError(spack.error.UnsatisfiableSpecError):
|
||||||
"""There was an issue with the spec that was requested (i.e. a user error)."""
|
"""There was an issue with the spec that was requested (i.e. a user error)."""
|
||||||
|
@ -42,7 +42,7 @@ def mock_pkg_git_repo(git, tmp_path_factory):
|
|||||||
repo_dir = root_dir / "builtin.mock"
|
repo_dir = root_dir / "builtin.mock"
|
||||||
shutil.copytree(spack.paths.mock_packages_path, str(repo_dir))
|
shutil.copytree(spack.paths.mock_packages_path, str(repo_dir))
|
||||||
|
|
||||||
repo_cache = spack.util.file_cache.FileCache(root_dir / "cache")
|
repo_cache = spack.util.file_cache.FileCache(str(root_dir / "cache"))
|
||||||
mock_repo = spack.repo.RepoPath(str(repo_dir), cache=repo_cache)
|
mock_repo = spack.repo.RepoPath(str(repo_dir), cache=repo_cache)
|
||||||
mock_repo_packages = mock_repo.repos[0].packages_path
|
mock_repo_packages = mock_repo.repos[0].packages_path
|
||||||
|
|
||||||
|
@ -3254,54 +3254,3 @@ def test_spec_unification(unify, mutable_config, mock_packages):
|
|||||||
maybe_fails = pytest.raises if unify is True else llnl.util.lang.nullcontext
|
maybe_fails = pytest.raises if unify is True else llnl.util.lang.nullcontext
|
||||||
with maybe_fails(spack.solver.asp.UnsatisfiableSpecError):
|
with maybe_fails(spack.solver.asp.UnsatisfiableSpecError):
|
||||||
_ = spack.cmd.parse_specs([a_restricted, b], concretize=True)
|
_ = spack.cmd.parse_specs([a_restricted, b], concretize=True)
|
||||||
|
|
||||||
|
|
||||||
def test_concretization_cache_roundtrip(use_concretization_cache, monkeypatch, mutable_config):
|
|
||||||
"""Tests whether we can write the results of a clingo solve to the cache
|
|
||||||
and load the same spec request from the cache to produce identical specs"""
|
|
||||||
# Force determinism:
|
|
||||||
# Solver setup is normally non-deterministic due to non-determinism in
|
|
||||||
# asp solver setup logic generation. The only other inputs to the cache keys are
|
|
||||||
# the .lp files, which are invariant over the course of this test.
|
|
||||||
# This method forces the same setup to be produced for the same specs
|
|
||||||
# which gives us a guarantee of cache hits, as it removes the only
|
|
||||||
# element of non deterministic solver setup for the same spec
|
|
||||||
# Basically just a quick and dirty memoization
|
|
||||||
solver_setup = spack.solver.asp.SpackSolverSetup.setup
|
|
||||||
|
|
||||||
def _setup(self, specs, *, reuse=None, allow_deprecated=False):
|
|
||||||
if not getattr(_setup, "cache_setup", None):
|
|
||||||
cache_setup = solver_setup(self, specs, reuse=reuse, allow_deprecated=allow_deprecated)
|
|
||||||
setattr(_setup, "cache_setup", cache_setup)
|
|
||||||
return getattr(_setup, "cache_setup")
|
|
||||||
|
|
||||||
# monkeypatch our forced determinism setup method into solver setup
|
|
||||||
monkeypatch.setattr(spack.solver.asp.SpackSolverSetup, "setup", _setup)
|
|
||||||
|
|
||||||
assert spack.config.get("config:concretization_cache:enable")
|
|
||||||
|
|
||||||
# run one standard concretization to populate the cache and the setup method
|
|
||||||
# memoization
|
|
||||||
h = spack.concretize.concretize_one("hdf5")
|
|
||||||
|
|
||||||
# due to our forced determinism above, we should not be observing
|
|
||||||
# cache misses, assert that we're not storing any new cache entries
|
|
||||||
def _ensure_no_store(self, problem: str, result, statistics, test=False):
|
|
||||||
# always throw, we never want to reach this code path
|
|
||||||
assert False, "Concretization cache hit expected"
|
|
||||||
|
|
||||||
# Assert that we're actually hitting the cache
|
|
||||||
cache_fetch = spack.solver.asp.ConcretizationCache.fetch
|
|
||||||
|
|
||||||
def _ensure_cache_hits(self, problem: str):
|
|
||||||
result, statistics = cache_fetch(self, problem)
|
|
||||||
assert result, "Expected successful concretization cache hit"
|
|
||||||
assert statistics, "Expected statistics to be non null on cache hit"
|
|
||||||
return result, statistics
|
|
||||||
|
|
||||||
monkeypatch.setattr(spack.solver.asp.ConcretizationCache, "store", _ensure_no_store)
|
|
||||||
monkeypatch.setattr(spack.solver.asp.ConcretizationCache, "fetch", _ensure_cache_hits)
|
|
||||||
# ensure subsequent concretizations of the same spec produce the same spec
|
|
||||||
# object
|
|
||||||
for _ in range(5):
|
|
||||||
assert h == spack.concretize.concretize_one("hdf5")
|
|
||||||
|
@ -350,16 +350,6 @@ def pytest_collection_modifyitems(config, items):
|
|||||||
item.add_marker(skip_as_slow)
|
item.add_marker(skip_as_slow)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
|
||||||
def use_concretization_cache(mutable_config, tmpdir):
|
|
||||||
"""Enables the use of the concretization cache"""
|
|
||||||
spack.config.set("config:concretization_cache:enable", True)
|
|
||||||
# ensure we have an isolated concretization cache
|
|
||||||
new_conc_cache_loc = str(tmpdir.mkdir("concretization"))
|
|
||||||
spack.config.set("config:concretization_cache:path", new_conc_cache_loc)
|
|
||||||
yield
|
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# These fixtures are applied to all tests
|
# These fixtures are applied to all tests
|
||||||
#
|
#
|
||||||
@ -2143,7 +2133,7 @@ def _c_compiler_always_exists():
|
|||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
def mock_test_cache(tmp_path_factory):
|
def mock_test_cache(tmp_path_factory):
|
||||||
cache_dir = tmp_path_factory.mktemp("cache")
|
cache_dir = tmp_path_factory.mktemp("cache")
|
||||||
return spack.util.file_cache.FileCache(cache_dir)
|
return spack.util.file_cache.FileCache(str(cache_dir))
|
||||||
|
|
||||||
|
|
||||||
class MockHTTPResponse(io.IOBase):
|
class MockHTTPResponse(io.IOBase):
|
||||||
|
@ -14,5 +14,3 @@ config:
|
|||||||
checksum: true
|
checksum: true
|
||||||
dirty: false
|
dirty: false
|
||||||
locks: {1}
|
locks: {1}
|
||||||
concretization_cache:
|
|
||||||
enable: false
|
|
||||||
|
@ -161,7 +161,7 @@ def test_handle_unknown_package(temporary_store, config, mock_packages, tmp_path
|
|||||||
"""
|
"""
|
||||||
layout = temporary_store.layout
|
layout = temporary_store.layout
|
||||||
|
|
||||||
repo_cache = spack.util.file_cache.FileCache(tmp_path / "cache")
|
repo_cache = spack.util.file_cache.FileCache(str(tmp_path / "cache"))
|
||||||
mock_db = spack.repo.RepoPath(spack.paths.mock_packages_path, cache=repo_cache)
|
mock_db = spack.repo.RepoPath(spack.paths.mock_packages_path, cache=repo_cache)
|
||||||
|
|
||||||
not_in_mock = set.difference(
|
not_in_mock = set.difference(
|
||||||
|
@ -34,7 +34,7 @@ def extra_repo(tmp_path_factory, request):
|
|||||||
subdirectory: '{request.param}'
|
subdirectory: '{request.param}'
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
repo_cache = spack.util.file_cache.FileCache(cache_dir)
|
repo_cache = spack.util.file_cache.FileCache(str(cache_dir))
|
||||||
return spack.repo.Repo(str(repo_dir), cache=repo_cache), request.param
|
return spack.repo.Repo(str(repo_dir), cache=repo_cache), request.param
|
||||||
|
|
||||||
|
|
||||||
@ -194,7 +194,7 @@ def _repo_paths(repos):
|
|||||||
|
|
||||||
repo_paths, namespaces = _repo_paths(repos)
|
repo_paths, namespaces = _repo_paths(repos)
|
||||||
|
|
||||||
repo_cache = spack.util.file_cache.FileCache(tmp_path / "cache")
|
repo_cache = spack.util.file_cache.FileCache(str(tmp_path / "cache"))
|
||||||
repo_path = spack.repo.RepoPath(*repo_paths, cache=repo_cache)
|
repo_path = spack.repo.RepoPath(*repo_paths, cache=repo_cache)
|
||||||
assert len(repo_path.repos) == len(namespaces)
|
assert len(repo_path.repos) == len(namespaces)
|
||||||
assert [x.namespace for x in repo_path.repos] == namespaces
|
assert [x.namespace for x in repo_path.repos] == namespaces
|
||||||
@ -362,5 +362,5 @@ def test_repo_package_api_version(tmp_path: pathlib.Path):
|
|||||||
namespace: example
|
namespace: example
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
cache = spack.util.file_cache.FileCache(tmp_path / "cache")
|
cache = spack.util.file_cache.FileCache(str(tmp_path / "cache"))
|
||||||
assert spack.repo.Repo(str(tmp_path / "example"), cache=cache).package_api == (1, 0)
|
assert spack.repo.Repo(str(tmp_path / "example"), cache=cache).package_api == (1, 0)
|
||||||
|
@ -5,17 +5,16 @@
|
|||||||
import errno
|
import errno
|
||||||
import math
|
import math
|
||||||
import os
|
import os
|
||||||
import pathlib
|
|
||||||
import shutil
|
import shutil
|
||||||
from typing import IO, Dict, Optional, Tuple, Union
|
from typing import IO, Optional, Tuple
|
||||||
|
|
||||||
from llnl.util.filesystem import rename
|
from llnl.util.filesystem import mkdirp, rename
|
||||||
|
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
from spack.util.lock import Lock, ReadTransaction, WriteTransaction
|
from spack.util.lock import Lock, ReadTransaction, WriteTransaction
|
||||||
|
|
||||||
|
|
||||||
def _maybe_open(path: Union[str, pathlib.Path]) -> Optional[IO[str]]:
|
def _maybe_open(path: str) -> Optional[IO[str]]:
|
||||||
try:
|
try:
|
||||||
return open(path, "r", encoding="utf-8")
|
return open(path, "r", encoding="utf-8")
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
@ -25,7 +24,7 @@ def _maybe_open(path: Union[str, pathlib.Path]) -> Optional[IO[str]]:
|
|||||||
|
|
||||||
|
|
||||||
class ReadContextManager:
|
class ReadContextManager:
|
||||||
def __init__(self, path: Union[str, pathlib.Path]) -> None:
|
def __init__(self, path: str) -> None:
|
||||||
self.path = path
|
self.path = path
|
||||||
|
|
||||||
def __enter__(self) -> Optional[IO[str]]:
|
def __enter__(self) -> Optional[IO[str]]:
|
||||||
@ -71,7 +70,7 @@ class FileCache:
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, root: Union[str, pathlib.Path], timeout=120):
|
def __init__(self, root, timeout=120):
|
||||||
"""Create a file cache object.
|
"""Create a file cache object.
|
||||||
|
|
||||||
This will create the cache directory if it does not exist yet.
|
This will create the cache directory if it does not exist yet.
|
||||||
@ -83,60 +82,58 @@ def __init__(self, root: Union[str, pathlib.Path], timeout=120):
|
|||||||
for cache files, this specifies how long Spack should wait
|
for cache files, this specifies how long Spack should wait
|
||||||
before assuming that there is a deadlock.
|
before assuming that there is a deadlock.
|
||||||
"""
|
"""
|
||||||
if isinstance(root, str):
|
self.root = root.rstrip(os.path.sep)
|
||||||
root = pathlib.Path(root)
|
if not os.path.exists(self.root):
|
||||||
self.root = root
|
mkdirp(self.root)
|
||||||
self.root.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
self._locks: Dict[Union[pathlib.Path, str], Lock] = {}
|
self._locks = {}
|
||||||
self.lock_timeout = timeout
|
self.lock_timeout = timeout
|
||||||
|
|
||||||
def destroy(self):
|
def destroy(self):
|
||||||
"""Remove all files under the cache root."""
|
"""Remove all files under the cache root."""
|
||||||
for f in self.root.iterdir():
|
for f in os.listdir(self.root):
|
||||||
if f.is_dir():
|
path = os.path.join(self.root, f)
|
||||||
shutil.rmtree(f, True)
|
if os.path.isdir(path):
|
||||||
|
shutil.rmtree(path, True)
|
||||||
else:
|
else:
|
||||||
f.unlink()
|
os.remove(path)
|
||||||
|
|
||||||
def cache_path(self, key: Union[str, pathlib.Path]):
|
def cache_path(self, key):
|
||||||
"""Path to the file in the cache for a particular key."""
|
"""Path to the file in the cache for a particular key."""
|
||||||
return self.root / key
|
return os.path.join(self.root, key)
|
||||||
|
|
||||||
def _lock_path(self, key: Union[str, pathlib.Path]):
|
def _lock_path(self, key):
|
||||||
"""Path to the file in the cache for a particular key."""
|
"""Path to the file in the cache for a particular key."""
|
||||||
keyfile = os.path.basename(key)
|
keyfile = os.path.basename(key)
|
||||||
keydir = os.path.dirname(key)
|
keydir = os.path.dirname(key)
|
||||||
|
|
||||||
return self.root / keydir / ("." + keyfile + ".lock")
|
return os.path.join(self.root, keydir, "." + keyfile + ".lock")
|
||||||
|
|
||||||
def _get_lock(self, key: Union[str, pathlib.Path]):
|
def _get_lock(self, key):
|
||||||
"""Create a lock for a key, if necessary, and return a lock object."""
|
"""Create a lock for a key, if necessary, and return a lock object."""
|
||||||
if key not in self._locks:
|
if key not in self._locks:
|
||||||
self._locks[key] = Lock(str(self._lock_path(key)), default_timeout=self.lock_timeout)
|
self._locks[key] = Lock(self._lock_path(key), default_timeout=self.lock_timeout)
|
||||||
return self._locks[key]
|
return self._locks[key]
|
||||||
|
|
||||||
def init_entry(self, key: Union[str, pathlib.Path]):
|
def init_entry(self, key):
|
||||||
"""Ensure we can access a cache file. Create a lock for it if needed.
|
"""Ensure we can access a cache file. Create a lock for it if needed.
|
||||||
|
|
||||||
Return whether the cache file exists yet or not.
|
Return whether the cache file exists yet or not.
|
||||||
"""
|
"""
|
||||||
cache_path = self.cache_path(key)
|
cache_path = self.cache_path(key)
|
||||||
# Avoid using pathlib here to allow the logic below to
|
|
||||||
# function as is
|
|
||||||
# TODO: Maybe refactor the following logic for pathlib
|
|
||||||
exists = os.path.exists(cache_path)
|
exists = os.path.exists(cache_path)
|
||||||
if exists:
|
if exists:
|
||||||
if not cache_path.is_file():
|
if not os.path.isfile(cache_path):
|
||||||
raise CacheError("Cache file is not a file: %s" % cache_path)
|
raise CacheError("Cache file is not a file: %s" % cache_path)
|
||||||
|
|
||||||
if not os.access(cache_path, os.R_OK):
|
if not os.access(cache_path, os.R_OK):
|
||||||
raise CacheError("Cannot access cache file: %s" % cache_path)
|
raise CacheError("Cannot access cache file: %s" % cache_path)
|
||||||
else:
|
else:
|
||||||
# if the file is hierarchical, make parent directories
|
# if the file is hierarchical, make parent directories
|
||||||
parent = cache_path.parent
|
parent = os.path.dirname(cache_path)
|
||||||
if parent != self.root:
|
if parent.rstrip(os.path.sep) != self.root:
|
||||||
parent.mkdir(parents=True, exist_ok=True)
|
mkdirp(parent)
|
||||||
|
|
||||||
if not os.access(parent, os.R_OK | os.W_OK):
|
if not os.access(parent, os.R_OK | os.W_OK):
|
||||||
raise CacheError("Cannot access cache directory: %s" % parent)
|
raise CacheError("Cannot access cache directory: %s" % parent)
|
||||||
@ -145,7 +142,7 @@ def init_entry(self, key: Union[str, pathlib.Path]):
|
|||||||
self._get_lock(key)
|
self._get_lock(key)
|
||||||
return exists
|
return exists
|
||||||
|
|
||||||
def read_transaction(self, key: Union[str, pathlib.Path]):
|
def read_transaction(self, key):
|
||||||
"""Get a read transaction on a file cache item.
|
"""Get a read transaction on a file cache item.
|
||||||
|
|
||||||
Returns a ReadTransaction context manager and opens the cache file for
|
Returns a ReadTransaction context manager and opens the cache file for
|
||||||
@ -156,11 +153,9 @@ def read_transaction(self, key: Union[str, pathlib.Path]):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
path = self.cache_path(key)
|
path = self.cache_path(key)
|
||||||
return ReadTransaction(
|
return ReadTransaction(self._get_lock(key), acquire=lambda: ReadContextManager(path))
|
||||||
self._get_lock(key), acquire=lambda: ReadContextManager(path) # type: ignore
|
|
||||||
)
|
|
||||||
|
|
||||||
def write_transaction(self, key: Union[str, pathlib.Path]):
|
def write_transaction(self, key):
|
||||||
"""Get a write transaction on a file cache item.
|
"""Get a write transaction on a file cache item.
|
||||||
|
|
||||||
Returns a WriteTransaction context manager that opens a temporary file
|
Returns a WriteTransaction context manager that opens a temporary file
|
||||||
@ -172,11 +167,9 @@ def write_transaction(self, key: Union[str, pathlib.Path]):
|
|||||||
if os.path.exists(path) and not os.access(path, os.W_OK):
|
if os.path.exists(path) and not os.access(path, os.W_OK):
|
||||||
raise CacheError(f"Insufficient permissions to write to file cache at {path}")
|
raise CacheError(f"Insufficient permissions to write to file cache at {path}")
|
||||||
|
|
||||||
return WriteTransaction(
|
return WriteTransaction(self._get_lock(key), acquire=lambda: WriteContextManager(path))
|
||||||
self._get_lock(key), acquire=lambda: WriteContextManager(path) # type: ignore
|
|
||||||
)
|
|
||||||
|
|
||||||
def mtime(self, key: Union[str, pathlib.Path]) -> float:
|
def mtime(self, key) -> float:
|
||||||
"""Return modification time of cache file, or -inf if it does not exist.
|
"""Return modification time of cache file, or -inf if it does not exist.
|
||||||
|
|
||||||
Time is in units returned by os.stat in the mtime field, which is
|
Time is in units returned by os.stat in the mtime field, which is
|
||||||
@ -186,14 +179,14 @@ def mtime(self, key: Union[str, pathlib.Path]) -> float:
|
|||||||
if not self.init_entry(key):
|
if not self.init_entry(key):
|
||||||
return -math.inf
|
return -math.inf
|
||||||
else:
|
else:
|
||||||
return self.cache_path(key).stat().st_mtime
|
return os.stat(self.cache_path(key)).st_mtime
|
||||||
|
|
||||||
def remove(self, key: Union[str, pathlib.Path]):
|
def remove(self, key):
|
||||||
file = self.cache_path(key)
|
file = self.cache_path(key)
|
||||||
lock = self._get_lock(key)
|
lock = self._get_lock(key)
|
||||||
try:
|
try:
|
||||||
lock.acquire_write()
|
lock.acquire_write()
|
||||||
file.unlink()
|
os.unlink(file)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
# File not found is OK, so remove is idempotent.
|
# File not found is OK, so remove is idempotent.
|
||||||
if e.errno != errno.ENOENT:
|
if e.errno != errno.ENOENT:
|
||||||
|
Loading…
Reference in New Issue
Block a user