Compare commits
9 Commits
packages/v
...
develop-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8e7489bc17 | ||
|
|
d234df62d7 | ||
|
|
4a5922a0ec | ||
|
|
5bd184aaaf | ||
|
|
464c3b96fa | ||
|
|
60544a4e84 | ||
|
|
a664d98f37 | ||
|
|
0e3d7efb0f | ||
|
|
a8cd0b99f3 |
@@ -125,6 +125,8 @@ are stored in ``$spack/var/spack/cache``. These are stored indefinitely
|
||||
by default. Can be purged with :ref:`spack clean --downloads
|
||||
<cmd-spack-clean>`.
|
||||
|
||||
.. _Misc Cache:
|
||||
|
||||
--------------------
|
||||
``misc_cache``
|
||||
--------------------
|
||||
@@ -334,3 +336,52 @@ create a new alias called ``inst`` that will always call ``install -v``:
|
||||
|
||||
aliases:
|
||||
inst: install -v
|
||||
|
||||
-------------------------------
|
||||
``concretization_cache:enable``
|
||||
-------------------------------
|
||||
|
||||
When set to ``true``, Spack will utilize a cache of solver outputs from
|
||||
successful concretization runs. When enabled, Spack will check the concretization
|
||||
cache prior to running the solver. If a previous request to solve a given
|
||||
problem is present in the cache, Spack will load the concrete specs and other
|
||||
solver data from the cache rather than running the solver. Specs not previously
|
||||
concretized will be added to the cache on a successful solve. The cache additionally
|
||||
holds solver statistics, so commands like ``spack solve`` will still return information
|
||||
about the run that produced a given solver result.
|
||||
|
||||
This cache is a subcache of the :ref:`Misc Cache` and as such will be cleaned when the Misc
|
||||
Cache is cleaned.
|
||||
|
||||
When ``false`` or ommitted, all concretization requests will be performed from scatch
|
||||
|
||||
----------------------------
|
||||
``concretization_cache:url``
|
||||
----------------------------
|
||||
|
||||
Path to the location where Spack will root the concretization cache. Currently this only supports
|
||||
paths on the local filesystem.
|
||||
|
||||
Default location is under the :ref:`Misc Cache` at: ``$misc_cache/concretization``
|
||||
|
||||
------------------------------------
|
||||
``concretization_cache:entry_limit``
|
||||
------------------------------------
|
||||
|
||||
Sets a limit on the number of concretization results that Spack will cache. The limit is evaluated
|
||||
after each concretization run; if Spack has stored more results than the limit allows, the
|
||||
oldest concretization results are pruned until 10% of the limit has been removed.
|
||||
|
||||
Setting this value to 0 disables the automatic pruning. It is expected users will be
|
||||
responsible for maintaining this cache.
|
||||
|
||||
-----------------------------------
|
||||
``concretization_cache:size_limit``
|
||||
-----------------------------------
|
||||
|
||||
Sets a limit on the size of the concretization cache in bytes. The limit is evaluated
|
||||
after each concretization run; if Spack has stored more results than the limit allows, the
|
||||
oldest concretization results are pruned until 10% of the limit has been removed.
|
||||
|
||||
Setting this value to 0 disables the automatic pruning. It is expected users will be
|
||||
responsible for maintaining this cache.
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
import fnmatch
|
||||
import glob
|
||||
import hashlib
|
||||
import io
|
||||
import itertools
|
||||
import numbers
|
||||
import os
|
||||
@@ -20,6 +21,7 @@
|
||||
from contextlib import contextmanager
|
||||
from itertools import accumulate
|
||||
from typing import (
|
||||
IO,
|
||||
Callable,
|
||||
Deque,
|
||||
Dict,
|
||||
@@ -2454,26 +2456,69 @@ class WindowsSimulatedRPath:
|
||||
and vis versa.
|
||||
"""
|
||||
|
||||
def __init__(self, package, link_install_prefix=True):
|
||||
def __init__(
|
||||
self,
|
||||
package,
|
||||
base_modification_prefix: Optional[Union[str, pathlib.Path]] = None,
|
||||
link_install_prefix: bool = True,
|
||||
):
|
||||
"""
|
||||
Args:
|
||||
package (spack.package_base.PackageBase): Package requiring links
|
||||
base_modification_prefix (str|pathlib.Path): Path representation indicating
|
||||
the root directory in which to establish the simulated rpath, ie where the
|
||||
symlinks that comprise the "rpath" behavior will be installed.
|
||||
|
||||
Note: This is a mutually exclusive option with `link_install_prefix` using
|
||||
both is an error.
|
||||
|
||||
Default: None
|
||||
link_install_prefix (bool): Link against package's own install or stage root.
|
||||
Packages that run their own executables during build and require rpaths to
|
||||
the build directory during build time require this option. Default: install
|
||||
the build directory during build time require this option.
|
||||
|
||||
Default: install
|
||||
root
|
||||
|
||||
Note: This is a mutually exclusive option with `base_modification_prefix`, using
|
||||
both is an error.
|
||||
"""
|
||||
self.pkg = package
|
||||
self._addl_rpaths = set()
|
||||
self._addl_rpaths: set[str] = set()
|
||||
if link_install_prefix and base_modification_prefix:
|
||||
raise RuntimeError(
|
||||
"Invalid combination of arguments given to WindowsSimulated RPath.\n"
|
||||
"Select either `link_install_prefix` to create an install prefix rpath"
|
||||
" or specify a `base_modification_prefix` for any other link type. "
|
||||
"Specifying both arguments is invalid."
|
||||
)
|
||||
if not (link_install_prefix or base_modification_prefix):
|
||||
raise RuntimeError(
|
||||
"Insufficient arguments given to WindowsSimulatedRpath.\n"
|
||||
"WindowsSimulatedRPath requires one of link_install_prefix"
|
||||
" or base_modification_prefix to be specified."
|
||||
" Neither was provided."
|
||||
)
|
||||
|
||||
self.link_install_prefix = link_install_prefix
|
||||
self._additional_library_dependents = set()
|
||||
if base_modification_prefix:
|
||||
self.base_modification_prefix = pathlib.Path(base_modification_prefix)
|
||||
else:
|
||||
self.base_modification_prefix = pathlib.Path(self.pkg.prefix)
|
||||
self._additional_library_dependents: set[pathlib.Path] = set()
|
||||
if not self.link_install_prefix:
|
||||
tty.debug(f"Generating rpath for non install context: {base_modification_prefix}")
|
||||
|
||||
@property
|
||||
def library_dependents(self):
|
||||
"""
|
||||
Set of directories where package binaries/libraries are located.
|
||||
"""
|
||||
return set([pathlib.Path(self.pkg.prefix.bin)]) | self._additional_library_dependents
|
||||
base_pths = set()
|
||||
if self.link_install_prefix:
|
||||
base_pths.add(pathlib.Path(self.pkg.prefix.bin))
|
||||
base_pths |= self._additional_library_dependents
|
||||
return base_pths
|
||||
|
||||
def add_library_dependent(self, *dest):
|
||||
"""
|
||||
@@ -2489,6 +2534,12 @@ def add_library_dependent(self, *dest):
|
||||
new_pth = pathlib.Path(pth).parent
|
||||
else:
|
||||
new_pth = pathlib.Path(pth)
|
||||
path_is_in_prefix = new_pth.is_relative_to(self.base_modification_prefix)
|
||||
if not path_is_in_prefix:
|
||||
raise RuntimeError(
|
||||
f"Attempting to generate rpath symlink out of rpath context:\
|
||||
{str(self.base_modification_prefix)}"
|
||||
)
|
||||
self._additional_library_dependents.add(new_pth)
|
||||
|
||||
@property
|
||||
@@ -2577,6 +2628,33 @@ def establish_link(self):
|
||||
self._link(library, lib_dir)
|
||||
|
||||
|
||||
def make_package_test_rpath(pkg, test_dir: Union[str, pathlib.Path]):
|
||||
"""Establishes a temp Windows simulated rpath for the pkg in the testing directory
|
||||
so an executable can test the libraries/executables with proper access
|
||||
to dependent dlls
|
||||
|
||||
Note: this is a no-op on all other platforms besides Windows
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): the package for which the rpath should be computed
|
||||
test_dir: the testing directory in which we should construct an rpath
|
||||
"""
|
||||
# link_install_prefix as false ensures we're not linking into the install prefix
|
||||
mini_rpath = WindowsSimulatedRPath(pkg, link_install_prefix=False)
|
||||
# add the testing directory as a location to install rpath symlinks
|
||||
mini_rpath.add_library_dependent(test_dir)
|
||||
|
||||
# check for whether build_directory is available, if not
|
||||
# assume the stage root is the build dir
|
||||
build_dir_attr = getattr(pkg, "build_directory", None)
|
||||
build_directory = build_dir_attr if build_dir_attr else pkg.stage.path
|
||||
# add the build dir & build dir bin
|
||||
mini_rpath.add_rpath(os.path.join(build_directory, "bin"))
|
||||
mini_rpath.add_rpath(os.path.join(build_directory))
|
||||
# construct rpath
|
||||
mini_rpath.establish_link()
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@memoized
|
||||
def can_access_dir(path):
|
||||
@@ -2805,6 +2883,20 @@ def keep_modification_time(*filenames):
|
||||
os.utime(f, (os.path.getatime(f), mtime))
|
||||
|
||||
|
||||
@contextmanager
|
||||
def temporary_file_position(stream):
|
||||
orig_pos = stream.tell()
|
||||
yield
|
||||
stream.seek(orig_pos)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def current_file_position(stream: IO[str], loc: int, relative_to=io.SEEK_CUR):
|
||||
with temporary_file_position(stream):
|
||||
stream.seek(loc, relative_to)
|
||||
yield
|
||||
|
||||
|
||||
@contextmanager
|
||||
def temporary_dir(
|
||||
suffix: Optional[str] = None, prefix: Optional[str] = None, dir: Optional[str] = None
|
||||
|
||||
@@ -1234,10 +1234,6 @@ def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
|
||||
if os.path.isdir(bin_dir):
|
||||
env.prepend_path("PATH", bin_dir)
|
||||
|
||||
for cp_dir in spack.build_systems.cmake.get_cmake_prefix_path(dep.package):
|
||||
env.append_path("CMAKE_PREFIX_PATH", cp_dir)
|
||||
env.prune_duplicate_paths("CMAKE_PREFIX_PATH")
|
||||
|
||||
|
||||
def _setup_pkg_and_run(
|
||||
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
|
||||
|
||||
@@ -125,9 +125,10 @@ def windows_establish_runtime_linkage(self):
|
||||
# Spack should in general not modify things it has not installed
|
||||
# we can reasonably expect externals to have their link interface properly established
|
||||
if sys.platform == "win32" and not self.spec.external:
|
||||
self.win_rpath.add_library_dependent(*self.win_add_library_dependent())
|
||||
self.win_rpath.add_rpath(*self.win_add_rpath())
|
||||
self.win_rpath.establish_link()
|
||||
win_rpath = fsys.WindowsSimulatedRPath(self)
|
||||
win_rpath.add_library_dependent(*self.win_add_library_dependent())
|
||||
win_rpath.add_rpath(*self.win_add_rpath())
|
||||
win_rpath.establish_link()
|
||||
|
||||
|
||||
#: Registers which are the detectable packages, by repo and package name
|
||||
@@ -742,7 +743,6 @@ def __init__(self, spec):
|
||||
# Set up timing variables
|
||||
self._fetch_time = 0.0
|
||||
|
||||
self.win_rpath = fsys.WindowsSimulatedRPath(self)
|
||||
super().__init__()
|
||||
|
||||
def __getitem__(self, key: str) -> "PackageBase":
|
||||
|
||||
@@ -108,6 +108,8 @@ def _get_user_cache_path():
|
||||
#: transient caches for Spack data (virtual cache, patch sha256 lookup, etc.)
|
||||
default_misc_cache_path = os.path.join(user_cache_path, "cache")
|
||||
|
||||
#: concretization cache for Spack concretizations
|
||||
default_conc_cache_path = os.path.join(default_misc_cache_path, "concretization")
|
||||
|
||||
# Below paths pull configuration from the host environment.
|
||||
#
|
||||
|
||||
@@ -58,6 +58,15 @@
|
||||
{"type": "string"}, # deprecated
|
||||
]
|
||||
},
|
||||
"concretization_cache": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"enable": {"type": "boolean"},
|
||||
"url": {"type": "string"},
|
||||
"entry_limit": {"type": "integer", "minimum": 0},
|
||||
"size_limit": {"type": "integer", "minimum": 0},
|
||||
},
|
||||
},
|
||||
"install_hash_length": {"type": "integer", "minimum": 1},
|
||||
"install_path_scheme": {"type": "string"}, # deprecated
|
||||
"build_stage": {
|
||||
|
||||
@@ -5,9 +5,12 @@
|
||||
import collections.abc
|
||||
import copy
|
||||
import enum
|
||||
import errno
|
||||
import functools
|
||||
import hashlib
|
||||
import io
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import pprint
|
||||
@@ -17,12 +20,25 @@
|
||||
import typing
|
||||
import warnings
|
||||
from contextlib import contextmanager
|
||||
from typing import Callable, Dict, Iterator, List, NamedTuple, Optional, Set, Tuple, Type, Union
|
||||
from typing import (
|
||||
IO,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterator,
|
||||
List,
|
||||
NamedTuple,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
)
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import current_file_position
|
||||
from llnl.util.lang import elide_list
|
||||
|
||||
import spack
|
||||
@@ -34,15 +50,18 @@
|
||||
import spack.deptypes as dt
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.hash_types as ht
|
||||
import spack.package_base
|
||||
import spack.package_prefs
|
||||
import spack.patch
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.solver.splicing
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.crypto
|
||||
import spack.util.hash
|
||||
import spack.util.libc
|
||||
import spack.util.module_cmd as md
|
||||
import spack.util.path
|
||||
@@ -51,6 +70,7 @@
|
||||
import spack.version as vn
|
||||
import spack.version.git_ref_lookup
|
||||
from spack import traverse
|
||||
from spack.util.file_cache import FileCache
|
||||
|
||||
from .core import (
|
||||
AspFunction,
|
||||
@@ -538,6 +558,365 @@ def format_unsolved(unsolved_specs):
|
||||
msg += "\n\t(No candidate specs from solver)"
|
||||
return msg
|
||||
|
||||
def to_dict(self, test: bool = False) -> dict:
|
||||
"""Produces dict representation of Result object
|
||||
|
||||
Does not include anything related to unsatisfiability as we
|
||||
are only interested in storing satisfiable results
|
||||
"""
|
||||
serial_node_arg = (
|
||||
lambda node_dict: f"""{{"id": "{node_dict.id}", "pkg": "{node_dict.pkg}"}}"""
|
||||
)
|
||||
spec_hash_type = ht.process_hash if test else ht.dag_hash
|
||||
ret = dict()
|
||||
ret["asp"] = self.asp
|
||||
ret["criteria"] = self.criteria
|
||||
ret["optimal"] = self.optimal
|
||||
ret["warnings"] = self.warnings
|
||||
ret["nmodels"] = self.nmodels
|
||||
ret["abstract_specs"] = [str(x) for x in self.abstract_specs]
|
||||
ret["satisfiable"] = self.satisfiable
|
||||
serial_answers = []
|
||||
for answer in self.answers:
|
||||
serial_answer = answer[:2]
|
||||
serial_answer_dict = {}
|
||||
for node, spec in answer[2].items():
|
||||
serial_answer_dict[serial_node_arg(node)] = spec.to_dict(hash=spec_hash_type)
|
||||
serial_answer = serial_answer + (serial_answer_dict,)
|
||||
serial_answers.append(serial_answer)
|
||||
ret["answers"] = serial_answers
|
||||
ret["specs_by_input"] = {}
|
||||
input_specs = {} if not self.specs_by_input else self.specs_by_input
|
||||
for input, spec in input_specs.items():
|
||||
ret["specs_by_input"][str(input)] = spec.to_dict(hash=spec_hash_type)
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def from_dict(obj: dict):
|
||||
"""Returns Result object from compatible dictionary"""
|
||||
|
||||
def _dict_to_node_argument(dict):
|
||||
id = dict["id"]
|
||||
pkg = dict["pkg"]
|
||||
return NodeArgument(id=id, pkg=pkg)
|
||||
|
||||
def _str_to_spec(spec_str):
|
||||
return spack.spec.Spec(spec_str)
|
||||
|
||||
def _dict_to_spec(spec_dict):
|
||||
loaded_spec = spack.spec.Spec.from_dict(spec_dict)
|
||||
_ensure_external_path_if_external(loaded_spec)
|
||||
spack.spec.Spec.ensure_no_deprecated(loaded_spec)
|
||||
return loaded_spec
|
||||
|
||||
asp = obj.get("asp")
|
||||
spec_list = obj.get("abstract_specs")
|
||||
if not spec_list:
|
||||
raise RuntimeError("Invalid json for concretization Result object")
|
||||
if spec_list:
|
||||
spec_list = [_str_to_spec(x) for x in spec_list]
|
||||
result = Result(spec_list, asp)
|
||||
result.criteria = obj.get("criteria")
|
||||
result.optimal = obj.get("optimal")
|
||||
result.warnings = obj.get("warnings")
|
||||
result.nmodels = obj.get("nmodels")
|
||||
result.satisfiable = obj.get("satisfiable")
|
||||
result._unsolved_specs = []
|
||||
answers = []
|
||||
for answer in obj.get("answers", []):
|
||||
loaded_answer = answer[:2]
|
||||
answer_node_dict = {}
|
||||
for node, spec in answer[2].items():
|
||||
answer_node_dict[_dict_to_node_argument(json.loads(node))] = _dict_to_spec(spec)
|
||||
loaded_answer.append(answer_node_dict)
|
||||
answers.append(tuple(loaded_answer))
|
||||
result.answers = answers
|
||||
result._concrete_specs_by_input = {}
|
||||
result._concrete_specs = []
|
||||
for input, spec in obj.get("specs_by_input", {}).items():
|
||||
result._concrete_specs_by_input[_str_to_spec(input)] = _dict_to_spec(spec)
|
||||
result._concrete_specs.append(_dict_to_spec(spec))
|
||||
return result
|
||||
|
||||
|
||||
class ConcretizationCache:
|
||||
"""Store for Spack concretization results and statistics
|
||||
|
||||
Serializes solver result objects and statistics to json and stores
|
||||
at a given endpoint in a cache associated by the sha256 of the
|
||||
asp problem and the involved control files.
|
||||
"""
|
||||
|
||||
def __init__(self, root: Union[str, None] = None):
|
||||
if not root:
|
||||
root = spack.config.get(
|
||||
"config:concretization_cache:url", spack.paths.default_conc_cache_path
|
||||
)
|
||||
self.root = pathlib.Path(spack.util.path.canonicalize_path(root))
|
||||
self._fc = FileCache(self.root)
|
||||
self._cache_manifest = ".cache_manifest"
|
||||
self._manifest_queue: List[Tuple[pathlib.Path, int]] = []
|
||||
|
||||
def cleanup(self):
|
||||
"""Prunes the concretization cache according to configured size and entry
|
||||
count limits. Cleanup is done in FIFO ordering."""
|
||||
# TODO: determine a better default
|
||||
entry_limit = spack.config.get("config:concretization_cache:entry_limit", 1000)
|
||||
bytes_limit = spack.config.get("config:concretization_cache:size_limit", 3e8)
|
||||
# lock the entire buildcache as we're removing a lot of data from the
|
||||
# manifest and cache itself
|
||||
with self._fc.read_transaction(self._cache_manifest) as f:
|
||||
count, cache_bytes = self._extract_cache_metadata(f)
|
||||
if not count or not cache_bytes:
|
||||
return
|
||||
entry_count = int(count)
|
||||
manifest_bytes = int(cache_bytes)
|
||||
# move beyond the metadata entry
|
||||
f.readline()
|
||||
if entry_count > entry_limit and entry_limit > 0:
|
||||
with self._fc.write_transaction(self._cache_manifest) as (old, new):
|
||||
# prune the oldest 10% or until we have removed 10% of
|
||||
# total bytes starting from oldest entry
|
||||
# TODO: make this configurable?
|
||||
prune_count = entry_limit // 10
|
||||
lines_to_prune = f.readlines(prune_count)
|
||||
for i, line in enumerate(lines_to_prune):
|
||||
sha, cache_entry_bytes = self._parse_manifest_entry(line)
|
||||
if sha and cache_entry_bytes:
|
||||
cache_path = self._cache_path_from_hash(sha)
|
||||
if self._fc.remove(cache_path):
|
||||
entry_count -= 1
|
||||
manifest_bytes -= int(cache_entry_bytes)
|
||||
else:
|
||||
tty.warn(
|
||||
f"Invalid concretization cache entry: '{line}' on line: {i+1}"
|
||||
)
|
||||
self._write_manifest(f, entry_count, manifest_bytes)
|
||||
|
||||
elif manifest_bytes > bytes_limit and bytes_limit > 0:
|
||||
with self._fc.write_transaction(self._cache_manifest) as (old, new):
|
||||
# take 10% of current size off
|
||||
prune_amount = bytes_limit // 10
|
||||
total_pruned = 0
|
||||
i = 0
|
||||
while total_pruned < prune_amount:
|
||||
sha, manifest_cache_bytes = self._parse_manifest_entry(f.readline())
|
||||
if sha and manifest_cache_bytes:
|
||||
entry_bytes = int(manifest_cache_bytes)
|
||||
cache_path = self.root / sha[:2] / sha
|
||||
if self._safe_remove(cache_path):
|
||||
entry_count -= 1
|
||||
entry_bytes -= entry_bytes
|
||||
total_pruned += entry_bytes
|
||||
else:
|
||||
tty.warn(
|
||||
"Invalid concretization cache entry "
|
||||
f"'{sha} {manifest_cache_bytes}' on line: {i}"
|
||||
)
|
||||
i += 1
|
||||
self._write_manifest(f, entry_count, manifest_bytes)
|
||||
for cache_dir in self.root.iterdir():
|
||||
if cache_dir.is_dir() and not any(cache_dir.iterdir()):
|
||||
self._safe_remove(cache_dir)
|
||||
|
||||
def cache_entries(self):
|
||||
"""Generator producing cache entries"""
|
||||
for cache_dir in self.root.iterdir():
|
||||
# ensure component is cache entry directory
|
||||
# not metadata file
|
||||
if cache_dir.is_dir():
|
||||
for cache_entry in cache_dir.iterdir():
|
||||
if not cache_entry.is_dir():
|
||||
yield cache_entry
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"Improperly formed concretization cache. "
|
||||
f"Directory {cache_entry.name} is improperly located "
|
||||
"within the concretization cache."
|
||||
)
|
||||
|
||||
def _parse_manifest_entry(self, line):
|
||||
"""Returns parsed manifest entry lines
|
||||
with handling for invalid reads."""
|
||||
if line:
|
||||
cache_values = line.strip("\n").split(" ")
|
||||
if len(cache_values) < 2:
|
||||
tty.warn(f"Invalid cache entry at {line}")
|
||||
return None, None
|
||||
return None, None
|
||||
|
||||
def _write_manifest(self, manifest_file, entry_count, entry_bytes):
|
||||
"""Writes new concretization cache manifest file.
|
||||
|
||||
Arguments:
|
||||
manifest_file: IO stream opened for readin
|
||||
and writing wrapping the manifest file
|
||||
with cursor at calltime set to location
|
||||
where manifest should be truncated
|
||||
entry_count: new total entry count
|
||||
entry_bytes: new total entry bytes count
|
||||
|
||||
"""
|
||||
persisted_entries = manifest_file.readlines()
|
||||
manifest_file.truncate(0)
|
||||
manifest_file.write(f"{entry_count} {entry_bytes}\n")
|
||||
manifest_file.writelines(persisted_entries)
|
||||
|
||||
def _results_from_cache(self, cache_entry_buffer: IO[str]) -> Union[Result, None]:
|
||||
"""Returns a Results object from the concretizer cache
|
||||
|
||||
Reads the cache hit and uses `Result`'s own deserializer
|
||||
to produce a new Result object
|
||||
"""
|
||||
|
||||
with current_file_position(cache_entry_buffer, 0):
|
||||
cache_str = cache_entry_buffer.read()
|
||||
# TODO: Should this be an error if None?
|
||||
# Same for _stats_from_cache
|
||||
if cache_str:
|
||||
cache_entry = json.loads(cache_str)
|
||||
result_json = cache_entry["results"]
|
||||
return Result.from_dict(result_json)
|
||||
return None
|
||||
|
||||
def _stats_from_cache(self, cache_entry_buffer: IO[str]) -> Union[List, None]:
|
||||
"""Returns concretization statistic from the
|
||||
concretization associated with the cache.
|
||||
|
||||
Deserialzes the the json representation of the
|
||||
statistics covering the cached concretization run
|
||||
and returns the Python data structures
|
||||
"""
|
||||
with current_file_position(cache_entry_buffer, 0):
|
||||
cache_str = cache_entry_buffer.read()
|
||||
if cache_str:
|
||||
return json.loads(cache_str)["statistics"]
|
||||
return None
|
||||
|
||||
def _extract_cache_metadata(self, cache_stream: IO[str]):
|
||||
"""Extracts and returns cache entry count and bytes count from head of manifest
|
||||
file"""
|
||||
# make sure we're always reading from the beginning of the stream
|
||||
# concretization cache manifest data lives at the top of the file
|
||||
with current_file_position(cache_stream, 0):
|
||||
return self._parse_manifest_entry(cache_stream.readline())
|
||||
|
||||
def _prefix_digest(self, problem: str) -> Tuple[str, str]:
|
||||
"""Return the first two characters of, and the full, sha256 of the given asp problem"""
|
||||
prob_digest = hashlib.sha256(problem.encode()).hexdigest()
|
||||
prefix = prob_digest[:2]
|
||||
return prefix, prob_digest
|
||||
|
||||
def _cache_path_from_problem(self, problem: str) -> pathlib.Path:
|
||||
"""Returns a Path object representing the path to the cache
|
||||
entry for the given problem"""
|
||||
prefix, digest = self._prefix_digest(problem)
|
||||
return pathlib.Path(prefix) / digest
|
||||
|
||||
def _cache_path_from_hash(self, hash: str) -> pathlib.Path:
|
||||
"""Returns a Path object representing the cache entry
|
||||
corresponding to the given sha256 hash"""
|
||||
return pathlib.Path(hash[:2]) / hash
|
||||
|
||||
def _lock_prefix_from_cache_path(self, cache_path: str):
|
||||
"""Returns the bit location corresponding to a given cache entry path
|
||||
for file locking"""
|
||||
return spack.util.hash.base32_prefix_bits(
|
||||
spack.util.hash.b32_hash(cache_path), spack.util.crypto.bit_length(sys.maxsize)
|
||||
)
|
||||
|
||||
def flush_manifest(self):
|
||||
"""Updates the concretization cache manifest file after a cache write operation
|
||||
Updates the current byte count and entry counts and writes to the head of the
|
||||
manifest file"""
|
||||
manifest_file = self.root / self._cache_manifest
|
||||
manifest_file.touch(exist_ok=True)
|
||||
with open(manifest_file, "r+", encoding="utf-8") as f:
|
||||
# check if manifest is empty
|
||||
count, cache_bytes = self._extract_cache_metadata(f)
|
||||
if not count or not cache_bytes:
|
||||
# cache is unintialized
|
||||
count = 0
|
||||
cache_bytes = 0
|
||||
f.seek(0, io.SEEK_END)
|
||||
for manifest_update in self._manifest_queue:
|
||||
entry_path, entry_bytes = manifest_update
|
||||
count += 1
|
||||
cache_bytes += entry_bytes
|
||||
f.write(f"{entry_path.name} {entry_bytes}")
|
||||
f.seek(0, io.SEEK_SET)
|
||||
new_stats = f"{int(count)+1} {int(cache_bytes)}\n"
|
||||
f.write(new_stats)
|
||||
|
||||
def _register_cache_update(self, cache_path: pathlib.Path, bytes_written: int):
|
||||
"""Adds manifest entry to update queue for later updates to the manifest"""
|
||||
self._manifest_queue.append((cache_path, bytes_written))
|
||||
|
||||
def _safe_remove(self, cache_dir: pathlib.Path):
|
||||
"""Removes cache entries with handling for the case where the entry has been
|
||||
removed already or there are multiple cache entries in a directory"""
|
||||
try:
|
||||
if cache_dir.is_dir():
|
||||
cache_dir.rmdir()
|
||||
else:
|
||||
cache_dir.unlink()
|
||||
return True
|
||||
except FileNotFoundError:
|
||||
# This is acceptable, removal is idempotent
|
||||
pass
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOTEMPTY:
|
||||
# there exists another cache entry in this directory, don't clean yet
|
||||
pass
|
||||
return False
|
||||
|
||||
def store(self, problem: str, result: Result, statistics: List, test: bool = False):
|
||||
"""Creates entry in concretization cache for problem if none exists,
|
||||
storing the concretization Result object and statistics in the cache
|
||||
as serialized json joined as a single file.
|
||||
|
||||
Hash membership is computed based on the sha256 of the provided asp
|
||||
problem.
|
||||
"""
|
||||
cache_path = self._cache_path_from_problem(problem)
|
||||
if self._fc.init_entry(cache_path):
|
||||
# if an entry for this conc hash exists already, we're don't want
|
||||
# to overwrite, just exit
|
||||
tty.debug(f"Cache entry {cache_path} exists, will not be overwritten")
|
||||
return
|
||||
with self._fc.write_transaction(cache_path) as (old, new):
|
||||
if old:
|
||||
# Entry for this conc hash exists already, do not overwrite
|
||||
tty.debug(f"Cache entry {cache_path} exists, will not be overwritten")
|
||||
return
|
||||
cache_dict = {"results": result.to_dict(test=test), "statistics": statistics}
|
||||
bytes_written = new.write(json.dumps(cache_dict))
|
||||
self._register_cache_update(cache_path, bytes_written)
|
||||
|
||||
def fetch(self, problem: str) -> Union[Tuple[Result, List], Tuple[None, None]]:
|
||||
"""Returns the concretization cache result for a lookup based on the given problem.
|
||||
|
||||
Checks the concretization cache for the given problem, and either returns the
|
||||
Python objects cached on disk representing the concretization results and statistics
|
||||
or returns none if no cache entry was found.
|
||||
"""
|
||||
cache_path = self._cache_path_from_problem(problem)
|
||||
result, statistics = None, None
|
||||
with self._fc.read_transaction(cache_path) as f:
|
||||
if f:
|
||||
result = self._results_from_cache(f)
|
||||
statistics = self._stats_from_cache(f)
|
||||
if result and statistics:
|
||||
tty.debug(f"Concretization cache hit at {str(cache_path)}")
|
||||
return result, statistics
|
||||
tty.debug(f"Concretization cache miss at {str(cache_path)}")
|
||||
return None, None
|
||||
|
||||
|
||||
CONC_CACHE: ConcretizationCache = llnl.util.lang.Singleton(
|
||||
lambda: ConcretizationCache()
|
||||
) # type: ignore
|
||||
|
||||
|
||||
def _normalize_packages_yaml(packages_yaml):
|
||||
normalized_yaml = copy.copy(packages_yaml)
|
||||
@@ -806,6 +1185,15 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
||||
if sys.platform == "win32":
|
||||
tty.debug("Ensuring basic dependencies {win-sdk, wgl} available")
|
||||
spack.bootstrap.core.ensure_winsdk_external_or_raise()
|
||||
control_files = ["concretize.lp", "heuristic.lp", "display.lp"]
|
||||
if not setup.concretize_everything:
|
||||
control_files.append("when_possible.lp")
|
||||
if using_libc_compatibility():
|
||||
control_files.append("libc_compatibility.lp")
|
||||
else:
|
||||
control_files.append("os_compatibility.lp")
|
||||
if setup.enable_splicing:
|
||||
control_files.append("splices.lp")
|
||||
|
||||
timer.start("setup")
|
||||
asp_problem = setup.setup(specs, reuse=reuse, allow_deprecated=allow_deprecated)
|
||||
@@ -815,123 +1203,133 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
||||
return Result(specs), None, None
|
||||
timer.stop("setup")
|
||||
|
||||
timer.start("load")
|
||||
# Add the problem instance
|
||||
self.control.add("base", [], asp_problem)
|
||||
# Load the file itself
|
||||
timer.start("cache-check")
|
||||
timer.start("ordering")
|
||||
# ensure deterministic output
|
||||
problem_repr = "\n".join(sorted(asp_problem.split("\n")))
|
||||
timer.stop("ordering")
|
||||
parent_dir = os.path.dirname(__file__)
|
||||
self.control.load(os.path.join(parent_dir, "concretize.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "heuristic.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "display.lp"))
|
||||
if not setup.concretize_everything:
|
||||
self.control.load(os.path.join(parent_dir, "when_possible.lp"))
|
||||
full_path = lambda x: os.path.join(parent_dir, x)
|
||||
abs_control_files = [full_path(x) for x in control_files]
|
||||
for ctrl_file in abs_control_files:
|
||||
with open(ctrl_file, "r+", encoding="utf-8") as f:
|
||||
problem_repr += "\n" + f.read()
|
||||
|
||||
# Binary compatibility is based on libc on Linux, and on the os tag elsewhere
|
||||
if using_libc_compatibility():
|
||||
self.control.load(os.path.join(parent_dir, "libc_compatibility.lp"))
|
||||
else:
|
||||
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
|
||||
if setup.enable_splicing:
|
||||
self.control.load(os.path.join(parent_dir, "splices.lp"))
|
||||
result = None
|
||||
conc_cache_enabled = spack.config.get("config:concretization_cache:enable", True)
|
||||
if conc_cache_enabled:
|
||||
result, concretization_stats = CONC_CACHE.fetch(problem_repr)
|
||||
|
||||
timer.stop("load")
|
||||
timer.stop("cache-check")
|
||||
if not result:
|
||||
timer.start("load")
|
||||
# Add the problem instance
|
||||
self.control.add("base", [], asp_problem)
|
||||
# Load the files
|
||||
[self.control.load(lp) for lp in abs_control_files]
|
||||
timer.stop("load")
|
||||
|
||||
# Grounding is the first step in the solve -- it turns our facts
|
||||
# and first-order logic rules into propositional logic.
|
||||
timer.start("ground")
|
||||
self.control.ground([("base", [])])
|
||||
timer.stop("ground")
|
||||
# Grounding is the first step in the solve -- it turns our facts
|
||||
# and first-order logic rules into propositional logic.
|
||||
timer.start("ground")
|
||||
self.control.ground([("base", [])])
|
||||
timer.stop("ground")
|
||||
|
||||
# With a grounded program, we can run the solve.
|
||||
models = [] # stable models if things go well
|
||||
cores = [] # unsatisfiable cores if they do not
|
||||
# With a grounded program, we can run the solve.
|
||||
models = [] # stable models if things go well
|
||||
cores = [] # unsatisfiable cores if they do not
|
||||
|
||||
def on_model(model):
|
||||
models.append((model.cost, model.symbols(shown=True, terms=True)))
|
||||
def on_model(model):
|
||||
models.append((model.cost, model.symbols(shown=True, terms=True)))
|
||||
|
||||
solve_kwargs = {
|
||||
"assumptions": setup.assumptions,
|
||||
"on_model": on_model,
|
||||
"on_core": cores.append,
|
||||
}
|
||||
solve_kwargs = {
|
||||
"assumptions": setup.assumptions,
|
||||
"on_model": on_model,
|
||||
"on_core": cores.append,
|
||||
}
|
||||
|
||||
if clingo_cffi():
|
||||
solve_kwargs["on_unsat"] = cores.append
|
||||
if clingo_cffi():
|
||||
solve_kwargs["on_unsat"] = cores.append
|
||||
|
||||
timer.start("solve")
|
||||
time_limit = spack.config.CONFIG.get("concretizer:timeout", -1)
|
||||
error_on_timeout = spack.config.CONFIG.get("concretizer:error_on_timeout", True)
|
||||
# Spack uses 0 to set no time limit, clingo API uses -1
|
||||
if time_limit == 0:
|
||||
time_limit = -1
|
||||
with self.control.solve(**solve_kwargs, async_=True) as handle:
|
||||
finished = handle.wait(time_limit)
|
||||
if not finished:
|
||||
specs_str = ", ".join(llnl.util.lang.elide_list([str(s) for s in specs], 4))
|
||||
header = f"Spack is taking more than {time_limit} seconds to solve for {specs_str}"
|
||||
if error_on_timeout:
|
||||
raise UnsatisfiableSpecError(f"{header}, stopping concretization")
|
||||
warnings.warn(f"{header}, using the best configuration found so far")
|
||||
handle.cancel()
|
||||
timer.start("solve")
|
||||
time_limit = spack.config.CONFIG.get("concretizer:timeout", -1)
|
||||
error_on_timeout = spack.config.CONFIG.get("concretizer:error_on_timeout", True)
|
||||
# Spack uses 0 to set no time limit, clingo API uses -1
|
||||
if time_limit == 0:
|
||||
time_limit = -1
|
||||
with self.control.solve(**solve_kwargs, async_=True) as handle:
|
||||
finished = handle.wait(time_limit)
|
||||
if not finished:
|
||||
specs_str = ", ".join(llnl.util.lang.elide_list([str(s) for s in specs], 4))
|
||||
header = (
|
||||
f"Spack is taking more than {time_limit} seconds to solve for {specs_str}"
|
||||
)
|
||||
if error_on_timeout:
|
||||
raise UnsatisfiableSpecError(f"{header}, stopping concretization")
|
||||
warnings.warn(f"{header}, using the best configuration found so far")
|
||||
handle.cancel()
|
||||
|
||||
solve_result = handle.get()
|
||||
timer.stop("solve")
|
||||
solve_result = handle.get()
|
||||
timer.stop("solve")
|
||||
|
||||
# once done, construct the solve result
|
||||
result = Result(specs)
|
||||
result.satisfiable = solve_result.satisfiable
|
||||
# once done, construct the solve result
|
||||
result = Result(specs)
|
||||
result.satisfiable = solve_result.satisfiable
|
||||
|
||||
if result.satisfiable:
|
||||
timer.start("construct_specs")
|
||||
# get the best model
|
||||
builder = SpecBuilder(specs, hash_lookup=setup.reusable_and_possible)
|
||||
min_cost, best_model = min(models)
|
||||
if result.satisfiable:
|
||||
timer.start("construct_specs")
|
||||
# get the best model
|
||||
builder = SpecBuilder(specs, hash_lookup=setup.reusable_and_possible)
|
||||
min_cost, best_model = min(models)
|
||||
|
||||
# first check for errors
|
||||
error_handler = ErrorHandler(best_model, specs)
|
||||
error_handler.raise_if_errors()
|
||||
# first check for errors
|
||||
error_handler = ErrorHandler(best_model, specs)
|
||||
error_handler.raise_if_errors()
|
||||
|
||||
# build specs from spec attributes in the model
|
||||
spec_attrs = [(name, tuple(rest)) for name, *rest in extract_args(best_model, "attr")]
|
||||
answers = builder.build_specs(spec_attrs)
|
||||
# build specs from spec attributes in the model
|
||||
spec_attrs = [
|
||||
(name, tuple(rest)) for name, *rest in extract_args(best_model, "attr")
|
||||
]
|
||||
answers = builder.build_specs(spec_attrs)
|
||||
|
||||
# add best spec to the results
|
||||
result.answers.append((list(min_cost), 0, answers))
|
||||
# add best spec to the results
|
||||
result.answers.append((list(min_cost), 0, answers))
|
||||
|
||||
# get optimization criteria
|
||||
criteria_args = extract_args(best_model, "opt_criterion")
|
||||
result.criteria = build_criteria_names(min_cost, criteria_args)
|
||||
# get optimization criteria
|
||||
criteria_args = extract_args(best_model, "opt_criterion")
|
||||
result.criteria = build_criteria_names(min_cost, criteria_args)
|
||||
|
||||
# record the number of models the solver considered
|
||||
result.nmodels = len(models)
|
||||
# record the number of models the solver considered
|
||||
result.nmodels = len(models)
|
||||
|
||||
# record the possible dependencies in the solve
|
||||
result.possible_dependencies = setup.pkgs
|
||||
timer.stop("construct_specs")
|
||||
timer.stop()
|
||||
elif cores:
|
||||
result.control = self.control
|
||||
result.cores.extend(cores)
|
||||
# record the possible dependencies in the solve
|
||||
result.possible_dependencies = setup.pkgs
|
||||
timer.stop("construct_specs")
|
||||
timer.stop()
|
||||
elif cores:
|
||||
result.control = self.control
|
||||
result.cores.extend(cores)
|
||||
|
||||
result.raise_if_unsat()
|
||||
|
||||
if result.satisfiable and result.unsolved_specs and setup.concretize_everything:
|
||||
unsolved_str = Result.format_unsolved(result.unsolved_specs)
|
||||
raise InternalConcretizerError(
|
||||
"Internal Spack error: the solver completed but produced specs"
|
||||
" that do not satisfy the request. Please report a bug at "
|
||||
f"https://github.com/spack/spack/issues\n\t{unsolved_str}"
|
||||
)
|
||||
if conc_cache_enabled:
|
||||
CONC_CACHE.store(problem_repr, result, self.control.statistics, test=setup.tests)
|
||||
concretization_stats = self.control.statistics
|
||||
if output.timers:
|
||||
timer.write_tty()
|
||||
print()
|
||||
|
||||
if output.stats:
|
||||
print("Statistics:")
|
||||
pprint.pprint(self.control.statistics)
|
||||
|
||||
result.raise_if_unsat()
|
||||
|
||||
if result.satisfiable and result.unsolved_specs and setup.concretize_everything:
|
||||
unsolved_str = Result.format_unsolved(result.unsolved_specs)
|
||||
raise InternalConcretizerError(
|
||||
"Internal Spack error: the solver completed but produced specs"
|
||||
" that do not satisfy the request. Please report a bug at "
|
||||
f"https://github.com/spack/spack/issues\n\t{unsolved_str}"
|
||||
)
|
||||
|
||||
return result, timer, self.control.statistics
|
||||
pprint.pprint(concretization_stats)
|
||||
return result, timer, concretization_stats
|
||||
|
||||
|
||||
class ConcreteSpecsByHash(collections.abc.Mapping):
|
||||
@@ -1373,7 +1771,7 @@ def effect_rules(self):
|
||||
return
|
||||
|
||||
self.gen.h2("Imposed requirements")
|
||||
for name in self._effect_cache:
|
||||
for name in sorted(self._effect_cache):
|
||||
cache = self._effect_cache[name]
|
||||
for (spec_str, _), (effect_id, requirements) in cache.items():
|
||||
self.gen.fact(fn.pkg_fact(name, fn.effect_id(effect_id)))
|
||||
@@ -1426,8 +1824,8 @@ def define_variant(
|
||||
|
||||
elif isinstance(values, vt.DisjointSetsOfValues):
|
||||
union = set()
|
||||
for sid, s in enumerate(values.sets):
|
||||
for value in s:
|
||||
for sid, s in enumerate(sorted(values.sets)):
|
||||
for value in sorted(s):
|
||||
pkg_fact(fn.variant_value_from_disjoint_sets(vid, value, sid))
|
||||
union.update(s)
|
||||
values = union
|
||||
@@ -1608,7 +2006,7 @@ def package_provider_rules(self, pkg):
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.possible_provider(vpkg_name)))
|
||||
|
||||
for when, provided in pkg.provided.items():
|
||||
for vpkg in provided:
|
||||
for vpkg in sorted(provided):
|
||||
if vpkg.name not in self.possible_virtuals:
|
||||
continue
|
||||
|
||||
@@ -1623,8 +2021,8 @@ def package_provider_rules(self, pkg):
|
||||
condition_id = self.condition(
|
||||
when, required_name=pkg.name, msg="Virtuals are provided together"
|
||||
)
|
||||
for set_id, virtuals_together in enumerate(sets_of_virtuals):
|
||||
for name in virtuals_together:
|
||||
for set_id, virtuals_together in enumerate(sorted(sets_of_virtuals)):
|
||||
for name in sorted(virtuals_together):
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(pkg.name, fn.provided_together(condition_id, set_id, name))
|
||||
)
|
||||
@@ -1734,7 +2132,7 @@ def package_splice_rules(self, pkg):
|
||||
for map in pkg.variants.values():
|
||||
for k in map:
|
||||
filt_match_variants.add(k)
|
||||
filt_match_variants = list(filt_match_variants)
|
||||
filt_match_variants = sorted(filt_match_variants)
|
||||
variant_constraints = self._gen_match_variant_splice_constraints(
|
||||
pkg, cond, spec_to_splice, hash_var, splice_node, filt_match_variants
|
||||
)
|
||||
@@ -2264,7 +2662,7 @@ def define_package_versions_and_validate_preferences(
|
||||
):
|
||||
"""Declare any versions in specs not declared in packages."""
|
||||
packages_yaml = spack.config.get("packages")
|
||||
for pkg_name in possible_pkgs:
|
||||
for pkg_name in sorted(possible_pkgs):
|
||||
pkg_cls = self.pkg_class(pkg_name)
|
||||
|
||||
# All the versions from the corresponding package.py file. Since concepts
|
||||
@@ -2592,7 +2990,7 @@ def define_variant_values(self):
|
||||
"""
|
||||
# Tell the concretizer about possible values from specs seen in spec_clauses().
|
||||
# We might want to order these facts by pkg and name if we are debugging.
|
||||
for pkg_name, variant_def_id, value in self.variant_values_from_specs:
|
||||
for pkg_name, variant_def_id, value in sorted(self.variant_values_from_specs):
|
||||
try:
|
||||
vid = self.variant_ids_by_def_id[variant_def_id]
|
||||
except KeyError:
|
||||
@@ -2630,6 +3028,8 @@ def concrete_specs(self):
|
||||
# Declare as possible parts of specs that are not in package.py
|
||||
# - Add versions to possible versions
|
||||
# - Add OS to possible OS's
|
||||
|
||||
# is traverse deterministic?
|
||||
for dep in spec.traverse():
|
||||
self.possible_versions[dep.name].add(dep.version)
|
||||
if isinstance(dep.version, vn.GitVersion):
|
||||
@@ -2867,7 +3267,7 @@ def define_runtime_constraints(self):
|
||||
recorder.consume_facts()
|
||||
|
||||
def literal_specs(self, specs):
|
||||
for spec in specs:
|
||||
for spec in sorted(specs):
|
||||
self.gen.h2("Spec: %s" % str(spec))
|
||||
condition_id = next(self._id_counter)
|
||||
trigger_id = next(self._id_counter)
|
||||
@@ -3368,7 +3768,7 @@ def consume_facts(self):
|
||||
# on the available compilers)
|
||||
self._setup.pkg_version_rules(runtime_pkg)
|
||||
|
||||
for imposed_spec, when_spec in self.runtime_conditions:
|
||||
for imposed_spec, when_spec in sorted(self.runtime_conditions):
|
||||
msg = f"{when_spec} requires {imposed_spec} at runtime"
|
||||
_ = self._setup.condition(when_spec, imposed_spec=imposed_spec, msg=msg)
|
||||
|
||||
@@ -4225,6 +4625,9 @@ def solve_with_stats(
|
||||
reusable_specs.extend(self.selector.reusable_specs(specs))
|
||||
setup = SpackSolverSetup(tests=tests)
|
||||
output = OutputConfiguration(timers=timers, stats=stats, out=out, setup_only=setup_only)
|
||||
|
||||
CONC_CACHE.flush_manifest()
|
||||
CONC_CACHE.cleanup()
|
||||
return self.driver.solve(
|
||||
setup, specs, reuse=reusable_specs, output=output, allow_deprecated=allow_deprecated
|
||||
)
|
||||
@@ -4294,6 +4697,9 @@ def solve_in_rounds(
|
||||
for spec in result.specs:
|
||||
reusable_specs.extend(spec.traverse())
|
||||
|
||||
CONC_CACHE.flush_manifest()
|
||||
CONC_CACHE.cleanup()
|
||||
|
||||
|
||||
class UnsatisfiableSpecError(spack.error.UnsatisfiableSpecError):
|
||||
"""There was an issue with the spec that was requested (i.e. a user error)."""
|
||||
|
||||
@@ -52,8 +52,7 @@ def test_load_shell(shell, set_command):
|
||||
mpileaks_spec = spack.concretize.concretize_one("mpileaks")
|
||||
|
||||
# Ensure our reference variable is clean.
|
||||
hello_world_paths = [os.path.normpath(p) for p in ("/hello", "/world")]
|
||||
os.environ["CMAKE_PREFIX_PATH"] = os.pathsep.join(hello_world_paths)
|
||||
os.environ["CMAKE_PREFIX_PATH"] = "/hello" + os.pathsep + "/world"
|
||||
|
||||
shell_out = load(shell, "mpileaks")
|
||||
|
||||
@@ -70,7 +69,7 @@ def extract_value(output, variable):
|
||||
paths_shell = extract_value(shell_out, "CMAKE_PREFIX_PATH")
|
||||
|
||||
# We should've prepended new paths, and keep old ones.
|
||||
assert paths_shell[-2:] == hello_world_paths
|
||||
assert paths_shell[-2:] == ["/hello", "/world"]
|
||||
|
||||
# All but the last two paths are added by spack load; lookup what packages they're from.
|
||||
pkgs = [prefix_to_pkg(p) for p in paths_shell[:-2]]
|
||||
|
||||
@@ -42,7 +42,7 @@ def mock_pkg_git_repo(git, tmp_path_factory):
|
||||
repo_dir = root_dir / "builtin.mock"
|
||||
shutil.copytree(spack.paths.mock_packages_path, str(repo_dir))
|
||||
|
||||
repo_cache = spack.util.file_cache.FileCache(str(root_dir / "cache"))
|
||||
repo_cache = spack.util.file_cache.FileCache(root_dir / "cache")
|
||||
mock_repo = spack.repo.RepoPath(str(repo_dir), cache=repo_cache)
|
||||
mock_repo_packages = mock_repo.repos[0].packages_path
|
||||
|
||||
|
||||
@@ -3255,3 +3255,54 @@ def test_spec_unification(unify, mutable_config, mock_packages):
|
||||
maybe_fails = pytest.raises if unify is True else llnl.util.lang.nullcontext
|
||||
with maybe_fails(spack.solver.asp.UnsatisfiableSpecError):
|
||||
_ = spack.cmd.parse_specs([a_restricted, b], concretize=True)
|
||||
|
||||
|
||||
def test_concretization_cache_roundtrip(use_concretization_cache, monkeypatch, mutable_config):
|
||||
"""Tests whether we can write the results of a clingo solve to the cache
|
||||
and load the same spec request from the cache to produce identical specs"""
|
||||
# Force determinism:
|
||||
# Solver setup is normally non-deterministic due to non-determinism in
|
||||
# asp solver setup logic generation. The only other inputs to the cache keys are
|
||||
# the .lp files, which are invariant over the course of this test.
|
||||
# This method forces the same setup to be produced for the same specs
|
||||
# which gives us a guarantee of cache hits, as it removes the only
|
||||
# element of non deterministic solver setup for the same spec
|
||||
# Basically just a quick and dirty memoization
|
||||
solver_setup = spack.solver.asp.SpackSolverSetup.setup
|
||||
|
||||
def _setup(self, specs, *, reuse=None, allow_deprecated=False):
|
||||
if not getattr(_setup, "cache_setup", None):
|
||||
cache_setup = solver_setup(self, specs, reuse=reuse, allow_deprecated=allow_deprecated)
|
||||
setattr(_setup, "cache_setup", cache_setup)
|
||||
return getattr(_setup, "cache_setup")
|
||||
|
||||
# monkeypatch our forced determinism setup method into solver setup
|
||||
monkeypatch.setattr(spack.solver.asp.SpackSolverSetup, "setup", _setup)
|
||||
|
||||
assert spack.config.get("config:concretization_cache:enable")
|
||||
|
||||
# run one standard concretization to populate the cache and the setup method
|
||||
# memoization
|
||||
h = spack.concretize.concretize_one("hdf5")
|
||||
|
||||
# due to our forced determinism above, we should not be observing
|
||||
# cache misses, assert that we're not storing any new cache entries
|
||||
def _ensure_no_store(self, problem: str, result, statistics, test=False):
|
||||
# always throw, we never want to reach this code path
|
||||
assert False, "Concretization cache hit expected"
|
||||
|
||||
# Assert that we're actually hitting the cache
|
||||
cache_fetch = spack.solver.asp.ConcretizationCache.fetch
|
||||
|
||||
def _ensure_cache_hits(self, problem: str):
|
||||
result, statistics = cache_fetch(self, problem)
|
||||
assert result, "Expected successful concretization cache hit"
|
||||
assert statistics, "Expected statistics to be non null on cache hit"
|
||||
return result, statistics
|
||||
|
||||
monkeypatch.setattr(spack.solver.asp.ConcretizationCache, "store", _ensure_no_store)
|
||||
monkeypatch.setattr(spack.solver.asp.ConcretizationCache, "fetch", _ensure_cache_hits)
|
||||
# ensure subsequent concretizations of the same spec produce the same spec
|
||||
# object
|
||||
for _ in range(5):
|
||||
assert h == spack.concretize.concretize_one("hdf5")
|
||||
|
||||
@@ -341,6 +341,16 @@ def pytest_collection_modifyitems(config, items):
|
||||
item.add_marker(skip_as_slow)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def use_concretization_cache(mutable_config, tmpdir):
|
||||
"""Enables the use of the concretization cache"""
|
||||
spack.config.set("config:concretization_cache:enable", True)
|
||||
# ensure we have an isolated concretization cache
|
||||
new_conc_cache_loc = str(tmpdir.mkdir("concretization"))
|
||||
spack.config.set("config:concretization_cache:path", new_conc_cache_loc)
|
||||
yield
|
||||
|
||||
|
||||
#
|
||||
# These fixtures are applied to all tests
|
||||
#
|
||||
@@ -2139,7 +2149,7 @@ def _c_compiler_always_exists():
|
||||
def mock_test_cache(tmp_path_factory):
|
||||
cache_dir = tmp_path_factory.mktemp("cache")
|
||||
print(cache_dir)
|
||||
return spack.util.file_cache.FileCache(str(cache_dir))
|
||||
return spack.util.file_cache.FileCache(cache_dir)
|
||||
|
||||
|
||||
class MockHTTPResponse(io.IOBase):
|
||||
|
||||
@@ -14,3 +14,5 @@ config:
|
||||
checksum: true
|
||||
dirty: false
|
||||
locks: {1}
|
||||
concretization_cache:
|
||||
enable: false
|
||||
|
||||
@@ -161,7 +161,7 @@ def test_handle_unknown_package(temporary_store, config, mock_packages, tmp_path
|
||||
"""
|
||||
layout = temporary_store.layout
|
||||
|
||||
repo_cache = spack.util.file_cache.FileCache(str(tmp_path / "cache"))
|
||||
repo_cache = spack.util.file_cache.FileCache(tmp_path / "cache")
|
||||
mock_db = spack.repo.RepoPath(spack.paths.mock_packages_path, cache=repo_cache)
|
||||
|
||||
not_in_mock = set.difference(
|
||||
|
||||
@@ -34,7 +34,7 @@ def extra_repo(tmp_path_factory, request):
|
||||
subdirectory: '{request.param}'
|
||||
"""
|
||||
)
|
||||
repo_cache = spack.util.file_cache.FileCache(str(cache_dir))
|
||||
repo_cache = spack.util.file_cache.FileCache(cache_dir)
|
||||
return spack.repo.Repo(str(repo_dir), cache=repo_cache), request.param
|
||||
|
||||
|
||||
@@ -194,7 +194,7 @@ def _repo_paths(repos):
|
||||
|
||||
repo_paths, namespaces = _repo_paths(repos)
|
||||
|
||||
repo_cache = spack.util.file_cache.FileCache(str(tmp_path / "cache"))
|
||||
repo_cache = spack.util.file_cache.FileCache(tmp_path / "cache")
|
||||
repo_path = spack.repo.RepoPath(*repo_paths, cache=repo_cache)
|
||||
assert len(repo_path.repos) == len(namespaces)
|
||||
assert [x.namespace for x in repo_path.repos] == namespaces
|
||||
|
||||
@@ -5,16 +5,17 @@
|
||||
import errno
|
||||
import math
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
from typing import IO, Optional, Tuple
|
||||
from typing import IO, Dict, Optional, Tuple, Union
|
||||
|
||||
from llnl.util.filesystem import mkdirp, rename
|
||||
from llnl.util.filesystem import rename
|
||||
|
||||
from spack.error import SpackError
|
||||
from spack.util.lock import Lock, ReadTransaction, WriteTransaction
|
||||
|
||||
|
||||
def _maybe_open(path: str) -> Optional[IO[str]]:
|
||||
def _maybe_open(path: Union[str, pathlib.Path]) -> Optional[IO[str]]:
|
||||
try:
|
||||
return open(path, "r", encoding="utf-8")
|
||||
except OSError as e:
|
||||
@@ -24,7 +25,7 @@ def _maybe_open(path: str) -> Optional[IO[str]]:
|
||||
|
||||
|
||||
class ReadContextManager:
|
||||
def __init__(self, path: str) -> None:
|
||||
def __init__(self, path: Union[str, pathlib.Path]) -> None:
|
||||
self.path = path
|
||||
|
||||
def __enter__(self) -> Optional[IO[str]]:
|
||||
@@ -70,7 +71,7 @@ class FileCache:
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, root, timeout=120):
|
||||
def __init__(self, root: Union[str, pathlib.Path], timeout=120):
|
||||
"""Create a file cache object.
|
||||
|
||||
This will create the cache directory if it does not exist yet.
|
||||
@@ -82,58 +83,60 @@ def __init__(self, root, timeout=120):
|
||||
for cache files, this specifies how long Spack should wait
|
||||
before assuming that there is a deadlock.
|
||||
"""
|
||||
self.root = root.rstrip(os.path.sep)
|
||||
if not os.path.exists(self.root):
|
||||
mkdirp(self.root)
|
||||
if isinstance(root, str):
|
||||
root = pathlib.Path(root)
|
||||
self.root = root
|
||||
self.root.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self._locks = {}
|
||||
self._locks: Dict[Union[pathlib.Path, str], Lock] = {}
|
||||
self.lock_timeout = timeout
|
||||
|
||||
def destroy(self):
|
||||
"""Remove all files under the cache root."""
|
||||
for f in os.listdir(self.root):
|
||||
path = os.path.join(self.root, f)
|
||||
if os.path.isdir(path):
|
||||
shutil.rmtree(path, True)
|
||||
for f in self.root.iterdir():
|
||||
if f.is_dir():
|
||||
shutil.rmtree(f, True)
|
||||
else:
|
||||
os.remove(path)
|
||||
f.unlink()
|
||||
|
||||
def cache_path(self, key):
|
||||
def cache_path(self, key: Union[str, pathlib.Path]):
|
||||
"""Path to the file in the cache for a particular key."""
|
||||
return os.path.join(self.root, key)
|
||||
return self.root / key
|
||||
|
||||
def _lock_path(self, key):
|
||||
def _lock_path(self, key: Union[str, pathlib.Path]):
|
||||
"""Path to the file in the cache for a particular key."""
|
||||
keyfile = os.path.basename(key)
|
||||
keydir = os.path.dirname(key)
|
||||
|
||||
return os.path.join(self.root, keydir, "." + keyfile + ".lock")
|
||||
return self.root / keydir / ("." + keyfile + ".lock")
|
||||
|
||||
def _get_lock(self, key):
|
||||
def _get_lock(self, key: Union[str, pathlib.Path]):
|
||||
"""Create a lock for a key, if necessary, and return a lock object."""
|
||||
if key not in self._locks:
|
||||
self._locks[key] = Lock(self._lock_path(key), default_timeout=self.lock_timeout)
|
||||
self._locks[key] = Lock(str(self._lock_path(key)), default_timeout=self.lock_timeout)
|
||||
return self._locks[key]
|
||||
|
||||
def init_entry(self, key):
|
||||
def init_entry(self, key: Union[str, pathlib.Path]):
|
||||
"""Ensure we can access a cache file. Create a lock for it if needed.
|
||||
|
||||
Return whether the cache file exists yet or not.
|
||||
"""
|
||||
cache_path = self.cache_path(key)
|
||||
|
||||
# Avoid using pathlib here to allow the logic below to
|
||||
# function as is
|
||||
# TODO: Maybe refactor the following logic for pathlib
|
||||
exists = os.path.exists(cache_path)
|
||||
if exists:
|
||||
if not os.path.isfile(cache_path):
|
||||
if not cache_path.is_file():
|
||||
raise CacheError("Cache file is not a file: %s" % cache_path)
|
||||
|
||||
if not os.access(cache_path, os.R_OK):
|
||||
raise CacheError("Cannot access cache file: %s" % cache_path)
|
||||
else:
|
||||
# if the file is hierarchical, make parent directories
|
||||
parent = os.path.dirname(cache_path)
|
||||
if parent.rstrip(os.path.sep) != self.root:
|
||||
mkdirp(parent)
|
||||
parent = cache_path.parent
|
||||
if parent != self.root:
|
||||
parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if not os.access(parent, os.R_OK | os.W_OK):
|
||||
raise CacheError("Cannot access cache directory: %s" % parent)
|
||||
@@ -142,7 +145,7 @@ def init_entry(self, key):
|
||||
self._get_lock(key)
|
||||
return exists
|
||||
|
||||
def read_transaction(self, key):
|
||||
def read_transaction(self, key: Union[str, pathlib.Path]):
|
||||
"""Get a read transaction on a file cache item.
|
||||
|
||||
Returns a ReadTransaction context manager and opens the cache file for
|
||||
@@ -153,9 +156,11 @@ def read_transaction(self, key):
|
||||
|
||||
"""
|
||||
path = self.cache_path(key)
|
||||
return ReadTransaction(self._get_lock(key), acquire=lambda: ReadContextManager(path))
|
||||
return ReadTransaction(
|
||||
self._get_lock(key), acquire=lambda: ReadContextManager(path) # type: ignore
|
||||
)
|
||||
|
||||
def write_transaction(self, key):
|
||||
def write_transaction(self, key: Union[str, pathlib.Path]):
|
||||
"""Get a write transaction on a file cache item.
|
||||
|
||||
Returns a WriteTransaction context manager that opens a temporary file
|
||||
@@ -167,9 +172,11 @@ def write_transaction(self, key):
|
||||
if os.path.exists(path) and not os.access(path, os.W_OK):
|
||||
raise CacheError(f"Insufficient permissions to write to file cache at {path}")
|
||||
|
||||
return WriteTransaction(self._get_lock(key), acquire=lambda: WriteContextManager(path))
|
||||
return WriteTransaction(
|
||||
self._get_lock(key), acquire=lambda: WriteContextManager(path) # type: ignore
|
||||
)
|
||||
|
||||
def mtime(self, key) -> float:
|
||||
def mtime(self, key: Union[str, pathlib.Path]) -> float:
|
||||
"""Return modification time of cache file, or -inf if it does not exist.
|
||||
|
||||
Time is in units returned by os.stat in the mtime field, which is
|
||||
@@ -179,14 +186,14 @@ def mtime(self, key) -> float:
|
||||
if not self.init_entry(key):
|
||||
return -math.inf
|
||||
else:
|
||||
return os.stat(self.cache_path(key)).st_mtime
|
||||
return self.cache_path(key).stat().st_mtime
|
||||
|
||||
def remove(self, key):
|
||||
def remove(self, key: Union[str, pathlib.Path]):
|
||||
file = self.cache_path(key)
|
||||
lock = self._get_lock(key)
|
||||
try:
|
||||
lock.acquire_write()
|
||||
os.unlink(file)
|
||||
file.unlink()
|
||||
except OSError as e:
|
||||
# File not found is OK, so remove is idempotent.
|
||||
if e.errno != errno.ENOENT:
|
||||
|
||||
@@ -2,6 +2,7 @@ ci:
|
||||
broken-tests-packages:
|
||||
- mpich
|
||||
- openmpi
|
||||
- py-mpi4py
|
||||
pipeline-gen:
|
||||
- build-job-remove:
|
||||
tags: [spack]
|
||||
|
||||
@@ -43,6 +43,11 @@ class Alps(CMakePackage):
|
||||
|
||||
extends("python")
|
||||
|
||||
# https://github.com/ALPSim/ALPS/issues/9
|
||||
conflicts(
|
||||
"%gcc@14", when="@:2.3.3-beta.6", msg="use gcc older than version 14 or else build fails"
|
||||
)
|
||||
|
||||
# See https://github.com/ALPSim/ALPS/issues/6#issuecomment-2604912169
|
||||
# for why this is needed
|
||||
resources = {
|
||||
|
||||
@@ -17,6 +17,7 @@ class Fmt(CMakePackage):
|
||||
|
||||
license("MIT")
|
||||
|
||||
version("11.1.4", sha256="49b039601196e1a765e81c5c9a05a61ed3d33f23b3961323d7322e4fe213d3e6")
|
||||
version("11.1.3", sha256="7df2fd3426b18d552840c071c977dc891efe274051d2e7c47e2c83c3918ba6df")
|
||||
version("11.1.2", sha256="ef54df1d4ba28519e31bf179f6a4fb5851d684c328ca051ce5da1b52bf8b1641")
|
||||
version("11.1.1", sha256="a25124e41c15c290b214c4dec588385153c91b47198dbacda6babce27edc4b45")
|
||||
|
||||
@@ -3,6 +3,10 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.tty as tty
|
||||
|
||||
from spack.package import *
|
||||
|
||||
@@ -57,19 +61,29 @@ def setup_run_environment(self, env):
|
||||
@on_package_attributes(run_tests=True)
|
||||
@run_after("install")
|
||||
def build_test(self):
|
||||
testdir = "smoke_test_build"
|
||||
testdir = join_path(self.stage.source_path, "smoke_test_build")
|
||||
cmakeExampleDir = join_path(self.stage.source_path, "examples")
|
||||
cmake_args = [
|
||||
cmakeExampleDir,
|
||||
"-DBUILD_SHARED_LIBS=ON",
|
||||
self.define("CMAKE_PREFIX_PATH", self.prefix),
|
||||
]
|
||||
adapter0_test_path = join_path(testdir, "adaptor0/adaptor0_test")
|
||||
if sys.platform == "win32":
|
||||
# Specify ninja generator for `cmake` call used to generate test artifact
|
||||
# (this differs from the build of `libcatalyst` itself); if unspecified, the
|
||||
# default is to use Visual Studio, which generates a more-complex path
|
||||
# (adapter0/<CONFIG>/adaptor0_test rather than adaptor0/adaptor0_test).
|
||||
cmake_args.append("-GNinja")
|
||||
# To run the test binary on Windows, we need to construct an rpath
|
||||
# for the current package being tested, including the package
|
||||
# itself
|
||||
fsys.make_package_test_rpath(self, adapter0_test_path)
|
||||
cmake = which(self.spec["cmake"].prefix.bin.cmake)
|
||||
|
||||
with working_dir(testdir, create=True):
|
||||
cmake(*cmake_args)
|
||||
cmake(*(["--build", "."]))
|
||||
tty.info("Running Catalyst test")
|
||||
|
||||
res = subprocess.run(["adaptor0/adaptor0_test", "catalyst"])
|
||||
res = subprocess.run([adapter0_test_path, "catalyst"])
|
||||
assert res.returncode == 0
|
||||
|
||||
39
var/spack/repos/builtin/packages/plantuml/package.py
Normal file
39
var/spack/repos/builtin/packages/plantuml/package.py
Normal file
@@ -0,0 +1,39 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import glob
|
||||
import os
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class Plantuml(Package):
|
||||
"""PlantUML is a highly versatile tool that facilitates the rapid
|
||||
and straightforward creation of a wide array of diagrams."""
|
||||
|
||||
homepage = "https://plantuml.com"
|
||||
url = "https://github.com/plantuml/plantuml/releases/download/v1.2025.1/plantuml-lgpl-1.2025.1.jar"
|
||||
|
||||
maintainers("greenc-FNAL", "knoepfel", "marcpaterno")
|
||||
|
||||
license("LGPL-3.0-or-later", checked_by="greenc-FNAL")
|
||||
|
||||
version(
|
||||
"1.2025.1",
|
||||
sha256="b08112f0c8ac2a2085c8c4a81ac9eac7bc5a3413a492c252cad4d39e473d9d6d",
|
||||
expand=False,
|
||||
)
|
||||
|
||||
depends_on("java@8.0:", type="run")
|
||||
depends_on("graphviz", type="run")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
mkdirp(prefix.bin)
|
||||
rename(glob.glob("plantuml-*.jar")[0], "plantuml.jar")
|
||||
install("plantuml.jar", prefix.bin)
|
||||
plantuml_wrapper = join_path(os.path.dirname(__file__), "plantuml")
|
||||
install(plantuml_wrapper, prefix.bin.plantuml)
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
env.set("PLANTUML_JAR_LOCATION", join_path(self.prefix.bin, "plantuml.jar"))
|
||||
5
var/spack/repos/builtin/packages/plantuml/plantuml
Executable file
5
var/spack/repos/builtin/packages/plantuml/plantuml
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
if [[ "$*" != *"-gui"* ]]; then
|
||||
VMARGS="-Djava.awt.headless=true"
|
||||
fi
|
||||
exec java $VMARGS -jar "$PLANTUML_JAR_LOCATION" "$@"
|
||||
@@ -2,6 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.url
|
||||
from spack.package import *
|
||||
|
||||
|
||||
@@ -11,89 +12,42 @@ class PyRadicalEntk(PythonPackage):
|
||||
|
||||
homepage = "https://radical-cybertools.github.io"
|
||||
git = "https://github.com/radical-cybertools/radical.entk.git"
|
||||
pypi = "radical.entk/radical.entk-1.47.0.tar.gz"
|
||||
pypi = "radical_entk/radical_entk-1.92.0.tar.gz"
|
||||
|
||||
maintainers("andre-merzky")
|
||||
|
||||
license("MIT")
|
||||
|
||||
version("develop", branch="devel")
|
||||
version("1.47.0", sha256="a4338e3a87147c032fb3a16a03990155742cc64c6625cfb4e1588ae0e51aafda")
|
||||
version("1.39.0", sha256="72d64b25df9f3cb1dcbc32323a669d86d947cf07d15bed91cfedca2a99fb3ef1")
|
||||
version("1.92.0", sha256="908a5d35cbc801c8b064837a21cbf5ad1a9b4aed0db48f2db84ef85d4e529cef")
|
||||
|
||||
version(
|
||||
"1.20.0",
|
||||
sha256="1b9fc470b926a93528fd2a898636bdcd1c565bd58ba47608f9bead811d8a46d7",
|
||||
"1.47.0",
|
||||
sha256="a4338e3a87147c032fb3a16a03990155742cc64c6625cfb4e1588ae0e51aafda",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.18.0",
|
||||
sha256="049f70ec7e95819ec0ea706ee6275db04799ceff119dd7b675ef0d36d814de6f",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.17.0",
|
||||
sha256="695e162b8b6209384660400920f4a2e613d01f0b904e44cfe5b5d012dcc35af9",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.16.0",
|
||||
sha256="6611b4634ad554651601d9aed3a6d8b8273073da6218112bb472ce51f771ac8e",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.14.0",
|
||||
sha256="beb6de5625b52b3aeeace52f7b4ac608e9f1bb761d8e9cdfe85d3e36931ce9f3",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.13.0",
|
||||
sha256="5489338173409777d69885fd5fdb296552937d5a539a8182321bebe273647e1c",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.12.0",
|
||||
sha256="1ea4814c8324e28cc2b86e6f44d26aaa09c8257ed58f50d1d2eada99adaa17da",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.11.0",
|
||||
sha256="a912ae3aee4c1a323910dbbb33c87a65f02bb30da94e64d81bb3203c2109fb83",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.9.0",
|
||||
sha256="918c716ac5eecb012a57452f45f5a064af7ea72f70765c7b0c60be4322b23557",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.8.0",
|
||||
sha256="47a3f7f1409612d015a3e6633853d31ec4e4b0681aecb7554be16ebf39c7f756",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.6.7",
|
||||
sha256="9384568279d29b9619a565c075f287a08bca8365e2af55e520af0c2f3595f8a2",
|
||||
"1.39.0",
|
||||
sha256="72d64b25df9f3cb1dcbc32323a669d86d947cf07d15bed91cfedca2a99fb3ef1",
|
||||
deprecated=True,
|
||||
)
|
||||
|
||||
depends_on("py-radical-utils@1.40:", type=("build", "run"), when="@1.40:")
|
||||
depends_on("py-radical-pilot@1.40:", type=("build", "run"), when="@1.40:")
|
||||
depends_on("py-radical-utils@1.90:1.99", type=("build", "run"), when="@1.90:")
|
||||
depends_on("py-radical-pilot@1.90:1.99", type=("build", "run"), when="@1.90:")
|
||||
|
||||
depends_on("py-radical-utils@1.40:1.52", type=("build", "run"), when="@1.40:1.52")
|
||||
depends_on("py-radical-pilot@1.40:1.52.1", type=("build", "run"), when="@1.40:1.52")
|
||||
|
||||
depends_on("py-radical-utils@1.39", type=("build", "run"), when="@1.39")
|
||||
depends_on("py-radical-pilot@1.39", type=("build", "run"), when="@1.39")
|
||||
|
||||
depends_on("py-radical-pilot@1.18:1.20", type=("build", "run"), when="@1.20")
|
||||
depends_on("python@3.7:", type=("build", "run"), when="@1.53:")
|
||||
depends_on("python@3.6:", type=("build", "run"), when="@:1.52")
|
||||
|
||||
depends_on("py-radical-utils@1.12:1.20", type=("build", "run"), when="@1.12:1.20")
|
||||
depends_on("py-radical-pilot@1.12:1.17", type=("build", "run"), when="@1.12:1.19")
|
||||
|
||||
depends_on("py-radical-utils@:1.11", type=("build", "run"), when="@:1.11")
|
||||
depends_on("py-radical-pilot@:1.11", type=("build", "run"), when="@:1.11")
|
||||
|
||||
depends_on("py-packaging", type=("build", "run"), when="@:1.20")
|
||||
depends_on("py-pika@0.13.0", type=("build", "run"), when="@:1.20")
|
||||
depends_on("py-requests", type=("build", "run"), when="@:1.20")
|
||||
|
||||
depends_on("python@3.6:", type=("build", "run"))
|
||||
depends_on("py-setuptools", type="build")
|
||||
|
||||
def url_for_version(self, version):
|
||||
if version >= Version("1.48.1"):
|
||||
return super().url_for_version(version)
|
||||
url = self.url.replace("_", ".")
|
||||
return spack.url.substitute_version(url, self.url_version(version))
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.url
|
||||
from spack.package import *
|
||||
|
||||
|
||||
@@ -13,40 +14,38 @@ class PyRadicalGtod(PythonPackage):
|
||||
|
||||
homepage = "https://radical-cybertools.github.io"
|
||||
git = "https://github.com/radical-cybertools/radical.gtod.git"
|
||||
pypi = "radical.gtod/radical.gtod-1.47.0.tar.gz"
|
||||
pypi = "radical_gtod/radical_gtod-1.90.0.tar.gz"
|
||||
|
||||
maintainers("andre-merzky")
|
||||
|
||||
license("LGPL-3.0-or-later")
|
||||
|
||||
version("develop", branch="devel")
|
||||
version("1.47.0", sha256="52e75bf14faf352165ffa0d9e32ca472bd63f479020cd78f832baa34f8acfe6d")
|
||||
version("1.39.0", sha256="254f1e805b58a33b93c6180f018904db25538710ec9e75b3a3a9969d7206ecf6")
|
||||
version("1.90.0", sha256="70889239d3a60f8f323f62b942939665464fa368c4a00d0fbc49c878658f57b2")
|
||||
|
||||
version(
|
||||
"1.20.0",
|
||||
sha256="8d0846de7a5d094146c01fbb7c137f343e4da06af51efafeba79dd3fdfe421dc",
|
||||
"1.47.0",
|
||||
sha256="52e75bf14faf352165ffa0d9e32ca472bd63f479020cd78f832baa34f8acfe6d",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.16.0",
|
||||
sha256="1fe9da598a965c7194ed9c7df49d5b30632a11a7f9ece12152bea9aaa91bd4b8",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.13.0",
|
||||
sha256="15df4ae728a8878b111cfdedffb9457aecc8003c2cfbdf2c918dfcb6b836cc93",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.6.7",
|
||||
sha256="8d7d32e3d0bcf6d7cf176454a9892a46919b03e1ed96bee389380e6d75d6eff8",
|
||||
"1.39.0",
|
||||
sha256="254f1e805b58a33b93c6180f018904db25538710ec9e75b3a3a9969d7206ecf6",
|
||||
deprecated=True,
|
||||
)
|
||||
|
||||
depends_on("c", type="build") # generated
|
||||
|
||||
depends_on("py-radical-utils", type=("build", "run"), when="@1.13:")
|
||||
depends_on("py-radical-utils@1.90:1.99", type=("build", "run"), when="@1.90:")
|
||||
depends_on("py-radical-utils@:1.52", type=("build", "run"), when="@1.13:1.52")
|
||||
|
||||
depends_on("python@3.7:", type=("build", "run"), when="@1.53:")
|
||||
depends_on("python@3.6:", type=("build", "run"), when="@:1.52")
|
||||
|
||||
depends_on("python@3.6:", type=("build", "run"))
|
||||
depends_on("py-setuptools", type="build")
|
||||
|
||||
def url_for_version(self, version):
|
||||
if version >= Version("1.47.1"):
|
||||
return super().url_for_version(version)
|
||||
url = self.url.replace("_", ".")
|
||||
return spack.url.substitute_version(url, self.url_version(version))
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.url
|
||||
from spack.package import *
|
||||
|
||||
|
||||
@@ -12,112 +13,50 @@ class PyRadicalPilot(PythonPackage):
|
||||
|
||||
homepage = "https://radical-cybertools.github.io"
|
||||
git = "https://github.com/radical-cybertools/radical.pilot.git"
|
||||
pypi = "radical.pilot/radical.pilot-1.47.0.tar.gz"
|
||||
pypi = "radical_pilot/radical_pilot-1.92.0.tar.gz"
|
||||
|
||||
maintainers("andre-merzky")
|
||||
|
||||
license("MIT")
|
||||
|
||||
version("develop", branch="devel")
|
||||
version("1.47.0", sha256="58f41a0c42fe61381f15263a63424294732606ab7cee717540c0b730308f7908")
|
||||
version("1.39.0", sha256="7ba0bfa3258b861db71e73d52f0915bfb8b3ac1099badacf69628307cab3b913")
|
||||
version("1.92.0", sha256="5c65df02ec097f71648259db8ed8638580ea8e4c1c7f360879afff7f99e56134")
|
||||
|
||||
version(
|
||||
"1.20.0",
|
||||
sha256="a0747e573a01a856dc330797dbee158f7e1cf8652001dc26f06a1d6c5e553bc6",
|
||||
"1.47.0",
|
||||
sha256="58f41a0c42fe61381f15263a63424294732606ab7cee717540c0b730308f7908",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.18.1",
|
||||
sha256="fd6a0ffaa727b6b9bab35d8f2dc300bf4d9c4ff3541136d83560aa7b853d6100",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.17.0",
|
||||
sha256="0bfbb321a623a684e6694241aa3b7804208846515d23afa3b930553274f4a69f",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.16.0",
|
||||
sha256="057941a206ee96b62b97a63a507c1136b7fe821ae9f9e5eebe7949a3f53941f9",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.15.1",
|
||||
sha256="35c3b179a0bc85f52d2165e98e19acf2bf79037dd14f4d9ff3fc55ae0122d17e",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.14.0",
|
||||
sha256="462471065de25f6d6e8baee705790828444c2eebb2073f5faf67a8da800d15a9",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.13.0",
|
||||
sha256="5bd9eef1884ccca09c242ab6d1361588a442d9cd980613c66604ba140786bde5",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.12.0",
|
||||
sha256="a266355d30d838f20b6cac190ce589ca919acd41883ad06aec62386239475133",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.11.2",
|
||||
sha256="9d239f747589b8ae5d6faaea90ea5304b6f230a1edfd8d4efb440bc3799c8a9d",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.10.2",
|
||||
sha256="56e9d8b1ce7ed05eff471d7df660e4940f485027e5f353aa36fd17425846a499",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.10.1",
|
||||
sha256="003f4c519b991bded31693026b69dd51547a5a69a5f94355dc8beff766524b3c",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.9.2",
|
||||
sha256="7c872ac9103a2aed0c5cd46057048a182f672191e194e0fd42794b0012e6e947",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.8.0",
|
||||
sha256="a4c3bca163db61206e15a2d820d9a64e888da5c72672448ae975c26768130b9d",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.6.8",
|
||||
sha256="fa8fd3f348a68b54ee8338d5c5cf1a3d99c10c0b6da804424a839239ee0d313d",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.6.7",
|
||||
sha256="6ca0a3bd3cda65034fa756f37fa05681d5a43441c1605408a58364f89c627970",
|
||||
"1.39.0",
|
||||
sha256="7ba0bfa3258b861db71e73d52f0915bfb8b3ac1099badacf69628307cab3b913",
|
||||
deprecated=True,
|
||||
)
|
||||
|
||||
depends_on("py-radical-utils@1.44:", type=("build", "run"), when="@1.47:")
|
||||
depends_on("py-radical-saga@1.40:", type=("build", "run"), when="@1.47:")
|
||||
depends_on("py-radical-gtod", type=("build", "run"), when="@1.14:")
|
||||
depends_on("py-radical-utils@1.90:1.99", type=("build", "run"), when="@1.90:")
|
||||
depends_on("py-radical-gtod@1.90:1.99", type=("build", "run"), when="@1.90:")
|
||||
|
||||
depends_on("py-radical-utils@1.44:1.52", type=("build", "run"), when="@1.47:1.52.1")
|
||||
depends_on("py-radical-saga@1.40:", type=("build", "run"), when="@1.47")
|
||||
depends_on("py-radical-gtod@:1.52", type=("build", "run"), when="@1.14:1.52.1")
|
||||
|
||||
depends_on("py-radical-utils@1.39", type=("build", "run"), when="@1.39")
|
||||
depends_on("py-radical-saga@1.39", type=("build", "run"), when="@1.39")
|
||||
depends_on("py-radical-gtod@1.39", type=("build", "run"), when="@1.39")
|
||||
|
||||
depends_on("py-radical-utils@1.12:1.20", type=("build", "run"), when="@1.12:1.20")
|
||||
depends_on("py-radical-saga@1.12:1.20", type=("build", "run"), when="@1.12:1.20")
|
||||
|
||||
depends_on("py-radical-utils@1.8.4:1.11", type=("build", "run"), when="@1.11")
|
||||
depends_on("py-radical-saga@1.8:1.11", type=("build", "run"), when="@1.11")
|
||||
|
||||
depends_on("py-radical-utils@:1.8.3", type=("build", "run"), when="@:1.10")
|
||||
depends_on("py-radical-saga@:1.7", type=("build", "run"), when="@:1.10")
|
||||
|
||||
depends_on("py-pymongo@:3", type=("build", "run"), when="@:1.39")
|
||||
|
||||
depends_on("python@3.6:", type=("build", "run"))
|
||||
depends_on("python@3.7:", type=("build", "run"), when="@1.48:")
|
||||
depends_on("python@3.6:", type=("build", "run"), when="@:1.47")
|
||||
|
||||
depends_on("py-requests", type=("build", "run"), when="@1.90:")
|
||||
depends_on("py-psij-python", type=("build", "run"), when="@1.48:")
|
||||
depends_on("py-dill", type=("build", "run"), when="@1.14:")
|
||||
depends_on("py-setproctitle", type=("build", "run"))
|
||||
depends_on("py-setuptools", type="build")
|
||||
|
||||
def url_for_version(self, version):
|
||||
if version >= Version("1.49.3"):
|
||||
return super().url_for_version(version)
|
||||
url = self.url.replace("_", ".")
|
||||
return spack.url.substitute_version(url, self.url_version(version))
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.url
|
||||
from spack.package import *
|
||||
|
||||
|
||||
@@ -13,81 +14,39 @@ class PyRadicalSaga(PythonPackage):
|
||||
|
||||
homepage = "https://radical-cybertools.github.io"
|
||||
git = "https://github.com/radical-cybertools/radical.saga.git"
|
||||
pypi = "radical.saga/radical.saga-1.47.0.tar.gz"
|
||||
pypi = "radical_saga/radical_saga-1.90.0.tar.gz"
|
||||
|
||||
maintainers("andre-merzky")
|
||||
|
||||
license("MIT")
|
||||
|
||||
version("develop", branch="devel")
|
||||
version("1.47.0", sha256="fc9a8fc060e708852ce6c40b08a65111f8d72b9ad5f8afef9ceaa866c1351233")
|
||||
version("1.39.0", sha256="0fea8103d3f96c821c977bcb55ff1c6a9844de727539b182dda4cbc2570df791")
|
||||
version("1.90.0", sha256="55758339f58087477574ed598e5a34cb99d045a540a74ba9e11b34eead4af78d")
|
||||
|
||||
version(
|
||||
"1.20.0",
|
||||
sha256="d85f3ed564d9eaf3ead2aa349c854e944ca459492ebf88542404106fce4204ab",
|
||||
"1.47.0",
|
||||
sha256="fc9a8fc060e708852ce6c40b08a65111f8d72b9ad5f8afef9ceaa866c1351233",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.18.0",
|
||||
sha256="544d4ffafc0b311151724db371ee11e27744103068748962866351ce31ccb810",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.17.0",
|
||||
sha256="e48b42c232ac0ad53a410c1317746a5f15214fd3108fad773d098714fb4c40a0",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.16.0",
|
||||
sha256="d269e2e7043f05e8f1d45ca3d50be973857150d7928d53bedd6844f39b224786",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.14.0",
|
||||
sha256="337d8778bf392fd54845b1876de903c4c12f6fa938ef16220e1847561b66731a",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.13.0",
|
||||
sha256="90d8e875f48402deab87314ea5c08d591264fb576c461bd9663ac611fc2e547e",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.12.0",
|
||||
sha256="769c83bab95c0e3ef970da0fa6cb30878d7a31216ff8b542e894686357f7cb5b",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.11.1",
|
||||
sha256="edb1def63fadd192a4be4f508e9e65669745843e158ce27a965bf2f43d18b84d",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.8.0",
|
||||
sha256="6edf94897102a08dcb994f7f107a0e25e7f546a0a9488af3f8b92ceeeaaf58a6",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.6.10",
|
||||
sha256="8fe7e281e9f81234f34f5c7c7986871761e9e37230d2a874c65d18daeccd976a",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.6.8",
|
||||
sha256="d5e9f95a027087fb637cef065ff3af848e5902e403360189e36c9aa7c3f6f29b",
|
||||
"1.39.0",
|
||||
sha256="0fea8103d3f96c821c977bcb55ff1c6a9844de727539b182dda4cbc2570df791",
|
||||
deprecated=True,
|
||||
)
|
||||
|
||||
depends_on("py-radical-utils@1.40:", type=("build", "run"), when="@1.40:")
|
||||
|
||||
depends_on("py-radical-utils@1.90:1.99", type=("build", "run"), when="@1.90:")
|
||||
depends_on("py-radical-utils@1.40:1.52", type=("build", "run"), when="@1.40:1.52")
|
||||
depends_on("py-radical-utils@1.39", type=("build", "run"), when="@1.39")
|
||||
|
||||
depends_on("py-radical-utils@1.12:1.20", type=("build", "run"), when="@1.12:1.20")
|
||||
depends_on("python@3.7:", type=("build", "run"), when="@1.53:")
|
||||
depends_on("python@3.6:", type=("build", "run"), when="@:1.52")
|
||||
|
||||
depends_on("py-radical-utils@:1.11", type=("build", "run"), when="@:1.11")
|
||||
|
||||
depends_on("python@3.6:", type=("build", "run"))
|
||||
depends_on("py-apache-libcloud", type=("build", "run"))
|
||||
depends_on("py-apache-libcloud", type=("build", "run"), when="@:1.60")
|
||||
depends_on("py-parse", type=("build", "run"))
|
||||
depends_on("py-setuptools", type="build")
|
||||
|
||||
def url_for_version(self, version):
|
||||
if version >= Version("1.47.1"):
|
||||
return super().url_for_version(version)
|
||||
url = self.url.replace("_", ".")
|
||||
return spack.url.substitute_version(url, self.url_version(version))
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.url
|
||||
from spack.package import *
|
||||
|
||||
|
||||
@@ -11,92 +12,29 @@ class PyRadicalUtils(PythonPackage):
|
||||
|
||||
homepage = "https://radical-cybertools.github.io"
|
||||
git = "https://github.com/radical-cybertools/radical.utils.git"
|
||||
pypi = "radical.utils/radical.utils-1.47.0.tar.gz"
|
||||
pypi = "radical_utils/radical_utils-1.91.1.tar.gz"
|
||||
|
||||
maintainers("andre-merzky")
|
||||
|
||||
license("MIT")
|
||||
|
||||
version("develop", branch="devel")
|
||||
version("1.47.0", sha256="f85a4a452561dd018217f1ed38d97c9be96fa448437cfeb1b879121174fd5311")
|
||||
version("1.39.0", sha256="fade87ee4c6ccf335d5e26d5158ce22ee891e4d4c576464274999ddf36dc4977")
|
||||
version("1.91.1", sha256="5293f375f699161e451982b2e7668613c24e2562252f65e765ebbc83d8ae0118")
|
||||
|
||||
version(
|
||||
"1.20.0",
|
||||
sha256="9b39dd616d70c387fb3f97d3510a506bac92c159b6482c3aebd3d11eeaeebcc9",
|
||||
"1.47.0",
|
||||
sha256="f85a4a452561dd018217f1ed38d97c9be96fa448437cfeb1b879121174fd5311",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.18.1",
|
||||
sha256="5b3ab15417a1ef82f63f8a77763a177d6bc59b61a80823be0df8c0f7502d9b3e",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.17.0",
|
||||
sha256="ee3fec190e89522f648e191d2e380689842746f1eacda27772a9471215908cfe",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.16.0",
|
||||
sha256="6eddfba5c73e71c7c5ddeba6c8ebe5260616d66b26d1f7123613c3cd543d61e9",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.15.0",
|
||||
sha256="22e5028de75c0a471bfed587d437dded214625b150deaca0289474a3619d395b",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.14.0",
|
||||
sha256="f61f0e335bbdc51e4023458e7e6959551686ebf170adc5353220dcc83fd677c9",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.13.0",
|
||||
sha256="84c1cad8be988dad7fb2b8455d19a4fb0c979fab02c5b7a7b531a4ae8fe52580",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.12.0",
|
||||
sha256="1474dbe4d94cdf3e992e1711e10d73dffa352c1c29ff51d81c1686e5081e9398",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.11.1",
|
||||
sha256="4fec3f6d45d7309c891ab4f8aeda0257f06f9a8404ca87c7eb643cd8d7415804",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.11.0",
|
||||
sha256="81537c2a2f8a1a409b4a1aac67323c6b49cc994e2b70052425e2bc8d4622e2de",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.9.1",
|
||||
sha256="0837d75e7f9dcce5ba5ac63151ab1683d6ba9ab3954b076d1f170cc4a3cdb1b4",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.8.4",
|
||||
sha256="4777ba20e9f881bf3e73ad917638fdeca5a4b253d57ed7b321a07f670e3f737b",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.8.0",
|
||||
sha256="8582c65593f51d394fc263c6354ec5ad9cc7173369dcedfb2eef4f5e8146cf03",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.6.7",
|
||||
sha256="552f6c282f960ccd9d2401d686b0b3bfab35dfa94a26baeb2d3b4e45211f05a9",
|
||||
"1.39.0",
|
||||
sha256="fade87ee4c6ccf335d5e26d5158ce22ee891e4d4c576464274999ddf36dc4977",
|
||||
deprecated=True,
|
||||
)
|
||||
|
||||
depends_on("py-radical-gtod", type=("build", "run"), when="@:1.13")
|
||||
depends_on("python@3.7:", type=("build", "run"), when="@1.53:")
|
||||
depends_on("python@3.6:", type=("build", "run"), when="@:1.52")
|
||||
|
||||
depends_on("py-pymongo@:3", type=("build", "run"), when="@:1.39")
|
||||
|
||||
depends_on("python@3.6:", type=("build", "run"))
|
||||
depends_on("py-colorama", type=("build", "run"))
|
||||
depends_on("py-msgpack", type=("build", "run"))
|
||||
depends_on("py-netifaces", type=("build", "run"))
|
||||
@@ -108,3 +46,9 @@ class PyRadicalUtils(PythonPackage):
|
||||
depends_on("py-setuptools")
|
||||
# https://github.com/radical-cybertools/radical.utils/issues/403
|
||||
depends_on("py-setuptools@:69.2", when="@:1.51")
|
||||
|
||||
def url_for_version(self, version):
|
||||
if version >= Version("1.48.1"):
|
||||
return super().url_for_version(version)
|
||||
url = self.url.replace("_", ".")
|
||||
return spack.url.substitute_version(url, self.url_version(version))
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class PySphinxcontribPlantuml(PythonPackage):
|
||||
"""PlantUML for Sphinx."""
|
||||
|
||||
homepage = "https://github.com/sphinx-contrib/plantuml/"
|
||||
pypi = "sphinxcontrib-plantuml/sphinxcontrib-plantuml-0.30.tar.gz"
|
||||
|
||||
maintainers("greenc-FNAL", "knoepfel", "marcpaterno")
|
||||
|
||||
license("BSD-2-Clause", checked_by="greenc-FNAL")
|
||||
|
||||
version("0.30", sha256="2a1266ca43bddf44640ae44107003df4490de2b3c3154a0d627cfb63e9a169bf")
|
||||
|
||||
depends_on("py-setuptools", type="build")
|
||||
|
||||
depends_on("plantuml", type=("build", "run"))
|
||||
depends_on("py-sphinx@1.6:", type=("build", "run"))
|
||||
@@ -34,6 +34,9 @@ class Seacas(CMakePackage):
|
||||
|
||||
# ###################### Versions ##########################
|
||||
version("master", branch="master")
|
||||
version(
|
||||
"2025-02-27", sha256="3906fedf17e0a5f1c6f941cb94b56134f41abe4da89f4ec25766fc33b327f521"
|
||||
)
|
||||
version(
|
||||
"2024-08-15", sha256="c85130b0dac5ab9a08dcb53c8ccff478122d72b08bd41d99c0adfddc5eb18a52"
|
||||
)
|
||||
|
||||
@@ -207,9 +207,10 @@ class Vtk(CMakePackage):
|
||||
depends_on("seacas+mpi", when="+mpi")
|
||||
depends_on("seacas~mpi", when="~mpi")
|
||||
depends_on("seacas@2021-05-12:2022-10-14", when="@9.1")
|
||||
# vtk@9.2: need Ioss::Utils::get_debug_stream() which 2022-10-14 and later provide
|
||||
depends_on("seacas@2022-10-14:", when="@9.2:9.3")
|
||||
depends_on("seacas@2024-06-27:", when="@9.4:")
|
||||
# vtk@9.2: need Ioss::Utils::get_debug_stream() which only 2022-10-14 provides,
|
||||
# and to be safe against other issues, make them build with this version only:
|
||||
depends_on("seacas@2022-10-14", when="@9.2:9.3")
|
||||
depends_on("seacas@2024-06-27", when="@9.4:")
|
||||
|
||||
depends_on("nlohmann-json", when="@9.2:")
|
||||
|
||||
|
||||
Reference in New Issue
Block a user