Compare commits
67 Commits
dag-ordere
...
features/v
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3ed077fc39 | ||
|
|
847d172bb9 | ||
|
|
8b004c29e1 | ||
|
|
8b1731688b | ||
|
|
27775163ca | ||
|
|
b1cf512d78 | ||
|
|
0bfd06d0b6 | ||
|
|
367bd4d670 | ||
|
|
8f359df2d3 | ||
|
|
cc2ae9f270 | ||
|
|
75f1077b4b | ||
|
|
b1e6507060 | ||
|
|
2981b4e5ee | ||
|
|
6847d73504 | ||
|
|
eeba92e788 | ||
|
|
84917cfa79 | ||
|
|
14e327be23 | ||
|
|
c329f7de33 | ||
|
|
f686a90779 | ||
|
|
918bb63c3a | ||
|
|
8d0cbb9812 | ||
|
|
95a76de7d5 | ||
|
|
0b388ff930 | ||
|
|
d25ac66a5d | ||
|
|
682ffd30ac | ||
|
|
af3dba8db6 | ||
|
|
08a24b5e03 | ||
|
|
65fbc5a461 | ||
|
|
80f3888cc8 | ||
|
|
5845750a10 | ||
|
|
c529a0fddf | ||
|
|
d7265d7ddc | ||
|
|
df0be87ada | ||
|
|
af2aeb275f | ||
|
|
4fef0bac20 | ||
|
|
b32a07bbe1 | ||
|
|
8709dbc232 | ||
|
|
54b9e3ce07 | ||
|
|
d35c24a3ed | ||
|
|
d4d200952e | ||
|
|
902a40bf72 | ||
|
|
a2d51cd4db | ||
|
|
f8e433689a | ||
|
|
5f8c09fd33 | ||
|
|
8eb4807615 | ||
|
|
047a481e48 | ||
|
|
051abfb894 | ||
|
|
0447ba1213 | ||
|
|
ff3dbe9394 | ||
|
|
90d00f8a59 | ||
|
|
fdc6bd3f1c | ||
|
|
d17aaf8729 | ||
|
|
ef6699b874 | ||
|
|
f5418ac344 | ||
|
|
909a5b1d83 | ||
|
|
9a0884bfed | ||
|
|
a9d5db572c | ||
|
|
16dbbb9b26 | ||
|
|
3695200a3c | ||
|
|
a1b2ba412b | ||
|
|
c81f1235a9 | ||
|
|
eaa16338de | ||
|
|
115b6b2a51 | ||
|
|
cd2d6a6397 | ||
|
|
c5086a5d0e | ||
|
|
b5fc217dc2 | ||
|
|
a4b8753456 |
@@ -116,7 +116,7 @@ creates a simple python file:
|
||||
|
||||
# FIXME: Add a list of GitHub accounts to
|
||||
# notify when the package is updated.
|
||||
# maintainers = ["github_user1", "github_user2"]
|
||||
# maintainers("github_user1", "github_user2")
|
||||
|
||||
version("0.8.13", sha256="591a9b4ec81c1f2042a97aa60564e0cb79d041c52faa7416acb38bc95bd2c76d")
|
||||
|
||||
|
||||
@@ -268,7 +268,7 @@ generates a boilerplate template for your package, and opens up the new
|
||||
|
||||
# FIXME: Add a list of GitHub accounts to
|
||||
# notify when the package is updated.
|
||||
# maintainers = ["github_user1", "github_user2"]
|
||||
# maintainers("github_user1", "github_user2")
|
||||
|
||||
version("6.2.1", sha256="eae9326beb4158c386e39a356818031bd28f3124cf915f8c5b1dc4c7a36b4d7c")
|
||||
|
||||
@@ -319,14 +319,8 @@ The rest of the tasks you need to do are as follows:
|
||||
|
||||
#. Add a comma-separated list of maintainers.
|
||||
|
||||
The ``maintainers`` field is a list of GitHub accounts of people
|
||||
who want to be notified any time the package is modified. When a
|
||||
pull request is submitted that updates the package, these people
|
||||
will be requested to review the PR. This is useful for developers
|
||||
who maintain a Spack package for their own software, as well as
|
||||
users who rely on a piece of software and want to ensure that the
|
||||
package doesn't break. It also gives users a list of people to
|
||||
contact for help when someone reports a build error with the package.
|
||||
Add a list of Github accounts of people who want to be notified
|
||||
any time the package is modified. See :ref:`package_maintainers`.
|
||||
|
||||
#. Add ``depends_on()`` calls for the package's dependencies.
|
||||
|
||||
@@ -497,6 +491,31 @@ some examples:
|
||||
In general, you won't have to remember this naming convention because
|
||||
:ref:`cmd-spack-create` and :ref:`cmd-spack-edit` handle the details for you.
|
||||
|
||||
.. _package_maintainers:
|
||||
|
||||
-----------
|
||||
Maintainers
|
||||
-----------
|
||||
|
||||
Each package in Spack may have one or more maintainers, i.e. one or more
|
||||
GitHub accounts of people who want to be notified any time the package is
|
||||
modified.
|
||||
|
||||
When a pull request is submitted that updates the package, these people will
|
||||
be requested to review the PR. This is useful for developers who maintain a
|
||||
Spack package for their own software, as well as users who rely on a piece of
|
||||
software and want to ensure that the package doesn't break. It also gives users
|
||||
a list of people to contact for help when someone reports a build error with
|
||||
the package.
|
||||
|
||||
To add maintainers to a package, simply declare them with the ``maintainers`` directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
maintainers("user1", "user2")
|
||||
|
||||
The list of maintainers is additive, and includes all the accounts eventually declared in base classes.
|
||||
|
||||
-----------------
|
||||
Trusted Downloads
|
||||
-----------------
|
||||
|
||||
@@ -47,7 +47,7 @@
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.caches import misc_cache_location
|
||||
from spack.relocate import utf8_paths_to_single_binary_regex
|
||||
from spack.relocate_text import utf8_paths_to_single_binary_regex
|
||||
from spack.spec import Spec
|
||||
from spack.stage import Stage
|
||||
from spack.util.executable import which
|
||||
@@ -1730,16 +1730,16 @@ def is_backup_file(file):
|
||||
|
||||
# For all buildcaches
|
||||
# relocate the install prefixes in text files including dependencies
|
||||
relocate.unsafe_relocate_text(text_names, prefix_to_prefix_text)
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
# relocate the install prefixes in binary files including dependencies
|
||||
relocate.unsafe_relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
|
||||
relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
|
||||
|
||||
# If we are installing back to the same location
|
||||
# relocate the sbang location if the spack directory changed
|
||||
else:
|
||||
if old_spack_prefix != new_spack_prefix:
|
||||
relocate.unsafe_relocate_text(text_names, prefix_to_prefix_text)
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
|
||||
def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum):
|
||||
|
||||
@@ -120,6 +120,7 @@ def std_meson_args(self):
|
||||
of package writers.
|
||||
"""
|
||||
# standard Meson arguments
|
||||
|
||||
std_meson_args = MesonBuilder.std_args(self.pkg)
|
||||
std_meson_args += getattr(self, "meson_flag_args", [])
|
||||
return std_meson_args
|
||||
@@ -182,7 +183,10 @@ def meson_args(self):
|
||||
|
||||
def meson(self, pkg, spec, prefix):
|
||||
"""Run ``meson`` in the build directory"""
|
||||
options = [os.path.abspath(self.root_mesonlists_dir)]
|
||||
options = []
|
||||
if self.spec["meson"].satisfies("@0.64:"):
|
||||
options.append("setup")
|
||||
options.append(os.path.abspath(self.root_mesonlists_dir))
|
||||
options += self.std_meson_args
|
||||
options += self.meson_args()
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
|
||||
@@ -267,7 +267,7 @@ def update_external_dependencies(self, extendee_spec=None):
|
||||
|
||||
python.external_path = self.spec.external_path
|
||||
python._mark_concrete()
|
||||
self.spec.add_dependency_edge(python, ("build", "link", "run"))
|
||||
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"))
|
||||
|
||||
def get_external_python_for_prefix(self):
|
||||
"""
|
||||
|
||||
@@ -138,7 +138,7 @@ class ROCmPackage(PackageBase):
|
||||
|
||||
depends_on("llvm-amdgpu", when="+rocm")
|
||||
depends_on("hsa-rocr-dev", when="+rocm")
|
||||
depends_on("hip", when="+rocm")
|
||||
depends_on("hip +rocm", when="+rocm")
|
||||
|
||||
conflicts("^blt@:0.3.6", when="+rocm")
|
||||
|
||||
|
||||
@@ -70,7 +70,7 @@ class {class_name}({base_class_name}):
|
||||
|
||||
# FIXME: Add a list of GitHub accounts to
|
||||
# notify when the package is updated.
|
||||
# maintainers = ["github_user1", "github_user2"]
|
||||
# maintainers("github_user1", "github_user2")
|
||||
|
||||
{versions}
|
||||
|
||||
|
||||
@@ -46,6 +46,14 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
|
||||
def shift(asp_function):
|
||||
"""Transforms ``attr("foo", "bar")`` into ``foo("bar")``."""
|
||||
if not asp_function.args:
|
||||
raise ValueError(f"Can't shift ASP function with no arguments: {str(asp_function)}")
|
||||
first, *rest = asp_function.args
|
||||
return asp.AspFunction(first, rest)
|
||||
|
||||
|
||||
def compare_specs(a, b, to_string=False, color=None):
|
||||
"""
|
||||
Generate a comparison, including diffs (for each side) and an intersection.
|
||||
@@ -71,7 +79,7 @@ def compare_specs(a, b, to_string=False, color=None):
|
||||
# get facts for specs, making sure to include build dependencies of concrete
|
||||
# specs and to descend into dependency hashes so we include all facts.
|
||||
a_facts = set(
|
||||
func.shift()
|
||||
shift(func)
|
||||
for func in setup.spec_clauses(
|
||||
a,
|
||||
body=True,
|
||||
@@ -81,7 +89,7 @@ def compare_specs(a, b, to_string=False, color=None):
|
||||
if func.name == "attr"
|
||||
)
|
||||
b_facts = set(
|
||||
func.shift()
|
||||
shift(func)
|
||||
for func in setup.spec_clauses(
|
||||
b,
|
||||
body=True,
|
||||
|
||||
@@ -39,12 +39,19 @@
|
||||
compiler flags:
|
||||
@g{cflags="flags"} cppflags, cflags, cxxflags,
|
||||
fflags, ldflags, ldlibs
|
||||
@g{cflags=="flags"} propagate flags to package dependencies
|
||||
cppflags, cflags, cxxflags, fflags,
|
||||
ldflags, ldlibs
|
||||
|
||||
variants:
|
||||
@B{+variant} enable <variant>
|
||||
@B{++variant} propagate enable <variant>
|
||||
@r{-variant} or @r{~variant} disable <variant>
|
||||
@r{--variant} or @r{~~variant} propagate disable <variant>
|
||||
@B{variant=value} set non-boolean <variant> to <value>
|
||||
@B{variant==value} propagate non-boolean <variant> to <value>
|
||||
@B{variant=value1,value2,value3} set multi-value <variant> values
|
||||
@B{variant==value1,value2,value3} propagate multi-value <variant> values
|
||||
|
||||
architecture variants:
|
||||
@m{platform=platform} linux, darwin, cray, etc.
|
||||
@@ -68,6 +75,8 @@
|
||||
hdf5 @c{@1.8:} @g{%gcc} hdf5 1.8 or higher built with gcc
|
||||
hdf5 @B{+mpi} hdf5 with mpi enabled
|
||||
hdf5 @r{~mpi} hdf5 with mpi disabled
|
||||
hdf5 @B{++mpi} hdf5 with mpi enabled and propagates
|
||||
hdf5 @r{~~mpi} hdf5 with mpi disabled and propagates
|
||||
hdf5 @B{+mpi} ^mpich hdf5 with mpi, using mpich
|
||||
hdf5 @B{+mpi} ^openmpi@c{@1.7} hdf5 with mpi, using openmpi 1.7
|
||||
boxlib @B{dim=2} boxlib built for 2 dimensions
|
||||
|
||||
@@ -97,41 +97,28 @@ def setup_parser(subparser):
|
||||
|
||||
def _process_result(result, show, required_format, kwargs):
|
||||
result.raise_if_unsat()
|
||||
opt, *_ = min(result.answers)
|
||||
|
||||
# dump the solutions as concretized specs
|
||||
opt, _, _ = min(result.answers)
|
||||
if ("opt" in show) and (not required_format):
|
||||
tty.msg("Best of %d considered solutions." % result.nmodels)
|
||||
tty.msg("Optimization Criteria:")
|
||||
|
||||
maxlen = max(len(name) for name in result.criteria)
|
||||
max_depth = max(len(v) for v in result.criteria.values() if isinstance(v, list))
|
||||
maxlen = max(len(s[2]) for s in result.criteria)
|
||||
color.cprint("@*{ Priority Criterion %sInstalled ToBuild}" % ((maxlen - 10) * " "))
|
||||
|
||||
header = "@*{"
|
||||
header += "".join(f"{depth:<4}" for depth in range(max_depth))
|
||||
header += "Criterion}"
|
||||
color.cprint(header)
|
||||
|
||||
# make non-zero numbers red
|
||||
def highlight(n, c):
|
||||
return color.colorize(f"@{c}{{{n:<4}}}" if n > 0 else f"{n:<4}")
|
||||
|
||||
for i, (name, cost) in enumerate(result.criteria.items(), 1):
|
||||
colored_name = name.replace("build:", "@c{build:}")
|
||||
colored_name = colored_name.replace("reuse:", "@B{reuse:}")
|
||||
colored_name = colored_name.replace("fixed:", "@G{fixed:}")
|
||||
colored_name = color.colorize(colored_name)
|
||||
|
||||
if isinstance(cost, int):
|
||||
print(highlight(cost, "G") + " " * (max_depth - 1) + colored_name)
|
||||
else:
|
||||
print(
|
||||
"".join(highlight(c, "c" if "build:" in name else "B") for c in cost)
|
||||
+ colored_name
|
||||
fmt = " @K{%%-8d} %%-%ds%%9s %%7s" % maxlen
|
||||
for i, (installed_cost, build_cost, name) in enumerate(result.criteria, 1):
|
||||
color.cprint(
|
||||
fmt
|
||||
% (
|
||||
i,
|
||||
name,
|
||||
"-" if build_cost is None else installed_cost,
|
||||
installed_cost if build_cost is None else build_cost,
|
||||
)
|
||||
|
||||
)
|
||||
print()
|
||||
|
||||
# dump the solutions as concretized specs
|
||||
if "solutions" in show:
|
||||
for spec in result.specs:
|
||||
# With -y, just print YAML to output.
|
||||
|
||||
@@ -162,7 +162,7 @@ def entries_to_specs(entries):
|
||||
continue
|
||||
parent_spec = spec_dict[entry["hash"]]
|
||||
dep_spec = spec_dict[dep_hash]
|
||||
parent_spec._add_dependency(dep_spec, deptypes)
|
||||
parent_spec._add_dependency(dep_spec, deptypes=deptypes)
|
||||
|
||||
return spec_dict
|
||||
|
||||
|
||||
@@ -107,6 +107,14 @@
|
||||
]
|
||||
|
||||
|
||||
def reader(version):
|
||||
reader_cls = {
|
||||
Version("5"): spack.spec.SpecfileV1,
|
||||
Version("6"): spack.spec.SpecfileV3,
|
||||
}
|
||||
return reader_cls[version]
|
||||
|
||||
|
||||
def _now():
|
||||
"""Returns the time since the epoch"""
|
||||
return time.time()
|
||||
@@ -674,7 +682,7 @@ def _write_to_file(self, stream):
|
||||
except (TypeError, ValueError) as e:
|
||||
raise sjson.SpackJSONError("error writing JSON database:", str(e))
|
||||
|
||||
def _read_spec_from_dict(self, hash_key, installs, hash=ht.dag_hash):
|
||||
def _read_spec_from_dict(self, spec_reader, hash_key, installs, hash=ht.dag_hash):
|
||||
"""Recursively construct a spec from a hash in a YAML database.
|
||||
|
||||
Does not do any locking.
|
||||
@@ -692,7 +700,7 @@ def _read_spec_from_dict(self, hash_key, installs, hash=ht.dag_hash):
|
||||
spec_dict[hash.name] = hash_key
|
||||
|
||||
# Build spec from dict first.
|
||||
spec = spack.spec.Spec.from_node_dict(spec_dict)
|
||||
spec = spec_reader.from_node_dict(spec_dict)
|
||||
return spec
|
||||
|
||||
def db_for_spec_hash(self, hash_key):
|
||||
@@ -732,7 +740,7 @@ def query_local_by_spec_hash(self, hash_key):
|
||||
with self.read_transaction():
|
||||
return self._data.get(hash_key, None)
|
||||
|
||||
def _assign_dependencies(self, hash_key, installs, data):
|
||||
def _assign_dependencies(self, spec_reader, hash_key, installs, data):
|
||||
# Add dependencies from other records in the install DB to
|
||||
# form a full spec.
|
||||
spec = data[hash_key].spec
|
||||
@@ -742,7 +750,7 @@ def _assign_dependencies(self, hash_key, installs, data):
|
||||
spec_node_dict = spec_node_dict[spec.name]
|
||||
if "dependencies" in spec_node_dict:
|
||||
yaml_deps = spec_node_dict["dependencies"]
|
||||
for dname, dhash, dtypes, _ in spack.spec.Spec.read_yaml_dep_specs(yaml_deps):
|
||||
for dname, dhash, dtypes, _ in spec_reader.read_specfile_dep_specs(yaml_deps):
|
||||
# It is important that we always check upstream installations
|
||||
# in the same order, and that we always check the local
|
||||
# installation first: if a downstream Spack installs a package
|
||||
@@ -765,7 +773,7 @@ def _assign_dependencies(self, hash_key, installs, data):
|
||||
tty.warn(msg)
|
||||
continue
|
||||
|
||||
spec._add_dependency(child, dtypes)
|
||||
spec._add_dependency(child, deptypes=dtypes)
|
||||
|
||||
def _read_from_file(self, filename):
|
||||
"""Fill database from file, do not maintain old data.
|
||||
@@ -797,6 +805,7 @@ def check(cond, msg):
|
||||
|
||||
# TODO: better version checking semantics.
|
||||
version = Version(db["version"])
|
||||
spec_reader = reader(version)
|
||||
if version > _db_version:
|
||||
raise InvalidDatabaseVersionError(_db_version, version)
|
||||
elif version < _db_version:
|
||||
@@ -832,7 +841,7 @@ def invalid_record(hash_key, error):
|
||||
for hash_key, rec in installs.items():
|
||||
try:
|
||||
# This constructs a spec DAG from the list of all installs
|
||||
spec = self._read_spec_from_dict(hash_key, installs)
|
||||
spec = self._read_spec_from_dict(spec_reader, hash_key, installs)
|
||||
|
||||
# Insert the brand new spec in the database. Each
|
||||
# spec has its own copies of its dependency specs.
|
||||
@@ -848,7 +857,7 @@ def invalid_record(hash_key, error):
|
||||
# Pass 2: Assign dependencies once all specs are created.
|
||||
for hash_key in data:
|
||||
try:
|
||||
self._assign_dependencies(hash_key, installs, data)
|
||||
self._assign_dependencies(spec_reader, hash_key, installs, data)
|
||||
except MissingDependenciesError:
|
||||
raise
|
||||
except Exception as e:
|
||||
@@ -1167,7 +1176,7 @@ def _add(
|
||||
for dep in spec.edges_to_dependencies(deptype=_tracked_deps):
|
||||
dkey = dep.spec.dag_hash()
|
||||
upstream, record = self.query_by_spec_hash(dkey)
|
||||
new_spec._add_dependency(record.spec, dep.deptypes)
|
||||
new_spec._add_dependency(record.spec, deptypes=dep.deptypes)
|
||||
if not upstream:
|
||||
record.ref_count += 1
|
||||
|
||||
|
||||
@@ -54,6 +54,7 @@ class OpenMpi(Package):
|
||||
"conflicts",
|
||||
"depends_on",
|
||||
"extends",
|
||||
"maintainers",
|
||||
"provides",
|
||||
"patch",
|
||||
"variant",
|
||||
@@ -767,6 +768,22 @@ def build_system(*values, **kwargs):
|
||||
)
|
||||
|
||||
|
||||
@directive(dicts=())
|
||||
def maintainers(*names: str):
|
||||
"""Add a new maintainer directive, to specify maintainers in a declarative way.
|
||||
|
||||
Args:
|
||||
names: GitHub username for the maintainer
|
||||
"""
|
||||
|
||||
def _execute_maintainer(pkg):
|
||||
maintainers_from_base = getattr(pkg, "maintainers", [])
|
||||
# Here it is essential to copy, otherwise we might add to an empty list in the parent
|
||||
pkg.maintainers = list(sorted(set(maintainers_from_base + list(names))))
|
||||
|
||||
return _execute_maintainer
|
||||
|
||||
|
||||
class DirectiveError(spack.error.SpackError):
|
||||
"""This is raised when something is wrong with a package directive."""
|
||||
|
||||
|
||||
@@ -104,6 +104,15 @@ def default_manifest_yaml():
|
||||
#: version of the lockfile format. Must increase monotonically.
|
||||
lockfile_format_version = 4
|
||||
|
||||
|
||||
READER_CLS = {
|
||||
1: spack.spec.SpecfileV1,
|
||||
2: spack.spec.SpecfileV1,
|
||||
3: spack.spec.SpecfileV2,
|
||||
4: spack.spec.SpecfileV3,
|
||||
}
|
||||
|
||||
|
||||
# Magic names
|
||||
# The name of the standalone spec list in the manifest yaml
|
||||
user_speclist_name = "specs"
|
||||
@@ -1436,7 +1445,7 @@ def _concretize_separately(self, tests=False):
|
||||
if test_dependency in current_spec[node.name]:
|
||||
continue
|
||||
current_spec[node.name].add_dependency_edge(
|
||||
test_dependency.copy(), deptype="test"
|
||||
test_dependency.copy(), deptypes="test"
|
||||
)
|
||||
|
||||
results = [
|
||||
@@ -1942,7 +1951,7 @@ def _to_lockfile_dict(self):
|
||||
"_meta": {
|
||||
"file-type": "spack-lockfile",
|
||||
"lockfile-version": lockfile_format_version,
|
||||
"specfile-version": spack.spec.specfile_format_version,
|
||||
"specfile-version": spack.spec.SPECFILE_FORMAT_VERSION,
|
||||
},
|
||||
# users specs + hashes are the 'roots' of the environment
|
||||
"roots": [{"hash": h, "spec": str(s)} for h, s in hash_spec_list],
|
||||
@@ -1975,10 +1984,19 @@ def _read_lockfile_dict(self, d):
|
||||
|
||||
# Track specs by their DAG hash, allows handling DAG hash collisions
|
||||
first_seen = {}
|
||||
current_lockfile_format = d["_meta"]["lockfile-version"]
|
||||
try:
|
||||
reader = READER_CLS[current_lockfile_format]
|
||||
except KeyError:
|
||||
msg = (
|
||||
f"Spack {spack.__version__} cannot read environment lockfiles using the "
|
||||
f"v{current_lockfile_format} format"
|
||||
)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
# First pass: Put each spec in the map ignoring dependencies
|
||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||
spec = Spec.from_node_dict(node_dict)
|
||||
spec = reader.from_node_dict(node_dict)
|
||||
if not spec._hash:
|
||||
# in v1 lockfiles, the hash only occurs as a key
|
||||
spec._hash = lockfile_key
|
||||
@@ -1987,8 +2005,11 @@ def _read_lockfile_dict(self, d):
|
||||
# Second pass: For each spec, get its dependencies from the node dict
|
||||
# and add them to the spec
|
||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||
for _, dep_hash, deptypes, _ in Spec.dependencies_from_node_dict(node_dict):
|
||||
specs_by_hash[lockfile_key]._add_dependency(specs_by_hash[dep_hash], deptypes)
|
||||
name, data = reader.name_and_data(node_dict)
|
||||
for _, dep_hash, deptypes, _ in reader.dependencies_from_node_dict(data):
|
||||
specs_by_hash[lockfile_key]._add_dependency(
|
||||
specs_by_hash[dep_hash], deptypes=deptypes
|
||||
)
|
||||
|
||||
# Traverse the root specs one at a time in the order they appear.
|
||||
# The first time we see each DAG hash, that's the one we want to
|
||||
|
||||
@@ -95,6 +95,22 @@ def _ensure_one_stage_entry(stage_path):
|
||||
return os.path.join(stage_path, stage_entries[0])
|
||||
|
||||
|
||||
def _filesummary(path, print_bytes=16):
|
||||
try:
|
||||
n = print_bytes
|
||||
with open(path, "rb") as f:
|
||||
size = os.fstat(f.fileno()).st_size
|
||||
if size <= 2 * n:
|
||||
short_contents = f.read(2 * n)
|
||||
else:
|
||||
short_contents = f.read(n)
|
||||
f.seek(-n, 2)
|
||||
short_contents += b"..." + f.read(n)
|
||||
return size, short_contents
|
||||
except OSError:
|
||||
return 0, b""
|
||||
|
||||
|
||||
def fetcher(cls):
|
||||
"""Decorator used to register fetch strategies."""
|
||||
all_strategies.append(cls)
|
||||
@@ -500,9 +516,14 @@ def check(self):
|
||||
|
||||
checker = crypto.Checker(self.digest)
|
||||
if not checker.check(self.archive_file):
|
||||
# On failure, provide some information about the file size and
|
||||
# contents, so that we can quickly see what the issue is (redirect
|
||||
# was not followed, empty file, text instead of binary, ...)
|
||||
size, contents = _filesummary(self.archive_file)
|
||||
raise ChecksumError(
|
||||
"%s checksum failed for %s" % (checker.hash_name, self.archive_file),
|
||||
"Expected %s but got %s" % (self.digest, checker.sum),
|
||||
f"{checker.hash_name} checksum failed for {self.archive_file}",
|
||||
f"Expected {self.digest} but got {checker.sum}. "
|
||||
f"File size = {size} bytes. Contents = {contents!r}",
|
||||
)
|
||||
|
||||
@_needs_stage
|
||||
|
||||
@@ -90,11 +90,11 @@ def view_copy(src, dst, view, spec=None):
|
||||
prefix_to_projection[dep.prefix] = view.get_projection_for_spec(dep)
|
||||
|
||||
if spack.relocate.is_binary(dst):
|
||||
spack.relocate.unsafe_relocate_text_bin(binaries=[dst], prefixes=prefix_to_projection)
|
||||
spack.relocate.relocate_text_bin(binaries=[dst], prefixes=prefix_to_projection)
|
||||
else:
|
||||
prefix_to_projection[spack.store.layout.root] = view._root
|
||||
prefix_to_projection[orig_sbang] = new_sbang
|
||||
spack.relocate.unsafe_relocate_text(files=[dst], prefixes=prefix_to_projection)
|
||||
spack.relocate.relocate_text(files=[dst], prefixes=prefix_to_projection)
|
||||
try:
|
||||
stat = os.stat(src)
|
||||
os.chown(dst, stat.st_uid, stat.st_gid)
|
||||
|
||||
@@ -283,7 +283,7 @@ def next_spec(self, initial_spec: Optional[spack.spec.Spec] = None) -> spack.spe
|
||||
if root_spec.concrete:
|
||||
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
|
||||
|
||||
root_spec._add_dependency(dependency, ())
|
||||
root_spec._add_dependency(dependency, deptypes=())
|
||||
|
||||
else:
|
||||
break
|
||||
|
||||
@@ -292,8 +292,8 @@ def from_json(stream, repository):
|
||||
index.providers = _transform(
|
||||
providers,
|
||||
lambda vpkg, plist: (
|
||||
spack.spec.Spec.from_node_dict(vpkg),
|
||||
set(spack.spec.Spec.from_node_dict(p) for p in plist),
|
||||
spack.spec.SpecfileV3.from_node_dict(vpkg),
|
||||
set(spack.spec.SpecfileV3.from_node_dict(p) for p in plist),
|
||||
),
|
||||
)
|
||||
return index
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
import itertools
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
@@ -27,6 +26,8 @@
|
||||
import spack.util.elf as elf
|
||||
import spack.util.executable as executable
|
||||
|
||||
from .relocate_text import BinaryFilePrefixReplacer, TextFilePrefixReplacer
|
||||
|
||||
is_macos = str(spack.platforms.real_host()) == "darwin"
|
||||
|
||||
|
||||
@@ -46,49 +47,6 @@ def __init__(self, file_path, root_path):
|
||||
)
|
||||
|
||||
|
||||
class BinaryStringReplacementError(spack.error.SpackError):
|
||||
def __init__(self, file_path, old_len, new_len):
|
||||
"""The size of the file changed after binary path substitution
|
||||
|
||||
Args:
|
||||
file_path (str): file with changing size
|
||||
old_len (str): original length of the file
|
||||
new_len (str): length of the file after substitution
|
||||
"""
|
||||
super(BinaryStringReplacementError, self).__init__(
|
||||
"Doing a binary string replacement in %s failed.\n"
|
||||
"The size of the file changed from %s to %s\n"
|
||||
"when it should have remanined the same." % (file_path, old_len, new_len)
|
||||
)
|
||||
|
||||
|
||||
class BinaryTextReplaceError(spack.error.SpackError):
|
||||
def __init__(self, msg):
|
||||
msg += (
|
||||
" To fix this, compile with more padding "
|
||||
"(config:install_tree:padded_length), or install to a shorter prefix."
|
||||
)
|
||||
super(BinaryTextReplaceError, self).__init__(msg)
|
||||
|
||||
|
||||
class CannotGrowString(BinaryTextReplaceError):
|
||||
def __init__(self, old, new):
|
||||
msg = "Cannot replace {!r} with {!r} because the new prefix is longer.".format(old, new)
|
||||
super(CannotGrowString, self).__init__(msg)
|
||||
|
||||
|
||||
class CannotShrinkCString(BinaryTextReplaceError):
|
||||
def __init__(self, old, new, full_old_string):
|
||||
# Just interpolate binary string to not risk issues with invalid
|
||||
# unicode, which would be really bad user experience: error in error.
|
||||
# We have no clue if we actually deal with a real C-string nor what
|
||||
# encoding it has.
|
||||
msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
|
||||
old, new, full_old_string
|
||||
)
|
||||
super(CannotShrinkCString, self).__init__(msg)
|
||||
|
||||
|
||||
@memoized
|
||||
def _patchelf():
|
||||
"""Return the full path to the patchelf binary, if available, else None."""
|
||||
@@ -450,108 +408,6 @@ def needs_text_relocation(m_type, m_subtype):
|
||||
return m_type == "text"
|
||||
|
||||
|
||||
def apply_binary_replacements(f, prefix_to_prefix, suffix_safety_size=7):
|
||||
"""
|
||||
Given a file opened in rb+ mode, apply the string replacements as
|
||||
specified by an ordered dictionary of prefix to prefix mappings. This
|
||||
method takes special care of null-terminated C-strings. C-string constants
|
||||
are problematic because compilers and linkers optimize readonly strings for
|
||||
space by aliasing those that share a common suffix (only suffix since all
|
||||
of them are null terminated). See https://github.com/spack/spack/pull/31739
|
||||
and https://github.com/spack/spack/pull/32253 for details. Our logic matches
|
||||
the original prefix with a ``suffix_safety_size + 1`` lookahead for null bytes.
|
||||
If no null terminator is found, we simply pad with leading /, assuming that
|
||||
it's a long C-string; the full C-string after replacement has a large suffix
|
||||
in common with its original value.
|
||||
If there *is* a null terminator we can do the same as long as the replacement
|
||||
has a sufficiently long common suffix with the original prefix.
|
||||
As a last resort when the replacement does not have a long enough common suffix,
|
||||
we can try to shorten the string, but this only works if the new length is
|
||||
sufficiently short (typically the case when going from large padding -> normal path)
|
||||
If the replacement string is longer, or all of the above fails, we error out.
|
||||
|
||||
Arguments:
|
||||
f: file opened in rb+ mode
|
||||
prefix_to_prefix (OrderedDict): OrderedDictionary where the keys are
|
||||
bytes representing the old prefixes and the values are the new
|
||||
suffix_safety_size (int): in case of null terminated strings, what size
|
||||
of the suffix should remain to avoid aliasing issues?
|
||||
"""
|
||||
assert suffix_safety_size >= 0
|
||||
assert f.tell() == 0
|
||||
|
||||
# Look for exact matches of our paths, and also look if there's a null terminator
|
||||
# soon after (this covers the case where we search for /abc but match /abc/ with
|
||||
# a trailing dir seperator).
|
||||
regex = re.compile(
|
||||
b"("
|
||||
+ b"|".join(re.escape(p) for p in prefix_to_prefix.keys())
|
||||
+ b")([^\0]{0,%d}\0)?" % suffix_safety_size
|
||||
)
|
||||
|
||||
# We *could* read binary data in chunks to avoid loading all in memory,
|
||||
# but it's nasty to deal with matches across boundaries, so let's stick to
|
||||
# something simple.
|
||||
|
||||
for match in regex.finditer(f.read()):
|
||||
# The matching prefix (old) and its replacement (new)
|
||||
old = match.group(1)
|
||||
new = prefix_to_prefix[old]
|
||||
|
||||
# Did we find a trailing null within a N + 1 bytes window after the prefix?
|
||||
null_terminated = match.end(0) > match.end(1)
|
||||
|
||||
# Suffix string length, excluding the null byte
|
||||
# Only makes sense if null_terminated
|
||||
suffix_strlen = match.end(0) - match.end(1) - 1
|
||||
|
||||
# How many bytes are we shrinking our string?
|
||||
bytes_shorter = len(old) - len(new)
|
||||
|
||||
# We can't make strings larger.
|
||||
if bytes_shorter < 0:
|
||||
raise CannotGrowString(old, new)
|
||||
|
||||
# If we don't know whether this is a null terminated C-string (we're looking
|
||||
# only N + 1 bytes ahead), or if it is and we have a common suffix, we can
|
||||
# simply pad with leading dir separators.
|
||||
elif (
|
||||
not null_terminated
|
||||
or suffix_strlen >= suffix_safety_size # == is enough, but let's be defensive
|
||||
or old[-suffix_safety_size + suffix_strlen :]
|
||||
== new[-suffix_safety_size + suffix_strlen :]
|
||||
):
|
||||
replacement = b"/" * bytes_shorter + new
|
||||
|
||||
# If it *was* null terminated, all that matters is that we can leave N bytes
|
||||
# of old suffix in place. Note that > is required since we also insert an
|
||||
# additional null terminator.
|
||||
elif bytes_shorter > suffix_safety_size:
|
||||
replacement = new + match.group(2) # includes the trailing null
|
||||
|
||||
# Otherwise... we can't :(
|
||||
else:
|
||||
raise CannotShrinkCString(old, new, match.group()[:-1])
|
||||
|
||||
f.seek(match.start())
|
||||
f.write(replacement)
|
||||
|
||||
|
||||
def _replace_prefix_bin(filename, prefix_to_prefix):
|
||||
"""Replace all the occurrences of the old prefix with a new prefix in binary
|
||||
files. See :func:`~spack.relocate.apply_binary_replacements` for details.
|
||||
|
||||
Args:
|
||||
filename (str): target binary file
|
||||
byte_prefixes (OrderedDict): ordered dictionary where the keys are
|
||||
bytes representing the old prefixes and the values are the new
|
||||
prefixes (all bytes utf-8 encoded)
|
||||
"""
|
||||
|
||||
with open(filename, "rb+") as f:
|
||||
apply_binary_replacements(f, prefix_to_prefix)
|
||||
|
||||
|
||||
def relocate_macho_binaries(
|
||||
path_names,
|
||||
old_layout_root,
|
||||
@@ -800,120 +656,32 @@ def relocate_links(links, prefix_to_prefix):
|
||||
symlink(new_target, link)
|
||||
|
||||
|
||||
def utf8_path_to_binary_regex(prefix):
|
||||
"""Create a (binary) regex that matches the input path in utf8"""
|
||||
prefix_bytes = re.escape(prefix).encode("utf-8")
|
||||
return re.compile(b"(?<![\\w\\-_/])([\\w\\-_]*?)%s([\\w\\-_/]*)" % prefix_bytes)
|
||||
|
||||
|
||||
def byte_strings_to_single_binary_regex(prefixes):
|
||||
all_prefixes = b"|".join(re.escape(p) for p in prefixes)
|
||||
return re.compile(b"(?<![\\w\\-_/])([\\w\\-_]*?)(%s)([\\w\\-_/]*)" % all_prefixes)
|
||||
|
||||
|
||||
def utf8_paths_to_single_binary_regex(prefixes):
|
||||
"""Create a (binary) regex that matches any input path in utf8"""
|
||||
return byte_strings_to_single_binary_regex(p.encode("utf-8") for p in prefixes)
|
||||
|
||||
|
||||
def _replace_prefix_text_file(file, regex, prefix_to_prefix):
|
||||
"""Given a text file opened in rb+, substitute all old with new prefixes and write
|
||||
in-place (file size may grow or shrink)."""
|
||||
|
||||
def replacement(match):
|
||||
return match.group(1) + prefix_to_prefix[match.group(2)] + match.group(3)
|
||||
|
||||
data = file.read()
|
||||
file.seek(0)
|
||||
file.write(re.sub(regex, replacement, data))
|
||||
file.truncate()
|
||||
|
||||
|
||||
def _replace_prefix_text(filename, regex, prefix_to_prefix):
|
||||
with open(filename, "rb+") as f:
|
||||
_replace_prefix_text_file(f, regex, prefix_to_prefix)
|
||||
|
||||
|
||||
def unsafe_relocate_text(files, prefixes, concurrency=32):
|
||||
def relocate_text(files, prefixes):
|
||||
"""Relocate text file from the original installation prefix to the
|
||||
new prefix.
|
||||
|
||||
Relocation also affects the the path in Spack's sbang script.
|
||||
|
||||
Note: unsafe when files contains duplicates, such as repeated paths,
|
||||
symlinks, hardlinks.
|
||||
|
||||
Args:
|
||||
files (list): Text files to be relocated
|
||||
prefixes (OrderedDict): String prefixes which need to be changed
|
||||
concurrency (int): Preferred degree of parallelism
|
||||
"""
|
||||
|
||||
# This now needs to be handled by the caller in all cases
|
||||
# orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(orig_spack)
|
||||
# new_sbang = '#!/bin/bash {0}/bin/sbang'.format(new_spack)
|
||||
|
||||
# Transform to binary string
|
||||
prefix_to_prefix = OrderedDict(
|
||||
(k.encode("utf-8"), v.encode("utf-8")) for (k, v) in prefixes.items()
|
||||
)
|
||||
|
||||
# Create a regex of the form (pre check)(prefix 1|prefix 2|prefix 3)(post check).
|
||||
regex = byte_strings_to_single_binary_regex(prefix_to_prefix.keys())
|
||||
|
||||
args = [(filename, regex, prefix_to_prefix) for filename in files]
|
||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||
try:
|
||||
tp.map(llnl.util.lang.star(_replace_prefix_text), args)
|
||||
finally:
|
||||
tp.terminate()
|
||||
tp.join()
|
||||
TextFilePrefixReplacer.from_strings_or_bytes(prefixes).apply(files)
|
||||
|
||||
|
||||
def unsafe_relocate_text_bin(binaries, prefixes, concurrency=32):
|
||||
"""Replace null terminated path strings hard coded into binaries.
|
||||
def relocate_text_bin(binaries, prefixes):
|
||||
"""Replace null terminated path strings hard-coded into binaries.
|
||||
|
||||
The new install prefix must be shorter than the original one.
|
||||
|
||||
Note: unsafe when files contains duplicates, such as repeated paths,
|
||||
symlinks, hardlinks.
|
||||
|
||||
Args:
|
||||
binaries (list): binaries to be relocated
|
||||
prefixes (OrderedDict): String prefixes which need to be changed.
|
||||
concurrency (int): Desired degree of parallelism.
|
||||
|
||||
Raises:
|
||||
BinaryTextReplaceError: when the new path is longer than the old path
|
||||
spack.relocate_text.BinaryTextReplaceError: when the new path is longer than the old path
|
||||
"""
|
||||
byte_prefixes = collections.OrderedDict({})
|
||||
|
||||
for orig_prefix, new_prefix in prefixes.items():
|
||||
if orig_prefix != new_prefix:
|
||||
if isinstance(orig_prefix, bytes):
|
||||
orig_bytes = orig_prefix
|
||||
else:
|
||||
orig_bytes = orig_prefix.encode("utf-8")
|
||||
if isinstance(new_prefix, bytes):
|
||||
new_bytes = new_prefix
|
||||
else:
|
||||
new_bytes = new_prefix.encode("utf-8")
|
||||
byte_prefixes[orig_bytes] = new_bytes
|
||||
|
||||
# Do relocations on text in binaries that refers to the install tree
|
||||
# multiprocesing.ThreadPool.map requires single argument
|
||||
args = []
|
||||
|
||||
for binary in binaries:
|
||||
args.append((binary, byte_prefixes))
|
||||
|
||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||
|
||||
try:
|
||||
tp.map(llnl.util.lang.star(_replace_prefix_bin), args)
|
||||
finally:
|
||||
tp.terminate()
|
||||
tp.join()
|
||||
BinaryFilePrefixReplacer.from_strings_or_bytes(prefixes).apply(binaries)
|
||||
|
||||
|
||||
def is_relocatable(spec):
|
||||
|
||||
288
lib/spack/spack/relocate_text.py
Normal file
288
lib/spack/spack/relocate_text.py
Normal file
@@ -0,0 +1,288 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""This module contains pure-Python classes and functions for replacing
|
||||
paths inside text files and binaries."""
|
||||
|
||||
import re
|
||||
from collections import OrderedDict
|
||||
from typing import Dict, Union
|
||||
|
||||
import spack.error
|
||||
|
||||
Prefix = Union[str, bytes]
|
||||
|
||||
|
||||
def encode_path(p: Prefix) -> bytes:
|
||||
return p if isinstance(p, bytes) else p.encode("utf-8")
|
||||
|
||||
|
||||
def _prefix_to_prefix_as_bytes(prefix_to_prefix) -> Dict[bytes, bytes]:
|
||||
return OrderedDict((encode_path(k), encode_path(v)) for (k, v) in prefix_to_prefix.items())
|
||||
|
||||
|
||||
def utf8_path_to_binary_regex(prefix: str):
|
||||
"""Create a binary regex that matches the input path in utf8"""
|
||||
prefix_bytes = re.escape(prefix).encode("utf-8")
|
||||
return re.compile(b"(?<![\\w\\-_/])([\\w\\-_]*?)%s([\\w\\-_/]*)" % prefix_bytes)
|
||||
|
||||
|
||||
def _byte_strings_to_single_binary_regex(prefixes):
|
||||
all_prefixes = b"|".join(re.escape(p) for p in prefixes)
|
||||
return re.compile(b"(?<![\\w\\-_/])([\\w\\-_]*?)(%s)([\\w\\-_/]*)" % all_prefixes)
|
||||
|
||||
|
||||
def utf8_paths_to_single_binary_regex(prefixes):
|
||||
"""Create a (binary) regex that matches any input path in utf8"""
|
||||
return _byte_strings_to_single_binary_regex(p.encode("utf-8") for p in prefixes)
|
||||
|
||||
|
||||
def filter_identity_mappings(prefix_to_prefix):
|
||||
"""Drop mappings that are not changed."""
|
||||
# NOTE: we don't guard against the following case:
|
||||
# [/abc/def -> /abc/def, /abc -> /x] *will* be simplified to
|
||||
# [/abc -> /x], meaning that after this simplification /abc/def will be
|
||||
# mapped to /x/def instead of /abc/def. This should not be a problem.
|
||||
return OrderedDict((k, v) for (k, v) in prefix_to_prefix.items() if k != v)
|
||||
|
||||
|
||||
class PrefixReplacer:
|
||||
"""Base class for applying a prefix to prefix map
|
||||
to a list of binaries or text files.
|
||||
Child classes implement _apply_to_file to do the
|
||||
actual work, which is different when it comes to
|
||||
binaries and text files."""
|
||||
|
||||
def __init__(self, prefix_to_prefix: Dict[bytes, bytes]):
|
||||
"""
|
||||
Arguments:
|
||||
|
||||
prefix_to_prefix (OrderedDict):
|
||||
|
||||
A ordered mapping from prefix to prefix. The order is
|
||||
relevant to support substring fallbacks, for example
|
||||
[("/first/sub", "/x"), ("/first", "/y")] will ensure
|
||||
/first/sub is matched and replaced before /first.
|
||||
"""
|
||||
self.prefix_to_prefix = filter_identity_mappings(prefix_to_prefix)
|
||||
|
||||
@property
|
||||
def is_noop(self) -> bool:
|
||||
"""Returns true when the prefix to prefix map
|
||||
is mapping everything to the same location (identity)
|
||||
or there are no prefixes to replace."""
|
||||
return not bool(self.prefix_to_prefix)
|
||||
|
||||
def apply(self, filenames: list):
|
||||
if self.is_noop:
|
||||
return
|
||||
for filename in filenames:
|
||||
self.apply_to_filename(filename)
|
||||
|
||||
def apply_to_filename(self, filename):
|
||||
if self.is_noop:
|
||||
return
|
||||
with open(filename, "rb+") as f:
|
||||
self.apply_to_file(f)
|
||||
|
||||
def apply_to_file(self, f):
|
||||
if self.is_noop:
|
||||
return
|
||||
self._apply_to_file(f)
|
||||
|
||||
|
||||
class TextFilePrefixReplacer(PrefixReplacer):
|
||||
"""This class applies prefix to prefix mappings for relocation
|
||||
on text files.
|
||||
|
||||
Note that UTF-8 encoding is assumed."""
|
||||
|
||||
def __init__(self, prefix_to_prefix: Dict[bytes, bytes]):
|
||||
"""
|
||||
prefix_to_prefix (OrderedDict): OrderedDictionary where the keys are
|
||||
bytes representing the old prefixes and the values are the new.
|
||||
"""
|
||||
super().__init__(prefix_to_prefix)
|
||||
# Single regex for all paths.
|
||||
self.regex = _byte_strings_to_single_binary_regex(self.prefix_to_prefix.keys())
|
||||
|
||||
@classmethod
|
||||
def from_strings_or_bytes(
|
||||
cls, prefix_to_prefix: Dict[Prefix, Prefix]
|
||||
) -> "TextFilePrefixReplacer":
|
||||
"""Create a TextFilePrefixReplacer from an ordered prefix to prefix map."""
|
||||
return cls(_prefix_to_prefix_as_bytes(prefix_to_prefix))
|
||||
|
||||
def _apply_to_file(self, f):
|
||||
"""Text replacement implementation simply reads the entire file
|
||||
in memory and applies the combined regex."""
|
||||
replacement = lambda m: m.group(1) + self.prefix_to_prefix[m.group(2)] + m.group(3)
|
||||
data = f.read()
|
||||
new_data = re.sub(self.regex, replacement, data)
|
||||
if id(data) == id(new_data):
|
||||
return
|
||||
f.seek(0)
|
||||
f.write(new_data)
|
||||
f.truncate()
|
||||
|
||||
|
||||
class BinaryFilePrefixReplacer(PrefixReplacer):
|
||||
def __init__(self, prefix_to_prefix, suffix_safety_size=7):
|
||||
"""
|
||||
prefix_to_prefix (OrderedDict): OrderedDictionary where the keys are
|
||||
bytes representing the old prefixes and the values are the new
|
||||
suffix_safety_size (int): in case of null terminated strings, what size
|
||||
of the suffix should remain to avoid aliasing issues?
|
||||
"""
|
||||
assert suffix_safety_size >= 0
|
||||
super().__init__(prefix_to_prefix)
|
||||
self.suffix_safety_size = suffix_safety_size
|
||||
self.regex = self.binary_text_regex(self.prefix_to_prefix.keys(), suffix_safety_size)
|
||||
|
||||
@classmethod
|
||||
def binary_text_regex(cls, binary_prefixes, suffix_safety_size=7):
|
||||
"""
|
||||
Create a regex that looks for exact matches of prefixes, and also tries to
|
||||
match a C-string type null terminator in a small lookahead window.
|
||||
|
||||
Arguments:
|
||||
binary_prefixes (list): List of byte strings of prefixes to match
|
||||
suffix_safety_size (int): Sizeof the lookahed for null-terminated string.
|
||||
|
||||
Returns: compiled regex
|
||||
"""
|
||||
return re.compile(
|
||||
b"("
|
||||
+ b"|".join(re.escape(p) for p in binary_prefixes)
|
||||
+ b")([^\0]{0,%d}\0)?" % suffix_safety_size
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_strings_or_bytes(
|
||||
cls, prefix_to_prefix: Dict[Prefix, Prefix], suffix_safety_size: int = 7
|
||||
) -> "BinaryFilePrefixReplacer":
|
||||
"""Create a BinaryFilePrefixReplacer from an ordered prefix to prefix map.
|
||||
|
||||
Arguments:
|
||||
prefix_to_prefix (OrderedDict): Ordered mapping of prefix to prefix.
|
||||
suffix_safety_size (int): Number of bytes to retain at the end of a C-string
|
||||
to avoid binary string-aliasing issues.
|
||||
"""
|
||||
return cls(_prefix_to_prefix_as_bytes(prefix_to_prefix), suffix_safety_size)
|
||||
|
||||
def _apply_to_file(self, f):
|
||||
"""
|
||||
Given a file opened in rb+ mode, apply the string replacements as
|
||||
specified by an ordered dictionary of prefix to prefix mappings. This
|
||||
method takes special care of null-terminated C-strings. C-string constants
|
||||
are problematic because compilers and linkers optimize readonly strings for
|
||||
space by aliasing those that share a common suffix (only suffix since all
|
||||
of them are null terminated). See https://github.com/spack/spack/pull/31739
|
||||
and https://github.com/spack/spack/pull/32253 for details. Our logic matches
|
||||
the original prefix with a ``suffix_safety_size + 1`` lookahead for null bytes.
|
||||
If no null terminator is found, we simply pad with leading /, assuming that
|
||||
it's a long C-string; the full C-string after replacement has a large suffix
|
||||
in common with its original value.
|
||||
If there *is* a null terminator we can do the same as long as the replacement
|
||||
has a sufficiently long common suffix with the original prefix.
|
||||
As a last resort when the replacement does not have a long enough common suffix,
|
||||
we can try to shorten the string, but this only works if the new length is
|
||||
sufficiently short (typically the case when going from large padding -> normal path)
|
||||
If the replacement string is longer, or all of the above fails, we error out.
|
||||
|
||||
Arguments:
|
||||
f: file opened in rb+ mode
|
||||
"""
|
||||
assert f.tell() == 0
|
||||
|
||||
# We *could* read binary data in chunks to avoid loading all in memory,
|
||||
# but it's nasty to deal with matches across boundaries, so let's stick to
|
||||
# something simple.
|
||||
|
||||
for match in self.regex.finditer(f.read()):
|
||||
# The matching prefix (old) and its replacement (new)
|
||||
old = match.group(1)
|
||||
new = self.prefix_to_prefix[old]
|
||||
|
||||
# Did we find a trailing null within a N + 1 bytes window after the prefix?
|
||||
null_terminated = match.end(0) > match.end(1)
|
||||
|
||||
# Suffix string length, excluding the null byte
|
||||
# Only makes sense if null_terminated
|
||||
suffix_strlen = match.end(0) - match.end(1) - 1
|
||||
|
||||
# How many bytes are we shrinking our string?
|
||||
bytes_shorter = len(old) - len(new)
|
||||
|
||||
# We can't make strings larger.
|
||||
if bytes_shorter < 0:
|
||||
raise CannotGrowString(old, new)
|
||||
|
||||
# If we don't know whether this is a null terminated C-string (we're looking
|
||||
# only N + 1 bytes ahead), or if it is and we have a common suffix, we can
|
||||
# simply pad with leading dir separators.
|
||||
elif (
|
||||
not null_terminated
|
||||
or suffix_strlen >= self.suffix_safety_size # == is enough, but let's be defensive
|
||||
or old[-self.suffix_safety_size + suffix_strlen :]
|
||||
== new[-self.suffix_safety_size + suffix_strlen :]
|
||||
):
|
||||
replacement = b"/" * bytes_shorter + new
|
||||
|
||||
# If it *was* null terminated, all that matters is that we can leave N bytes
|
||||
# of old suffix in place. Note that > is required since we also insert an
|
||||
# additional null terminator.
|
||||
elif bytes_shorter > self.suffix_safety_size:
|
||||
replacement = new + match.group(2) # includes the trailing null
|
||||
|
||||
# Otherwise... we can't :(
|
||||
else:
|
||||
raise CannotShrinkCString(old, new, match.group()[:-1])
|
||||
|
||||
f.seek(match.start())
|
||||
f.write(replacement)
|
||||
|
||||
|
||||
class BinaryStringReplacementError(spack.error.SpackError):
|
||||
def __init__(self, file_path, old_len, new_len):
|
||||
"""The size of the file changed after binary path substitution
|
||||
|
||||
Args:
|
||||
file_path (str): file with changing size
|
||||
old_len (str): original length of the file
|
||||
new_len (str): length of the file after substitution
|
||||
"""
|
||||
super(BinaryStringReplacementError, self).__init__(
|
||||
"Doing a binary string replacement in %s failed.\n"
|
||||
"The size of the file changed from %s to %s\n"
|
||||
"when it should have remanined the same." % (file_path, old_len, new_len)
|
||||
)
|
||||
|
||||
|
||||
class BinaryTextReplaceError(spack.error.SpackError):
|
||||
def __init__(self, msg):
|
||||
msg += (
|
||||
" To fix this, compile with more padding "
|
||||
"(config:install_tree:padded_length), or install to a shorter prefix."
|
||||
)
|
||||
super(BinaryTextReplaceError, self).__init__(msg)
|
||||
|
||||
|
||||
class CannotGrowString(BinaryTextReplaceError):
|
||||
def __init__(self, old, new):
|
||||
msg = "Cannot replace {!r} with {!r} because the new prefix is longer.".format(old, new)
|
||||
super(CannotGrowString, self).__init__(msg)
|
||||
|
||||
|
||||
class CannotShrinkCString(BinaryTextReplaceError):
|
||||
def __init__(self, old, new, full_old_string):
|
||||
# Just interpolate binary string to not risk issues with invalid
|
||||
# unicode, which would be really bad user experience: error in error.
|
||||
# We have no clue if we actually deal with a real C-string nor what
|
||||
# encoding it has.
|
||||
msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
|
||||
old, new, full_old_string
|
||||
)
|
||||
super(CannotShrinkCString, self).__init__(msg)
|
||||
@@ -70,7 +70,7 @@ def rewire_node(spec, explicit):
|
||||
for rel_path in manifest.get("text_to_relocate", [])
|
||||
]
|
||||
if text_to_relocate:
|
||||
relocate.unsafe_relocate_text(files=text_to_relocate, prefixes=prefix_to_prefix)
|
||||
relocate.relocate_text(files=text_to_relocate, prefixes=prefix_to_prefix)
|
||||
|
||||
bins_to_relocate = [
|
||||
os.path.join(tempdir, spec.dag_hash(), rel_path)
|
||||
@@ -97,7 +97,7 @@ def rewire_node(spec, explicit):
|
||||
spec.build_spec.prefix,
|
||||
spec.prefix,
|
||||
)
|
||||
relocate.unsafe_relocate_text_bin(binaries=bins_to_relocate, prefixes=prefix_to_prefix)
|
||||
relocate.relocate_text_bin(binaries=bins_to_relocate, prefixes=prefix_to_prefix)
|
||||
# Copy package into place, except for spec.json (because spec.json
|
||||
# describes the old spec and not the new spliced spec).
|
||||
shutil.copytree(
|
||||
|
||||
@@ -13,7 +13,6 @@
|
||||
import re
|
||||
import types
|
||||
import warnings
|
||||
from typing import Dict, List, Tuple, Union
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -125,87 +124,84 @@ def getter(node):
|
||||
|
||||
# The space of possible priorities for optimization targets
|
||||
# is partitioned in the following ranges:
|
||||
# +=============================================================+
|
||||
# | Priority | Description |
|
||||
# +=============================================================+
|
||||
# | 10,000,000+ | Error conditions |
|
||||
# +-------------+-----------------------------------------------+
|
||||
# | 9,999,999 | |
|
||||
# | ... | High-priority criteria |
|
||||
# | 1,000,000 | |
|
||||
# +-------------+-----------------------------------------------+
|
||||
# | 999,999 | |
|
||||
# | ... | Standard criteria for built packages |
|
||||
# | 100,001 | |
|
||||
# +-------------+-----------------------------------------------+
|
||||
# | 100,000 | Number of packages being built |
|
||||
# +-------------+-----------------------------------------------+
|
||||
# | 99,999 | |
|
||||
# | ... | Standard criteria for reused packages |
|
||||
# | 0 | |
|
||||
# +-------------+-----------------------------------------------+
|
||||
#
|
||||
# [0-100) Optimization criteria for software being reused
|
||||
# [100-200) Fixed criteria that are higher priority than reuse, but lower than build
|
||||
# [200-300) Optimization criteria for software being built
|
||||
# [300-1000) High-priority fixed criteria
|
||||
# [1000-inf) Error conditions
|
||||
#
|
||||
# Each optimization target is a minimization with optimal value 0.
|
||||
#
|
||||
|
||||
#: High fixed priority offset for criteria that supersede all build criteria
|
||||
high_fixed_priority_offset = 10_000_000
|
||||
high_fixed_priority_offset = 300
|
||||
|
||||
#: Priority offset for "build" criteria (regular criterio shifted to
|
||||
#: higher priority for specs we have to build)
|
||||
build_priority_offset = 100_000
|
||||
build_priority_offset = 200
|
||||
|
||||
#: max priority for an error
|
||||
max_error_priority = 3
|
||||
#: Priority offset of "fixed" criteria (those w/o build criteria)
|
||||
fixed_priority_offset = 100
|
||||
|
||||
|
||||
def build_criteria_names(
|
||||
costs: List[int], opt_criteria: List["AspFunction"], max_depth: int
|
||||
) -> Dict[str, Union[int, List[Tuple[int, int]]]]:
|
||||
def build_criteria_names(costs, arg_tuples):
|
||||
"""Construct an ordered mapping from criteria names to costs."""
|
||||
# pull optimization criteria names out of the solution
|
||||
priorities_names = []
|
||||
|
||||
# ensure names of all criteria are unique
|
||||
names = {criterion.args[0] for criterion in opt_criteria}
|
||||
assert len(names) == len(opt_criteria), "names of optimization criteria must be unique"
|
||||
num_fixed = 0
|
||||
num_high_fixed = 0
|
||||
for args in arg_tuples:
|
||||
priority, name = args[:2]
|
||||
priority = int(priority)
|
||||
|
||||
# split opt criteria into two lists
|
||||
fixed_criteria = [oc for oc in opt_criteria if oc.args[1] == "fixed"]
|
||||
leveled_criteria = [oc for oc in opt_criteria if oc.args[1] == "leveled"]
|
||||
# add the priority of this opt criterion and its name
|
||||
priorities_names.append((priority, name))
|
||||
|
||||
# first non-error criterion
|
||||
solve_index = max_error_priority + 1
|
||||
# if the priority is less than fixed_priority_offset, then it
|
||||
# has an associated build priority -- the same criterion but for
|
||||
# nodes that we have to build.
|
||||
if priority < fixed_priority_offset:
|
||||
build_priority = priority + build_priority_offset
|
||||
priorities_names.append((build_priority, name))
|
||||
elif priority >= high_fixed_priority_offset:
|
||||
num_high_fixed += 1
|
||||
else:
|
||||
num_fixed += 1
|
||||
|
||||
# compute without needing max_depth from solve
|
||||
max_leveled_costs = (len(costs) - max_error_priority - 3) / 2
|
||||
assert max_leveled_costs * 2 == len(costs) - max_error_priority - 3
|
||||
assert max_leveled_costs % len(leveled_criteria) == 0
|
||||
max_leveled_costs = int(max_leveled_costs)
|
||||
# sort the criteria by priority
|
||||
priorities_names = sorted(priorities_names, reverse=True)
|
||||
|
||||
n_leveled_costs = len(leveled_criteria) * (max_depth + 1)
|
||||
# We only have opt-criterion values for non-error types
|
||||
# error type criteria are excluded (they come first)
|
||||
error_criteria = len(costs) - len(priorities_names)
|
||||
costs = costs[error_criteria:]
|
||||
|
||||
build_index = solve_index + 1 + max_leveled_costs
|
||||
fixed_costs = [costs[solve_index], costs[build_index]]
|
||||
# split list into three parts: build criteria, fixed criteria, non-build criteria
|
||||
num_criteria = len(priorities_names)
|
||||
num_build = (num_criteria - num_fixed - num_high_fixed) // 2
|
||||
|
||||
build_costs = costs[solve_index + 1 : solve_index + 1 + n_leveled_costs]
|
||||
reuse_costs = costs[build_index + 1 : build_index + 1 + n_leveled_costs]
|
||||
assert len(build_costs) == len(reuse_costs) == n_leveled_costs
|
||||
build_start_idx = num_high_fixed
|
||||
fixed_start_idx = num_high_fixed + num_build
|
||||
installed_start_idx = num_high_fixed + num_build + num_fixed
|
||||
|
||||
criteria = {}
|
||||
high_fixed = priorities_names[:build_start_idx]
|
||||
build = priorities_names[build_start_idx:fixed_start_idx]
|
||||
fixed = priorities_names[fixed_start_idx:installed_start_idx]
|
||||
installed = priorities_names[installed_start_idx:]
|
||||
|
||||
def add_fixed(criterion_idx, cost_idx):
|
||||
name = fixed_criteria[criterion_idx].args[2]
|
||||
criteria["fixed: " + name] = costs[cost_idx]
|
||||
# mapping from priority to index in cost list
|
||||
indices = dict((p, i) for i, (p, n) in enumerate(priorities_names))
|
||||
|
||||
add_fixed(0, solve_index)
|
||||
# make a list that has each name with its build and non-build costs
|
||||
criteria = [(cost, None, name) for cost, (p, name) in zip(costs[:build_start_idx], high_fixed)]
|
||||
criteria += [
|
||||
(cost, None, name)
|
||||
for cost, (p, name) in zip(costs[fixed_start_idx:installed_start_idx], fixed)
|
||||
]
|
||||
|
||||
for i, fn in enumerate(leveled_criteria):
|
||||
name = fn.args[2]
|
||||
criteria["build: " + name] = build_costs[i :: len(leveled_criteria)]
|
||||
|
||||
add_fixed(1, build_index)
|
||||
|
||||
for i, fn in enumerate(leveled_criteria):
|
||||
name = fn.args[2]
|
||||
criteria["reuse: " + name] = reuse_costs[i :: len(leveled_criteria)]
|
||||
for (i, name), (b, _) in zip(installed, build):
|
||||
criteria.append((costs[indices[i]], costs[indices[b]], name))
|
||||
|
||||
return criteria
|
||||
|
||||
@@ -255,11 +251,7 @@ def _id(thing):
|
||||
class AspFunction(AspObject):
|
||||
def __init__(self, name, args=None):
|
||||
self.name = name
|
||||
|
||||
def simplify(arg):
|
||||
return arg if isinstance(arg, (str, bool, int)) else str(arg)
|
||||
|
||||
self.args = () if args is None else tuple(simplify(arg) for arg in args)
|
||||
self.args = () if args is None else tuple(args)
|
||||
|
||||
def _cmp_key(self):
|
||||
return (self.name, self.args)
|
||||
@@ -294,29 +286,10 @@ def argify(arg):
|
||||
elif isinstance(arg, int):
|
||||
return clingo.Number(arg)
|
||||
else:
|
||||
return clingo.String(arg)
|
||||
return clingo.String(str(arg))
|
||||
|
||||
return clingo.Function(self.name, [argify(arg) for arg in self.args], positive=positive)
|
||||
|
||||
@staticmethod
|
||||
def from_symbol(symbol):
|
||||
def deargify(arg):
|
||||
if arg.type is clingo.SymbolType.Number:
|
||||
return arg.number
|
||||
elif arg.type is clingo.SymbolType.String and arg.string in ("True", "False"):
|
||||
return arg.string == "True"
|
||||
else:
|
||||
return arg.string
|
||||
|
||||
return AspFunction(symbol.name, [deargify(arg) for arg in symbol.arguments])
|
||||
|
||||
def shift(self):
|
||||
"""Transforms ``attr("foo", "bar")`` into ``foo("bar")``."""
|
||||
if not self.args:
|
||||
raise ValueError(f"Can't shift ASP function with no arguments: {str(self)}")
|
||||
first, *rest = self.args
|
||||
return AspFunction(first, rest)
|
||||
|
||||
def __str__(self):
|
||||
return "%s(%s)" % (self.name, ", ".join(str(_id(arg)) for arg in self.args))
|
||||
|
||||
@@ -521,8 +494,7 @@ def _compute_specs_from_answer_set(self):
|
||||
self._concrete_specs, self._unsolved_specs = [], []
|
||||
self._concrete_specs_by_input = {}
|
||||
best = min(self.answers)
|
||||
|
||||
opt, _, answer, _ = best
|
||||
opt, _, answer = best
|
||||
for input_spec in self.abstract_specs:
|
||||
key = input_spec.name
|
||||
if input_spec.virtual:
|
||||
@@ -594,6 +566,9 @@ def stringify(sym):
|
||||
if isinstance(sym, (list, tuple)):
|
||||
return tuple(stringify(a) for a in sym)
|
||||
|
||||
if str(sym.type) == "Function": # TODO GBB: Find appropriate test for this
|
||||
return tuple(stringify(a) for a in sym.arguments)
|
||||
|
||||
if clingo_cffi:
|
||||
# Clingo w/ CFFI will throw an exception on failure
|
||||
try:
|
||||
@@ -604,13 +579,13 @@ def stringify(sym):
|
||||
return sym.string or str(sym)
|
||||
|
||||
|
||||
def extract_functions(model, function_name):
|
||||
"""Extract ASP functions with the given name from a model.
|
||||
def extract_args(model, predicate_name):
|
||||
"""Extract the arguments to predicates with the provided name from a model.
|
||||
|
||||
Pull out all the functions with name ``function_name`` from the model, and return them as
|
||||
``AspFunction`` objects.
|
||||
Pull out all the predicates with name ``predicate_name`` from the model, and return
|
||||
their stringified arguments as tuples.
|
||||
"""
|
||||
return [AspFunction.from_symbol(sym) for sym in model if sym.name == function_name]
|
||||
return [stringify(sym.arguments) for sym in model if sym.name == predicate_name]
|
||||
|
||||
|
||||
class PyclingoDriver(object):
|
||||
@@ -709,10 +684,11 @@ def solve(self, setup, specs, reuse=None, output=None, control=None):
|
||||
self.control = control or default_clingo_control()
|
||||
# set up the problem -- this generates facts and rules
|
||||
self.assumptions = []
|
||||
with timer.measure("setup"):
|
||||
with self.control.backend() as backend:
|
||||
self.backend = backend
|
||||
setup.setup(self, specs, reuse=reuse)
|
||||
timer.start("setup")
|
||||
with self.control.backend() as backend:
|
||||
self.backend = backend
|
||||
setup.setup(self, specs, reuse=reuse)
|
||||
timer.stop("setup")
|
||||
|
||||
timer.start("load")
|
||||
# read in the main ASP program and display logic -- these are
|
||||
@@ -758,8 +734,7 @@ def visit(node):
|
||||
cores = [] # unsatisfiable cores if they do not
|
||||
|
||||
def on_model(model):
|
||||
priorities = getattr(model, "priority", None)
|
||||
models.append((model.cost, priorities, model.symbols(shown=True, terms=True)))
|
||||
models.append((model.cost, model.symbols(shown=True, terms=True)))
|
||||
|
||||
solve_kwargs = {
|
||||
"assumptions": self.assumptions,
|
||||
@@ -780,28 +755,24 @@ def on_model(model):
|
||||
if result.satisfiable:
|
||||
# get the best model
|
||||
builder = SpecBuilder(specs, hash_lookup=setup.reusable_and_possible)
|
||||
min_cost, priorities, best_model = min(models)
|
||||
min_cost, best_model = min(models)
|
||||
|
||||
# first check for errors
|
||||
error_args = [fn.args for fn in extract_functions(best_model, "error")]
|
||||
error_args = extract_args(best_model, "error")
|
||||
errors = sorted((int(priority), msg, args) for priority, msg, *args in error_args)
|
||||
for _, msg, args in errors:
|
||||
self.handle_error(msg, *args)
|
||||
|
||||
# build specs from spec attributes in the model
|
||||
spec_attrs = extract_functions(best_model, "attr")
|
||||
with timer.measure("build"):
|
||||
answers = builder.build_specs(spec_attrs)
|
||||
spec_attrs = [(name, tuple(rest)) for name, *rest in extract_args(best_model, "attr")]
|
||||
answers = builder.build_specs(spec_attrs)
|
||||
|
||||
# add best spec to the results
|
||||
result.answers.append((list(min_cost), 0, answers, spec_attrs))
|
||||
result.answers.append((list(min_cost), 0, answers))
|
||||
|
||||
# get optimization criteria
|
||||
criteria = extract_functions(best_model, "opt_criterion")
|
||||
depths = extract_functions(best_model, "depth")
|
||||
max_depth = max(d.args[1] for d in depths)
|
||||
|
||||
result.criteria = build_criteria_names(min_cost, criteria, max_depth)
|
||||
criteria_args = extract_args(best_model, "opt_criterion")
|
||||
result.criteria = build_criteria_names(min_cost, criteria_args)
|
||||
|
||||
# record the number of models the solver considered
|
||||
result.nmodels = len(models)
|
||||
@@ -811,9 +782,9 @@ def on_model(model):
|
||||
|
||||
# print any unknown functions in the model
|
||||
for sym in best_model:
|
||||
if sym.name not in ("attr", "error", "opt_criterion", "depth", "const_max_depth"):
|
||||
if sym.name not in ("attr", "error", "opt_criterion"):
|
||||
tty.debug(
|
||||
"UNKNOWN SYMBOL: %s(%s)" % (sym.name, ", ".join(stringify(sym.arguments)))
|
||||
"UNKNOWN SYMBOL: %s%s" % (sym.name, stringify(sym.arguments))
|
||||
)
|
||||
|
||||
elif cores:
|
||||
@@ -1441,23 +1412,23 @@ def spec_clauses(self, *args, **kwargs):
|
||||
def _spec_clauses(
|
||||
self,
|
||||
spec,
|
||||
body: bool = False,
|
||||
transitive: bool = True,
|
||||
expand_hashes: bool = False,
|
||||
concrete_build_deps: bool = False,
|
||||
deptype: Union[str, Tuple[str, ...]] = "all",
|
||||
body=False,
|
||||
transitive=True,
|
||||
expand_hashes=False,
|
||||
concrete_build_deps=False,
|
||||
):
|
||||
"""Return a list of clauses for a spec mandates are true.
|
||||
|
||||
Arguments:
|
||||
spec: the spec to analyze
|
||||
body: if True, generate clauses to be used in rule bodies
|
||||
spec (spack.spec.Spec): the spec to analyze
|
||||
body (bool): if True, generate clauses to be used in rule bodies
|
||||
(final values) instead of rule heads (setters).
|
||||
transitive: if False, don't generate clauses from dependencies.
|
||||
expand_hashes: If transitive and True, descend into hashes of concrete specs.
|
||||
concrete_build_deps: if False, do not include pure build deps
|
||||
transitive (bool): if False, don't generate clauses from
|
||||
dependencies (default True)
|
||||
expand_hashes (bool): if True, descend into hashes of concrete specs
|
||||
(default False)
|
||||
concrete_build_deps (bool): if False, do not include pure build deps
|
||||
of concrete specs (as they have no effect on runtime constraints)
|
||||
deptype: dependency types to follow when transitive (default "all").
|
||||
|
||||
Normally, if called with ``transitive=True``, ``spec_clauses()`` just generates
|
||||
hashes for the dependency requirements of concrete specs. If ``expand_hashes``
|
||||
@@ -1585,7 +1556,7 @@ class Body(object):
|
||||
# add all clauses from dependencies
|
||||
if transitive:
|
||||
# TODO: Eventually distinguish 2 deps on the same pkg (build and link)
|
||||
for dspec in spec.edges_to_dependencies(deptype=deptype):
|
||||
for dspec in spec.edges_to_dependencies():
|
||||
dep = dspec.spec
|
||||
|
||||
if spec.concrete:
|
||||
@@ -1617,13 +1588,12 @@ class Body(object):
|
||||
body=body,
|
||||
expand_hashes=expand_hashes,
|
||||
concrete_build_deps=concrete_build_deps,
|
||||
deptype=deptype,
|
||||
)
|
||||
)
|
||||
|
||||
return clauses
|
||||
|
||||
def build_version_dict(self, possible_pkgs):
|
||||
def build_version_dict(self):
|
||||
"""Declare any versions in specs not declared in packages."""
|
||||
self.declared_versions = collections.defaultdict(list)
|
||||
self.possible_versions = collections.defaultdict(set)
|
||||
@@ -1631,7 +1601,7 @@ def build_version_dict(self, possible_pkgs):
|
||||
|
||||
packages_yaml = spack.config.get("packages")
|
||||
packages_yaml = _normalize_packages_yaml(packages_yaml)
|
||||
for pkg_name in possible_pkgs:
|
||||
for pkg_name in self.pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
|
||||
# All the versions from the corresponding package.py file. Since concepts
|
||||
@@ -2002,11 +1972,11 @@ def define_variant_values(self):
|
||||
for pkg, variant, value in self.variant_values_from_specs:
|
||||
self.gen.fact(fn.variant_possible_value(pkg, variant, value))
|
||||
|
||||
def _facts_from_concrete_spec(self, spec, possible):
|
||||
def _facts_from_concrete_spec(self, spec):
|
||||
# tell the solver about any installed packages that could
|
||||
# be dependencies (don't tell it about the others)
|
||||
h = spec.dag_hash()
|
||||
if spec.name in possible and h not in self.seen_hashes:
|
||||
if spec.name in self.pkgs and h not in self.seen_hashes:
|
||||
self.reusable_and_possible[h] = spec
|
||||
try:
|
||||
# Only consider installed packages for repo we know
|
||||
@@ -2036,12 +2006,12 @@ def _facts_from_concrete_spec(self, spec, possible):
|
||||
# add the hash to the one seen so far
|
||||
self.seen_hashes.add(h)
|
||||
|
||||
def define_concrete_input_specs(self, specs, possible):
|
||||
def define_concrete_input_specs(self, specs):
|
||||
# any concrete specs in the input spec list
|
||||
for input_spec in specs:
|
||||
for spec in input_spec.traverse():
|
||||
if spec.concrete:
|
||||
self._facts_from_concrete_spec(spec, possible)
|
||||
self._facts_from_concrete_spec(spec)
|
||||
|
||||
def setup(self, driver, specs, reuse=None):
|
||||
"""Generate an ASP program with relevant constraints for specs.
|
||||
@@ -2062,20 +2032,28 @@ def setup(self, driver, specs, reuse=None):
|
||||
|
||||
# get list of all possible dependencies
|
||||
self.possible_virtuals = set(x.name for x in specs if x.virtual)
|
||||
possible = spack.package_base.possible_dependencies(
|
||||
*specs, virtuals=self.possible_virtuals, deptype=spack.dependency.all_deptypes
|
||||
self.pkgs = set(
|
||||
spack.package_base.possible_dependencies(
|
||||
*specs, virtuals=self.possible_virtuals, deptype=spack.dependency.all_deptypes
|
||||
)
|
||||
)
|
||||
|
||||
# TODO GBB: This only gets packages that can only appear as pure build deps,
|
||||
# need to rethink it to get packages that can appear as link/run or as pure build deps
|
||||
self.build_pkgs = self.pkgs - set(
|
||||
spack.package_base.possible_dependencies(
|
||||
*specs, virtuals=self.possible_virtuals, deptype=("link", "run")
|
||||
)
|
||||
)
|
||||
|
||||
# Fail if we already know an unreachable node is requested
|
||||
for spec in specs:
|
||||
missing_deps = [
|
||||
str(d) for d in spec.traverse() if d.name not in possible and not d.virtual
|
||||
str(d) for d in spec.traverse() if d.name not in self.pkgs and not d.virtual
|
||||
]
|
||||
if missing_deps:
|
||||
raise spack.spec.InvalidDependencyError(spec.name, missing_deps)
|
||||
|
||||
self.pkgs = set(possible)
|
||||
|
||||
# driver is used by all the functions below to add facts and
|
||||
# rules to generate an ASP program.
|
||||
self.gen = driver
|
||||
@@ -2099,18 +2077,18 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.possible_compilers = self.generate_possible_compilers(specs)
|
||||
|
||||
# traverse all specs and packages to build dict of possible versions
|
||||
self.build_version_dict(possible)
|
||||
self.build_version_dict()
|
||||
self.add_concrete_versions_from_specs(specs, version_provenance.spec)
|
||||
self.add_concrete_versions_from_specs(dev_specs, version_provenance.dev_spec)
|
||||
|
||||
self.gen.h1("Concrete input spec definitions")
|
||||
self.define_concrete_input_specs(specs, possible)
|
||||
self.define_concrete_input_specs(specs)
|
||||
|
||||
if reuse:
|
||||
self.gen.h1("Reusable specs")
|
||||
self.gen.fact(fn.optimize_for_reuse())
|
||||
for reusable_spec in reuse:
|
||||
self._facts_from_concrete_spec(reusable_spec, possible)
|
||||
self._facts_from_concrete_spec(reusable_spec)
|
||||
|
||||
self.gen.h1("General Constraints")
|
||||
self.available_compilers()
|
||||
@@ -2136,6 +2114,10 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.preferred_variants(pkg)
|
||||
self.target_preferences(pkg)
|
||||
|
||||
self.gen.h1("Package Alternates")
|
||||
for pkg in sorted(self.pkgs): # TODO GBB: Can we cleverly reduce the size of this?
|
||||
self.gen.fact(fn.name_mangled(pkg, 3))
|
||||
|
||||
# Inject dev_path from environment
|
||||
for ds in dev_specs:
|
||||
self.condition(spack.spec.Spec(ds.name), ds, msg="%s is a develop spec" % ds.name)
|
||||
@@ -2209,8 +2191,10 @@ def hash(self, pkg, h):
|
||||
self._specs[pkg] = self._hash_lookup[h]
|
||||
|
||||
def node(self, pkg):
|
||||
print(pkg)
|
||||
name = pkg[0] if isinstance(pkg, tuple) else pkg
|
||||
if pkg not in self._specs:
|
||||
self._specs[pkg] = spack.spec.Spec(pkg)
|
||||
self._specs[pkg] = spack.spec.Spec(name)
|
||||
|
||||
def _arch(self, pkg):
|
||||
arch = self._specs[pkg].architecture
|
||||
@@ -2292,7 +2276,7 @@ def depends_on(self, pkg, dep, type):
|
||||
assert len(dependencies) < 2, msg
|
||||
|
||||
if not dependencies:
|
||||
self._specs[pkg].add_dependency_edge(self._specs[dep], (type,))
|
||||
self._specs[pkg].add_dependency_edge(self._specs[dep], deptypes=(type,))
|
||||
else:
|
||||
# TODO: This assumes that each solve unifies dependencies
|
||||
dependencies[0].add_type(type)
|
||||
@@ -2349,7 +2333,7 @@ def deprecated(self, pkg, version):
|
||||
tty.warn(msg.format(pkg, version))
|
||||
|
||||
@staticmethod
|
||||
def sort_fn(function):
|
||||
def sort_fn(function_tuple):
|
||||
"""Ensure attributes are evaluated in the correct order.
|
||||
|
||||
hash attributes are handled first, since they imply entire concrete specs
|
||||
@@ -2359,7 +2343,7 @@ def sort_fn(function):
|
||||
the concrete specs on which they depend because all nodes are fully constructed before we
|
||||
consider which ones are external.
|
||||
"""
|
||||
name = function.args[0]
|
||||
name = function_tuple[0]
|
||||
if name == "hash":
|
||||
return (-5, 0)
|
||||
elif name == "node":
|
||||
@@ -2373,24 +2357,23 @@ def sort_fn(function):
|
||||
else:
|
||||
return (-1, 0)
|
||||
|
||||
def build_specs(self, functions):
|
||||
def build_specs(self, function_tuples):
|
||||
# Functions don't seem to be in particular order in output. Sort
|
||||
# them here so that directives that build objects (like node and
|
||||
# node_compiler) are called in the right order.
|
||||
self.functions = sorted(set(functions), key=self.sort_fn)
|
||||
self.function_tuples = sorted(set(function_tuples), key=self.sort_fn)
|
||||
|
||||
self._specs = {}
|
||||
for attr in self.functions:
|
||||
fn = attr.shift() # attr("foo", "bar") -> foo("bar")
|
||||
|
||||
if SpecBuilder.ignored_attributes.match(fn.name):
|
||||
for name, args in self.function_tuples:
|
||||
if SpecBuilder.ignored_attributes.match(name):
|
||||
continue
|
||||
|
||||
action = getattr(self, fn.name, None)
|
||||
action = getattr(self, name, None)
|
||||
|
||||
# print out unknown actions so we can display them for debugging
|
||||
if not action:
|
||||
tty.debug(f"UNKNOWN SYMBOL: {attr}")
|
||||
msg = 'UNKNOWN SYMBOL: attr("%s", %s)' % (name, ", ".join(str(a) for a in args))
|
||||
tty.debug(msg)
|
||||
continue
|
||||
|
||||
msg = (
|
||||
@@ -2402,9 +2385,10 @@ def build_specs(self, functions):
|
||||
# ignore predicates on virtual packages, as they're used for
|
||||
# solving but don't construct anything. Do not ignore error
|
||||
# predicates on virtual packages.
|
||||
if fn.name != "error":
|
||||
pkg = fn.args[0]
|
||||
if spack.repo.path.is_virtual(pkg):
|
||||
if name != "error":
|
||||
pkg = args[0]
|
||||
pkg_name = pkg[0] if isinstance(pkg, tuple) else pkg
|
||||
if spack.repo.path.is_virtual(pkg_name):
|
||||
continue
|
||||
|
||||
# if we've already gotten a concrete spec for this pkg,
|
||||
@@ -2413,7 +2397,7 @@ def build_specs(self, functions):
|
||||
if spec and spec.concrete:
|
||||
continue
|
||||
|
||||
action(*fn.args)
|
||||
action(*args)
|
||||
|
||||
# namespace assignment is done after the fact, as it is not
|
||||
# currently part of the solve
|
||||
|
||||
@@ -23,17 +23,9 @@ literal_not_solved(ID) :- not literal_solved(ID), literal(ID).
|
||||
% in better reporting for users. See #30669 for details.
|
||||
1 { literal_solved(ID) : literal(ID) }.
|
||||
|
||||
% priority ranges for optimization criteria
|
||||
% note that clingo's weight_t is int32_t, so the max priority we can use is 2,147,483,647
|
||||
#const max_error_priority = 3.
|
||||
#const error_prio = 10000000.
|
||||
#const solve_prio = 1000000.
|
||||
#const build_prio = 100000. % n_nodes x depth_offset x max levels needs to be less than this
|
||||
#const depth_offset = 100. % depth_offset-1 is the max id for leveled criteria
|
||||
|
||||
opt_criterion(solve_prio, "fixed", "number of input specs not concretized").
|
||||
#minimize{ 0@solve_prio: #true }.
|
||||
#minimize{ 1@solve_prio,ID : literal_not_solved(ID) }.
|
||||
opt_criterion(300, "number of input specs not concretized").
|
||||
#minimize{ 0@300: #true }.
|
||||
#minimize { 1@300,ID : literal_not_solved(ID) }.
|
||||
|
||||
% Map constraint on the literal ID to the correct PSID
|
||||
attr(Name, A1) :- literal(LiteralID, Name, A1), literal_solved(LiteralID).
|
||||
@@ -48,6 +40,167 @@ attr(Name, A1, A2, A3, A4) :- literal(LiteralID, Name, A1, A2, A3, A4), literal_
|
||||
#defined literal/5.
|
||||
#defined literal/6.
|
||||
|
||||
%--------------------------------------------------
|
||||
% Map package attributes to mangled names
|
||||
%--------------------------------------------------
|
||||
|
||||
%package_attr((Name, N), A1) :- package_attr(Name, A1), name_mangled(name, M, N)
|
||||
%package_attr((Name, N), A1, A2) :- package_attr(Name, A1, A2), name_mangled(name, M, N)
|
||||
%package_attr((Name, N), A1, A2, A3) :- package_attr(Name, A1, A2, A3), name_mangled(name, M, N)
|
||||
%package_attr((Name, N), A1, A2, A3, A4) :- package_attr(Name, A1, A2, A3, A4), name_mangled(name, M, N)
|
||||
|
||||
package_alternate(Name, (Name, N)) :- name_mangled(Name, M), N=1..M.
|
||||
package_alternate(Package, Package) :- name_mangled(Package, _). %TODO GBB: stand in for "name exists"
|
||||
|
||||
version_declared((Package, N), Version, Weight, Origin)
|
||||
:- version_declared(Package, Version, Weight, Origin),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
version_equivalent((Package, N), Version, RefVersion)
|
||||
:- version_equivalent(Package, Version, RefVersion),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
deprecated_version((Package, N), Version)
|
||||
:- deprecated_version(Package, Version),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
conflict((Package, N), (Trigger, N), (Constraint, N), Msg)
|
||||
:- conflict(Package, Trigger, Constraint, Msg),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
node_compiler_preference((Package, N), Compiler, CompilerVersion, Weight)
|
||||
:- node_compiler_preference(Package, Compiler, CompilerVersion, Weight),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
variant((Package, N), Variant)
|
||||
:- variant(Package, Variant),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
variant_condition((Condition, N), (Package, N), Variant)
|
||||
:- variant_condition(Condition, Package, Variant),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
variant_single_value((Package, N), Name)
|
||||
:- variant_single_value(Package, Name),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
variant_default_value_from_package_py((Package, N), Variant, Value)
|
||||
:- variant_default_value_from_package_py(Package, Variant, Value),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
variant_value_from_disjoint_sets((Package, N), Variant, Value, SetID)
|
||||
:- variant_value_from_disjoint_sets(Package, Variant, Value, SetID),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
variant_possible_value((Package, N), Variant, Value)
|
||||
:- variant_possible_value(Package, Variant, Value),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
variant_sticky((Package, N), Variant)
|
||||
:- variant_sticky(Package, Variant),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
pkg_provider_preference((Package, N), Virtual, Provider, Weight)
|
||||
:- pkg_provider_preference(Package, Virtual, Provider, Weight),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
condition((Condition, N))
|
||||
:- condition(Condition),
|
||||
condition_requirement((Condition, N), _, _).
|
||||
condition((Condition, N))
|
||||
:- condition(Condition),
|
||||
condition_requirement((Condition, N), _, _, _).
|
||||
condition((Condition, N))
|
||||
:- condition(Condition),
|
||||
condition_requirement((Condition, N), _, _, _, _).
|
||||
condition((Condition, N))
|
||||
:- condition(Condition),
|
||||
condition_requirement((Condition, N), _, _, _, _).
|
||||
|
||||
condition_requirement((Condition, N), Name, (Package, N))
|
||||
:- condition_requirement(Condition, Name, Package),
|
||||
package_alternate(Package, (Package, N)).
|
||||
condition_requirement((Condition, N), Name, (Package, N), A2)
|
||||
:- condition_requirement(Condition, Name, Package, A2),
|
||||
package_alternate(Package, (Package, N)).
|
||||
condition_requirement((Condition, N), Name, (Package, N), A2, A3)
|
||||
:- condition_requirement(Condition, Name, Package, A2, A3),
|
||||
package_alternate(Package, (Package, N)).
|
||||
condition_requirement((Condition, N), Name, (Package, N), A2, A3, A4)
|
||||
:- condition_requirement(Condition, Name, Package, A2, A3, A4),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
imposed_constraint((Condition, N), Name, (Package, N))
|
||||
:- imposed_constraint(Condition, Name, Package),
|
||||
package_alternate(Package, (Package, N)).
|
||||
imposed_constraint((Condition, N), Name, (Package, N), A2)
|
||||
:- imposed_constraint(Condition, Name, Package, A2),
|
||||
package_alternate(Package, (Package, N)).
|
||||
imposed_constraint((Condition, N), Name, (Package, N), A2, A3)
|
||||
:- imposed_constraint(Condition, Name, Package, A2, A3),
|
||||
package_alternate(Package, (Package, N)).
|
||||
imposed_constraint((Condition, N), Name, (Package, N), A2, A3, A4)
|
||||
:- imposed_constraint(Condition, Name, Package, A2, A3, A4),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
possible_provider((Package, N), Virtual)
|
||||
:- possible_provider(Package, Virtual),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
provider_condition((Condition, N), (Package, N), Virtual)
|
||||
:- provider_condition(Condition, Package, Virtual),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
dependency_condition((Condition, N), (Package, N), Dependency)
|
||||
:- dependency_condition(Condition, Package, Dependency),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
dependency_type((Condition, N), Type)
|
||||
:- dependency_condition((Condition, N), (_, N), _),
|
||||
dependency_type(Condition, Type).
|
||||
|
||||
% Do we need one for default_provider_preference?
|
||||
|
||||
requirement_group((Package, N), RGroupID)
|
||||
:- requirement_group(Package, RGroupID),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
requirement_policy((Package, N), RGroupID, Policy)
|
||||
:- requirement_policy(Package, RGroupID, Policy),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
requirement_group_member((MemberID, N), (Package, N), RGroupID)
|
||||
:- requirement_group_member(MemberID, Package, RGroupID),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
requirement_has_weight((MemberID, N), Weight)
|
||||
:- requirement_has_weight(MemberID, Weight),
|
||||
requirement_group_member((MemberID, N), (_, N), _).
|
||||
|
||||
buildable_false((Package, N))
|
||||
:- buildable_false(Package),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
possible_external((Condition, N), (Package, N), Index)
|
||||
:- possible_external(Condition, Package, Index),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
variant_default_value_from_packages_yaml((Package, N), Variant, Value)
|
||||
:- variant_default_value_from_packages_yaml(Package, Variant, Value),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
target_weight((Package, N), Target, Weight)
|
||||
:- target_weight(Package, Target, Weight),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
version_satisfies((Package, N), Constraint, Version)
|
||||
:- version_satisfies(Package, Constraint, Version),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
installed_hash((Package, N), Hash)
|
||||
:- installed_hash(Package, Hash),
|
||||
package_alternate(Package, (Package, N)).
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Version semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -73,8 +226,7 @@ version_declared(Package, Version, Weight) :- version_declared(Package, Version,
|
||||
version_declared(Package, Version) :- version_declared(Package, Version, _).
|
||||
|
||||
% a spec with a git hash version is equivalent to one with the same matched version
|
||||
version_satisfies(Package, Constraint, HashVersion) :-
|
||||
version_satisfies(Package, Constraint, EquivalentVersion),
|
||||
version_satisfies(Package, Constraint, HashVersion) :- version_satisfies(Package, Constraint, EquivalentVersion),
|
||||
version_equivalent(Package, HashVersion, EquivalentVersion).
|
||||
#defined version_equivalent/3.
|
||||
|
||||
@@ -143,22 +295,22 @@ possible_version_weight(Package, Weight)
|
||||
% versions, virtual nodes with version constraints require this rule to be
|
||||
% able to choose versions
|
||||
{ attr("version", Package, Version) : version_satisfies(Package, Constraint, Version) }
|
||||
:- attr("node_version_satisfies", Package, Constraint).
|
||||
:- attr("node_version_satisfies", Package, Constraint),
|
||||
attr("node", Package).
|
||||
|
||||
% If there is at least a version that satisfy the constraint, impose a lower
|
||||
% bound on the choice rule to avoid false positives with the error below
|
||||
1 { attr("version", Package, Version) : version_satisfies(Package, Constraint, Version) }
|
||||
:- attr("node_version_satisfies", Package, Constraint),
|
||||
version_satisfies(Package, Constraint, _).
|
||||
version_satisfies(Package, Constraint, _),
|
||||
attr("node", Package).
|
||||
|
||||
% More specific error message if the version cannot satisfy some constraint
|
||||
% Otherwise covered by `no_version_error` and `versions_conflict_error`.
|
||||
error(1, "No valid version for '{0}' satisfies '@{1}'", Package, Constraint)
|
||||
:- attr("node_version_satisfies", Package, Constraint),
|
||||
C = #count{
|
||||
Version
|
||||
: attr("version", Package, Version), version_satisfies(Package, Constraint, Version)
|
||||
},
|
||||
attr("node", Package),
|
||||
C = #count{ Version : attr("version", Package, Version), version_satisfies(Package, Constraint, Version)},
|
||||
C < 1.
|
||||
|
||||
attr("node_version_satisfies", Package, Constraint)
|
||||
@@ -252,6 +404,19 @@ do_not_impose(ID) :-
|
||||
% They're only triggered if the associated dependnecy condition holds.
|
||||
attr("depends_on", Package, Dependency, Type)
|
||||
:- dependency_holds(Package, Dependency, Type),
|
||||
Type != "build",
|
||||
not virtual(Dependency).
|
||||
|
||||
attr("depends_on", Package, Dependency, "build")
|
||||
:- dependency_holds(Package, Dependency, "build"),
|
||||
dependency_holds(Package, Dependency, Type),
|
||||
Type != "build",
|
||||
not virtual(Dependency).
|
||||
|
||||
1 { attr("depends_on", Package, DepMangled, "build") : package_alternate(Dependency, DepMangled) } 1
|
||||
:- dependency_holds(Package, Dependency, "build"),
|
||||
not dependency_holds(Package, Dependency, "link"),
|
||||
not dependency_holds(Package, Dependency, "run"),
|
||||
not virtual(Dependency).
|
||||
|
||||
% every root must be a node
|
||||
@@ -301,9 +466,24 @@ error(0, Msg) :- attr("node", Package),
|
||||
% provider for that virtual then it depends on the provider
|
||||
attr("depends_on", Package, Provider, Type)
|
||||
:- dependency_holds(Package, Virtual, Type),
|
||||
Type != "build",
|
||||
provider(Provider, Virtual),
|
||||
not external(Package).
|
||||
|
||||
attr("depends_on", Package, Provider, "build")
|
||||
:- dependency_holds(Package, Virtual, "build"),
|
||||
dependency_holds(Package, Virtual, Type),
|
||||
Type != "build",
|
||||
provider(Provider, Virtual),
|
||||
not external(Package).
|
||||
|
||||
1 { attr("depends_on", Package, ProviderMangled, "build") : package_alternate(Provider, ProviderMangled) } 1
|
||||
:- dependency_holds(Package, Virtual, "build"),
|
||||
not dependency_holds(Package, Virtual, "link"),
|
||||
not dependency_holds(Package, Virtual, "run"),
|
||||
provider(Provider, Virtual),
|
||||
not external(Package).
|
||||
|
||||
% dependencies on virtuals also imply that the virtual is a virtual node
|
||||
attr("virtual_node", Virtual)
|
||||
:- dependency_holds(Package, Virtual, Type),
|
||||
@@ -1089,11 +1269,11 @@ build(Package) :- not attr("hash", Package, _), attr("node", Package).
|
||||
% 200+ Shifted priorities for build nodes; correspond to priorities 0 - 99.
|
||||
% 100 - 199 Unshifted priorities. Currently only includes minimizing #builds.
|
||||
% 0 - 99 Priorities for non-built nodes.
|
||||
build_priority(Package, build_prio) :- build(Package), attr("node", Package), optimize_for_reuse().
|
||||
build_priority(Package, 0) :- not build(Package), attr("node", Package), optimize_for_reuse().
|
||||
build_priority(Package, 200) :- build(Package), attr("node", Package), optimize_for_reuse().
|
||||
build_priority(Package, 0) :- not build(Package), attr("node", Package), optimize_for_reuse().
|
||||
|
||||
% don't adjust build priorities if reuse is not enabled
|
||||
build_priority(Package, build_prio) :- attr("node", Package), not optimize_for_reuse().
|
||||
build_priority(Package, 0) :- attr("node", Package), not optimize_for_reuse().
|
||||
|
||||
% don't assign versions from installed packages unless reuse is enabled
|
||||
% NOTE: that "installed" means the declared version was only included because
|
||||
@@ -1113,56 +1293,22 @@ build_priority(Package, build_prio) :- attr("node", Package), not optimize_for_r
|
||||
|
||||
#defined installed_hash/2.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Calculate min depth of nodes in the DAG
|
||||
% We use this to optimize nodes closer to roots with higher precedence.
|
||||
%-----------------------------------------------------------------------------
|
||||
#const max_depth = 4.
|
||||
|
||||
% roots have depth 0.
|
||||
depth(Package, 0) :- attr("root", Package).
|
||||
|
||||
%depth(Package, D+1) :- depth(Dependent, D), depends_on(Dependent, Package), D < max_depth.
|
||||
|
||||
%parent_depth(Package, D) :-
|
||||
% depends_on(Dependent, Package),
|
||||
% depth(Dependent, D),
|
||||
% D < max_depth - 1.
|
||||
|
||||
%depth(Package, M+1) :-
|
||||
% M = #min{ D: parent_depth(Package, D); max_depth - 1 },
|
||||
% attr("node", Package).
|
||||
|
||||
% other nodes' depth is the minimum depth of any dependent plus one.
|
||||
depth(Package, N + 1) :-
|
||||
N = #min{
|
||||
D: depends_on(Dependent, Package),
|
||||
depth(Dependent, D),
|
||||
D < max_depth;
|
||||
max_depth - 1
|
||||
},
|
||||
N = 0..max_depth - 1,
|
||||
not attr("root", Package),
|
||||
attr("node", Package).
|
||||
|
||||
|
||||
%-----------------------------------------------------------------
|
||||
% Optimization to avoid errors
|
||||
%-----------------------------------------------------------------
|
||||
% Some errors are handled as rules instead of constraints because
|
||||
% it allows us to explain why something failed. Here we optimize
|
||||
% HEAVILY against the facts generated by those rules.
|
||||
#minimize{ 0@1000: #true}.
|
||||
#minimize{ 0@1001: #true}.
|
||||
#minimize{ 0@1002: #true}.
|
||||
|
||||
% ensure that error costs are always in the solution.
|
||||
#minimize{ 0@error_prio + (0..max_error_priority): #true}.
|
||||
|
||||
% TODO: why 1000 and not just 1? 1000 seems unnecessary since priorities are lexicographic.
|
||||
#minimize{ 1000@error_prio+Priority,Msg: error(Priority, Msg) }.
|
||||
#minimize{ 1000@error_prio+Priority,Msg,Arg1: error(Priority, Msg, Arg1) }.
|
||||
#minimize{ 1000@error_prio+Priority,Msg,Arg1,Arg2: error(Priority, Msg, Arg1, Arg2) }.
|
||||
#minimize{ 1000@error_prio+Priority,Msg,Arg1,Arg2,Arg3: error(Priority, Msg, Arg1, Arg2, Arg3) }.
|
||||
#minimize{ 1000@error_prio+Priority,Msg,Arg1,Arg2,Arg3,Arg4: error(Priority, Msg, Arg1, Arg2, Arg3, Arg4) }.
|
||||
#minimize{ 1000@error_prio+Priority,Msg,Arg1,Arg2,Arg3,Arg4,Arg5: error(Priority, Msg, Arg1, Arg2, Arg3, Arg4, Arg5) }.
|
||||
#minimize{ 1000@1000+Priority,Msg: error(Priority, Msg) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1: error(Priority, Msg, Arg1) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2: error(Priority, Msg, Arg1, Arg2) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2,Arg3: error(Priority, Msg, Arg1, Arg2, Arg3) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2,Arg3,Arg4: error(Priority, Msg, Arg1, Arg2, Arg3, Arg4) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2,Arg3,Arg4,Arg5: error(Priority, Msg, Arg1, Arg2, Arg3, Arg4, Arg5) }.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% How to optimize the spec (high to low priority)
|
||||
@@ -1172,157 +1318,202 @@ depth(Package, N + 1) :-
|
||||
% 2. a `#minimize{ 0@2 : #true }.` statement that ensures the criterion
|
||||
% is displayed (clingo doesn't display sums over empty sets by default)
|
||||
|
||||
% Ensure that values are returned by clingo for every distinct optimization criterion.
|
||||
% Some criteria are "fixed" and have only one bucket. Others are summed into multiple
|
||||
% buckets -- per build priority and per depth in the graph.
|
||||
% If we don't do this, it's very hard to read the sums back. We use `0@...` because
|
||||
% it doesn't affect the sums -- it just ensure that clingo returns them.
|
||||
|
||||
% "fixed" criteria have one bucket -- their priority.
|
||||
#minimize{ 0@N: opt_criterion(N, "fixed", _) }.
|
||||
|
||||
% "leveled" criteria sum into a bucket per depth in the graph, per build priority
|
||||
#minimize{
|
||||
0@(((max_depth - D - 1) * depth_offset) + N + build_prio)
|
||||
: opt_criterion(N, "leveled", _), depth(_, D)
|
||||
}.
|
||||
#minimize{
|
||||
0@(((max_depth - D - 1) * depth_offset) + N)
|
||||
: opt_criterion(N, "leveled", _), depth(_, D)
|
||||
}.
|
||||
|
||||
% Try hard to reuse installed packages (i.e., minimize the number built)
|
||||
opt_criterion(build_prio, "fixed", "number of packages to build (vs. reuse)").
|
||||
#minimize { 1@build_prio,Package : build(Package), optimize_for_reuse() }.
|
||||
opt_criterion(100, "number of packages to build (vs. reuse)").
|
||||
#minimize { 0@100: #true }.
|
||||
#minimize { 1@100,Package : build(Package), optimize_for_reuse() }.
|
||||
#defined optimize_for_reuse/0.
|
||||
|
||||
#minimize { 0@99: #true }.
|
||||
#minimize { PSID@99,Package,PSID : attr("node", (Package, PSID)) }.
|
||||
|
||||
% A condition group specifies one or more specs that must be satisfied.
|
||||
% Specs declared first are preferred, so we assign increasing weights and
|
||||
% minimize the weights.
|
||||
opt_criterion(65, "leveled", "requirement weight").
|
||||
#minimize{
|
||||
Weight@(65 + ((max_depth - D - 1) * depth_offset) + Priority), Package
|
||||
opt_criterion(75, "requirement weight").
|
||||
#minimize{ 0@275: #true }.
|
||||
#minimize{ 0@75: #true }.
|
||||
#minimize {
|
||||
Weight@75+Priority
|
||||
: requirement_weight(Package, Weight),
|
||||
build_priority(Package, Priority),
|
||||
depth(Package, D)
|
||||
build_priority(Package, Priority)
|
||||
}.
|
||||
|
||||
opt_criterion(60, "leveled", "deprecated versions used").
|
||||
% Minimize the number of deprecated versions being used
|
||||
opt_criterion(73, "deprecated versions used").
|
||||
#minimize{ 0@273: #true }.
|
||||
#minimize{ 0@73: #true }.
|
||||
#minimize{
|
||||
1@(60 + ((max_depth - D - 1) * depth_offset) + Priority), Package
|
||||
1@73+Priority,Package
|
||||
: attr("deprecated", Package, _),
|
||||
build_priority(Package, Priority),
|
||||
depth(Package, D)
|
||||
build_priority(Package, Priority)
|
||||
}.
|
||||
|
||||
opt_criterion(55, "leveled", "version badness").
|
||||
#minimize{
|
||||
Weight@(55 + ((max_depth - D - 1) * depth_offset) + Priority), Package
|
||||
: version_weight(Package, Weight),
|
||||
build_priority(Package, Priority),
|
||||
depth(Package, D)
|
||||
% Minimize the:
|
||||
% 1. Version weight
|
||||
% 2. Number of variants with a non default value, if not set
|
||||
% for the root package.
|
||||
opt_criterion(70, "version weight").
|
||||
#minimize{ 0@270: #true }.
|
||||
#minimize{ 0@70: #true }.
|
||||
#minimize {
|
||||
Weight@70+Priority
|
||||
: attr("root", Package), version_weight(Package, Weight),
|
||||
build_priority(Package, Priority)
|
||||
}.
|
||||
|
||||
opt_criterion(50, "leveled", "number of non-default variants").
|
||||
#minimize{
|
||||
1@(50 + ((max_depth - D - 1) * depth_offset) + Priority), Package, Variant, Value
|
||||
opt_criterion(65, "number of non-default variants (roots)").
|
||||
#minimize{ 0@265: #true }.
|
||||
#minimize{ 0@65: #true }.
|
||||
#minimize {
|
||||
1@65+Priority,Package,Variant,Value
|
||||
: variant_not_default(Package, Variant, Value),
|
||||
build_priority(Package, Priority),
|
||||
depth(Package, D)
|
||||
attr("root", Package),
|
||||
build_priority(Package, Priority)
|
||||
}.
|
||||
|
||||
opt_criterion(45, "leveled", "preferred providers").
|
||||
opt_criterion(60, "preferred providers for roots").
|
||||
#minimize{ 0@260: #true }.
|
||||
#minimize{ 0@60: #true }.
|
||||
#minimize{
|
||||
Weight@(45 + ((max_depth - D - 1) * depth_offset) + Priority), Provider, Virtual
|
||||
Weight@60+Priority,Provider,Virtual
|
||||
: provider_weight(Provider, Virtual, Weight),
|
||||
build_priority(Provider, Priority),
|
||||
depth(Package, D)
|
||||
attr("root", Provider),
|
||||
build_priority(Provider, Priority)
|
||||
}.
|
||||
|
||||
opt_criterion(40, "leveled", "default values of variants not being used").
|
||||
opt_criterion(55, "default values of variants not being used (roots)").
|
||||
#minimize{ 0@255: #true }.
|
||||
#minimize{ 0@55: #true }.
|
||||
#minimize{
|
||||
1@(40 + ((max_depth - D - 1) * depth_offset) + Priority), Package, Variant, Value
|
||||
1@55+Priority,Package,Variant,Value
|
||||
: variant_default_not_used(Package, Variant, Value),
|
||||
build_priority(Package, Priority),
|
||||
depth(Package, D)
|
||||
attr("root", Package),
|
||||
build_priority(Package, Priority)
|
||||
}.
|
||||
|
||||
opt_criterion(35, "leveled", "compiler mismatches (not from CLI)").
|
||||
#minimize{
|
||||
1@(35 + ((max_depth - D - 1) * depth_offset) + Priority), Dependent, Package
|
||||
: compiler_mismatch(Dependent, Package),
|
||||
build_priority(Package, Priority),
|
||||
depth(Package, D)
|
||||
% Try to use default variants or variants that have been set
|
||||
opt_criterion(50, "number of non-default variants (non-roots)").
|
||||
#minimize{ 0@250: #true }.
|
||||
#minimize{ 0@50: #true }.
|
||||
#minimize {
|
||||
1@50+Priority,Package,Variant,Value
|
||||
: variant_not_default(Package, Variant, Value),
|
||||
not attr("root", Package),
|
||||
build_priority(Package, Priority)
|
||||
}.
|
||||
|
||||
opt_criterion(30, "leveled", "compiler mismatches (from CLI)").
|
||||
% Minimize the weights of the providers, i.e. use as much as
|
||||
% possible the most preferred providers
|
||||
opt_criterion(45, "preferred providers (non-roots)").
|
||||
#minimize{ 0@245: #true }.
|
||||
#minimize{ 0@45: #true }.
|
||||
#minimize{
|
||||
1@(30 + ((max_depth - D - 1) * depth_offset) + Priority), Dependent, Package
|
||||
: compiler_mismatch_required(Dependent, Package),
|
||||
build_priority(Package, Priority),
|
||||
depth(Package, D)
|
||||
Weight@45+Priority,Provider,Virtual
|
||||
: provider_weight(Provider, Virtual, Weight), not attr("root", Provider),
|
||||
build_priority(Provider, Priority)
|
||||
}.
|
||||
|
||||
opt_criterion(25, "leveled", "OS mismatches").
|
||||
% Try to minimize the number of compiler mismatches in the DAG.
|
||||
opt_criterion(40, "compiler mismatches that are not from CLI").
|
||||
#minimize{ 0@240: #true }.
|
||||
#minimize{ 0@40: #true }.
|
||||
#minimize{
|
||||
1@(25 + ((max_depth - D - 1) * depth_offset) + Priority), Dependent, Package
|
||||
: node_os_mismatch(Dependent, Package),
|
||||
build_priority(Package, Priority),
|
||||
depth(Package, D)
|
||||
1@40+Priority,Package,Dependency
|
||||
: compiler_mismatch(Package, Dependency),
|
||||
build_priority(Package, Priority)
|
||||
}.
|
||||
|
||||
opt_criterion(20, "leveled", "non-preferred compilers").
|
||||
opt_criterion(39, "compiler mismatches that are not from CLI").
|
||||
#minimize{ 0@239: #true }.
|
||||
#minimize{ 0@39: #true }.
|
||||
#minimize{
|
||||
Weight@(20 + ((max_depth - D - 1) * depth_offset) + Priority), Package
|
||||
: compiler_weight(Package, Weight),
|
||||
build_priority(Package, Priority),
|
||||
depth(Package, D)
|
||||
1@39+Priority,Package,Dependency
|
||||
: compiler_mismatch_required(Package, Dependency),
|
||||
build_priority(Package, Priority)
|
||||
}.
|
||||
|
||||
opt_criterion(15, "leveled", "non-preferred OS's").
|
||||
% Try to minimize the number of compiler mismatches in the DAG.
|
||||
opt_criterion(35, "OS mismatches").
|
||||
#minimize{ 0@235: #true }.
|
||||
#minimize{ 0@35: #true }.
|
||||
#minimize{
|
||||
Weight@(15 + ((max_depth - D - 1) * depth_offset) + Priority), Package
|
||||
1@35+Priority,Package,Dependency
|
||||
: node_os_mismatch(Package, Dependency),
|
||||
build_priority(Package, Priority)
|
||||
}.
|
||||
|
||||
opt_criterion(30, "non-preferred OS's").
|
||||
#minimize{ 0@230: #true }.
|
||||
#minimize{ 0@30: #true }.
|
||||
#minimize{
|
||||
Weight@30+Priority,Package
|
||||
: node_os_weight(Package, Weight),
|
||||
build_priority(Package, Priority),
|
||||
depth(Package, D)
|
||||
build_priority(Package, Priority)
|
||||
}.
|
||||
|
||||
% Choose more recent versions for nodes
|
||||
opt_criterion(25, "version badness").
|
||||
#minimize{ 0@225: #true }.
|
||||
#minimize{ 0@25: #true }.
|
||||
#minimize{
|
||||
Weight@25+Priority,Package
|
||||
: version_weight(Package, Weight),
|
||||
build_priority(Package, Priority)
|
||||
}.
|
||||
|
||||
% Try to use all the default values of variants
|
||||
opt_criterion(20, "default values of variants not being used (non-roots)").
|
||||
#minimize{ 0@220: #true }.
|
||||
#minimize{ 0@20: #true }.
|
||||
#minimize{
|
||||
1@20+Priority,Package,Variant,Value
|
||||
: variant_default_not_used(Package, Variant, Value),
|
||||
not attr("root", Package),
|
||||
build_priority(Package, Priority)
|
||||
}.
|
||||
|
||||
% Try to use preferred compilers
|
||||
opt_criterion(15, "non-preferred compilers").
|
||||
#minimize{ 0@215: #true }.
|
||||
#minimize{ 0@15: #true }.
|
||||
#minimize{
|
||||
Weight@15+Priority,Package
|
||||
: compiler_weight(Package, Weight),
|
||||
build_priority(Package, Priority)
|
||||
}.
|
||||
|
||||
% Minimize the number of mismatches for targets in the DAG, try
|
||||
% to select the preferred target.
|
||||
opt_criterion(10, "leveled", "target mismatches").
|
||||
opt_criterion(10, "target mismatches").
|
||||
#minimize{ 0@210: #true }.
|
||||
#minimize{ 0@10: #true }.
|
||||
#minimize{
|
||||
1@(10 + ((max_depth - D - 1) * depth_offset) + Priority), Dependent, Package
|
||||
: node_target_mismatch(Dependent, Package),
|
||||
build_priority(Package, Priority),
|
||||
depth(Package, D)
|
||||
1@10+Priority,Package,Dependency
|
||||
: node_target_mismatch(Package, Dependency),
|
||||
build_priority(Package, Priority)
|
||||
}.
|
||||
|
||||
opt_criterion(5, "leveled", "non-preferred targets").
|
||||
opt_criterion(5, "non-preferred targets").
|
||||
#minimize{ 0@205: #true }.
|
||||
#minimize{ 0@5: #true }.
|
||||
#minimize{
|
||||
Weight@(5 + ((max_depth - D - 1) * depth_offset) + Priority), Package
|
||||
Weight@5+Priority,Package
|
||||
: node_target_weight(Package, Weight),
|
||||
build_priority(Package, Priority),
|
||||
depth(Package, D)
|
||||
build_priority(Package, Priority)
|
||||
}.
|
||||
|
||||
%-----------------
|
||||
% Domain heuristic
|
||||
%-----------------
|
||||
|
||||
#heuristic attr("version", Package, Version) : version_declared(Package, Version, 0), attr("node", Package). [10, true]
|
||||
|
||||
|
||||
#heuristic version_weight(Package, 0) : version_declared(Package, Version, 0), attr("node", Package). [10, true]
|
||||
#heuristic attr("node_target", Package, Target) : package_target_weight(Target, Package, 0), attr("node", Package). [10, true]
|
||||
#heuristic node_target_weight(Package, 0) : attr("node", Package). [10, true]
|
||||
#heuristic attr("variant_value", Package, Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", Package). [10, true]
|
||||
|
||||
%#heuristic provider(Package, Virtual) : possible_provider_weight(Package, Virtual, 0, _), attr("virtual_node", Virtual). [10, true]
|
||||
%#heuristic attr("node", Package) : possible_provider_weight(Package, Virtual, 0, _), attr("virtual_node", Virtual). [10, true]
|
||||
%#heuristic attr("node_os", Package, OS) : buildable_os(OS). [10, true]
|
||||
%#heuristic attr("node_target", Dependency, Target): depends_on(Package, Dependency), attr("node_target", Package, Target). [20, true]
|
||||
%#heuristic attr("node_os", Dependency, OS): depends_on(Package, Dependency), attr("node_os", Package, OS). [20, true]
|
||||
#heuristic provider(Package, Virtual) : possible_provider_weight(Package, Virtual, 0, _), attr("virtual_node", Virtual). [10, true]
|
||||
#heuristic attr("node", Package) : possible_provider_weight(Package, Virtual, 0, _), attr("virtual_node", Virtual). [10, true]
|
||||
#heuristic attr("node_os", Package, OS) : buildable_os(OS). [10, true]
|
||||
|
||||
%-----------
|
||||
% Notes
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
#show attr/4.
|
||||
|
||||
% names of optimization criteria
|
||||
#show opt_criterion/3.
|
||||
#show opt_criterion/2.
|
||||
|
||||
% error types
|
||||
#show error/2.
|
||||
@@ -25,16 +25,16 @@
|
||||
#show error/6.
|
||||
#show error/7.
|
||||
|
||||
% depths
|
||||
#show depth/2.
|
||||
%#show parent_depth/2.
|
||||
|
||||
% debug
|
||||
|
||||
%#show depends_on/2.
|
||||
|
||||
%node(Package) :- attr("node", Package).
|
||||
%#show node/1.
|
||||
|
||||
%version(Package, Version) :- attr("version", Package, Version).
|
||||
%#show version/2.
|
||||
#show provider/2.
|
||||
#show virtual_condition_holds/2.
|
||||
#show provider_condition/3.
|
||||
#show possible_provider/2.
|
||||
#show dependency_condition/3.
|
||||
#show condition_holds/1.
|
||||
#show condition_requirement/3.
|
||||
#show condition_requirement/4.
|
||||
#show condition_requirement/5.
|
||||
#show condition_requirement/6.
|
||||
#show dependency_holds/3.
|
||||
#show dependency_type/2.
|
||||
@@ -168,7 +168,7 @@
|
||||
)
|
||||
|
||||
#: specfile format version. Must increase monotonically
|
||||
specfile_format_version = 3
|
||||
SPECFILE_FORMAT_VERSION = 3
|
||||
|
||||
|
||||
def colorize_spec(spec):
|
||||
@@ -1529,14 +1529,14 @@ def _set_compiler(self, compiler):
|
||||
)
|
||||
self.compiler = compiler
|
||||
|
||||
def _add_dependency(self, spec, deptypes):
|
||||
def _add_dependency(self, spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
"""Called by the parser to add another spec as a dependency."""
|
||||
if spec.name not in self._dependencies:
|
||||
self.add_dependency_edge(spec, deptypes)
|
||||
self.add_dependency_edge(spec, deptypes=deptypes)
|
||||
return
|
||||
|
||||
# Keep the intersection of constraints when a dependency is added
|
||||
# multiple times. Currently we only allow identical edge types.
|
||||
# multiple times. Currently, we only allow identical edge types.
|
||||
orig = self._dependencies[spec.name]
|
||||
try:
|
||||
dspec = next(dspec for dspec in orig if deptypes == dspec.deptypes)
|
||||
@@ -1550,34 +1550,39 @@ def _add_dependency(self, spec, deptypes):
|
||||
"Cannot depend on incompatible specs '%s' and '%s'" % (dspec.spec, spec)
|
||||
)
|
||||
|
||||
def add_dependency_edge(self, dependency_spec, deptype):
|
||||
def add_dependency_edge(
|
||||
self,
|
||||
dependency_spec: "Spec",
|
||||
*,
|
||||
deptypes: dp.DependencyArgument,
|
||||
):
|
||||
"""Add a dependency edge to this spec.
|
||||
|
||||
Args:
|
||||
dependency_spec (Spec): spec of the dependency
|
||||
deptype (str or tuple): dependency types
|
||||
dependency_spec: spec of the dependency
|
||||
deptypes: dependency types for this edge
|
||||
"""
|
||||
deptype = dp.canonical_deptype(deptype)
|
||||
deptypes = dp.canonical_deptype(deptypes)
|
||||
|
||||
# Check if we need to update edges that are already present
|
||||
selected = self._dependencies.select(child=dependency_spec.name)
|
||||
for edge in selected:
|
||||
if any(d in edge.deptypes for d in deptype):
|
||||
if any(d in edge.deptypes for d in deptypes):
|
||||
msg = (
|
||||
'cannot add a dependency on "{0.spec}" of {1} type '
|
||||
'when the "{0.parent}" has the edge {0!s} already'
|
||||
)
|
||||
raise spack.error.SpecError(msg.format(edge, deptype))
|
||||
raise spack.error.SpecError(msg.format(edge, deptypes))
|
||||
|
||||
for edge in selected:
|
||||
if id(dependency_spec) == id(edge.spec):
|
||||
# If we are here, it means the edge object was previously added to
|
||||
# both the parent and the child. When we update this object they'll
|
||||
# both see the deptype modification.
|
||||
edge.add_type(deptype)
|
||||
edge.add_type(deptypes)
|
||||
return
|
||||
|
||||
edge = DependencySpec(self, dependency_spec, deptypes=deptype)
|
||||
edge = DependencySpec(self, dependency_spec, deptypes=deptypes)
|
||||
self._dependencies.add(edge)
|
||||
dependency_spec._dependents.add(edge)
|
||||
|
||||
@@ -2027,7 +2032,7 @@ def to_dict(self, hash=ht.dag_hash):
|
||||
node_list.append(node)
|
||||
hash_set.add(node_hash)
|
||||
|
||||
meta_dict = syaml.syaml_dict([("version", specfile_format_version)])
|
||||
meta_dict = syaml.syaml_dict([("version", SPECFILE_FORMAT_VERSION)])
|
||||
inner_dict = syaml.syaml_dict([("_meta", meta_dict), ("nodes", node_list)])
|
||||
spec_dict = syaml.syaml_dict([("spec", inner_dict)])
|
||||
return spec_dict
|
||||
@@ -2063,137 +2068,13 @@ def to_json(self, stream=None, hash=ht.dag_hash):
|
||||
|
||||
@staticmethod
|
||||
def from_specfile(path):
|
||||
"""Construct a spec from aJSON or YAML spec file path"""
|
||||
"""Construct a spec from a JSON or YAML spec file path"""
|
||||
with open(path, "r") as fd:
|
||||
file_content = fd.read()
|
||||
if path.endswith(".json"):
|
||||
return Spec.from_json(file_content)
|
||||
return Spec.from_yaml(file_content)
|
||||
|
||||
@staticmethod
|
||||
def from_node_dict(node):
|
||||
spec = Spec()
|
||||
if "name" in node.keys():
|
||||
# New format
|
||||
name = node["name"]
|
||||
else:
|
||||
# Old format
|
||||
name = next(iter(node))
|
||||
node = node[name]
|
||||
for h in ht.hashes:
|
||||
setattr(spec, h.attr, node.get(h.name, None))
|
||||
|
||||
spec.name = name
|
||||
spec.namespace = node.get("namespace", None)
|
||||
|
||||
if "version" in node or "versions" in node:
|
||||
spec.versions = vn.VersionList.from_dict(node)
|
||||
|
||||
if "arch" in node:
|
||||
spec.architecture = ArchSpec.from_dict(node)
|
||||
|
||||
if "compiler" in node:
|
||||
spec.compiler = CompilerSpec.from_dict(node)
|
||||
else:
|
||||
spec.compiler = None
|
||||
|
||||
if "parameters" in node:
|
||||
for name, values in node["parameters"].items():
|
||||
if name in _valid_compiler_flags:
|
||||
spec.compiler_flags[name] = []
|
||||
for val in values:
|
||||
spec.compiler_flags.add_flag(name, val, False)
|
||||
else:
|
||||
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values)
|
||||
elif "variants" in node:
|
||||
for name, value in node["variants"].items():
|
||||
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, value)
|
||||
for name in FlagMap.valid_compiler_flags():
|
||||
spec.compiler_flags[name] = []
|
||||
|
||||
spec.external_path = None
|
||||
spec.external_modules = None
|
||||
if "external" in node:
|
||||
# This conditional is needed because sometimes this function is
|
||||
# called with a node already constructed that contains a 'versions'
|
||||
# and 'external' field. Related to virtual packages provider
|
||||
# indexes.
|
||||
if node["external"]:
|
||||
spec.external_path = node["external"]["path"]
|
||||
spec.external_modules = node["external"]["module"]
|
||||
if spec.external_modules is False:
|
||||
spec.external_modules = None
|
||||
spec.extra_attributes = node["external"].get(
|
||||
"extra_attributes", syaml.syaml_dict()
|
||||
)
|
||||
|
||||
# specs read in are concrete unless marked abstract
|
||||
spec._concrete = node.get("concrete", True)
|
||||
|
||||
if "patches" in node:
|
||||
patches = node["patches"]
|
||||
if len(patches) > 0:
|
||||
mvar = spec.variants.setdefault("patches", vt.MultiValuedVariant("patches", ()))
|
||||
mvar.value = patches
|
||||
# FIXME: Monkey patches mvar to store patches order
|
||||
mvar._patches_in_order_of_appearance = patches
|
||||
|
||||
# Don't read dependencies here; from_dict() is used by
|
||||
# from_yaml() and from_json() to read the root *and* each dependency
|
||||
# spec.
|
||||
|
||||
return spec
|
||||
|
||||
@staticmethod
|
||||
def build_spec_from_node_dict(node, hash_type=ht.dag_hash.name):
|
||||
build_spec_dict = node["build_spec"]
|
||||
return build_spec_dict["name"], build_spec_dict[hash_type], hash_type
|
||||
|
||||
@staticmethod
|
||||
def dependencies_from_node_dict(node):
|
||||
if "name" in node.keys():
|
||||
# New format
|
||||
name = node["name"]
|
||||
else:
|
||||
name = next(iter(node))
|
||||
node = node[name]
|
||||
if "dependencies" not in node:
|
||||
return
|
||||
for t in Spec.read_yaml_dep_specs(node["dependencies"]):
|
||||
yield t
|
||||
|
||||
@staticmethod
|
||||
def read_yaml_dep_specs(deps, hash_type=ht.dag_hash.name):
|
||||
"""Read the DependencySpec portion of a YAML-formatted Spec.
|
||||
This needs to be backward-compatible with older spack spec
|
||||
formats so that reindex will work on old specs/databases.
|
||||
"""
|
||||
dep_iter = deps.items() if isinstance(deps, dict) else deps
|
||||
for dep in dep_iter:
|
||||
if isinstance(dep, tuple):
|
||||
dep_name, elt = dep
|
||||
else:
|
||||
elt = dep
|
||||
dep_name = dep["name"]
|
||||
if isinstance(elt, str):
|
||||
# original format, elt is just the dependency hash.
|
||||
dep_hash, deptypes = elt, ["build", "link"]
|
||||
elif isinstance(elt, tuple):
|
||||
# original deptypes format: (used tuples, not future-proof)
|
||||
dep_hash, deptypes = elt
|
||||
elif isinstance(elt, dict):
|
||||
# new format: elements of dependency spec are keyed.
|
||||
for h in ht.hashes:
|
||||
if h.name in elt:
|
||||
dep_hash, deptypes = elt[h.name], elt["type"]
|
||||
hash_type = h.name
|
||||
break
|
||||
else: # We never determined a hash type...
|
||||
raise spack.error.SpecError("Couldn't parse dependency spec.")
|
||||
else:
|
||||
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
|
||||
yield dep_name, dep_hash, list(deptypes), hash_type
|
||||
|
||||
@staticmethod
|
||||
def override(init_spec, change_spec):
|
||||
# TODO: this doesn't account for the case where the changed spec
|
||||
@@ -2367,7 +2248,7 @@ def spec_and_dependency_types(s):
|
||||
dag_node, dependency_types = spec_and_dependency_types(s)
|
||||
|
||||
dependency_spec = spec_builder({dag_node: s_dependencies})
|
||||
spec._add_dependency(dependency_spec, dependency_types)
|
||||
spec._add_dependency(dependency_spec, deptypes=dependency_types)
|
||||
|
||||
return spec
|
||||
|
||||
@@ -2380,54 +2261,14 @@ def from_dict(data):
|
||||
Args:
|
||||
data: a nested dict/list data structure read from YAML or JSON.
|
||||
"""
|
||||
if isinstance(data["spec"], list): # Legacy specfile format
|
||||
return _spec_from_old_dict(data)
|
||||
# Legacy specfile format
|
||||
if isinstance(data["spec"], list):
|
||||
return SpecfileV1.load(data)
|
||||
|
||||
# Current specfile format
|
||||
nodes = data["spec"]["nodes"]
|
||||
hash_type = None
|
||||
any_deps = False
|
||||
|
||||
# Pass 0: Determine hash type
|
||||
for node in nodes:
|
||||
if "dependencies" in node.keys():
|
||||
any_deps = True
|
||||
for _, _, _, dhash_type in Spec.dependencies_from_node_dict(node):
|
||||
if dhash_type:
|
||||
hash_type = dhash_type
|
||||
break
|
||||
|
||||
if not any_deps: # If we never see a dependency...
|
||||
hash_type = ht.dag_hash.name
|
||||
elif not hash_type: # Seen a dependency, still don't know hash_type
|
||||
raise spack.error.SpecError(
|
||||
"Spec dictionary contains malformed " "dependencies. Old format?"
|
||||
)
|
||||
|
||||
hash_dict = {}
|
||||
root_spec_hash = None
|
||||
|
||||
# Pass 1: Create a single lookup dictionary by hash
|
||||
for i, node in enumerate(nodes):
|
||||
node_hash = node[hash_type]
|
||||
node_spec = Spec.from_node_dict(node)
|
||||
hash_dict[node_hash] = node
|
||||
hash_dict[node_hash]["node_spec"] = node_spec
|
||||
if i == 0:
|
||||
root_spec_hash = node_hash
|
||||
if not root_spec_hash:
|
||||
raise spack.error.SpecError("Spec dictionary contains no nodes.")
|
||||
|
||||
# Pass 2: Finish construction of all DAG edges (including build specs)
|
||||
for node_hash, node in hash_dict.items():
|
||||
node_spec = node["node_spec"]
|
||||
for _, dhash, dtypes, _ in Spec.dependencies_from_node_dict(node):
|
||||
node_spec._add_dependency(hash_dict[dhash]["node_spec"], dtypes)
|
||||
if "build_spec" in node.keys():
|
||||
_, bhash, _ = Spec.build_spec_from_node_dict(node, hash_type=hash_type)
|
||||
node_spec._build_spec = hash_dict[bhash]["node_spec"]
|
||||
|
||||
return hash_dict[root_spec_hash]["node_spec"]
|
||||
specfile_version = int(data["spec"]["_meta"]["version"])
|
||||
if specfile_version == 2:
|
||||
return SpecfileV2.load(data)
|
||||
return SpecfileV3.load(data)
|
||||
|
||||
@staticmethod
|
||||
def from_yaml(stream):
|
||||
@@ -2583,7 +2424,7 @@ def _replace_with(self, concrete):
|
||||
|
||||
# add the replacement, unless it is already a dep of dependent.
|
||||
if concrete.name not in dependent._dependencies:
|
||||
dependent._add_dependency(concrete, deptypes)
|
||||
dependent._add_dependency(concrete, deptypes=deptypes)
|
||||
|
||||
def _expand_virtual_packages(self, concretizer):
|
||||
"""Find virtual packages in this spec, replace them with providers,
|
||||
@@ -2928,7 +2769,7 @@ def _new_concretize(self, tests=False):
|
||||
result.raise_if_unsat()
|
||||
|
||||
# take the best answer
|
||||
opt, i, answer, _ = min(result.answers)
|
||||
opt, i, answer = min(result.answers)
|
||||
name = self.name
|
||||
# TODO: Consolidate this code with similar code in solve.py
|
||||
if self.virtual:
|
||||
@@ -3254,7 +3095,7 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
|
||||
# Add merged spec to my deps and recurse
|
||||
spec_dependency = spec_deps[dep.name]
|
||||
if dep.name not in self._dependencies:
|
||||
self._add_dependency(spec_dependency, dependency.type)
|
||||
self._add_dependency(spec_dependency, deptypes=dependency.type)
|
||||
|
||||
changed |= spec_dependency._normalize_helper(visited, spec_deps, provider_index, tests)
|
||||
return changed
|
||||
@@ -3585,7 +3426,7 @@ def _constrain_dependencies(self, other):
|
||||
dep_spec_copy = other._get_dependency(name)
|
||||
dep_copy = dep_spec_copy.spec
|
||||
deptypes = dep_spec_copy.deptypes
|
||||
self._add_dependency(dep_copy.copy(), deptypes)
|
||||
self._add_dependency(dep_copy.copy(), deptypes=deptypes)
|
||||
changed = True
|
||||
|
||||
return changed
|
||||
@@ -3898,7 +3739,7 @@ def spid(spec):
|
||||
new_specs[spid(edge.spec)] = edge.spec.copy(deps=False)
|
||||
|
||||
new_specs[spid(edge.parent)].add_dependency_edge(
|
||||
new_specs[spid(edge.spec)], edge.deptypes
|
||||
new_specs[spid(edge.spec)], deptypes=edge.deptypes
|
||||
)
|
||||
|
||||
def copy(self, deps=True, **kwargs):
|
||||
@@ -4813,12 +4654,12 @@ def from_self(name, transitive):
|
||||
if name in self_nodes:
|
||||
for edge in self[name].edges_to_dependencies():
|
||||
dep_name = deps_to_replace.get(edge.spec, edge.spec).name
|
||||
nodes[name].add_dependency_edge(nodes[dep_name], edge.deptypes)
|
||||
nodes[name].add_dependency_edge(nodes[dep_name], deptypes=edge.deptypes)
|
||||
if any(dep not in self_nodes for dep in self[name]._dependencies):
|
||||
nodes[name].build_spec = self[name].build_spec
|
||||
else:
|
||||
for edge in other[name].edges_to_dependencies():
|
||||
nodes[name].add_dependency_edge(nodes[edge.spec.name], edge.deptypes)
|
||||
nodes[name].add_dependency_edge(nodes[edge.spec.name], deptypes=edge.deptypes)
|
||||
if any(dep not in other_nodes for dep in other[name]._dependencies):
|
||||
nodes[name].build_spec = other[name].build_spec
|
||||
|
||||
@@ -4891,40 +4732,252 @@ def merge_abstract_anonymous_specs(*abstract_specs: Spec):
|
||||
# Update with additional constraints from other spec
|
||||
for name in current_spec_constraint.direct_dep_difference(merged_spec):
|
||||
edge = next(iter(current_spec_constraint.edges_to_dependencies(name)))
|
||||
merged_spec._add_dependency(edge.spec.copy(), edge.deptypes)
|
||||
merged_spec._add_dependency(edge.spec.copy(), deptypes=edge.deptypes)
|
||||
|
||||
return merged_spec
|
||||
|
||||
|
||||
def _spec_from_old_dict(data):
|
||||
"""Construct a spec from JSON/YAML using the format version 1.
|
||||
Note: Version 1 format has no notion of a build_spec, and names are
|
||||
guaranteed to be unique.
|
||||
class SpecfileReaderBase:
|
||||
@classmethod
|
||||
def from_node_dict(cls, node):
|
||||
spec = Spec()
|
||||
|
||||
Parameters:
|
||||
data -- a nested dict/list data structure read from YAML or JSON.
|
||||
"""
|
||||
nodes = data["spec"]
|
||||
name, node = cls.name_and_data(node)
|
||||
for h in ht.hashes:
|
||||
setattr(spec, h.attr, node.get(h.name, None))
|
||||
|
||||
# Read nodes out of list. Root spec is the first element;
|
||||
# dependencies are the following elements.
|
||||
dep_list = [Spec.from_node_dict(node) for node in nodes]
|
||||
if not dep_list:
|
||||
raise spack.error.SpecError("YAML spec contains no nodes.")
|
||||
deps = dict((spec.name, spec) for spec in dep_list)
|
||||
spec = dep_list[0]
|
||||
spec.name = name
|
||||
spec.namespace = node.get("namespace", None)
|
||||
|
||||
for node in nodes:
|
||||
# get dependency dict from the node.
|
||||
if "version" in node or "versions" in node:
|
||||
spec.versions = vn.VersionList.from_dict(node)
|
||||
|
||||
if "arch" in node:
|
||||
spec.architecture = ArchSpec.from_dict(node)
|
||||
|
||||
if "compiler" in node:
|
||||
spec.compiler = CompilerSpec.from_dict(node)
|
||||
else:
|
||||
spec.compiler = None
|
||||
|
||||
for name, values in node.get("parameters", {}).items():
|
||||
if name in _valid_compiler_flags:
|
||||
spec.compiler_flags[name] = []
|
||||
for val in values:
|
||||
spec.compiler_flags.add_flag(name, val, False)
|
||||
else:
|
||||
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values)
|
||||
|
||||
spec.external_path = None
|
||||
spec.external_modules = None
|
||||
if "external" in node:
|
||||
# This conditional is needed because sometimes this function is
|
||||
# called with a node already constructed that contains a 'versions'
|
||||
# and 'external' field. Related to virtual packages provider
|
||||
# indexes.
|
||||
if node["external"]:
|
||||
spec.external_path = node["external"]["path"]
|
||||
spec.external_modules = node["external"]["module"]
|
||||
if spec.external_modules is False:
|
||||
spec.external_modules = None
|
||||
spec.extra_attributes = node["external"].get(
|
||||
"extra_attributes", syaml.syaml_dict()
|
||||
)
|
||||
|
||||
# specs read in are concrete unless marked abstract
|
||||
spec._concrete = node.get("concrete", True)
|
||||
|
||||
if "patches" in node:
|
||||
patches = node["patches"]
|
||||
if len(patches) > 0:
|
||||
mvar = spec.variants.setdefault("patches", vt.MultiValuedVariant("patches", ()))
|
||||
mvar.value = patches
|
||||
# FIXME: Monkey patches mvar to store patches order
|
||||
mvar._patches_in_order_of_appearance = patches
|
||||
|
||||
# Don't read dependencies here; from_dict() is used by
|
||||
# from_yaml() and from_json() to read the root *and* each dependency
|
||||
# spec.
|
||||
|
||||
return spec
|
||||
|
||||
@classmethod
|
||||
def _load(cls, data):
|
||||
"""Construct a spec from JSON/YAML using the format version 2.
|
||||
|
||||
This format is used in Spack v0.17, was introduced in
|
||||
https://github.com/spack/spack/pull/22845
|
||||
|
||||
Args:
|
||||
data: a nested dict/list data structure read from YAML or JSON.
|
||||
"""
|
||||
# Current specfile format
|
||||
nodes = data["spec"]["nodes"]
|
||||
hash_type = None
|
||||
any_deps = False
|
||||
|
||||
# Pass 0: Determine hash type
|
||||
for node in nodes:
|
||||
for _, _, _, dhash_type in cls.dependencies_from_node_dict(node):
|
||||
any_deps = True
|
||||
if dhash_type:
|
||||
hash_type = dhash_type
|
||||
break
|
||||
|
||||
if not any_deps: # If we never see a dependency...
|
||||
hash_type = ht.dag_hash.name
|
||||
elif not hash_type: # Seen a dependency, still don't know hash_type
|
||||
raise spack.error.SpecError(
|
||||
"Spec dictionary contains malformed dependencies. Old format?"
|
||||
)
|
||||
|
||||
hash_dict = {}
|
||||
root_spec_hash = None
|
||||
|
||||
# Pass 1: Create a single lookup dictionary by hash
|
||||
for i, node in enumerate(nodes):
|
||||
node_hash = node[hash_type]
|
||||
node_spec = cls.from_node_dict(node)
|
||||
hash_dict[node_hash] = node
|
||||
hash_dict[node_hash]["node_spec"] = node_spec
|
||||
if i == 0:
|
||||
root_spec_hash = node_hash
|
||||
|
||||
if not root_spec_hash:
|
||||
raise spack.error.SpecError("Spec dictionary contains no nodes.")
|
||||
|
||||
# Pass 2: Finish construction of all DAG edges (including build specs)
|
||||
for node_hash, node in hash_dict.items():
|
||||
node_spec = node["node_spec"]
|
||||
for _, dhash, dtypes, _ in cls.dependencies_from_node_dict(node):
|
||||
node_spec._add_dependency(hash_dict[dhash]["node_spec"], deptypes=dtypes)
|
||||
if "build_spec" in node.keys():
|
||||
_, bhash, _ = cls.build_spec_from_node_dict(node, hash_type=hash_type)
|
||||
node_spec._build_spec = hash_dict[bhash]["node_spec"]
|
||||
|
||||
return hash_dict[root_spec_hash]["node_spec"]
|
||||
|
||||
|
||||
class SpecfileV1(SpecfileReaderBase):
|
||||
@classmethod
|
||||
def load(cls, data):
|
||||
"""Construct a spec from JSON/YAML using the format version 1.
|
||||
|
||||
Note: Version 1 format has no notion of a build_spec, and names are
|
||||
guaranteed to be unique. This function is guaranteed to read specs as
|
||||
old as v0.10 - while it was not checked for older formats.
|
||||
|
||||
Args:
|
||||
data: a nested dict/list data structure read from YAML or JSON.
|
||||
"""
|
||||
nodes = data["spec"]
|
||||
|
||||
# Read nodes out of list. Root spec is the first element;
|
||||
# dependencies are the following elements.
|
||||
dep_list = [cls.from_node_dict(node) for node in nodes]
|
||||
if not dep_list:
|
||||
raise spack.error.SpecError("specfile contains no nodes.")
|
||||
|
||||
deps = {spec.name: spec for spec in dep_list}
|
||||
result = dep_list[0]
|
||||
|
||||
for node in nodes:
|
||||
# get dependency dict from the node.
|
||||
name, data = cls.name_and_data(node)
|
||||
for dname, _, dtypes, _ in cls.dependencies_from_node_dict(data):
|
||||
deps[name]._add_dependency(deps[dname], deptypes=dtypes)
|
||||
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def name_and_data(cls, node):
|
||||
name = next(iter(node))
|
||||
node = node[name]
|
||||
return name, node
|
||||
|
||||
if "dependencies" not in node[name]:
|
||||
continue
|
||||
@classmethod
|
||||
def dependencies_from_node_dict(cls, node):
|
||||
if "dependencies" not in node:
|
||||
return []
|
||||
|
||||
for dname, _, dtypes, _ in Spec.dependencies_from_node_dict(node):
|
||||
deps[name]._add_dependency(deps[dname], dtypes)
|
||||
for t in cls.read_specfile_dep_specs(node["dependencies"]):
|
||||
yield t
|
||||
|
||||
return spec
|
||||
@classmethod
|
||||
def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
|
||||
"""Read the DependencySpec portion of a YAML-formatted Spec.
|
||||
This needs to be backward-compatible with older spack spec
|
||||
formats so that reindex will work on old specs/databases.
|
||||
"""
|
||||
for dep_name, elt in deps.items():
|
||||
if isinstance(elt, dict):
|
||||
for h in ht.hashes:
|
||||
if h.name in elt:
|
||||
dep_hash, deptypes = elt[h.name], elt["type"]
|
||||
hash_type = h.name
|
||||
break
|
||||
else: # We never determined a hash type...
|
||||
raise spack.error.SpecError("Couldn't parse dependency spec.")
|
||||
else:
|
||||
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
|
||||
yield dep_name, dep_hash, list(deptypes), hash_type
|
||||
|
||||
|
||||
class SpecfileV2(SpecfileReaderBase):
|
||||
@classmethod
|
||||
def load(cls, data):
|
||||
result = cls._load(data)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def name_and_data(cls, node):
|
||||
return node["name"], node
|
||||
|
||||
@classmethod
|
||||
def dependencies_from_node_dict(cls, node):
|
||||
return cls.read_specfile_dep_specs(node.get("dependencies", []))
|
||||
|
||||
@classmethod
|
||||
def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
|
||||
"""Read the DependencySpec portion of a YAML-formatted Spec.
|
||||
This needs to be backward-compatible with older spack spec
|
||||
formats so that reindex will work on old specs/databases.
|
||||
"""
|
||||
if not isinstance(deps, list):
|
||||
raise spack.error.SpecError("Spec dictionary contains malformed dependencies")
|
||||
|
||||
result = []
|
||||
for dep in deps:
|
||||
elt = dep
|
||||
dep_name = dep["name"]
|
||||
if isinstance(elt, dict):
|
||||
# new format: elements of dependency spec are keyed.
|
||||
for h in ht.hashes:
|
||||
if h.name in elt:
|
||||
dep_hash, deptypes, hash_type, virtuals = cls.extract_info_from_dep(elt, h)
|
||||
break
|
||||
else: # We never determined a hash type...
|
||||
raise spack.error.SpecError("Couldn't parse dependency spec.")
|
||||
else:
|
||||
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
|
||||
result.append((dep_name, dep_hash, list(deptypes), hash_type))
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def extract_info_from_dep(cls, elt, hash):
|
||||
dep_hash, deptypes = elt[hash.name], elt["type"]
|
||||
hash_type = hash.name
|
||||
virtuals = []
|
||||
return dep_hash, deptypes, hash_type, virtuals
|
||||
|
||||
@classmethod
|
||||
def build_spec_from_node_dict(cls, node, hash_type=ht.dag_hash.name):
|
||||
build_spec_dict = node["build_spec"]
|
||||
return build_spec_dict["name"], build_spec_dict[hash_type], hash_type
|
||||
|
||||
|
||||
class SpecfileV3(SpecfileV2):
|
||||
pass
|
||||
|
||||
|
||||
class LazySpecCache(collections.defaultdict):
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import itertools as it
|
||||
import itertools
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
@@ -14,7 +14,7 @@
|
||||
import spack.ci as ci
|
||||
import spack.ci_needs_workaround as cinw
|
||||
import spack.ci_optimization as ci_opt
|
||||
import spack.config as cfg
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.paths as spack_paths
|
||||
@@ -23,12 +23,16 @@
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def repro_dir(tmp_path):
|
||||
result = tmp_path / "repro_dir"
|
||||
result.mkdir()
|
||||
with fs.working_dir(str(tmp_path)):
|
||||
yield result
|
||||
|
||||
|
||||
def test_urlencode_string():
|
||||
s = "Spack Test Project"
|
||||
|
||||
s_enc = ci._url_encode_string(s)
|
||||
|
||||
assert s_enc == "Spack+Test+Project"
|
||||
assert ci._url_encode_string("Spack Test Project") == "Spack+Test+Project"
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@@ -54,16 +58,16 @@ def assert_present(config):
|
||||
"install_missing_compilers" in config and config["install_missing_compilers"] is True
|
||||
)
|
||||
|
||||
original_config = cfg.get("config")
|
||||
original_config = spack.config.get("config")
|
||||
assert_missing(original_config)
|
||||
|
||||
ci.configure_compilers("FIND_ANY", scope="site")
|
||||
|
||||
second_config = cfg.get("config")
|
||||
second_config = spack.config.get("config")
|
||||
assert_missing(second_config)
|
||||
|
||||
ci.configure_compilers("INSTALL_MISSING")
|
||||
last_config = cfg.get("config")
|
||||
last_config = spack.config.get("config")
|
||||
assert_present(last_config)
|
||||
|
||||
|
||||
@@ -380,7 +384,7 @@ def make_yaml_obj(use_artifact_buildcache, optimize, use_dependencies):
|
||||
use_artifact_buildcache=use_ab, optimize=False, use_dependencies=False
|
||||
)
|
||||
|
||||
for opt, deps in it.product(*(((False, True),) * 2)):
|
||||
for opt, deps in itertools.product(*(((False, True),) * 2)):
|
||||
# neither optimizing nor converting needs->dependencies
|
||||
if not (opt or deps):
|
||||
# therefore, nothing to test
|
||||
@@ -453,33 +457,24 @@ def test_affected_specs_on_first_concretization(mutable_mock_env_path, mock_pack
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
|
||||
)
|
||||
def test_ci_process_command(tmpdir):
|
||||
repro_dir = tmpdir.join("repro_dir").strpath
|
||||
os.makedirs(repro_dir)
|
||||
result = ci.process_command("help", [], repro_dir)
|
||||
|
||||
assert os.path.exists(fs.join_path(repro_dir, "help.sh"))
|
||||
assert not result
|
||||
def test_ci_process_command(repro_dir):
|
||||
result = ci.process_command("help", commands=[], repro_dir=str(repro_dir))
|
||||
help_sh = repro_dir / "help.sh"
|
||||
assert help_sh.exists() and not result
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
|
||||
)
|
||||
def test_ci_process_command_fail(tmpdir, monkeypatch):
|
||||
import subprocess
|
||||
|
||||
err = "subprocess wait exception"
|
||||
def test_ci_process_command_fail(repro_dir, monkeypatch):
|
||||
msg = "subprocess wait exception"
|
||||
|
||||
def _fail(self, args):
|
||||
raise RuntimeError(err)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
monkeypatch.setattr(subprocess.Popen, "__init__", _fail)
|
||||
|
||||
repro_dir = tmpdir.join("repro_dir").strpath
|
||||
os.makedirs(repro_dir)
|
||||
|
||||
with pytest.raises(RuntimeError, match=err):
|
||||
ci.process_command("help", [], repro_dir)
|
||||
with pytest.raises(RuntimeError, match=msg):
|
||||
ci.process_command("help", [], str(repro_dir))
|
||||
|
||||
|
||||
def test_ci_create_buildcache(tmpdir, working_env, config, mock_packages, monkeypatch):
|
||||
@@ -513,16 +508,15 @@ def test_ci_run_standalone_tests_missing_requirements(
|
||||
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
|
||||
)
|
||||
def test_ci_run_standalone_tests_not_installed_junit(
|
||||
tmpdir, working_env, default_mock_concretization, mock_test_stage, capfd
|
||||
tmp_path, repro_dir, working_env, default_mock_concretization, mock_test_stage, capfd
|
||||
):
|
||||
log_file = tmpdir.join("junit.xml").strpath
|
||||
log_file = tmp_path / "junit.xml"
|
||||
args = {
|
||||
"log_file": log_file,
|
||||
"log_file": str(log_file),
|
||||
"job_spec": default_mock_concretization("printing-package"),
|
||||
"repro_dir": tmpdir.join("repro_dir").strpath,
|
||||
"repro_dir": str(repro_dir),
|
||||
"fail_fast": True,
|
||||
}
|
||||
os.makedirs(args["repro_dir"])
|
||||
|
||||
ci.run_standalone_tests(**args)
|
||||
err = capfd.readouterr()[1]
|
||||
@@ -534,16 +528,15 @@ def test_ci_run_standalone_tests_not_installed_junit(
|
||||
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
|
||||
)
|
||||
def test_ci_run_standalone_tests_not_installed_cdash(
|
||||
tmpdir, working_env, default_mock_concretization, mock_test_stage, capfd
|
||||
tmp_path, repro_dir, working_env, default_mock_concretization, mock_test_stage, capfd
|
||||
):
|
||||
"""Test run_standalone_tests with cdash and related options."""
|
||||
log_file = tmpdir.join("junit.xml").strpath
|
||||
log_file = tmp_path / "junit.xml"
|
||||
args = {
|
||||
"log_file": log_file,
|
||||
"log_file": str(log_file),
|
||||
"job_spec": default_mock_concretization("printing-package"),
|
||||
"repro_dir": tmpdir.join("repro_dir").strpath,
|
||||
"repro_dir": str(repro_dir),
|
||||
}
|
||||
os.makedirs(args["repro_dir"])
|
||||
|
||||
# Cover when CDash handler provided (with the log file as well)
|
||||
ci_cdash = {
|
||||
@@ -564,9 +557,9 @@ def test_ci_run_standalone_tests_not_installed_cdash(
|
||||
assert "0 passed of 0" in out
|
||||
|
||||
# copy test results (though none)
|
||||
artifacts_dir = tmpdir.join("artifacts")
|
||||
fs.mkdirp(artifacts_dir.strpath)
|
||||
handler.copy_test_results(tmpdir.strpath, artifacts_dir.strpath)
|
||||
artifacts_dir = tmp_path / "artifacts"
|
||||
artifacts_dir.mkdir()
|
||||
handler.copy_test_results(str(tmp_path), str(artifacts_dir))
|
||||
err = capfd.readouterr()[1]
|
||||
assert "Unable to copy files" in err
|
||||
assert "No such file or directory" in err
|
||||
|
||||
@@ -14,6 +14,8 @@
|
||||
|
||||
maintainers = spack.main.SpackCommand("maintainers")
|
||||
|
||||
MAINTAINED_PACKAGES = ["maintainers-1", "maintainers-2", "maintainers-3", "py-extension1"]
|
||||
|
||||
|
||||
def split(output):
|
||||
"""Split command line output into an array."""
|
||||
@@ -23,14 +25,12 @@ def split(output):
|
||||
|
||||
def test_maintained(mock_packages):
|
||||
out = split(maintainers("--maintained"))
|
||||
assert out == ["maintainers-1", "maintainers-2"]
|
||||
assert out == MAINTAINED_PACKAGES
|
||||
|
||||
|
||||
def test_unmaintained(mock_packages):
|
||||
out = split(maintainers("--unmaintained"))
|
||||
assert out == sorted(
|
||||
set(spack.repo.all_package_names()) - set(["maintainers-1", "maintainers-2"])
|
||||
)
|
||||
assert out == sorted(set(spack.repo.all_package_names()) - set(MAINTAINED_PACKAGES))
|
||||
|
||||
|
||||
def test_all(mock_packages, capfd):
|
||||
@@ -43,6 +43,14 @@ def test_all(mock_packages, capfd):
|
||||
"maintainers-2:",
|
||||
"user2,",
|
||||
"user3",
|
||||
"maintainers-3:",
|
||||
"user0,",
|
||||
"user1,",
|
||||
"user2,",
|
||||
"user3",
|
||||
"py-extension1:",
|
||||
"user1,",
|
||||
"user2",
|
||||
]
|
||||
|
||||
with capfd.disabled():
|
||||
@@ -58,23 +66,34 @@ def test_all_by_user(mock_packages, capfd):
|
||||
with capfd.disabled():
|
||||
out = split(maintainers("--all", "--by-user"))
|
||||
assert out == [
|
||||
"user0:",
|
||||
"maintainers-3",
|
||||
"user1:",
|
||||
"maintainers-1",
|
||||
"maintainers-1,",
|
||||
"maintainers-3,",
|
||||
"py-extension1",
|
||||
"user2:",
|
||||
"maintainers-1,",
|
||||
"maintainers-2",
|
||||
"maintainers-2,",
|
||||
"maintainers-3,",
|
||||
"py-extension1",
|
||||
"user3:",
|
||||
"maintainers-2",
|
||||
"maintainers-2,",
|
||||
"maintainers-3",
|
||||
]
|
||||
|
||||
with capfd.disabled():
|
||||
out = split(maintainers("--all", "--by-user", "user1", "user2"))
|
||||
assert out == [
|
||||
"user1:",
|
||||
"maintainers-1",
|
||||
"maintainers-1,",
|
||||
"maintainers-3,",
|
||||
"py-extension1",
|
||||
"user2:",
|
||||
"maintainers-1,",
|
||||
"maintainers-2",
|
||||
"maintainers-2,",
|
||||
"maintainers-3,",
|
||||
"py-extension1",
|
||||
]
|
||||
|
||||
|
||||
@@ -116,16 +135,16 @@ def test_maintainers_list_fails(mock_packages, capfd):
|
||||
def test_maintainers_list_by_user(mock_packages, capfd):
|
||||
with capfd.disabled():
|
||||
out = split(maintainers("--by-user", "user1"))
|
||||
assert out == ["maintainers-1"]
|
||||
assert out == ["maintainers-1", "maintainers-3", "py-extension1"]
|
||||
|
||||
with capfd.disabled():
|
||||
out = split(maintainers("--by-user", "user1", "user2"))
|
||||
assert out == ["maintainers-1", "maintainers-2"]
|
||||
assert out == ["maintainers-1", "maintainers-2", "maintainers-3", "py-extension1"]
|
||||
|
||||
with capfd.disabled():
|
||||
out = split(maintainers("--by-user", "user2"))
|
||||
assert out == ["maintainers-1", "maintainers-2"]
|
||||
assert out == ["maintainers-1", "maintainers-2", "maintainers-3", "py-extension1"]
|
||||
|
||||
with capfd.disabled():
|
||||
out = split(maintainers("--by-user", "user3"))
|
||||
assert out == ["maintainers-2"]
|
||||
assert out == ["maintainers-2", "maintainers-3"]
|
||||
|
||||
@@ -258,7 +258,7 @@ def test_has_test_method_fails(capsys):
|
||||
assert "is not a class" in captured
|
||||
|
||||
|
||||
def test_read_old_results(mock_test_stage):
|
||||
def test_read_old_results(mock_packages, mock_test_stage):
|
||||
"""Take test data generated before the switch to full hash everywhere
|
||||
and make sure we can still read it in"""
|
||||
# Test data was generated with:
|
||||
|
||||
@@ -1779,8 +1779,8 @@ def test_version_weight_and_provenance(self):
|
||||
num_specs = len(list(result_spec.traverse()))
|
||||
|
||||
criteria = [
|
||||
(None, num_specs - 1, "number of packages to build (vs. reuse)"),
|
||||
(2, 0, "NON-ROOTS: version badness"),
|
||||
(num_specs - 1, None, "number of packages to build (vs. reuse)"),
|
||||
(2, 0, "version badness"),
|
||||
]
|
||||
|
||||
for criterion in criteria:
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
from llnl.util.filesystem import getuid, join_path, mkdirp, touch, touchp
|
||||
|
||||
import spack.config
|
||||
import spack.directory_layout
|
||||
import spack.environment as ev
|
||||
import spack.main
|
||||
import spack.package_base
|
||||
@@ -28,6 +29,7 @@
|
||||
import spack.schema.mirrors
|
||||
import spack.schema.packages
|
||||
import spack.schema.repos
|
||||
import spack.store
|
||||
import spack.util.path as spack_path
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
@@ -451,9 +453,9 @@ def test_substitute_date(mock_low_high_config):
|
||||
assert date.today().strftime("%Y-%m-%d") in new_path
|
||||
|
||||
|
||||
PAD_STRING = spack.util.path.SPACK_PATH_PADDING_CHARS
|
||||
MAX_PATH_LEN = spack.util.path.get_system_path_max()
|
||||
MAX_PADDED_LEN = MAX_PATH_LEN - spack.util.path.SPACK_MAX_INSTALL_PATH_LENGTH
|
||||
PAD_STRING = spack_path.SPACK_PATH_PADDING_CHARS
|
||||
MAX_PATH_LEN = spack_path.get_system_path_max()
|
||||
MAX_PADDED_LEN = MAX_PATH_LEN - spack_path.SPACK_MAX_INSTALL_PATH_LENGTH
|
||||
reps = [PAD_STRING for _ in range((MAX_PADDED_LEN // len(PAD_STRING) + 1) + 2)]
|
||||
full_padded_string = os.path.join(os.sep + "path", os.sep.join(reps))[:MAX_PADDED_LEN]
|
||||
|
||||
|
||||
BIN
lib/spack/spack/test/data/specfiles/hdf5.v013.json.gz
Normal file
BIN
lib/spack/spack/test/data/specfiles/hdf5.v013.json.gz
Normal file
Binary file not shown.
BIN
lib/spack/spack/test/data/specfiles/hdf5.v016.json.gz
Normal file
BIN
lib/spack/spack/test/data/specfiles/hdf5.v016.json.gz
Normal file
Binary file not shown.
BIN
lib/spack/spack/test/data/specfiles/hdf5.v017.json.gz
Normal file
BIN
lib/spack/spack/test/data/specfiles/hdf5.v017.json.gz
Normal file
Binary file not shown.
BIN
lib/spack/spack/test/data/specfiles/hdf5.v019.json.gz
Normal file
BIN
lib/spack/spack/test/data/specfiles/hdf5.v019.json.gz
Normal file
Binary file not shown.
@@ -68,3 +68,19 @@ def test_error_on_anonymous_dependency(config, mock_packages):
|
||||
pkg = spack.repo.path.get_pkg_class("a")
|
||||
with pytest.raises(spack.directives.DependencyError):
|
||||
spack.directives._depends_on(pkg, "@4.5")
|
||||
|
||||
|
||||
@pytest.mark.regression("34879")
|
||||
@pytest.mark.parametrize(
|
||||
"package_name,expected_maintainers",
|
||||
[
|
||||
("maintainers-1", ["user1", "user2"]),
|
||||
# Reset from PythonPackage
|
||||
("py-extension1", ["user1", "user2"]),
|
||||
# Extends maintainers-1
|
||||
("maintainers-3", ["user0", "user1", "user2", "user3"]),
|
||||
],
|
||||
)
|
||||
def test_maintainer_directive(config, mock_packages, package_name, expected_maintainers):
|
||||
pkg_cls = spack.repo.path.get_pkg_class(package_name)
|
||||
assert pkg_cls.maintainers == expected_maintainers
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
import pytest
|
||||
|
||||
from spack.fetch_strategy import from_url_scheme
|
||||
from spack import fetch_strategy
|
||||
|
||||
|
||||
def test_fetchstrategy_bad_url_scheme():
|
||||
@@ -13,4 +13,14 @@ def test_fetchstrategy_bad_url_scheme():
|
||||
unsupported scheme fails as expected."""
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
fetcher = from_url_scheme("bogus-scheme://example.com/a/b/c") # noqa: F841
|
||||
fetcher = fetch_strategy.from_url_scheme("bogus-scheme://example.com/a/b/c") # noqa: F841
|
||||
|
||||
|
||||
def test_filesummary(tmpdir):
|
||||
p = str(tmpdir.join("xyz"))
|
||||
with open(p, "wb") as f:
|
||||
f.write(b"abcdefghijklmnopqrstuvwxyz")
|
||||
|
||||
assert fetch_strategy._filesummary(p, print_bytes=8) == (26, b"abcdefgh...stuvwxyz")
|
||||
assert fetch_strategy._filesummary(p, print_bytes=13) == (26, b"abcdefghijklmnopqrstuvwxyz")
|
||||
assert fetch_strategy._filesummary(p, print_bytes=100) == (26, b"abcdefghijklmnopqrstuvwxyz")
|
||||
|
||||
@@ -36,7 +36,7 @@
|
||||
needs_binary_relocation,
|
||||
needs_text_relocation,
|
||||
relocate_links,
|
||||
unsafe_relocate_text,
|
||||
relocate_text,
|
||||
)
|
||||
from spack.spec import Spec
|
||||
|
||||
@@ -190,7 +190,7 @@ def test_buildcache(mock_archive, tmpdir):
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("install_mockery")
|
||||
def test_unsafe_relocate_text(tmpdir):
|
||||
def test_relocate_text(tmpdir):
|
||||
spec = Spec("trivial-install-test-package")
|
||||
spec.concretize()
|
||||
with tmpdir.as_cwd():
|
||||
@@ -203,7 +203,7 @@ def test_unsafe_relocate_text(tmpdir):
|
||||
filenames = [filename]
|
||||
new_dir = "/opt/rh/devtoolset/"
|
||||
# Singleton dict doesn't matter if Ordered
|
||||
unsafe_relocate_text(filenames, {old_dir: new_dir})
|
||||
relocate_text(filenames, {old_dir: new_dir})
|
||||
with open(filename, "r") as script:
|
||||
for line in script:
|
||||
assert new_dir in line
|
||||
|
||||
@@ -2,13 +2,11 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import io
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
from collections import OrderedDict
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -18,11 +16,11 @@
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.relocate
|
||||
import spack.relocate_text as relocate_text
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.tengine
|
||||
import spack.util.executable
|
||||
from spack.relocate import utf8_path_to_binary_regex, utf8_paths_to_single_binary_regex
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Tests fail on Windows")
|
||||
|
||||
@@ -269,7 +267,7 @@ def test_set_elf_rpaths_warning(mock_patchelf):
|
||||
|
||||
@pytest.mark.requires_executables("patchelf", "strings", "file", "gcc")
|
||||
@skip_unless_linux
|
||||
def test_replace_prefix_bin(binary_with_rpaths, prefix_like):
|
||||
def test_relocate_text_bin(binary_with_rpaths, prefix_like):
|
||||
prefix = "/usr/" + prefix_like
|
||||
prefix_bytes = prefix.encode("utf-8")
|
||||
new_prefix = "/foo/" + prefix_like
|
||||
@@ -278,7 +276,7 @@ def test_replace_prefix_bin(binary_with_rpaths, prefix_like):
|
||||
executable = binary_with_rpaths(rpaths=[prefix + "/lib", prefix + "/lib64"])
|
||||
|
||||
# Relocate the RPATHs
|
||||
spack.relocate._replace_prefix_bin(str(executable), {prefix_bytes: new_prefix_bytes})
|
||||
spack.relocate.relocate_text_bin([str(executable)], {prefix_bytes: new_prefix_bytes})
|
||||
|
||||
# Some compilers add rpaths so ensure changes included in final result
|
||||
assert "%s/lib:%s/lib64" % (new_prefix, new_prefix) in rpaths_for(executable)
|
||||
@@ -349,7 +347,7 @@ def test_make_elf_binaries_relative(binary_with_rpaths, copy_binary, prefix_tmpd
|
||||
|
||||
@pytest.mark.requires_executables("patchelf", "strings", "file", "gcc")
|
||||
@skip_unless_linux
|
||||
def test_relocate_text_bin(binary_with_rpaths, copy_binary, prefix_tmpdir):
|
||||
def test_relocate_text_bin_with_message(binary_with_rpaths, copy_binary, prefix_tmpdir):
|
||||
orig_binary = binary_with_rpaths(
|
||||
rpaths=[
|
||||
str(prefix_tmpdir.mkdir("lib")),
|
||||
@@ -368,7 +366,7 @@ def test_relocate_text_bin(binary_with_rpaths, copy_binary, prefix_tmpdir):
|
||||
orig_path_bytes = str(orig_binary.dirpath()).encode("utf-8")
|
||||
new_path_bytes = str(new_binary.dirpath()).encode("utf-8")
|
||||
|
||||
spack.relocate.unsafe_relocate_text_bin([str(new_binary)], {orig_path_bytes: new_path_bytes})
|
||||
spack.relocate.relocate_text_bin([str(new_binary)], {orig_path_bytes: new_path_bytes})
|
||||
|
||||
# Check original directory is not there anymore and it was
|
||||
# substituted with the new one
|
||||
@@ -382,8 +380,8 @@ def test_relocate_text_bin_raise_if_new_prefix_is_longer(tmpdir):
|
||||
fpath = str(tmpdir.join("fakebin"))
|
||||
with open(fpath, "w") as f:
|
||||
f.write("/short")
|
||||
with pytest.raises(spack.relocate.BinaryTextReplaceError):
|
||||
spack.relocate.unsafe_relocate_text_bin([fpath], {short_prefix: long_prefix})
|
||||
with pytest.raises(relocate_text.BinaryTextReplaceError):
|
||||
spack.relocate.relocate_text_bin([fpath], {short_prefix: long_prefix})
|
||||
|
||||
|
||||
@pytest.mark.requires_executables("install_name_tool", "file", "cc")
|
||||
@@ -438,227 +436,3 @@ def test_fixup_macos_rpaths(make_dylib, make_object_file):
|
||||
# (this is a corner case for GCC installation)
|
||||
(root, filename) = make_object_file()
|
||||
assert not fixup_rpath(root, filename)
|
||||
|
||||
|
||||
def test_text_relocation_regex_is_safe():
|
||||
# Test whether prefix regex is properly escaped
|
||||
string = b"This does not match /a/, but this does: /[a-z]/."
|
||||
assert utf8_path_to_binary_regex("/[a-z]/").search(string).group(0) == b"/[a-z]/"
|
||||
|
||||
|
||||
def test_utf8_paths_to_single_binary_regex():
|
||||
regex = utf8_paths_to_single_binary_regex(["/first/path", "/second/path", "/safe/[a-z]"])
|
||||
# Match nothing
|
||||
assert not regex.search(b"text /neither/first/path text /the/second/path text")
|
||||
|
||||
# Match first
|
||||
string = b"contains both /first/path/subdir and /second/path/sub"
|
||||
assert regex.search(string).group(0) == b"/first/path/subdir"
|
||||
|
||||
# Match second
|
||||
string = b"contains both /not/first/path/subdir but /second/path/subdir"
|
||||
assert regex.search(string).group(0) == b"/second/path/subdir"
|
||||
|
||||
# Match "unsafe" dir name
|
||||
string = b"don't match /safe/a/path but do match /safe/[a-z]/file"
|
||||
assert regex.search(string).group(0) == b"/safe/[a-z]/file"
|
||||
|
||||
|
||||
def test_ordered_replacement():
|
||||
# This tests whether binary text replacement respects order, so that
|
||||
# a long package prefix is replaced before a shorter sub-prefix like
|
||||
# the root of the spack store (as a fallback).
|
||||
def replace_and_expect(prefix_map, before, after=None, suffix_safety_size=7):
|
||||
f = io.BytesIO(before)
|
||||
spack.relocate.apply_binary_replacements(f, OrderedDict(prefix_map), suffix_safety_size)
|
||||
f.seek(0)
|
||||
assert f.read() == after
|
||||
|
||||
# The case of having a non-null terminated common suffix.
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
|
||||
(b"/old-spack/opt", b"/sec/spack/opt"),
|
||||
],
|
||||
b"Binary with /old-spack/opt/specific-package and /old-spack/opt",
|
||||
b"Binary with /////////first/specific-package and /sec/spack/opt",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# The case of having a direct null terminated common suffix.
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
|
||||
(b"/old-spack/opt", b"/sec/spack/opt"),
|
||||
],
|
||||
b"Binary with /old-spack/opt/specific-package\0 and /old-spack/opt\0",
|
||||
b"Binary with /////////first/specific-package\0 and /sec/spack/opt\0",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Testing the order of operations (not null terminated, long enough common suffix)
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/old-spack/opt", b"/s/spack/opt"),
|
||||
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
|
||||
],
|
||||
b"Binary with /old-spack/opt/specific-package and /old-spack/opt",
|
||||
b"Binary with ///s/spack/opt/specific-package and ///s/spack/opt",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Testing the order of operations (null terminated, long enough common suffix)
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/old-spack/opt", b"/s/spack/opt"),
|
||||
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
|
||||
],
|
||||
b"Binary with /old-spack/opt/specific-package\0 and /old-spack/opt\0",
|
||||
b"Binary with ///s/spack/opt/specific-package\0 and ///s/spack/opt\0",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Null terminated within the lookahead window, common suffix long enough
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/opt/specific-XXXXage")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
b"Binary with ///////////opt/specific-XXXXage/sub\0 data",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Null terminated within the lookahead window, common suffix too short, but
|
||||
# shortening is enough to spare more than 7 bytes of old suffix.
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/opt/specific-XXXXXge")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
b"Binary with /opt/specific-XXXXXge/sub\0ckage/sub\0 data", # ckage/sub = 9 bytes
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Null terminated within the lookahead window, common suffix too short,
|
||||
# shortening leaves exactly 7 suffix bytes untouched, amazing!
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/spack/specific-XXXXXge")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
b"Binary with /spack/specific-XXXXXge/sub\0age/sub\0 data", # age/sub = 7 bytes
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Null terminated within the lookahead window, common suffix too short,
|
||||
# shortening doesn't leave space for 7 bytes, sad!
|
||||
error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
|
||||
b"/old-spack/opt/specific-package",
|
||||
b"/snacks/specific-XXXXXge",
|
||||
b"/old-spack/opt/specific-package/sub",
|
||||
)
|
||||
with pytest.raises(spack.relocate.CannotShrinkCString, match=error_msg):
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/snacks/specific-XXXXXge")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
# expect failure!
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Check that it works when changing suffix_safety_size.
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/snacks/specific-XXXXXXe")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
b"Binary with /snacks/specific-XXXXXXe/sub\0ge/sub\0 data",
|
||||
suffix_safety_size=6,
|
||||
)
|
||||
|
||||
# Finally check the case of no shortening but a long enough common suffix.
|
||||
replace_and_expect(
|
||||
[(b"pkg-gwixwaalgczp6", b"pkg-zkesfralgczp6")],
|
||||
b"Binary with pkg-gwixwaalgczp6/config\0 data",
|
||||
b"Binary with pkg-zkesfralgczp6/config\0 data",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Too short matching suffix, identical string length
|
||||
error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
|
||||
b"pkg-gwixwaxlgczp6",
|
||||
b"pkg-zkesfrzlgczp6",
|
||||
b"pkg-gwixwaxlgczp6",
|
||||
)
|
||||
with pytest.raises(spack.relocate.CannotShrinkCString, match=error_msg):
|
||||
replace_and_expect(
|
||||
[(b"pkg-gwixwaxlgczp6", b"pkg-zkesfrzlgczp6")],
|
||||
b"Binary with pkg-gwixwaxlgczp6\0 data",
|
||||
# expect failure
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Finally, make sure that the regex is not greedily finding the LAST null byte
|
||||
# it should find the first null byte in the window. In this test we put one null
|
||||
# at a distance where we cant keep a long enough suffix, and one where we can,
|
||||
# so we should expect failure when the first null is used.
|
||||
error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
|
||||
b"pkg-abcdef",
|
||||
b"pkg-xyzabc",
|
||||
b"pkg-abcdef",
|
||||
)
|
||||
with pytest.raises(spack.relocate.CannotShrinkCString, match=error_msg):
|
||||
replace_and_expect(
|
||||
[(b"pkg-abcdef", b"pkg-xyzabc")],
|
||||
b"Binary with pkg-abcdef\0/xx\0", # def\0/xx is 7 bytes.
|
||||
# expect failure
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
|
||||
def test_inplace_text_replacement():
|
||||
def replace_and_expect(prefix_to_prefix, before: bytes, after: bytes):
|
||||
f = io.BytesIO(before)
|
||||
prefix_to_prefix = OrderedDict(prefix_to_prefix)
|
||||
regex = spack.relocate.byte_strings_to_single_binary_regex(prefix_to_prefix.keys())
|
||||
spack.relocate._replace_prefix_text_file(f, regex, prefix_to_prefix)
|
||||
f.seek(0)
|
||||
assert f.read() == after
|
||||
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/first/prefix", b"/first-replacement/prefix"),
|
||||
(b"/second/prefix", b"/second-replacement/prefix"),
|
||||
],
|
||||
b"Example: /first/prefix/subdir and /second/prefix/subdir",
|
||||
b"Example: /first-replacement/prefix/subdir and /second-replacement/prefix/subdir",
|
||||
)
|
||||
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/replace/in/order", b"/first"),
|
||||
(b"/replace/in", b"/second"),
|
||||
(b"/replace", b"/third"),
|
||||
],
|
||||
b"/replace/in/order/x /replace/in/y /replace/z",
|
||||
b"/first/x /second/y /third/z",
|
||||
)
|
||||
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/replace", b"/third"),
|
||||
(b"/replace/in", b"/second"),
|
||||
(b"/replace/in/order", b"/first"),
|
||||
],
|
||||
b"/replace/in/order/x /replace/in/y /replace/z",
|
||||
b"/third/in/order/x /third/in/y /third/z",
|
||||
)
|
||||
|
||||
replace_and_expect(
|
||||
[(b"/my/prefix", b"/replacement")],
|
||||
b"/dont/replace/my/prefix #!/dont/replace/my/prefix",
|
||||
b"/dont/replace/my/prefix #!/dont/replace/my/prefix",
|
||||
)
|
||||
|
||||
replace_and_expect(
|
||||
[(b"/my/prefix", b"/replacement")],
|
||||
b"Install path: /my/prefix.",
|
||||
b"Install path: /replacement.",
|
||||
)
|
||||
|
||||
replace_and_expect(
|
||||
[(b"/my/prefix", b"/replacement")],
|
||||
b"#!/my/prefix",
|
||||
b"#!/replacement",
|
||||
)
|
||||
|
||||
247
lib/spack/spack/test/relocate_text.py
Normal file
247
lib/spack/spack/test/relocate_text.py
Normal file
@@ -0,0 +1,247 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import io
|
||||
from collections import OrderedDict
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.relocate_text as relocate_text
|
||||
|
||||
|
||||
def test_text_relocation_regex_is_safe():
|
||||
# Test whether prefix regex is properly escaped
|
||||
string = b"This does not match /a/, but this does: /[a-z]/."
|
||||
assert relocate_text.utf8_path_to_binary_regex("/[a-z]/").search(string).group(0) == b"/[a-z]/"
|
||||
|
||||
|
||||
def test_utf8_paths_to_single_binary_regex():
|
||||
regex = relocate_text.utf8_paths_to_single_binary_regex(
|
||||
["/first/path", "/second/path", "/safe/[a-z]"]
|
||||
)
|
||||
# Match nothing
|
||||
assert not regex.search(b"text /neither/first/path text /the/second/path text")
|
||||
|
||||
# Match first
|
||||
string = b"contains both /first/path/subdir and /second/path/sub"
|
||||
assert regex.search(string).group(0) == b"/first/path/subdir"
|
||||
|
||||
# Match second
|
||||
string = b"contains both /not/first/path/subdir but /second/path/subdir"
|
||||
assert regex.search(string).group(0) == b"/second/path/subdir"
|
||||
|
||||
# Match "unsafe" dir name
|
||||
string = b"don't match /safe/a/path but do match /safe/[a-z]/file"
|
||||
assert regex.search(string).group(0) == b"/safe/[a-z]/file"
|
||||
|
||||
|
||||
def test_ordered_replacement():
|
||||
# This tests whether binary text replacement respects order, so that
|
||||
# a long package prefix is replaced before a shorter sub-prefix like
|
||||
# the root of the spack store (as a fallback).
|
||||
def replace_and_expect(prefix_map, before, after=None, suffix_safety_size=7):
|
||||
f = io.BytesIO(before)
|
||||
relocater = relocate_text.BinaryFilePrefixReplacer(
|
||||
OrderedDict(prefix_map), suffix_safety_size
|
||||
)
|
||||
relocater.apply_to_file(f)
|
||||
f.seek(0)
|
||||
assert f.read() == after
|
||||
|
||||
# The case of having a non-null terminated common suffix.
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
|
||||
(b"/old-spack/opt", b"/sec/spack/opt"),
|
||||
],
|
||||
b"Binary with /old-spack/opt/specific-package and /old-spack/opt",
|
||||
b"Binary with /////////first/specific-package and /sec/spack/opt",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# The case of having a direct null terminated common suffix.
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
|
||||
(b"/old-spack/opt", b"/sec/spack/opt"),
|
||||
],
|
||||
b"Binary with /old-spack/opt/specific-package\0 and /old-spack/opt\0",
|
||||
b"Binary with /////////first/specific-package\0 and /sec/spack/opt\0",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Testing the order of operations (not null terminated, long enough common suffix)
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/old-spack/opt", b"/s/spack/opt"),
|
||||
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
|
||||
],
|
||||
b"Binary with /old-spack/opt/specific-package and /old-spack/opt",
|
||||
b"Binary with ///s/spack/opt/specific-package and ///s/spack/opt",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Testing the order of operations (null terminated, long enough common suffix)
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/old-spack/opt", b"/s/spack/opt"),
|
||||
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
|
||||
],
|
||||
b"Binary with /old-spack/opt/specific-package\0 and /old-spack/opt\0",
|
||||
b"Binary with ///s/spack/opt/specific-package\0 and ///s/spack/opt\0",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Null terminated within the lookahead window, common suffix long enough
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/opt/specific-XXXXage")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
b"Binary with ///////////opt/specific-XXXXage/sub\0 data",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Null terminated within the lookahead window, common suffix too short, but
|
||||
# shortening is enough to spare more than 7 bytes of old suffix.
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/opt/specific-XXXXXge")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
b"Binary with /opt/specific-XXXXXge/sub\0ckage/sub\0 data", # ckage/sub = 9 bytes
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Null terminated within the lookahead window, common suffix too short,
|
||||
# shortening leaves exactly 7 suffix bytes untouched, amazing!
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/spack/specific-XXXXXge")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
b"Binary with /spack/specific-XXXXXge/sub\0age/sub\0 data", # age/sub = 7 bytes
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Null terminated within the lookahead window, common suffix too short,
|
||||
# shortening doesn't leave space for 7 bytes, sad!
|
||||
error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
|
||||
b"/old-spack/opt/specific-package",
|
||||
b"/snacks/specific-XXXXXge",
|
||||
b"/old-spack/opt/specific-package/sub",
|
||||
)
|
||||
with pytest.raises(relocate_text.CannotShrinkCString, match=error_msg):
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/snacks/specific-XXXXXge")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
# expect failure!
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Check that it works when changing suffix_safety_size.
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/snacks/specific-XXXXXXe")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
b"Binary with /snacks/specific-XXXXXXe/sub\0ge/sub\0 data",
|
||||
suffix_safety_size=6,
|
||||
)
|
||||
|
||||
# Finally check the case of no shortening but a long enough common suffix.
|
||||
replace_and_expect(
|
||||
[(b"pkg-gwixwaalgczp6", b"pkg-zkesfralgczp6")],
|
||||
b"Binary with pkg-gwixwaalgczp6/config\0 data",
|
||||
b"Binary with pkg-zkesfralgczp6/config\0 data",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Too short matching suffix, identical string length
|
||||
error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
|
||||
b"pkg-gwixwaxlgczp6",
|
||||
b"pkg-zkesfrzlgczp6",
|
||||
b"pkg-gwixwaxlgczp6",
|
||||
)
|
||||
with pytest.raises(relocate_text.CannotShrinkCString, match=error_msg):
|
||||
replace_and_expect(
|
||||
[(b"pkg-gwixwaxlgczp6", b"pkg-zkesfrzlgczp6")],
|
||||
b"Binary with pkg-gwixwaxlgczp6\0 data",
|
||||
# expect failure
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Finally, make sure that the regex is not greedily finding the LAST null byte
|
||||
# it should find the first null byte in the window. In this test we put one null
|
||||
# at a distance where we cant keep a long enough suffix, and one where we can,
|
||||
# so we should expect failure when the first null is used.
|
||||
error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
|
||||
b"pkg-abcdef",
|
||||
b"pkg-xyzabc",
|
||||
b"pkg-abcdef",
|
||||
)
|
||||
with pytest.raises(relocate_text.CannotShrinkCString, match=error_msg):
|
||||
replace_and_expect(
|
||||
[(b"pkg-abcdef", b"pkg-xyzabc")],
|
||||
b"Binary with pkg-abcdef\0/xx\0", # def\0/xx is 7 bytes.
|
||||
# expect failure
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
|
||||
def test_inplace_text_replacement():
|
||||
def replace_and_expect(prefix_to_prefix, before: bytes, after: bytes):
|
||||
f = io.BytesIO(before)
|
||||
replacer = relocate_text.TextFilePrefixReplacer(OrderedDict(prefix_to_prefix))
|
||||
replacer.apply_to_file(f)
|
||||
f.seek(0)
|
||||
assert f.read() == after
|
||||
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/first/prefix", b"/first-replacement/prefix"),
|
||||
(b"/second/prefix", b"/second-replacement/prefix"),
|
||||
],
|
||||
b"Example: /first/prefix/subdir and /second/prefix/subdir",
|
||||
b"Example: /first-replacement/prefix/subdir and /second-replacement/prefix/subdir",
|
||||
)
|
||||
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/replace/in/order", b"/first"),
|
||||
(b"/replace/in", b"/second"),
|
||||
(b"/replace", b"/third"),
|
||||
],
|
||||
b"/replace/in/order/x /replace/in/y /replace/z",
|
||||
b"/first/x /second/y /third/z",
|
||||
)
|
||||
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/replace", b"/third"),
|
||||
(b"/replace/in", b"/second"),
|
||||
(b"/replace/in/order", b"/first"),
|
||||
],
|
||||
b"/replace/in/order/x /replace/in/y /replace/z",
|
||||
b"/third/in/order/x /third/in/y /third/z",
|
||||
)
|
||||
|
||||
replace_and_expect(
|
||||
[(b"/my/prefix", b"/replacement")],
|
||||
b"/dont/replace/my/prefix #!/dont/replace/my/prefix",
|
||||
b"/dont/replace/my/prefix #!/dont/replace/my/prefix",
|
||||
)
|
||||
|
||||
replace_and_expect(
|
||||
[(b"/my/prefix", b"/replacement")],
|
||||
b"Install path: /my/prefix.",
|
||||
b"Install path: /replacement.",
|
||||
)
|
||||
|
||||
replace_and_expect(
|
||||
[(b"/my/prefix", b"/replacement")],
|
||||
b"#!/my/prefix",
|
||||
b"#!/replacement",
|
||||
)
|
||||
|
||||
|
||||
def test_relocate_text_filters_redundant_entries():
|
||||
# Test that we're filtering identical old / new paths, since that's a waste.
|
||||
mapping = OrderedDict([("/hello", "/hello"), ("/world", "/world")])
|
||||
replacer_1 = relocate_text.BinaryFilePrefixReplacer.from_strings_or_bytes(mapping)
|
||||
replacer_2 = relocate_text.TextFilePrefixReplacer.from_strings_or_bytes(mapping)
|
||||
assert not replacer_1.prefix_to_prefix
|
||||
assert not replacer_2.prefix_to_prefix
|
||||
@@ -1,72 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import itertools
|
||||
import pytest
|
||||
|
||||
import spack.spec
|
||||
import spack.solver.asp as asp
|
||||
import spack.store
|
||||
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skipif(
|
||||
spack.config.get("config:concretizer") == "original", reason="requires new concretizer"
|
||||
),
|
||||
pytest.mark.usefixtures("mutable_config", "mock_packages"),
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def reusable_specs(mock_packages):
|
||||
reusable_specs = []
|
||||
for spec in ["mpich", "openmpi", "zmpi"]:
|
||||
reusable_specs.extend(s for s in spack.spec.Spec(spec).concretized().traverse(root=True))
|
||||
return list(sorted(set(reusable_specs)))
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"root,reuse",
|
||||
itertools.product(
|
||||
("mpileaks ^mpich", "mpileaks ^openmpi", "mpileaks ^zmpi", "patch"),
|
||||
(True, False),
|
||||
),
|
||||
)
|
||||
def test_all_facts_in_solve(database, root, reuse, reusable_specs):
|
||||
reusable_specs = reusable_specs if reuse else []
|
||||
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
result, _, _ = solver.driver.solve(setup, [spack.spec.Spec(root)], reuse=reusable_specs)
|
||||
|
||||
*_, result_attrs = result.answers[0]
|
||||
result_attrs = set(result_attrs)
|
||||
|
||||
def remove_hashes(attrs):
|
||||
return []
|
||||
|
||||
for spec in result.specs:
|
||||
# check only link and run deps if reusing.
|
||||
deptype = ("link", "run") if reuse else "all"
|
||||
|
||||
# get all facts about the spec and filter out just the "attr" ones.
|
||||
attrs = setup.spec_clauses(spec, deptype=deptype, body=True, expand_hashes=True)
|
||||
|
||||
# only consider attr() functions, not other displayed atoms
|
||||
# don't consider any DAG/package hashes, as they are added after solving
|
||||
attrs = set(attr for attr in attrs if attr.name == "attr" and "hash" not in attr.args[0])
|
||||
|
||||
# make sure all facts from the solver are in the actual solution.
|
||||
diff = attrs - result_attrs
|
||||
|
||||
# this is a current bug in the solver: we don't manage dependency patches
|
||||
# properly, and with reuse it can grab something w/o the right patch.
|
||||
# See https://github.com/spack/spack/issues/32497
|
||||
# TODO: Remove this XFAIL when #32497 is fixed.
|
||||
patches = [a for a in diff if a.args[0] == "variant_value" and a.args[2] == "patches"]
|
||||
if diff and not (diff - set(patches)):
|
||||
pytest.xfail("Bug in new concretizer with patch constraints. See #32497.")
|
||||
|
||||
assert not diff
|
||||
@@ -125,7 +125,7 @@ def _mock_installed(self):
|
||||
# use the installed C. It should *not* force A to use the installed D
|
||||
# *if* we're doing a fresh installation.
|
||||
a_spec = Spec(a)
|
||||
a_spec._add_dependency(c_spec, ("build", "link"))
|
||||
a_spec._add_dependency(c_spec, deptypes=("build", "link"))
|
||||
a_spec.concretize()
|
||||
assert spack.version.Version("2") == a_spec[c][d].version
|
||||
assert spack.version.Version("2") == a_spec[e].version
|
||||
@@ -148,7 +148,7 @@ def test_specify_preinstalled_dep(tmpdir, monkeypatch):
|
||||
monkeypatch.setattr(Spec, "installed", property(lambda x: x.name != "a"))
|
||||
|
||||
a_spec = Spec("a")
|
||||
a_spec._add_dependency(b_spec, ("build", "link"))
|
||||
a_spec._add_dependency(b_spec, deptypes=("build", "link"))
|
||||
a_spec.concretize()
|
||||
|
||||
assert set(x.name for x in a_spec.traverse()) == set(["a", "b", "c"])
|
||||
@@ -992,9 +992,9 @@ def test_synthetic_construction_of_split_dependencies_from_same_package(mock_pac
|
||||
link_run_spec = Spec("c@1.0").concretized()
|
||||
build_spec = Spec("c@2.0").concretized()
|
||||
|
||||
root.add_dependency_edge(link_run_spec, deptype="link")
|
||||
root.add_dependency_edge(link_run_spec, deptype="run")
|
||||
root.add_dependency_edge(build_spec, deptype="build")
|
||||
root.add_dependency_edge(link_run_spec, deptypes="link")
|
||||
root.add_dependency_edge(link_run_spec, deptypes="run")
|
||||
root.add_dependency_edge(build_spec, deptypes="build")
|
||||
|
||||
# Check dependencies from the perspective of root
|
||||
assert len(root.dependencies()) == 2
|
||||
@@ -1020,7 +1020,7 @@ def test_synthetic_construction_bootstrapping(mock_packages, config):
|
||||
root = Spec("b@2.0").concretized()
|
||||
bootstrap = Spec("b@1.0").concretized()
|
||||
|
||||
root.add_dependency_edge(bootstrap, deptype="build")
|
||||
root.add_dependency_edge(bootstrap, deptypes="build")
|
||||
|
||||
assert len(root.dependencies()) == 1
|
||||
assert root.dependencies()[0].name == "b"
|
||||
@@ -1039,7 +1039,7 @@ def test_addition_of_different_deptypes_in_multiple_calls(mock_packages, config)
|
||||
bootstrap = Spec("b@1.0").concretized()
|
||||
|
||||
for current_deptype in ("build", "link", "run"):
|
||||
root.add_dependency_edge(bootstrap, deptype=current_deptype)
|
||||
root.add_dependency_edge(bootstrap, deptypes=current_deptype)
|
||||
|
||||
# Check edges in dependencies
|
||||
assert len(root.edges_to_dependencies()) == 1
|
||||
@@ -1066,9 +1066,9 @@ def test_adding_same_deptype_with_the_same_name_raises(
|
||||
c1 = Spec("b@1.0").concretized()
|
||||
c2 = Spec("b@2.0").concretized()
|
||||
|
||||
p.add_dependency_edge(c1, deptype=c1_deptypes)
|
||||
p.add_dependency_edge(c1, deptypes=c1_deptypes)
|
||||
with pytest.raises(spack.error.SpackError):
|
||||
p.add_dependency_edge(c2, deptype=c2_deptypes)
|
||||
p.add_dependency_edge(c2, deptypes=c2_deptypes)
|
||||
|
||||
|
||||
@pytest.mark.regression("33499")
|
||||
@@ -1087,16 +1087,16 @@ def test_indexing_prefers_direct_or_transitive_link_deps():
|
||||
z3_flavor_1 = Spec("z3 +through_a1")
|
||||
z3_flavor_2 = Spec("z3 +through_z1")
|
||||
|
||||
root.add_dependency_edge(a1, deptype=("build", "run", "test"))
|
||||
root.add_dependency_edge(a1, deptypes=("build", "run", "test"))
|
||||
|
||||
# unique package as a dep of a build/run/test type dep.
|
||||
a1.add_dependency_edge(a2, deptype="all")
|
||||
a1.add_dependency_edge(z3_flavor_1, deptype="all")
|
||||
a1.add_dependency_edge(a2, deptypes="all")
|
||||
a1.add_dependency_edge(z3_flavor_1, deptypes="all")
|
||||
|
||||
# chain of link type deps root -> z1 -> z2 -> z3
|
||||
root.add_dependency_edge(z1, deptype="link")
|
||||
z1.add_dependency_edge(z2, deptype="link")
|
||||
z2.add_dependency_edge(z3_flavor_2, deptype="link")
|
||||
root.add_dependency_edge(z1, deptypes="link")
|
||||
z1.add_dependency_edge(z2, deptypes="link")
|
||||
z2.add_dependency_edge(z3_flavor_2, deptypes="link")
|
||||
|
||||
# Indexing should prefer the link-type dep.
|
||||
assert "through_z1" in root["z3"].variants
|
||||
|
||||
@@ -1108,7 +1108,7 @@ def test_error_message_unknown_variant(self):
|
||||
def test_satisfies_dependencies_ordered(self):
|
||||
d = Spec("zmpi ^fake")
|
||||
s = Spec("mpileaks")
|
||||
s._add_dependency(d, ())
|
||||
s._add_dependency(d, deptypes=())
|
||||
assert s.satisfies("mpileaks ^zmpi ^fake", strict=True)
|
||||
|
||||
@pytest.mark.parametrize("transitive", [True, False])
|
||||
@@ -1156,7 +1156,9 @@ def test_is_extension_after_round_trip_to_dict(config, mock_packages, spec_str):
|
||||
|
||||
def test_malformed_spec_dict():
|
||||
with pytest.raises(SpecError, match="malformed"):
|
||||
Spec.from_dict({"spec": {"nodes": [{"dependencies": {"name": "foo"}}]}})
|
||||
Spec.from_dict(
|
||||
{"spec": {"_meta": {"version": 2}, "nodes": [{"dependencies": {"name": "foo"}}]}}
|
||||
)
|
||||
|
||||
|
||||
def test_spec_dict_hashless_dep():
|
||||
@@ -1164,9 +1166,10 @@ def test_spec_dict_hashless_dep():
|
||||
Spec.from_dict(
|
||||
{
|
||||
"spec": {
|
||||
"_meta": {"version": 2},
|
||||
"nodes": [
|
||||
{"name": "foo", "hash": "thehash", "dependencies": [{"name": "bar"}]}
|
||||
]
|
||||
],
|
||||
}
|
||||
}
|
||||
)
|
||||
@@ -1252,7 +1255,7 @@ def test_concretize_partial_old_dag_hash_spec(mock_packages, config):
|
||||
|
||||
# add it to an abstract spec as a dependency
|
||||
top = Spec("dt-diamond")
|
||||
top.add_dependency_edge(bottom, ())
|
||||
top.add_dependency_edge(bottom, deptypes=())
|
||||
|
||||
# concretize with the already-concrete dependency
|
||||
top.concretize()
|
||||
|
||||
@@ -13,7 +13,9 @@
|
||||
import ast
|
||||
import collections
|
||||
import collections.abc
|
||||
import gzip
|
||||
import inspect
|
||||
import json
|
||||
import os
|
||||
|
||||
import pytest
|
||||
@@ -507,3 +509,33 @@ def test_legacy_yaml(tmpdir, install_mockery, mock_packages):
|
||||
("version", "1.2.11"),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"specfile,expected_hash,reader_cls",
|
||||
[
|
||||
# First version supporting JSON format for specs
|
||||
("specfiles/hdf5.v013.json.gz", "vglgw4reavn65vx5d4dlqn6rjywnq76d", spack.spec.SpecfileV1),
|
||||
# Introduces full hash in the format, still has 3 hashes
|
||||
("specfiles/hdf5.v016.json.gz", "stp45yvzte43xdauknaj3auxlxb4xvzs", spack.spec.SpecfileV1),
|
||||
# Introduces "build_specs", see https://github.com/spack/spack/pull/22845
|
||||
("specfiles/hdf5.v017.json.gz", "xqh5iyjjtrp2jw632cchacn3l7vqzf3m", spack.spec.SpecfileV2),
|
||||
# Use "full hash" everywhere, see https://github.com/spack/spack/pull/28504
|
||||
("specfiles/hdf5.v019.json.gz", "iulacrbz7o5v5sbj7njbkyank3juh6d3", spack.spec.SpecfileV3),
|
||||
],
|
||||
)
|
||||
def test_load_json_specfiles(specfile, expected_hash, reader_cls):
|
||||
fullpath = os.path.join(spack.paths.test_path, "data", specfile)
|
||||
with gzip.open(fullpath, "rt", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
s1 = Spec.from_dict(data)
|
||||
s2 = reader_cls.load(data)
|
||||
|
||||
assert s2.dag_hash() == expected_hash
|
||||
assert s1.dag_hash() == s2.dag_hash()
|
||||
assert s1 == s2
|
||||
assert Spec.from_json(s2.to_json()).dag_hash() == s2.dag_hash()
|
||||
|
||||
openmpi_edges = s2.edges_to_dependencies(name="openmpi")
|
||||
assert len(openmpi_edges) == 1
|
||||
|
||||
@@ -18,8 +18,8 @@ def create_dag(nodes, edges):
|
||||
dict: mapping from package name to abstract Spec with proper deps.
|
||||
"""
|
||||
specs = {name: Spec(name) for name in nodes}
|
||||
for parent, child, deptype in edges:
|
||||
specs[parent].add_dependency_edge(specs[child], deptype)
|
||||
for parent, child, deptypes in edges:
|
||||
specs[parent].add_dependency_edge(specs[child], deptypes=deptypes)
|
||||
return specs
|
||||
|
||||
|
||||
|
||||
@@ -12,6 +12,6 @@ class Maintainers1(Package):
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/maintainers-1.0.tar.gz"
|
||||
|
||||
maintainers = ["user1", "user2"]
|
||||
maintainers("user1", "user2")
|
||||
|
||||
version("1.0", "0123456789abcdef0123456789abcdef")
|
||||
|
||||
@@ -0,0 +1,17 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from spack.package import *
|
||||
from spack.pkg.builtin.mock.maintainers_1 import Maintainers1
|
||||
|
||||
|
||||
class Maintainers3(Maintainers1):
|
||||
"""A second package with a maintainers field."""
|
||||
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/maintainers2-1.0.tar.gz"
|
||||
|
||||
maintainers("user0", "user3")
|
||||
|
||||
version("1.0", "0123456789abcdef0123456789abcdef")
|
||||
@@ -12,6 +12,4 @@ class Openmpi(Package):
|
||||
variant("internal-hwloc", default=False)
|
||||
variant("fabrics", values=any_combination_of("psm", "mxm"))
|
||||
|
||||
provides("mpi")
|
||||
|
||||
depends_on("hwloc", when="~internal-hwloc")
|
||||
|
||||
@@ -13,8 +13,7 @@ class PyExtension1(PythonPackage):
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/extension1-1.0.tar.gz"
|
||||
|
||||
# Override settings in base class
|
||||
maintainers = []
|
||||
maintainers = ["user1", "user2"]
|
||||
|
||||
version("1.0", "00000000000000000000000000000110")
|
||||
version("2.0", "00000000000000000000000000000120")
|
||||
|
||||
@@ -278,7 +278,7 @@ class Acts(CMakePackage, CudaPackage):
|
||||
depends_on("python", when="+python")
|
||||
depends_on("python@3.8:", when="+python @19.11:19")
|
||||
depends_on("python@3.8:", when="+python @21:")
|
||||
depends_on("py-onnx-runtime", when="+onnx")
|
||||
depends_on("py-onnxruntime", when="+onnx")
|
||||
depends_on("py-pybind11 @2.6.2:", when="+python @18:")
|
||||
depends_on("py-pytest", when="+python +unit_tests")
|
||||
depends_on("root @6.10: cxxstd=14", when="+tgeo @:0.8.0")
|
||||
|
||||
39
var/spack/repos/builtin/packages/amqp-cpp/package.py
Normal file
39
var/spack/repos/builtin/packages/amqp-cpp/package.py
Normal file
@@ -0,0 +1,39 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class AmqpCpp(CMakePackage):
|
||||
"""AMQP-CPP is a C++ library for communicating with a RabbitMQ message
|
||||
broker. The library can be used to parse incoming data from, and generate
|
||||
frames to, a RabbitMQ server."""
|
||||
|
||||
homepage = "https://github.com/CopernicaMarketingSoftware/AMQP-CPP"
|
||||
git = "https://github.com/CopernicaMarketingSoftware/AMQP-CPP.git"
|
||||
url = "https://github.com/CopernicaMarketingSoftware/AMQP-CPP/archive/refs/tags/v4.3.19.tar.gz"
|
||||
|
||||
maintainers = ["lpottier"]
|
||||
|
||||
version("4.3.19", sha256="ca29bb349c498948576a4604bed5fd3c27d87240b271a4441ccf04ba3797b31d")
|
||||
|
||||
variant(
|
||||
"tcp",
|
||||
default=False,
|
||||
description="Build TCP module. TCP module is supported for Linux only.",
|
||||
)
|
||||
variant("shared", default=True, description="Build as a shared library (static by default)")
|
||||
|
||||
conflicts("tcp", when="platform=darwin", msg="TCP module requires Linux")
|
||||
|
||||
depends_on("cmake@3.5:", type="build")
|
||||
depends_on("openssl@1.1.1:", when="+tcp", type=("build", "link", "run"))
|
||||
|
||||
def cmake_args(self):
|
||||
args = [
|
||||
self.define_from_variant("AMQP-CPP_LINUX_TCP", "tcp"),
|
||||
self.define_from_variant("AMQP-CPP_BUILD_SHARED", "shared"),
|
||||
]
|
||||
return args
|
||||
@@ -23,8 +23,10 @@ class AppleGl(Package):
|
||||
# Only supported on 'platform=darwin' and compiler=apple-clang
|
||||
conflicts("platform=linux")
|
||||
conflicts("platform=cray")
|
||||
conflicts("platform=windows")
|
||||
conflicts("%gcc")
|
||||
conflicts("%clang")
|
||||
conflicts("%msvc")
|
||||
|
||||
phases = []
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -20,6 +20,8 @@ class Binutils(AutotoolsPackage, GNUMirrorPackage):
|
||||
|
||||
executables = ["^nm$", "^readelf$"]
|
||||
|
||||
version("2.40", sha256="f8298eb153a4b37d112e945aa5cb2850040bcf26a3ea65b5a715c83afe05e48a")
|
||||
version("2.39", sha256="da24a84fef220102dd24042df06fdea851c2614a5377f86effa28f33b7b16148")
|
||||
version("2.38", sha256="070ec71cf077a6a58e0b959f05a09a35015378c2d8a51e90f3aeabfe30590ef8")
|
||||
version("2.37", sha256="67fc1a4030d08ee877a4867d3dcab35828148f87e1fd05da6db585ed5a166bd4")
|
||||
version("2.36.1", sha256="5b4bd2e79e30ce8db0abd76dd2c2eae14a94ce212cfc59d3c37d23e24bc6d7a3")
|
||||
@@ -30,6 +32,7 @@ class Binutils(AutotoolsPackage, GNUMirrorPackage):
|
||||
version("2.33.1", sha256="0cb4843da15a65a953907c96bad658283f3c4419d6bcc56bf2789db16306adb2")
|
||||
version("2.32", sha256="de38b15c902eb2725eac6af21183a5f34ea4634cb0bcef19612b50e5ed31072d")
|
||||
version("2.31.1", sha256="ffcc382695bf947da6135e7436b8ed52d991cf270db897190f19d6f9838564d0")
|
||||
version("2.30", sha256="efeade848067e9a03f1918b1da0d37aaffa0b0127a06b5e9236229851d9d0c09")
|
||||
version("2.29.1", sha256="1509dff41369fb70aed23682351b663b56db894034773e6dbf7d5d6071fc55cc")
|
||||
version("2.28", sha256="6297433ee120b11b4b0a1c8f3512d7d73501753142ab9e2daa13c5a3edd32a72")
|
||||
version("2.27", sha256="369737ce51587f92466041a97ab7d2358c6d9e1b6490b3940eb09fb0a9a6ac88")
|
||||
@@ -45,9 +48,15 @@ class Binutils(AutotoolsPackage, GNUMirrorPackage):
|
||||
# --disable-ld flag
|
||||
variant("gold", default=False, when="+ld", description="build the gold linker")
|
||||
variant("libiberty", default=False, description="Also install libiberty.")
|
||||
variant("nls", default=True, description="Enable Native Language Support")
|
||||
variant("nls", default=False, description="Enable Native Language Support")
|
||||
variant("headers", default=False, description="Install extra headers (e.g. ELF)")
|
||||
variant("lto", default=False, description="Enable lto.")
|
||||
variant(
|
||||
"pgo",
|
||||
default=False,
|
||||
description="Build with profile-guided optimization (slow)",
|
||||
when="@2.37:",
|
||||
)
|
||||
variant("ld", default=False, description="Enable ld.")
|
||||
# When you build binutils with ~ld and +gas and load it in your PATH, you
|
||||
# may end up with incompatibilities between a potentially older system ld
|
||||
@@ -57,6 +66,7 @@ class Binutils(AutotoolsPackage, GNUMirrorPackage):
|
||||
# when compiling with debug symbols on gcc.
|
||||
variant("gas", default=False, when="+ld", description="Enable as assembler.")
|
||||
variant("interwork", default=False, description="Enable interwork.")
|
||||
variant("gprofng", default=False, description="Enable gprofng.", when="@2.39:")
|
||||
variant(
|
||||
"libs",
|
||||
default="shared,static",
|
||||
@@ -76,16 +86,26 @@ class Binutils(AutotoolsPackage, GNUMirrorPackage):
|
||||
depends_on("diffutils", type="build")
|
||||
depends_on("gettext", when="+nls")
|
||||
|
||||
# PGO runs tests, which requires `runtest` from dejagnu
|
||||
depends_on("dejagnu", when="+pgo", type="build")
|
||||
|
||||
# Prior to 2.30, gold did not distribute the generated files and
|
||||
# thus needs bison, even for a one-time build.
|
||||
depends_on("m4", type="build", when="@:2.29 +gold")
|
||||
depends_on("bison", type="build", when="@:2.29 +gold")
|
||||
|
||||
# 2.34:2.38 needs makeinfo due to a bug, see:
|
||||
# 2.34:2.40 needs makeinfo due to a bug, see:
|
||||
# https://sourceware.org/bugzilla/show_bug.cgi?id=25491
|
||||
depends_on("texinfo", type="build", when="@2.34:2.38")
|
||||
# https://sourceware.org/bugzilla/show_bug.cgi?id=28909
|
||||
depends_on("texinfo", type="build", when="@2.34:2.40")
|
||||
|
||||
# gprofng requires bison
|
||||
depends_on("bison@3.0.4:", type="build", when="+gprofng")
|
||||
|
||||
conflicts("+gold", when="platform=darwin", msg="Binutils cannot build linkers on macOS")
|
||||
conflicts(
|
||||
"~lto", when="+pgo", msg="Profile-guided optimization enables link-time optimization"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def determine_version(cls, exe):
|
||||
@@ -95,6 +115,13 @@ def determine_version(cls, exe):
|
||||
|
||||
def flag_handler(self, name, flags):
|
||||
spec = self.spec
|
||||
|
||||
# Set -O3 -g0 by default when using gcc or clang, since it improves performance
|
||||
# a bit and significantly reduces install size
|
||||
if name in ("cflags", "cxxflags") and self.compiler.name in ("gcc", "clang"):
|
||||
flags.insert(0, "-g0")
|
||||
flags.insert(0, "-O3")
|
||||
|
||||
# Use a separate variable for injecting flags. This way, installing
|
||||
# `binutils cflags='-O2'` will still work as expected.
|
||||
iflags = []
|
||||
@@ -105,10 +132,8 @@ def flag_handler(self, name, flags):
|
||||
):
|
||||
iflags.append("-Wno-narrowing")
|
||||
elif name == "cflags":
|
||||
if spec.satisfies("@:2.34 %gcc@10:"):
|
||||
if spec.satisfies("@:2.34 %gcc@10:") or spec.satisfies("%cce"):
|
||||
iflags.append("-fcommon")
|
||||
if spec.satisfies("%cce") or spec.satisfies("@2.38 %gcc"):
|
||||
iflags.extend([self.compiler.cc_pic_flag, "-fcommon"])
|
||||
elif name == "ldflags":
|
||||
if spec.satisfies("%cce") or spec.satisfies("@2.38 %gcc"):
|
||||
iflags.append("-Wl,-z,notext")
|
||||
@@ -143,28 +168,45 @@ def test(self):
|
||||
|
||||
class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||
def configure_args(self):
|
||||
known_targets = {
|
||||
"x86_64": "x86_64",
|
||||
"aarch64": "aarch64",
|
||||
"ppc64le": "powerpc",
|
||||
}
|
||||
known_platforms = {"linux": "linux-gnu", "cray": "linux-gnu", "darwin": "apple-darwin"}
|
||||
|
||||
family = str(self.spec.target.family)
|
||||
platform = self.spec.platform
|
||||
|
||||
if family in known_targets and platform in known_platforms:
|
||||
targets = "{}-{}".format(known_targets[family], known_platforms[platform])
|
||||
else:
|
||||
targets = "all"
|
||||
|
||||
args = [
|
||||
"--disable-dependency-tracking",
|
||||
"--disable-werror",
|
||||
"--enable-multilib",
|
||||
"--enable-64-bit-bfd",
|
||||
"--enable-targets=all",
|
||||
"--with-system-zlib",
|
||||
"--enable-multilib",
|
||||
"--enable-pic",
|
||||
"--enable-targets={}".format(targets),
|
||||
"--with-sysroot=/",
|
||||
"--with-system-zlib",
|
||||
]
|
||||
args += self.enable_or_disable("gas")
|
||||
args += self.enable_or_disable("gold")
|
||||
args += self.enable_or_disable("gprofng")
|
||||
args += self.enable_or_disable("install-libiberty", variant="libiberty")
|
||||
args += self.enable_or_disable("interwork")
|
||||
args += self.enable_or_disable("ld")
|
||||
args += self.enable_or_disable("libs")
|
||||
args += self.enable_or_disable("lto")
|
||||
args += self.enable_or_disable("ld")
|
||||
args += self.enable_or_disable("gas")
|
||||
args += self.enable_or_disable("interwork")
|
||||
args += self.enable_or_disable("gold")
|
||||
args += self.enable_or_disable("nls")
|
||||
args += self.enable_or_disable("plugins")
|
||||
|
||||
if "+libiberty" in self.spec:
|
||||
args.append("--enable-install-libiberty")
|
||||
if "+pgo" in self.spec:
|
||||
args.append("--enable-pgo-build=lto")
|
||||
else:
|
||||
args.append("--disable-install-libiberty")
|
||||
args.append("--disable-pgo-build")
|
||||
|
||||
# To avoid namespace collisions with Darwin/BSD system tools,
|
||||
# prefix executables with "g", e.g., gar, gnm; see Homebrew
|
||||
|
||||
@@ -23,8 +23,6 @@ class Clingo(CMakePackage):
|
||||
url = "https://github.com/potassco/clingo/archive/v5.2.2.tar.gz"
|
||||
git = "https://github.com/potassco/clingo.git"
|
||||
tags = ["windows"]
|
||||
submodules = True
|
||||
|
||||
maintainers = ["tgamblin", "alalazo"]
|
||||
|
||||
version("master", branch="master", submodules=True)
|
||||
|
||||
@@ -13,14 +13,18 @@ class Costa(CMakePackage):
|
||||
Based on the paper: https://arxiv.org/abs/2106.06601
|
||||
"""
|
||||
|
||||
maintainers = ["haampie", "kabicm"]
|
||||
maintainers = [
|
||||
"haampie",
|
||||
"kabicm",
|
||||
"RMeli",
|
||||
]
|
||||
homepage = "https://github.com/eth-cscs/COSTA"
|
||||
url = "https://github.com/eth-cscs/COSTA/releases/download/v2.0/COSTA-v2.0.tar.gz"
|
||||
git = "https://github.com/eth-cscs/COSTA.git"
|
||||
|
||||
# note: The default archives produced with github do not have the archives
|
||||
# of the submodules.
|
||||
version("master", branch="master", submodules=True)
|
||||
version("2.1", sha256="c1e86452415083f7470b292d93ec60708b7c8dbafc2bac383636bb4b28135866")
|
||||
version("2.0", sha256="de250197f31f7d23226c6956a687c3ff46fb0ff6c621a932428236c3f7925fe4")
|
||||
|
||||
variant("scalapack", default=False, description="Build with ScaLAPACK API")
|
||||
@@ -31,9 +35,14 @@ class Costa(CMakePackage):
|
||||
depends_on("scalapack", when="+scalapack")
|
||||
|
||||
def url_for_version(self, version):
|
||||
return "https://github.com/eth-cscs/COSTA/releases/download/v{0}/COSTA-v{1}.tar.gz".format(
|
||||
version, version
|
||||
)
|
||||
if version == Version("2.0"):
|
||||
return "https://github.com/eth-cscs/COSTA/releases/download/v{0}/COSTA-v{1}.tar.gz".format(
|
||||
version, version
|
||||
)
|
||||
else:
|
||||
return "https://github.com/eth-cscs/COSTA/archive/refs/tags/v{0}.tar.gz".format(
|
||||
version
|
||||
)
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
return
|
||||
|
||||
@@ -24,6 +24,8 @@ class Curl(NMakePackage, AutotoolsPackage):
|
||||
executables = ["^curl$"]
|
||||
tags = ["build-tools", "windows"]
|
||||
|
||||
version("7.87.0", sha256="5d6e128761b7110946d1276aff6f0f266f2b726f5e619f7e0a057a474155f307")
|
||||
version("7.86.0", sha256="f5ca69db03eea17fa8705bdfb1a9f58d76a46c9010518109bb38f313137e0a28")
|
||||
version("7.85.0", sha256="21a7e83628ee96164ac2b36ff6bf99d467c7b0b621c1f7e317d8f0d96011539c")
|
||||
version("7.84.0", sha256="702fb26e73190a3bd77071aa146f507b9817cc4dfce218d2ab87f00cd3bc059d")
|
||||
version("7.83.0", sha256="247c7ec7521c4258e65634e529270d214fe32969971cccb72845e7aa46831f96")
|
||||
|
||||
@@ -17,6 +17,9 @@ class Dbcsr(CMakePackage, CudaPackage, ROCmPackage):
|
||||
maintainers = ["dev-zero"]
|
||||
|
||||
version("develop", branch="develop")
|
||||
version("2.5.0", sha256="91fda9b2502e5d0a2a6cdd5a73ef096253cc7e75bd01ba5189a4726ad86aef08")
|
||||
version("2.4.1", sha256="b3d5ae62ca582b72707a2c932e8074a4f2f61d61085d97bd374213c70b8dbdcf")
|
||||
version("2.4.0", sha256="cf2b774328c9a30677501f49b79955841bd08915a7ca53c8533bfdf14a8f9bd4")
|
||||
version("2.3.0", sha256="f750de586cffa66852b646f7f85eb831eeb64fa2d25ce50ed10e1df016dd3364")
|
||||
version("2.2.0", sha256="245b0382ddc7b80f85af8288f75bd03d56ec51cdfb6968acb4931529b35173ec")
|
||||
version("2.1.0", sha256="9e58fd998f224632f356e479d18b5032570d00d87b86736b6a6ac2d03f8d4b3c")
|
||||
|
||||
@@ -13,11 +13,11 @@ class Dejagnu(AutotoolsPackage, GNUMirrorPackage):
|
||||
homepage = "https://www.gnu.org/software/dejagnu/"
|
||||
gnu_mirror_path = "dejagnu/dejagnu-1.6.tar.gz"
|
||||
|
||||
version("1.6.3", sha256="87daefacd7958b4a69f88c6856dbd1634261963c414079d0c371f589cd66a2e3")
|
||||
version("1.6", sha256="00b64a618e2b6b581b16eb9131ee80f721baa2669fa0cdee93c500d1a652d763")
|
||||
version("1.4.4", sha256="d0fbedef20fb0843318d60551023631176b27ceb1e11de7468a971770d0e048d")
|
||||
|
||||
depends_on("expect")
|
||||
depends_on("tcl@8.5:")
|
||||
depends_on("expect", type=("run", "link", "build"))
|
||||
|
||||
# DejaGnu 1.4.4 cannot be built in parallel
|
||||
# `make check` also fails but this can be ignored
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
diff --git a/requirements/core.txt b/requirements/core.txt
|
||||
index 0c0d3de..17c10f4 100644
|
||||
--- a/requirements/core.txt
|
||||
+++ b/requirements/core.txt
|
||||
@@ -6,4 +6,3 @@ pyelftools==0.27
|
||||
requests>=2.26.0
|
||||
tinydb==4.5.2
|
||||
python-sotools==0.1.0
|
||||
-docker==5.0.3
|
||||
36
var/spack/repos/builtin/packages/e4s-cl/package.py
Normal file
36
var/spack/repos/builtin/packages/e4s-cl/package.py
Normal file
@@ -0,0 +1,36 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class E4sCl(PythonPackage):
|
||||
"""Container Launcher for E4S containers, facilitating MPI library
|
||||
translations"""
|
||||
|
||||
maintainers = ["spoutn1k", "FrederickDeny"]
|
||||
homepage = "https://e4s-cl.readthedocs.io"
|
||||
url = "https://oaciss.uoregon.edu/e4s/e4s-cl/releases"
|
||||
git = "https://github.com/E4S-Project/e4s-cl"
|
||||
|
||||
tags = ["e4s"]
|
||||
|
||||
patch("drop-docker.patch", when="@:1.0.1")
|
||||
|
||||
version("master", branch="master")
|
||||
version("1.0.1", commit="b2c92993e0c7cb42de07f0f7cc02da3a06816192")
|
||||
version("1.0.0", commit="410bb2e6601d9b90243a487ad7f7d2dabd8ba04c")
|
||||
|
||||
depends_on("python@3.7:", type=("build", "run"))
|
||||
depends_on("py-setuptools", type="build")
|
||||
|
||||
depends_on("py-termcolor@1.1.0:", type=("build", "run"))
|
||||
depends_on("py-pyyaml@6.0:", type=("build", "run"))
|
||||
depends_on("py-texttable@1.6.2:", type=("build", "run"))
|
||||
depends_on("py-python-ptrace@0.9.7:", type=("build", "run"))
|
||||
depends_on("py-pyelftools@0.27", type=("build", "run"))
|
||||
depends_on("py-requests@2.26.0:", type=("build", "run"))
|
||||
depends_on("py-tinydb@4.5.2", type=("build", "run"))
|
||||
depends_on("py-python-sotools@0.1.0", type=("build", "run"))
|
||||
@@ -19,6 +19,7 @@ class Fargparse(CMakePackage):
|
||||
version("develop", branch="develop")
|
||||
version("main", branch="main")
|
||||
|
||||
version("1.4.2", sha256="2cd3f14845235407c6a4171ab4602499dade045e3f9b7dc75190f4a315ac8b44")
|
||||
version("1.4.1", sha256="8f9b92a80f05b0a8ab2dd5cd309ad165041c7fcdd589b96bf75c7dd889b9b584")
|
||||
version("1.3.1", sha256="65d168696762b53f9a34fac8a82527fb602372f47be05018ebb382ec27b52c6c")
|
||||
version("1.3.0", sha256="08fde5fb1b739b69203ac336fe7b39915cfc7f52e068e564b9b6d905d79fc93d")
|
||||
@@ -27,6 +28,7 @@ class Fargparse(CMakePackage):
|
||||
|
||||
depends_on("gftl-shared")
|
||||
depends_on("gftl")
|
||||
depends_on("cmake@3.12:", type="build")
|
||||
|
||||
variant(
|
||||
"build_type",
|
||||
|
||||
@@ -27,6 +27,7 @@ class Fortrilinos(CMakePackage):
|
||||
tags = ["e4s"]
|
||||
test_requires_compiler = True
|
||||
|
||||
version("2.2.0", sha256="9e73fc71066bfaf7cde040e1467baf7a1ec797ff2874add49f9741e93f9fffb5")
|
||||
version("2.1.0", sha256="2c62bb6106ae86a804497d549080cb6877c5d860b6bf2e72ec5cbcbbe63e3b5b")
|
||||
version("2.0.1", sha256="291a62c885cd4ffd76cbebafa02789649bd4fa73f1005cf8da51fd153acb9e1a")
|
||||
version("2.0.0", sha256="4382a21864e70e9059654c0529cac95548768fe02855c5f3624e454807dff018")
|
||||
@@ -49,8 +50,9 @@ class Fortrilinos(CMakePackage):
|
||||
variant("shared", default=True, description="Build shared libraries")
|
||||
|
||||
# Trilinos version dependencies
|
||||
depends_on("trilinos@13.4.0:13.4", when="@2.2.0:2.2")
|
||||
depends_on("trilinos@13.2.0:13.2", when="@2.1.0:2.1")
|
||||
depends_on("trilinos@13.0.0:13.0", when="@2.0.0:2.0")
|
||||
depends_on("trilinos@13.0.0:13.2", when="@2.0.0:2.0")
|
||||
depends_on("trilinos@12.18.1", when="@2.0.dev3")
|
||||
depends_on("trilinos@12.18.1", when="@2.0.dev2")
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@ class GftlShared(CMakePackage):
|
||||
|
||||
version("main", branch="main")
|
||||
|
||||
version("1.5.1", sha256="353d07cc22678d1a79b19dbf53d8ba54b889e424a15e315cc4f035b72eedb83a")
|
||||
version("1.5.0", sha256="c19b8197cc6956d4a51a16f98b38b63c7bc9f784f1fd38f8e3949be3ea792356")
|
||||
version("1.4.1", sha256="bb403f72e80aaac49ed5107f7c755ce5273c2e650bd5438a746228798eeced6c")
|
||||
version("1.4.0", sha256="83a2474ae943d81d797460b18106874de14c39093efd4e35abb3f1b6ec835171")
|
||||
@@ -34,7 +35,7 @@ class GftlShared(CMakePackage):
|
||||
version("1.3.0", sha256="979b00c4d531e701bf4346f662e3e4cc865124a97ca958637a53201d66d4ee43")
|
||||
|
||||
depends_on("m4", type=("build", "run"))
|
||||
depends_on("cmake", type="build")
|
||||
depends_on("cmake@3.12:", type="build")
|
||||
depends_on("gftl")
|
||||
|
||||
variant(
|
||||
|
||||
@@ -38,6 +38,7 @@ class Gftl(CMakePackage):
|
||||
version("develop", branch="develop")
|
||||
version("main", branch="main")
|
||||
|
||||
version("1.8.2", sha256="7ee9a1db62f6dd09e533516d7dc53fbc9c8c81464bb12f6eb558ad5d3bfd85ef")
|
||||
version("1.8.1", sha256="b8171ea69b108325816472ee47068618d709a3f563959142bc58ff38908a7210")
|
||||
version("1.8.0", sha256="e99def0a9a1b3031ceff22c416bee75e70558cf6b91ce4be70b0ad752dda26c6")
|
||||
version("1.7.2", sha256="35a39a0dffb91969af5577b6dd7681379e1c16ca545f0cc2dae0b5192474d852")
|
||||
@@ -48,7 +49,7 @@ class Gftl(CMakePackage):
|
||||
version("1.5.5", sha256="67ff8210f08e9f2ee6ba23c8c26336f948420db5db7fc054c3a662e9017f18a3")
|
||||
version("1.5.4", sha256="4c53e932ba8d82616b65500f403a33a14957b9266b5e931e2448f1f005990750")
|
||||
|
||||
depends_on("cmake", type="build")
|
||||
depends_on("cmake@3.12:", type="build")
|
||||
depends_on("m4", type="build")
|
||||
|
||||
variant(
|
||||
|
||||
@@ -197,7 +197,6 @@ class Hdf5(CMakePackage):
|
||||
depends_on("cmake@3.12:", type="build")
|
||||
depends_on("cmake@3.18:", type="build", when="@1.13:")
|
||||
|
||||
depends_on("msmpi", when="+mpi platform=windows")
|
||||
depends_on("mpi", when="+mpi")
|
||||
depends_on("java", type=("build", "run"), when="+java")
|
||||
depends_on("szip", when="+szip")
|
||||
|
||||
@@ -111,58 +111,66 @@ class Hip(CMakePackage):
|
||||
description="CMake build type",
|
||||
)
|
||||
|
||||
variant("rocm", default=True, description="Enable ROCm support")
|
||||
variant("cuda", default=False, description="Build with CUDA")
|
||||
conflicts("+cuda +rocm", msg="CUDA and ROCm support are mutually exclusive")
|
||||
conflicts("~cuda ~rocm", msg="CUDA or ROCm support is required")
|
||||
|
||||
depends_on("cuda", when="+cuda")
|
||||
|
||||
depends_on("cmake@3.16.8:", type="build", when="@4.5.0:")
|
||||
depends_on("cmake@3.4.3:", type="build")
|
||||
depends_on("perl@5.10:", type=("build", "run"))
|
||||
depends_on("gl@4.5:")
|
||||
depends_on("py-cppheaderparser", type="build", when="@5.3.3:")
|
||||
|
||||
for ver in [
|
||||
"3.5.0",
|
||||
"3.7.0",
|
||||
"3.8.0",
|
||||
"3.9.0",
|
||||
"3.10.0",
|
||||
"4.0.0",
|
||||
"4.1.0",
|
||||
"4.2.0",
|
||||
"4.3.0",
|
||||
"4.3.1",
|
||||
]:
|
||||
depends_on("hip-rocclr@" + ver, when="@" + ver)
|
||||
for ver in [
|
||||
"3.5.0",
|
||||
"3.7.0",
|
||||
"3.8.0",
|
||||
"3.9.0",
|
||||
"3.10.0",
|
||||
"4.0.0",
|
||||
"4.1.0",
|
||||
"4.2.0",
|
||||
"4.3.0",
|
||||
"4.3.1",
|
||||
"4.5.0",
|
||||
"4.5.2",
|
||||
"5.0.0",
|
||||
"5.0.2",
|
||||
"5.1.0",
|
||||
"5.1.3",
|
||||
"5.2.0",
|
||||
"5.2.1",
|
||||
"5.2.3",
|
||||
"5.3.0",
|
||||
"5.3.3",
|
||||
]:
|
||||
depends_on("hsakmt-roct@" + ver, when="@" + ver)
|
||||
depends_on("hsa-rocr-dev@" + ver, when="@" + ver)
|
||||
depends_on("comgr@" + ver, when="@" + ver)
|
||||
depends_on("llvm-amdgpu@{0} +rocm-device-libs".format(ver), when="@" + ver)
|
||||
depends_on("rocminfo@" + ver, when="@" + ver)
|
||||
depends_on("roctracer-dev-api@" + ver, when="@" + ver)
|
||||
with when("+rocm"):
|
||||
depends_on("gl@4.5:")
|
||||
depends_on("py-cppheaderparser", type="build", when="@5.3.3:")
|
||||
for ver in [
|
||||
"3.5.0",
|
||||
"3.7.0",
|
||||
"3.8.0",
|
||||
"3.9.0",
|
||||
"3.10.0",
|
||||
"4.0.0",
|
||||
"4.1.0",
|
||||
"4.2.0",
|
||||
"4.3.0",
|
||||
"4.3.1",
|
||||
]:
|
||||
depends_on("hip-rocclr@" + ver, when="@" + ver)
|
||||
for ver in [
|
||||
"3.5.0",
|
||||
"3.7.0",
|
||||
"3.8.0",
|
||||
"3.9.0",
|
||||
"3.10.0",
|
||||
"4.0.0",
|
||||
"4.1.0",
|
||||
"4.2.0",
|
||||
"4.3.0",
|
||||
"4.3.1",
|
||||
"4.5.0",
|
||||
"4.5.2",
|
||||
"5.0.0",
|
||||
"5.0.2",
|
||||
"5.1.0",
|
||||
"5.1.3",
|
||||
"5.2.0",
|
||||
"5.2.1",
|
||||
"5.2.3",
|
||||
"5.3.0",
|
||||
"5.3.3",
|
||||
]:
|
||||
depends_on("hsakmt-roct@" + ver, when="@" + ver)
|
||||
depends_on("hsa-rocr-dev@" + ver, when="@" + ver)
|
||||
depends_on("comgr@" + ver, when="@" + ver)
|
||||
depends_on("llvm-amdgpu@{0} +rocm-device-libs".format(ver), when="@" + ver)
|
||||
depends_on("rocminfo@" + ver, when="@" + ver)
|
||||
depends_on("roctracer-dev-api@" + ver, when="@" + ver)
|
||||
|
||||
# hipcc likes to add `-lnuma` by default :(
|
||||
# ref https://github.com/ROCm-Developer-Tools/HIP/pull/2202
|
||||
depends_on("numactl", when="@3.7.0:")
|
||||
# hipcc likes to add `-lnuma` by default :(
|
||||
# ref https://github.com/ROCm-Developer-Tools/HIP/pull/2202
|
||||
depends_on("numactl", when="@3.7.0:")
|
||||
|
||||
# roc-obj-ls requirements
|
||||
depends_on("perl-file-which")
|
||||
@@ -390,65 +398,70 @@ def determine_version(cls, lib):
|
||||
return ver
|
||||
|
||||
def set_variables(self, env):
|
||||
# Note: do not use self.spec[name] here, since not all dependencies
|
||||
# have defined prefixes when hip is marked as external.
|
||||
paths = self.get_paths()
|
||||
if self.spec.satisfies("+rocm"):
|
||||
# Note: do not use self.spec[name] here, since not all dependencies
|
||||
# have defined prefixes when hip is marked as external.
|
||||
paths = self.get_paths()
|
||||
|
||||
# Used in hipcc, but only useful when hip is external, since only then
|
||||
# there is a common prefix /opt/rocm-x.y.z.
|
||||
env.set("ROCM_PATH", paths["rocm-path"])
|
||||
# Used in hipcc, but only useful when hip is external, since only then
|
||||
# there is a common prefix /opt/rocm-x.y.z.
|
||||
env.set("ROCM_PATH", paths["rocm-path"])
|
||||
|
||||
# hipcc recognizes HIP_PLATFORM == hcc and HIP_COMPILER == clang, even
|
||||
# though below we specified HIP_PLATFORM=rocclr and HIP_COMPILER=clang
|
||||
# in the CMake args.
|
||||
if self.spec.satisfies("@:4.0.0"):
|
||||
env.set("HIP_PLATFORM", "hcc")
|
||||
else:
|
||||
env.set("HIP_PLATFORM", "amd")
|
||||
# hipcc recognizes HIP_PLATFORM == hcc and HIP_COMPILER == clang, even
|
||||
# though below we specified HIP_PLATFORM=rocclr and HIP_COMPILER=clang
|
||||
# in the CMake args.
|
||||
if self.spec.satisfies("@:4.0.0"):
|
||||
env.set("HIP_PLATFORM", "hcc")
|
||||
else:
|
||||
env.set("HIP_PLATFORM", "amd")
|
||||
|
||||
env.set("HIP_COMPILER", "clang")
|
||||
env.set("HIP_COMPILER", "clang")
|
||||
|
||||
# bin directory where clang++ resides
|
||||
env.set("HIP_CLANG_PATH", paths["llvm-amdgpu"].bin)
|
||||
# bin directory where clang++ resides
|
||||
env.set("HIP_CLANG_PATH", paths["llvm-amdgpu"].bin)
|
||||
|
||||
# Path to hsa-rocr-dev prefix used by hipcc.
|
||||
env.set("HSA_PATH", paths["hsa-rocr-dev"])
|
||||
# Path to hsa-rocr-dev prefix used by hipcc.
|
||||
env.set("HSA_PATH", paths["hsa-rocr-dev"])
|
||||
|
||||
# This is a variable that does not exist in hipcc but was introduced
|
||||
# in a patch of ours since 3.5.0 to locate rocm_agent_enumerator:
|
||||
# https://github.com/ROCm-Developer-Tools/HIP/pull/2138
|
||||
env.set("ROCMINFO_PATH", paths["rocminfo"])
|
||||
# This is a variable that does not exist in hipcc but was introduced
|
||||
# in a patch of ours since 3.5.0 to locate rocm_agent_enumerator:
|
||||
# https://github.com/ROCm-Developer-Tools/HIP/pull/2138
|
||||
env.set("ROCMINFO_PATH", paths["rocminfo"])
|
||||
|
||||
# This one is used in hipcc to run `clang --hip-device-lib-path=...`
|
||||
env.set("DEVICE_LIB_PATH", paths["bitcode"])
|
||||
# This one is used in hipcc to run `clang --hip-device-lib-path=...`
|
||||
env.set("DEVICE_LIB_PATH", paths["bitcode"])
|
||||
|
||||
# And this is used in clang whenever the --hip-device-lib-path is not
|
||||
# used (e.g. when clang is invoked directly)
|
||||
env.set("HIP_DEVICE_LIB_PATH", paths["bitcode"])
|
||||
# And this is used in clang whenever the --hip-device-lib-path is not
|
||||
# used (e.g. when clang is invoked directly)
|
||||
env.set("HIP_DEVICE_LIB_PATH", paths["bitcode"])
|
||||
|
||||
# Just the prefix of hip (used in hipcc)
|
||||
env.set("HIP_PATH", paths["hip-path"])
|
||||
# Just the prefix of hip (used in hipcc)
|
||||
env.set("HIP_PATH", paths["hip-path"])
|
||||
|
||||
# Used in comgr and seems necessary when using the JIT compiler, e.g.
|
||||
# hiprtcCreateProgram:
|
||||
# https://github.com/RadeonOpenCompute/ROCm-CompilerSupport/blob/rocm-4.0.0/lib/comgr/src/comgr-env.cpp
|
||||
env.set("LLVM_PATH", paths["llvm-amdgpu"])
|
||||
# Used in comgr and seems necessary when using the JIT compiler, e.g.
|
||||
# hiprtcCreateProgram:
|
||||
# https://github.com/RadeonOpenCompute/ROCm-CompilerSupport/blob/rocm-4.0.0/lib/comgr/src/comgr-env.cpp
|
||||
env.set("LLVM_PATH", paths["llvm-amdgpu"])
|
||||
|
||||
# Finally we have to set --rocm-path=<prefix> ourselves, which is not
|
||||
# the same as --hip-device-lib-path (set by hipcc). It's used to set
|
||||
# default bin, include and lib folders in clang. If it's not set it is
|
||||
# infered from the clang install dir (and they try to find
|
||||
# /opt/rocm again...). If this path is set, there is no strict checking
|
||||
# and parsing of the <prefix>/bin/.hipVersion file. Let's just set this
|
||||
# to the hip prefix directory for non-external builds so that the
|
||||
# bin/.hipVersion file can still be parsed.
|
||||
# See also https://github.com/ROCm-Developer-Tools/HIP/issues/2223
|
||||
if "@3.8.0:" in self.spec:
|
||||
env.append_path(
|
||||
"HIPCC_COMPILE_FLAGS_APPEND",
|
||||
"--rocm-path={0}".format(paths["rocm-path"]),
|
||||
separator=" ",
|
||||
)
|
||||
# Finally we have to set --rocm-path=<prefix> ourselves, which is not
|
||||
# the same as --hip-device-lib-path (set by hipcc). It's used to set
|
||||
# default bin, include and lib folders in clang. If it's not set it is
|
||||
# infered from the clang install dir (and they try to find
|
||||
# /opt/rocm again...). If this path is set, there is no strict checking
|
||||
# and parsing of the <prefix>/bin/.hipVersion file. Let's just set this
|
||||
# to the hip prefix directory for non-external builds so that the
|
||||
# bin/.hipVersion file can still be parsed.
|
||||
# See also https://github.com/ROCm-Developer-Tools/HIP/issues/2223
|
||||
if "@3.8.0:" in self.spec:
|
||||
env.append_path(
|
||||
"HIPCC_COMPILE_FLAGS_APPEND",
|
||||
"--rocm-path={0}".format(paths["rocm-path"]),
|
||||
separator=" ",
|
||||
)
|
||||
elif self.spec.satisfies("+cuda"):
|
||||
env.set("CUDA_PATH", self.spec["cuda"].prefix)
|
||||
env.set("HIP_PATH", self.spec.prefix)
|
||||
env.set("HIP_PLATFORM", "nvidia")
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
self.set_variables(env)
|
||||
@@ -478,7 +491,7 @@ def patch(self):
|
||||
"hip-config.cmake.in",
|
||||
string=True,
|
||||
)
|
||||
if self.spec.satisfies("@5.2:"):
|
||||
if self.spec.satisfies("@5.2: +rocm"):
|
||||
filter_file(
|
||||
'"${ROCM_PATH}/llvm"',
|
||||
self.spec["llvm-amdgpu"].prefix,
|
||||
@@ -519,7 +532,7 @@ def patch(self):
|
||||
substitute = "#!{perl}".format(perl=perl)
|
||||
files = ["roc-obj-extract", "roc-obj-ls"]
|
||||
filter_file(match, substitute, *files, **kwargs)
|
||||
if "@3.7.0:" in self.spec:
|
||||
if "@3.7.0: +rocm" in self.spec:
|
||||
numactl = self.spec["numactl"].prefix.lib
|
||||
kwargs = {"ignore_absent": False, "backup": False, "string": False}
|
||||
|
||||
@@ -537,20 +550,24 @@ def flag_handler(self, name, flags):
|
||||
return (flags, None, None)
|
||||
|
||||
def cmake_args(self):
|
||||
args = [
|
||||
self.define(
|
||||
"PROF_API_HEADER_PATH",
|
||||
join_path(self.spec["roctracer-dev-api"].prefix, "roctracer", "include", "ext"),
|
||||
),
|
||||
self.define("HIP_COMPILER", "clang"),
|
||||
self.define("HSA_PATH", self.spec["hsa-rocr-dev"].prefix),
|
||||
]
|
||||
if self.spec.satisfies("@:4.0.0"):
|
||||
args.append(self.define("HIP_RUNTIME", "ROCclr"))
|
||||
args.append(self.define("HIP_PLATFORM", "rocclr"))
|
||||
else:
|
||||
args.append(self.define("HIP_RUNTIME", "rocclr"))
|
||||
args.append(self.define("HIP_PLATFORM", "amd"))
|
||||
args = []
|
||||
if self.spec.satisfies("+rocm"):
|
||||
args.append(self.define("HSA_PATH", self.spec["hsa-rocr-dev"].prefix))
|
||||
args.append(self.define("HIP_COMPILER", "clang"))
|
||||
args.append(
|
||||
self.define(
|
||||
"PROF_API_HEADER_PATH",
|
||||
self.spec["roctracer-dev-api"].prefix.roctracer.include.ext,
|
||||
)
|
||||
)
|
||||
if self.spec.satisfies("@:4.0.0"):
|
||||
args.append(self.define("HIP_RUNTIME", "ROCclr"))
|
||||
args.append(self.define("HIP_PLATFORM", "rocclr"))
|
||||
else:
|
||||
args.append(self.define("HIP_RUNTIME", "rocclr"))
|
||||
args.append(self.define("HIP_PLATFORM", "amd"))
|
||||
if self.spec.satisfies("+cuda"):
|
||||
args.append(self.define("HIP_PLATFORM", "nvidia"))
|
||||
|
||||
# LIBROCclr_STATIC_DIR is unused from 3.6.0 and above
|
||||
if "@3.5.0:4.3.2" in self.spec:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -232,6 +232,7 @@ def url_for_version(self, version):
|
||||
"meam",
|
||||
"misc",
|
||||
"mliap",
|
||||
"ml-hdnnp",
|
||||
"ml-iap",
|
||||
"ml-snap",
|
||||
"molecule",
|
||||
@@ -262,6 +263,7 @@ def url_for_version(self, version):
|
||||
"user-eff",
|
||||
"user-fep",
|
||||
"user-h5md",
|
||||
"user-hdnnp",
|
||||
"user-intel",
|
||||
"user-lb",
|
||||
"user-manifold",
|
||||
@@ -412,6 +414,9 @@ def url_for_version(self, version):
|
||||
depends_on("py-numpy", when="+mliap+python")
|
||||
depends_on("py-numpy", when="+ml-iap+python")
|
||||
depends_on("py-setuptools", when="@20220217:+python", type="build")
|
||||
depends_on("n2p2@2.1.4:", when="+user-hdnnp")
|
||||
depends_on("n2p2@2.1.4:", when="+ml-hdnnp")
|
||||
depends_on("n2p2+shared", when="+lib ^n2p2")
|
||||
|
||||
conflicts("+cuda", when="+opencl")
|
||||
conflicts("+body", when="+poems@:20180628")
|
||||
@@ -603,6 +608,19 @@ def url_for_version(self, version):
|
||||
msg="+user-h5md was removed after @20210527, use +h5md instead",
|
||||
)
|
||||
conflicts("+h5md", when="@:20210527", msg="+h5md only added @20210702, use +user-h5md instead")
|
||||
conflicts(
|
||||
"+user-hdnnp", when="@:20210514", msg="+user-hdnnp was introduced in version @20210527"
|
||||
)
|
||||
conflicts(
|
||||
"+user-hdnnp",
|
||||
when="@20210702:",
|
||||
msg="+user-hdnnp was removed after @20210527, use +ml-hdnnp instead",
|
||||
)
|
||||
conflicts(
|
||||
"+ml-hdnnp",
|
||||
when="@:20210527",
|
||||
msg="+ml-hdnnp only added @20210702, use +user-hdnnp instead",
|
||||
)
|
||||
conflicts(
|
||||
"+user-intel",
|
||||
when="@20210702:",
|
||||
@@ -938,6 +956,9 @@ def cmake_args(self):
|
||||
if "+user-smd" in spec or "+machdyn" in spec:
|
||||
args.append("-DDOWNLOAD_EIGEN3=no")
|
||||
args.append("-DEIGEN3_INCLUDE_DIR={0}".format(self.spec["eigen"].prefix.include))
|
||||
if "+user-hdnnp" in spec or "+ml-hdnnp" in spec:
|
||||
args.append("-DDOWNLOAD_N2P2=no")
|
||||
args.append("-DN2P2_DIR={0}".format(self.spec["n2p2"].prefix))
|
||||
|
||||
return args
|
||||
|
||||
|
||||
17
var/spack/repos/builtin/packages/lbfgspp/package.py
Normal file
17
var/spack/repos/builtin/packages/lbfgspp/package.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class Lbfgspp(CMakePackage):
|
||||
"""A Header-only C++ Library for L-BFGS and L-BFGS-B Algorithms"""
|
||||
|
||||
homepage = "https://lbfgspp.statr.me/"
|
||||
url = "https://github.com/yixuan/LBFGSpp/archive/refs/tags/v0.2.0.tar.gz"
|
||||
|
||||
version("0.2.0", sha256="7101744a538c3aff52e10c82267305847b0b5e9d39f9974b4b29812cd1398ff9")
|
||||
|
||||
depends_on("eigen @3:")
|
||||
@@ -15,6 +15,8 @@ class Libblastrampoline(MakefilePackage):
|
||||
|
||||
maintainers = ["haampie", "giordano"]
|
||||
|
||||
version("5.4.0", sha256="e1a2258b0ad31cc41e6e9b8ba36f5c239fd1a267f2657ef3d3f669cc5b811f6a")
|
||||
version("5.3.0", sha256="95bca73f1567e2acd1110d0dfe5bb58fc86718555cd6eab89f0a769534df3b62")
|
||||
version("5.2.0", sha256="5af9ff9cec16087f57109082a362419fc49152034fa90772ebcabd882007fd95")
|
||||
version("5.1.1", sha256="cb5515512f6653538ce74653e46ccfba58c87b7dcb79b9655f7c3655e65498f0")
|
||||
version("5.1.0", sha256="55ac0c8f9cb91b2ed2db014be8394c9dadf3b5f26bd8af6dca9d6f20ca72b8fd")
|
||||
|
||||
@@ -31,6 +31,12 @@ class Libgcrypt(AutotoolsPackage):
|
||||
|
||||
depends_on("libgpg-error@1.25:")
|
||||
|
||||
def flag_handler(self, name, flags):
|
||||
# We should not inject optimization flags through the wrapper, because
|
||||
# the jitter entropy code should never be compiled with optimization
|
||||
# flags, and the build system ensures that
|
||||
return (None, flags, None)
|
||||
|
||||
def check(self):
|
||||
# Without this hack, `make check` fails on macOS when SIP is enabled
|
||||
# https://bugs.gnupg.org/gnupg/issue2056
|
||||
|
||||
@@ -11,9 +11,10 @@ class Libxc(AutotoolsPackage, CudaPackage):
|
||||
density-functional theory."""
|
||||
|
||||
homepage = "https://tddft.org/programs/libxc/"
|
||||
url = "https://www.tddft.org/programs/libxc/down.php?file=2.2.2/libxc-2.2.2.tar.gz"
|
||||
url = "https://www.tddft.org/programs/libxc/down/2.2.2/libxc-2.2.2.tar.gz"
|
||||
|
||||
# Get checksum from latest release package at https://tddft.org/programs/libxc/download/
|
||||
version("6.1.0", sha256="a3aa16915942543031a5d9c4a92c439ce54249bdcda8c91c4e69e65329dc9a54")
|
||||
version("6.0.0", sha256="c2ca205a762200dfba2e6c9e8ca2061aaddc6b7cf42048859fe717a7aa07de7c")
|
||||
version("5.2.3", sha256="7b7a96d8eeb472c7b8cca7ac38eae27e0a8113ef44dae5359b0eb12592b4bcf2")
|
||||
version("5.1.7", sha256="1a818fdfe5c5f74270bc8ef0c59064e8feebcd66b8f642c08aecc1e7d125be34")
|
||||
@@ -96,13 +97,9 @@ def setup_build_environment(self, env):
|
||||
env.append_flags("CFLAGS", "-arch=sm_{0}".format(cuda_arch))
|
||||
|
||||
def configure_args(self):
|
||||
spec = self.spec
|
||||
|
||||
args = [
|
||||
"--enable-shared" if "+shared" in spec else "--disable-shared",
|
||||
"--enable-cuda" if "+cuda" in spec else "--disable-cuda",
|
||||
]
|
||||
|
||||
args = []
|
||||
args += self.enable_or_disable("shared")
|
||||
args += self.enable_or_disable("cuda")
|
||||
return args
|
||||
|
||||
@run_after("configure")
|
||||
|
||||
@@ -19,11 +19,12 @@ class Loki(MakefilePackage):
|
||||
variant("shared", default=True, description="Build shared libraries")
|
||||
|
||||
def flag_handler(self, name, flags):
|
||||
iflags = []
|
||||
if name == "cxxflags":
|
||||
if self.spec.satisfies("%oneapi@2023.0.0:"):
|
||||
iflags.append("-Wno-error=dynamic-exception-spec")
|
||||
return (iflags, None, None)
|
||||
flags.append("-Wno-error=dynamic-exception-spec")
|
||||
if self.spec.satisfies("@0.1.7 %gcc@11:"):
|
||||
flags.append("-std=c++14")
|
||||
return (flags, None, None)
|
||||
|
||||
def build(self, spec, prefix):
|
||||
if "+shared" in spec:
|
||||
|
||||
@@ -118,6 +118,7 @@ class Mesa(MesonPackage):
|
||||
depends_on("libllvm@6:")
|
||||
depends_on("libllvm@:11", when="@:20")
|
||||
depends_on("libllvm@:12", when="@:21")
|
||||
|
||||
depends_on("libx11", when="+glx")
|
||||
depends_on("libxcb", when="+glx")
|
||||
depends_on("libxext", when="+glx")
|
||||
@@ -202,8 +203,10 @@ def meson_args(self):
|
||||
"-Dbuild-tests=false",
|
||||
"-Dglvnd=false",
|
||||
]
|
||||
if spec.satisfies("@:22.2"):
|
||||
# gallium-xvmc was removed in @main and @2.23:
|
||||
if self.spec.satisfies("@:22.2"):
|
||||
args.append("-Dgallium-xvmc=disabled")
|
||||
|
||||
args_platforms = []
|
||||
args_gallium_drivers = ["swrast"]
|
||||
args_dri_drivers = []
|
||||
|
||||
@@ -17,6 +17,7 @@ class MochiMargo(AutotoolsPackage):
|
||||
maintainers = ["carns", "mdorier", "fbudin69500", "chuckatkins"]
|
||||
|
||||
version("main", branch="main")
|
||||
version("0.12", sha256="d9949423d87d74ae20b0e344fdc76cc7e0a62249f219b05297b8f44891f75a8e")
|
||||
version("0.11.1", sha256="ce4a61e2796df2a2c6efdfbd2d2c5a6be31e524f279b740a423ed932607503f3")
|
||||
version("0.11", sha256="3f9f30591127ecf3aac8a524c69cbc90fe3c8e68e263bda4c69b1e2c8d77ebdd")
|
||||
version("0.10", sha256="163be090575ee267a84320b92791d83b98e9549b03bd705a166f0b5e4df53129")
|
||||
|
||||
@@ -48,6 +48,7 @@ class Ncurses(AutotoolsPackage, GNUMirrorPackage):
|
||||
conflicts("abi=6", when="@:5.9", msg="6 is not compatible with this release")
|
||||
|
||||
depends_on("pkgconfig", type="build")
|
||||
depends_on("pkgconf@1.8.0", type="build")
|
||||
|
||||
patch("patch_gcc_5.txt", when="@6.0%gcc@5.0:")
|
||||
patch("sed_pgi.patch", when="@:6.0")
|
||||
|
||||
@@ -21,6 +21,20 @@
|
||||
# - package key must be in the form '{os}-{arch}' where 'os' is in the
|
||||
# format returned by platform.system() and 'arch' by platform.machine()
|
||||
_versions = {
|
||||
"23.1": {
|
||||
"Linux-aarch64": (
|
||||
"5b430e03752954ea62ac1c745b1735cfdaa43b2e981a9412c1465ecb0412fff6",
|
||||
"https://developer.download.nvidia.com/hpc-sdk/23.1/nvhpc_2023_231_Linux_aarch64_cuda_multi.tar.gz",
|
||||
),
|
||||
"Linux-ppc64le": (
|
||||
"81759e7c747bf4f552b75e7657301f76ecc0828b94fe860f81108c6e83e6ad2b",
|
||||
"https://developer.download.nvidia.com/hpc-sdk/23.1/nvhpc_2023_231_Linux_ppc64le_cuda_multi.tar.gz",
|
||||
),
|
||||
"Linux-x86_64": (
|
||||
"55a064415f6d4ce6a01823ee27ebd266f4fb579679871e7c1a7c054bdc18e9f5",
|
||||
"https://developer.download.nvidia.com/hpc-sdk/23.1/nvhpc_2023_231_Linux_x86_64_cuda_multi.tar.gz",
|
||||
),
|
||||
},
|
||||
"22.11": {
|
||||
"Linux-aarch64": (
|
||||
"e60e798657c33b06754d33dfd5ab3bea2882d4a9b9476102303edf2bbe3b7a95",
|
||||
|
||||
@@ -15,9 +15,15 @@ class Opencascade(CMakePackage):
|
||||
|
||||
homepage = "https://www.opencascade.com"
|
||||
url = "https://git.dev.opencascade.org/gitweb/?p=occt.git;a=snapshot;h=refs/tags/V7_4_0;sf=tgz"
|
||||
git = "https://git.dev.opencascade.org/repos/occt.git"
|
||||
|
||||
maintainers = ["wdconinc"]
|
||||
|
||||
version(
|
||||
"7.7.0",
|
||||
extension="tar.gz",
|
||||
sha256="075ca1dddd9646fcf331a809904925055747a951a6afd07a463369b9b441b445",
|
||||
)
|
||||
version(
|
||||
"7.6.3",
|
||||
extension="tar.gz",
|
||||
@@ -54,6 +60,13 @@ class Opencascade(CMakePackage):
|
||||
sha256="655da7717dac3460a22a6a7ee68860c1da56da2fec9c380d8ac0ac0349d67676",
|
||||
)
|
||||
|
||||
# fix for numeric_limits in gcc-12; applies cleanly to all older versions
|
||||
patch(
|
||||
"https://git.dev.opencascade.org/gitweb/?p=occt.git;a=patch;h=2a8c5ad46cfef8114b13c3a33dcd88a81e522c1e",
|
||||
sha256="bd0d7463259f469f8fc06a2b11eec7b0c89882aeea2f8c8647cf750c44b3e656",
|
||||
when="@:7.7.0",
|
||||
)
|
||||
|
||||
variant("tbb", default=False, description="Build with Intel Threading Building Blocks")
|
||||
variant("vtk", default=False, description="Enable VTK support")
|
||||
variant("freeimage", default=False, description="Build with FreeImage")
|
||||
@@ -83,7 +96,7 @@ def cmake_args(self):
|
||||
|
||||
if "+tbb" in self.spec:
|
||||
args.append("-DUSE_TBB=ON")
|
||||
args.append("-D3RDPARTY_VTK_DIR=%s" % self.spec["intel-tbb"].prefix)
|
||||
args.append("-D3RDPARTY_TBB_DIR=%s" % self.spec["intel-tbb"].prefix)
|
||||
else:
|
||||
args.append("-DUSE_TBB=OFF")
|
||||
|
||||
@@ -97,6 +110,9 @@ def cmake_args(self):
|
||||
if "+freeimage" in self.spec:
|
||||
args.append("-DUSE_FREEIMAGE=ON")
|
||||
args.append("-D3RDPARTY_FREEIMAGE_DIR=%s" % self.spec["freeimage"].prefix)
|
||||
args.append(
|
||||
"-D3RDPARTY_FREEIMAGE_INCLUDE_DIR=%s" % self.spec["freeimage"].prefix.include
|
||||
)
|
||||
else:
|
||||
args.append("-DUSE_FREEIMAGE=OFF")
|
||||
|
||||
|
||||
@@ -22,6 +22,11 @@ class Orca(Package):
|
||||
maintainers = ["snehring"]
|
||||
manual_download = True
|
||||
|
||||
version(
|
||||
"5.0.3-f.1",
|
||||
sha256="dea377459d61ef7d7e822e366420197ee2a4864991dfcdc4ea1a683f9be26c7f",
|
||||
url="file://{0}/orca-5.0.3-f.1_linux_x86-64_shared_openmpi41.tar.xz".format(os.getcwd()),
|
||||
)
|
||||
version(
|
||||
"5.0.3",
|
||||
sha256="b8b9076d1711150a6d6cb3eb30b18e2782fa847c5a86d8404b9339faef105043",
|
||||
@@ -46,7 +51,13 @@ class Orca(Package):
|
||||
depends_on("zstd", when="@:4.2.1", type="build")
|
||||
|
||||
# Map Orca version with the required OpenMPI version
|
||||
openmpi_versions = {"4.0.1.2": "2.0.2", "4.2.0": "3.1.4", "4.2.1": "3.1.4", "5.0.3": "4.1.2"}
|
||||
openmpi_versions = {
|
||||
"4.0.1.2": "2.0.2",
|
||||
"4.2.0": "3.1.4",
|
||||
"4.2.1": "3.1.4",
|
||||
"5.0.3": "4.1.2",
|
||||
"5.0.3-f.1": "4.1.2",
|
||||
}
|
||||
for orca_version, openmpi_version in openmpi_versions.items():
|
||||
depends_on(
|
||||
"openmpi@{0}".format(openmpi_version), type="run", when="@{0}".format(orca_version)
|
||||
@@ -70,6 +81,9 @@ def install(self, spec, prefix):
|
||||
|
||||
# there are READMEs in there but they don't hurt anyone
|
||||
install_tree(vername, prefix.bin)
|
||||
if self.spec.satisfies("@5.0.3-f.1"):
|
||||
install_tree("bin", prefix.bin)
|
||||
install_tree("lib", prefix.lib)
|
||||
else:
|
||||
install_tree(".", prefix.bin)
|
||||
|
||||
@@ -81,4 +95,6 @@ def install(self, spec, prefix):
|
||||
install(mpirun_srun, prefix.bin.mpirun)
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
env.prepend_path("LD_LIBRARY_PATH", self.prefix.bin)
|
||||
# In 5.0.3-f.1 an RPATH is set to $ORGIN/../lib
|
||||
if not self.spec.satisfies("@5.0.3-f.1"):
|
||||
env.prepend_path("LD_LIBRARY_PATH", self.prefix.bin)
|
||||
|
||||
@@ -268,6 +268,11 @@ class Paraview(CMakePackage, CudaPackage, ROCmPackage):
|
||||
# intel oneapi doesn't compile some code in catalyst
|
||||
patch("catalyst-etc_oneapi_fix.patch", when="@5.10.0:5.10.1%oneapi")
|
||||
|
||||
# Patch for paraview 5.10: +hdf5 ^hdf5@1.13.2:
|
||||
# https://gitlab.kitware.com/vtk/vtk/-/merge_requests/9690
|
||||
patch("vtk-xdmf2-hdf51.13.1.patch", when="@5.10.0:5.10 +hdf5")
|
||||
patch("vtk-xdmf2-hdf51.13.2.patch", when="@5.10: +hdf5")
|
||||
|
||||
@property
|
||||
def generator(self):
|
||||
# https://gitlab.kitware.com/paraview/paraview/-/issues/21223
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
From 3a536a3c6b949cb1c9dd147c3a395c3ff31856e8 Mon Sep 17 00:00:00 2001
|
||||
From: Ben Boeckel <ben.boeckel@kitware.com>
|
||||
Date: Thu, 14 Apr 2022 12:53:36 -0400
|
||||
Subject: [PATCH 1/6] xdmf2: install `XdmfExport.h`
|
||||
|
||||
---
|
||||
VTK/ThirdParty/xdmf2/vtkxdmf2/libsrc/CMakeLists.txt | 1 +
|
||||
1 file changed, 1 insertion(+)
|
||||
|
||||
diff --git a/VTK/ThirdParty/xdmf2/vtkxdmf2/libsrc/CMakeLists.txt b/VTK/ThirdParty/xdmf2/vtkxdmf2/libsrc/CMakeLists.txt
|
||||
index 274dbc4423c..a47bf4ee4d9 100644
|
||||
--- a/VTK/ThirdParty/xdmf2/vtkxdmf2/libsrc/CMakeLists.txt
|
||||
+++ b/VTK/ThirdParty/xdmf2/vtkxdmf2/libsrc/CMakeLists.txt
|
||||
@@ -276,6 +276,7 @@ vtk_module_add_module(VTK::xdmf2
|
||||
${XdmfModelSource}
|
||||
SOURCES ${XdmfExtraSource}
|
||||
HEADERS Xdmf.h
|
||||
+ XdmfExport.h
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/XdmfConfig.h"
|
||||
HEADERS_SUBDIR "vtkxdmf2/libsrc")
|
||||
set_target_properties(xdmf2
|
||||
--
|
||||
GitLab
|
||||
|
||||
|
||||
From 1456c9e6ab2b244d225dcfc0a7f985933c09d5f3 Mon Sep 17 00:00:00 2001
|
||||
From: Ben Boeckel <ben.boeckel@kitware.com>
|
||||
Date: Thu, 14 Apr 2022 15:20:13 -0400
|
||||
Subject: [PATCH 6/6] xdmf2: support HDF5 1.13.1
|
||||
|
||||
---
|
||||
VTK/ThirdParty/xdmf2/vtkxdmf2/libsrc/XdmfH5Driver.cxx | 7 +++++++
|
||||
1 file changed, 7 insertions(+)
|
||||
|
||||
diff --git a/VTK/ThirdParty/xdmf2/vtkxdmf2/libsrc/XdmfH5Driver.cxx b/VTK/ThirdParty/xdmf2/vtkxdmf2/libsrc/XdmfH5Driver.cxx
|
||||
index 6a4966bb231..1a412b56d02 100644
|
||||
--- a/VTK/ThirdParty/xdmf2/vtkxdmf2/libsrc/XdmfH5Driver.cxx
|
||||
+++ b/VTK/ThirdParty/xdmf2/vtkxdmf2/libsrc/XdmfH5Driver.cxx
|
||||
@@ -156,6 +156,9 @@ static herr_t H5FD_dsm_write(H5FD_t *_file, H5FD_mem_t type, hid_t fapl_id, hadd
|
||||
}
|
||||
|
||||
static const H5FD_class_t H5FD_dsm_g = {
|
||||
+#if (H5_VERS_MAJOR>1)||((H5_VERS_MAJOR==1)&&(H5_VERS_MINOR>=13))
|
||||
+ H5_VFD_RESERVED + 0xd3f2, /*value */
|
||||
+#endif
|
||||
"dsm", /*name */
|
||||
MAXADDR, /*maxaddr */
|
||||
H5F_CLOSE_WEAK, /*fc_degree */
|
||||
@@ -193,6 +196,10 @@ static const H5FD_class_t H5FD_dsm_g = {
|
||||
#endif
|
||||
NULL, /*lock */
|
||||
NULL, /*unlock */
|
||||
+#if (H5_VERS_MAJOR>1)||((H5_VERS_MAJOR==1)&&(H5_VERS_MINOR>=13))
|
||||
+ NULL, /*del */
|
||||
+ NULL, /*ctl */
|
||||
+#endif
|
||||
H5FD_FLMAP_SINGLE /*fl_map */
|
||||
};
|
||||
|
||||
--
|
||||
GitLab
|
||||
@@ -0,0 +1,36 @@
|
||||
From 2cc0d020359f714587d14b2f25a2c5f235f829c9 Mon Sep 17 00:00:00 2001
|
||||
From: Ryan Krattiger <ryan.krattiger@kitware.com>
|
||||
Date: Tue, 15 Nov 2022 15:00:36 -0600
|
||||
Subject: [PATCH] Xdmf2: Update HDF5 driver for 1.13.2
|
||||
|
||||
---
|
||||
VTK/ThirdParty/xdmf2/vtkxdmf2/libsrc/XdmfH5Driver.cxx | 7 +++++++
|
||||
1 file changed, 7 insertions(+)
|
||||
|
||||
diff --git a/VTK/ThirdParty/xdmf2/vtkxdmf2/libsrc/XdmfH5Driver.cxx b/VTK/ThirdParty/xdmf2/vtkxdmf2/libsrc/XdmfH5Driver.cxx
|
||||
index 1a412b56d02..ff4427d0265 100644
|
||||
--- a/VTK/ThirdParty/xdmf2/vtkxdmf2/libsrc/XdmfH5Driver.cxx
|
||||
+++ b/VTK/ThirdParty/xdmf2/vtkxdmf2/libsrc/XdmfH5Driver.cxx
|
||||
@@ -157,6 +157,7 @@ static herr_t H5FD_dsm_write(H5FD_t *_file, H5FD_mem_t type, hid_t fapl_id, hadd
|
||||
|
||||
static const H5FD_class_t H5FD_dsm_g = {
|
||||
#if (H5_VERS_MAJOR>1)||((H5_VERS_MAJOR==1)&&(H5_VERS_MINOR>=13))
|
||||
+ 0, /*version */
|
||||
H5_VFD_RESERVED + 0xd3f2, /*value */
|
||||
#endif
|
||||
"dsm", /*name */
|
||||
@@ -190,6 +191,12 @@ static const H5FD_class_t H5FD_dsm_g = {
|
||||
NULL, /*get_handle */
|
||||
H5FD_dsm_read, /*read */
|
||||
H5FD_dsm_write, /*write */
|
||||
+#if (H5_VERS_MAJOR>1)||((H5_VERS_MAJOR==1)&&(H5_VERS_MINOR>=13))
|
||||
+ NULL, /*read_vector */
|
||||
+ NULL, /*write_vector */
|
||||
+ NULL, /*read_selection */
|
||||
+ NULL, /*write_selection */
|
||||
+#endif
|
||||
NULL, /*flush */
|
||||
#if (H5_VERS_MAJOR>1)||((H5_VERS_MAJOR==1)&&(H5_VERS_MINOR>=8))
|
||||
NULL, /* truncate */
|
||||
--
|
||||
GitLab
|
||||
@@ -0,0 +1,17 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class PerlMathSymbolic(PerlPackage):
|
||||
"""Math::Symbolic - Symbolic calculations."""
|
||||
|
||||
homepage = "https://metacpan.org/pod/Math::Symbolic"
|
||||
url = "https://cpan.metacpan.org/authors/id/S/SM/SMUELLER/Math-Symbolic-0.612.tar.gz"
|
||||
|
||||
version("0.612", sha256="a9af979956c4c28683c535b5e5da3cde198c0cac2a11b3c9a129da218b3b9c08")
|
||||
|
||||
depends_on("perl-module-build", type="build")
|
||||
@@ -19,6 +19,7 @@ class Pflogger(CMakePackage):
|
||||
version("develop", branch="develop")
|
||||
version("main", branch="main")
|
||||
|
||||
version("1.9.2", sha256="783879eb1326a911f6e22c016e8530644ed0d315660405f2b43df42ba8670acc")
|
||||
version("1.9.1", sha256="918965f5a748a3a62e54751578f5935a820407b988b8455f7f25c266b5b7fe3c")
|
||||
version("1.9.0", sha256="aacd9b7e188bee3a54a4e681adde32e3bd95bb556cbbbd2c725c81aca5008003")
|
||||
version("1.8.0", sha256="28ce9ac8af374253b6dfd8f53f8fd271c787d432645ec9bc6a5a01601dc56e19")
|
||||
@@ -48,6 +49,8 @@ class Pflogger(CMakePackage):
|
||||
|
||||
depends_on("mpi", when="+mpi")
|
||||
|
||||
depends_on("cmake@3.12:", type="build")
|
||||
|
||||
def cmake_args(self):
|
||||
spec = self.spec
|
||||
args = []
|
||||
|
||||
@@ -20,6 +20,7 @@ class Pfunit(CMakePackage):
|
||||
|
||||
maintainers = ["mathomp4", "tclune"]
|
||||
|
||||
version("4.6.2", sha256="fd302a1f7a131b38e18bc31ede69a216e580c640152e5e313f5a1e084669a950")
|
||||
version("4.6.1", sha256="19de22ff0542ca900aaf2957407f24d7dadaccd993ea210beaf22032d3095add")
|
||||
version("4.6.0", sha256="7c768ea3a2d16d8ef6229b25bd7756721c24a18db779c7422afde0e3e2248d72")
|
||||
version("4.5.0", sha256="ae0ed4541f2f4ec7b1d06eed532a49cb4c666394ab92b233911f92ce50f76743")
|
||||
@@ -76,6 +77,15 @@ class Pfunit(CMakePackage):
|
||||
depends_on("esmf", when="+esmf")
|
||||
depends_on("m4", when="@4.1.5:", type="build")
|
||||
depends_on("fargparse", when="@4:")
|
||||
depends_on("cmake@3.12:", type="build")
|
||||
|
||||
# CMake 3.25.0 has an issue with pFUnit
|
||||
# https://gitlab.kitware.com/cmake/cmake/-/issues/24203
|
||||
conflicts(
|
||||
"^cmake@3.25.0",
|
||||
when="@4.0.0:",
|
||||
msg="CMake 3.25.0 has a bug with pFUnit. Please use an older or newer version.",
|
||||
)
|
||||
|
||||
conflicts(
|
||||
"%gcc@:8.3.9",
|
||||
|
||||
@@ -102,13 +102,14 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage):
|
||||
depends_on("rocsolver", when="@0.5: +rocm")
|
||||
depends_on("tracy-client", when="+tracy")
|
||||
conflicts("tracy-client@0.9:", when="@:0.9")
|
||||
depends_on("whip+rocm", when="@0.9: +rocm")
|
||||
depends_on("whip+cuda", when="@0.9: +cuda")
|
||||
depends_on("whip@0.1: +rocm", when="@0.9: +rocm")
|
||||
depends_on("whip@0.1: +cuda", when="@0.9: +cuda")
|
||||
|
||||
with when("+rocm"):
|
||||
for val in ROCmPackage.amdgpu_targets:
|
||||
depends_on(
|
||||
"whip amdgpu_target={0}".format(val), when="@0.9: amdgpu_target={0}".format(val)
|
||||
"whip@0.1: amdgpu_target={0}".format(val),
|
||||
when="@0.9: amdgpu_target={0}".format(val),
|
||||
)
|
||||
depends_on(
|
||||
"rocsolver amdgpu_target={0}".format(val),
|
||||
@@ -120,7 +121,9 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage):
|
||||
|
||||
with when("+cuda"):
|
||||
for val in CudaPackage.cuda_arch_values:
|
||||
depends_on("whip cuda_arch={0}".format(val), when="@0.9: cuda_arch={0}".format(val))
|
||||
depends_on(
|
||||
"whip@0.1: cuda_arch={0}".format(val), when="@0.9: cuda_arch={0}".format(val)
|
||||
)
|
||||
|
||||
for cxxstd in cxxstds:
|
||||
depends_on("boost cxxstd={0}".format(map_cxxstd(cxxstd)), when="cxxstd={0}".format(cxxstd))
|
||||
|
||||
@@ -16,24 +16,33 @@ class PortsOfCall(CMakePackage):
|
||||
maintainers = ["rbberger"]
|
||||
|
||||
version("main", branch="main")
|
||||
version("1.2.0", sha256="b802ffa07c5f34ea9839f23841082133d8af191efe5a526cb7e53ec338ac146b")
|
||||
version("1.1.0", sha256="c47f7e24c82176b69229a2bcb23a6adcf274dc90ec77a452a36ccae0b12e6e39")
|
||||
version("1.4.1", sha256="82d2c75fcca8bd613273fd4126749df68ccc22fbe4134ba673b4275f9972b78d")
|
||||
version("1.4.0", sha256="e08ae556b7c30d14d77147d248d118cf5343a2e8c0847943385c602394bda0fa")
|
||||
version("1.3.0", sha256="54b4a62539c23b1a345dd87c1eac65f4f69db4e50336cd81a15a627ce80ce7d9")
|
||||
version(
|
||||
"1.2.0",
|
||||
sha256="b802ffa07c5f34ea9839f23841082133d8af191efe5a526cb7e53ec338ac146b",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"1.1.0",
|
||||
sha256="c47f7e24c82176b69229a2bcb23a6adcf274dc90ec77a452a36ccae0b12e6e39",
|
||||
deprecated=True,
|
||||
)
|
||||
|
||||
variant("doc", default=False, description="Sphinx Documentation Support")
|
||||
variant(
|
||||
"portability_strategy",
|
||||
description="Portability strategy backend",
|
||||
values=("Kokkos", "Cuda", "None"),
|
||||
multi=False,
|
||||
default="None",
|
||||
when="@:1.2.0",
|
||||
)
|
||||
|
||||
depends_on("cmake@3.12:")
|
||||
|
||||
depends_on("py-sphinx", when="+doc")
|
||||
depends_on("py-sphinx-rtd-theme@0.4.3", when="+doc")
|
||||
depends_on("py-sphinx-multiversion", when="+doc")
|
||||
depends_on("cmake@3.12:", type="build")
|
||||
|
||||
def cmake_args(self):
|
||||
args = [self.define_from_variant("PORTABILITY_STRATEGY", "portability_strategy")]
|
||||
args = []
|
||||
if self.spec.satisfies("@:1.2.0"):
|
||||
args.append(self.define_from_variant("PORTABILITY_STRATEGY", "portability_strategy"))
|
||||
return args
|
||||
|
||||
@@ -7,14 +7,16 @@
|
||||
|
||||
|
||||
class PyCligj(PythonPackage):
|
||||
"""Click-based argument and option decorators for Python GIS command
|
||||
line programs"""
|
||||
"""Common arguments and options for GeoJSON processing commands, using Click."""
|
||||
|
||||
homepage = "https://github.com/mapbox/cligj"
|
||||
url = "https://github.com/mapbox/cligj/archive/0.5.0.zip"
|
||||
pypi = "cligj/cligj-0.7.2.tar.gz"
|
||||
|
||||
version("0.5.0", sha256="ad158722a3f512f7eb33526479acf5cb53d9e59ca15cd494556440839783f106")
|
||||
version("0.4.0", sha256="5a5eb903ea66a8ccd41765dd276d9d08a6285f21dd99d41425ef80030d740351")
|
||||
version("0.7.2", sha256="a4bc13d623356b373c2c27c53dbd9c68cae5d526270bfa71f6c6fa69669c6b27")
|
||||
version("0.5.0", sha256="6c7d52d529a78712491974f975c33473f430c0f7beb18c0d7a402a743dcb460a")
|
||||
version("0.4.0", sha256="12ad07994f5c1173b06087ffbaacec52f9ebe4687926e5aacfc22b6b0c8b3f54")
|
||||
|
||||
depends_on("python@2.7:2,3.3:3", when="@0.7:", type=("build", "run"))
|
||||
depends_on("py-setuptools", type="build")
|
||||
depends_on("py-click", type=("build", "run"))
|
||||
depends_on("py-click@4:", type=("build", "run"))
|
||||
depends_on("py-click@4:7", when="@0.5.0", type=("build", "run"))
|
||||
|
||||
17
var/spack/repos/builtin/packages/py-colorlover/package.py
Normal file
17
var/spack/repos/builtin/packages/py-colorlover/package.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class PyColorlover(PythonPackage):
|
||||
"""Color scales in Python for humans."""
|
||||
|
||||
homepage = "https://github.com/plotly/colorlover"
|
||||
pypi = "colorlover/colorlover-0.3.0.tar.gz"
|
||||
|
||||
version("0.3.0", sha256="b8fb7246ab46e1f5e6715649453c1762e245a515de5ff2d2b4aab7a6e67fa4e2")
|
||||
|
||||
depends_on("py-setuptools", type="build")
|
||||
@@ -0,0 +1,30 @@
|
||||
From a4478b5a9a4d048671d078356bbb5d3ef333a9f9 Mon Sep 17 00:00:00 2001
|
||||
From: Thomas Bouvier <contact@thomas-bouvier.io>
|
||||
Date: Mon, 23 Jan 2023 11:24:39 +0100
|
||||
Subject: [PATCH] Move package `prospector` to dev dependencies
|
||||
|
||||
---
|
||||
requirements.txt | 1 -
|
||||
requirements_dev.txt | 1 +
|
||||
2 files changed, 1 insertion(+), 1 deletion(-)
|
||||
create mode 100644 requirements_dev.txt
|
||||
|
||||
diff --git a/requirements.txt b/requirements.txt
|
||||
index b9e40cd9..46c81d93 100644
|
||||
--- a/requirements.txt
|
||||
+++ b/requirements.txt
|
||||
@@ -9,7 +9,6 @@ scikit-learn>=0.24.1
|
||||
pandas>=1.1.5
|
||||
pytest>=5.0.1
|
||||
pytest-mock>=3.6.1
|
||||
-prospector[with_mypy]>=1.2.0
|
||||
h5py>=3.1.0
|
||||
requests>=2.24.0
|
||||
ImageHash>=4.2.1
|
||||
diff --git a/requirements_dev.txt b/requirements_dev.txt
|
||||
new file mode 100644
|
||||
index 00000000..a1986540
|
||||
--- /dev/null
|
||||
+++ b/requirements_dev.txt
|
||||
@@ -0,0 +1 @@
|
||||
+prospector[with_mypy]>=1.2.0
|
||||
41
var/spack/repos/builtin/packages/py-continuum/package.py
Normal file
41
var/spack/repos/builtin/packages/py-continuum/package.py
Normal file
@@ -0,0 +1,41 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class PyContinuum(PythonPackage):
|
||||
"""A clean and simple data loading library for Continual Learning"""
|
||||
|
||||
homepage = "https://continuum.readthedocs.io/en/latest/"
|
||||
pypi = "continuum/continuum-1.2.7.tar.gz"
|
||||
|
||||
maintainers = ["thomas-bouvier"]
|
||||
|
||||
version("1.2.7", sha256="baadcdbe2f5b3c05254307c73434a012f2a3d663e1be9469a03d4b82559e98e1")
|
||||
|
||||
# This patch moves the `prospector` dependency of package continuum to the
|
||||
# dev dependencies, as it is not called from any Python code.
|
||||
# https://github.com/Continvvm/continuum/pull/280
|
||||
patch("move_prospector_dev_requires.patch", when="@:1.2.7")
|
||||
|
||||
depends_on("python@3.6:")
|
||||
|
||||
depends_on("py-setuptools", type="build")
|
||||
depends_on("py-torch@1.2.0:", type=("build", "run"))
|
||||
depends_on("py-torchvision@0.4.0:", type=("build", "run"))
|
||||
depends_on("py-numpy@1.17.2:", type=("build", "run"))
|
||||
depends_on("pil@6.2.1:", type=("build", "run"))
|
||||
depends_on("py-matplotlib@3.1.0:", type=("build", "run"))
|
||||
depends_on("py-scipy@1.3.3:", type=("build", "run"))
|
||||
depends_on("py-scikit-image@0.15.0:", type=("build", "run"))
|
||||
depends_on("py-scikit-learn@0.24.1:", type=("build", "run"))
|
||||
depends_on("py-pandas@1.1.5:", type=("build", "run"))
|
||||
depends_on("py-pytest@5.0.1:", type=("build", "run"))
|
||||
depends_on("py-pytest-mock@3.6.1:", type=("build", "run"))
|
||||
depends_on("py-h5py@3.1.0:", type=("build", "run"))
|
||||
depends_on("py-requests@2.24.0:", type=("build", "run"))
|
||||
depends_on("py-datasets@1.6.0:", type=("build", "run"))
|
||||
depends_on("py-imagehash@4.2.1:", type=("build", "run"))
|
||||
@@ -12,6 +12,7 @@ class PyDeap(PythonPackage):
|
||||
homepage = "https://deap.readthedocs.org/"
|
||||
pypi = "deap/deap-1.3.1.tar.gz"
|
||||
|
||||
version("1.3.3", sha256="8772f1b0fff042d5e516b0aebac2c706243045aa7d0de8e0b8658f380181cf31")
|
||||
version("1.3.1", sha256="11f54493ceb54aae10dde676577ef59fc52d52f82729d5a12c90b0813c857a2f")
|
||||
|
||||
depends_on("py-setuptools@:57", type="build")
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class PyDocstringParser(PythonPackage):
|
||||
"""Parse Python docstrings in reST, Google and Numpydoc format."""
|
||||
|
||||
homepage = "https://github.com/rr-/docstring_parser"
|
||||
pypi = "docstring-parser/docstring_parser-0.15.tar.gz"
|
||||
|
||||
version("0.15", sha256="48ddc093e8b1865899956fcc03b03e66bb7240c310fac5af81814580c55bf682")
|
||||
|
||||
depends_on("python@3.6:3", type=("build", "run"))
|
||||
depends_on("py-poetry-core@1:", type="build")
|
||||
@@ -0,0 +1,23 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class PyElasticsearchDsl(PythonPackage):
|
||||
"""Elasticsearch DSL is a high-level library whose aim
|
||||
is to help with writing and running queries against Elasticsearch.
|
||||
It is built on top of the official low-level client (elasticsearch-py).
|
||||
"""
|
||||
|
||||
homepage = "https://github.com/elastic/elasticsearch-dsl-py"
|
||||
pypi = "elasticsearch-dsl/elasticsearch-dsl-7.4.0.tar.gz"
|
||||
|
||||
version("7.4.0", sha256="c4a7b93882918a413b63bed54018a1685d7410ffd8facbc860ee7fd57f214a6d")
|
||||
|
||||
depends_on("py-setuptools", type="build")
|
||||
depends_on("py-six", type=("build", "run"))
|
||||
depends_on("py-python-dateutil", type=("build", "run"))
|
||||
depends_on("py-elasticsearch@7.0.0:7", type=("build", "run"))
|
||||
@@ -16,7 +16,7 @@ class PyFlattenDict(PythonPackage):
|
||||
|
||||
version("0.3.0", sha256="0ccc43f15c7c84c5ef387ad19254f6769a32d170313a1bcbf4ce582089313d7e")
|
||||
|
||||
depends_on("python@2.7:2.8,3.5:", type=("build", "run"))
|
||||
depends_on("py-setuptools", type="build")
|
||||
depends_on("python@2.7,3.5:3", type=("build", "run"))
|
||||
depends_on("py-poetry@1:", type="build")
|
||||
depends_on("py-six@1.12:1", type=("build", "run"))
|
||||
depends_on("py-pathlib2@2.3:2", type=("build", "run"))
|
||||
|
||||
@@ -12,6 +12,7 @@ class PyFsspec(PythonPackage):
|
||||
homepage = "https://github.com/intake/filesystem_spec"
|
||||
pypi = "fsspec/fsspec-0.4.4.tar.gz"
|
||||
|
||||
version("2023.1.0", sha256="fbae7f20ff801eb5f7d0bedf81f25c787c0dfac5e982d98fa3884a9cde2b5411")
|
||||
version("2022.11.0", sha256="259d5fd5c8e756ff2ea72f42e7613c32667dc2049a4ac3d84364a7ca034acb8b")
|
||||
version("2021.7.0", sha256="792ebd3b54de0b30f1ce73f0ba0a8bcc864724f2d9f248cb8d0ece47db0cbde8")
|
||||
version("2021.4.0", sha256="8b1a69884855d1a8c038574292e8b861894c3373282d9a469697a2b41d5289a6")
|
||||
@@ -20,13 +21,8 @@ class PyFsspec(PythonPackage):
|
||||
version("0.7.3", sha256="1b540552c93b47e83c568e87507d6e02993e6d1b30bc7285f2336c81c5014103")
|
||||
version("0.4.4", sha256="97697a46e8bf8be34461c2520d6fc4bfca0ed749b22bb2b7c21939fd450a7d63")
|
||||
|
||||
variant("http", default=False, description="HTTPFileSystem support (Requires version 0.8.1+)")
|
||||
variant("http", default=False, description="HTTPFileSystem support", when="@0.8.1:")
|
||||
|
||||
conflicts("+http", when="@:0.8.0", msg="Only available in 0.8.1+")
|
||||
|
||||
depends_on("python@3.5:", type=("build", "run"))
|
||||
depends_on("python@3.6:", type=("build", "run"), when="@0.6.3:")
|
||||
depends_on("python@3.7:", type=("build", "run"), when="@2022.11.0:")
|
||||
depends_on("py-setuptools", type="build")
|
||||
depends_on("py-requests", type=("build", "run"), when="+http")
|
||||
depends_on("py-aiohttp", type=("build", "run"), when="+http")
|
||||
|
||||
24
var/spack/repos/builtin/packages/py-gcsfs/package.py
Normal file
24
var/spack/repos/builtin/packages/py-gcsfs/package.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class PyGcsfs(PythonPackage):
|
||||
"""Pythonic file-system for Google Cloud Storage."""
|
||||
|
||||
homepage = "https://github.com/fsspec/gcsfs"
|
||||
pypi = "gcsfs/gcsfs-2023.1.0.tar.gz"
|
||||
|
||||
version("2023.1.0", sha256="0a7b7ca8c1affa126a14ba35d7b7dff81c49e2aaceedda9732c7f159a4837a26")
|
||||
|
||||
depends_on("py-setuptools", type="build")
|
||||
depends_on("py-aiohttp", type=("build", "run"))
|
||||
depends_on("py-decorator@4.1.3:", type=("build", "run"))
|
||||
depends_on("py-fsspec@2023.1.0", type=("build", "run"))
|
||||
depends_on("py-google-auth@1.2:", type=("build", "run"))
|
||||
depends_on("py-google-auth-oauthlib", type=("build", "run"))
|
||||
depends_on("py-google-cloud-storage", type=("build", "run"))
|
||||
depends_on("py-requests", type=("build", "run"))
|
||||
@@ -37,6 +37,7 @@ class PyH5py(PythonPackage):
|
||||
variant("mpi", default=True, description="Build with MPI support")
|
||||
|
||||
# Python versions
|
||||
depends_on("python@:3.9", type=("build", "run"), when="@:2.8")
|
||||
depends_on("python@3.6:", type=("build", "run"), when="@3:3.1")
|
||||
depends_on("python@3.7:", type=("build", "run"), when="@3.2:")
|
||||
|
||||
|
||||
18
var/spack/repos/builtin/packages/py-hjson/package.py
Normal file
18
var/spack/repos/builtin/packages/py-hjson/package.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class PyHjson(PythonPackage):
|
||||
"""Hjson is an user interface for JSON.
|
||||
The Python implementation of Hjson is based on simplejson."""
|
||||
|
||||
homepage = "https://github.com/hjson/hjson-py"
|
||||
pypi = "hjson/hjson-3.0.2.tar.gz"
|
||||
|
||||
version("3.1.2", sha256="55af475a27cf83a7969c808399d7bccdec8fb836a07ddbd574587593b9cdcf75")
|
||||
version("3.0.2", sha256="2838fd7200e5839ea4516ece953f3a19892c41089f0d933ba3f68e596aacfcd5")
|
||||
|
||||
depends_on("py-setuptools", type="build")
|
||||
21
var/spack/repos/builtin/packages/py-hydra-core/package.py
Normal file
21
var/spack/repos/builtin/packages/py-hydra-core/package.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class PyHydraCore(PythonPackage):
|
||||
"""A framework for elegantly configuring complex applications."""
|
||||
|
||||
homepage = "https://github.com/facebookresearch/hydra"
|
||||
pypi = "hydra-core/hydra-core-1.3.1.tar.gz"
|
||||
|
||||
version("1.3.1", sha256="8dd42d551befc43dfca0c612cbd58c4f3e273dbd97a87214c1a030ba557d238b")
|
||||
|
||||
depends_on("py-setuptools", type="build")
|
||||
depends_on("py-omegaconf@2.2:2.3", type=("build", "run"))
|
||||
depends_on("py-antlr4-python3-runtime@4.9", type=("build", "run"))
|
||||
depends_on("py-importlib-resources", when="^python@:3.8", type=("build", "run"))
|
||||
depends_on("py-packaging", type=("build", "run"))
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user