Compare commits
1 Commits
woptim/spa
...
remove-rem
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
111501b583 |
2
.github/workflows/coverage.yml
vendored
2
.github/workflows/coverage.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
- run: coverage xml
|
||||
|
||||
- name: "Upload coverage report to CodeCov"
|
||||
uses: codecov/codecov-action@1e68e06f1dbfde0e4cefc87efeba9e4643565303
|
||||
uses: codecov/codecov-action@05f5a9cfad807516dbbef9929c4a42df3eb78766
|
||||
with:
|
||||
verbose: true
|
||||
fail_ci_if_error: false
|
||||
|
||||
@@ -2,6 +2,6 @@ black==24.10.0
|
||||
clingo==5.7.1
|
||||
flake8==7.1.1
|
||||
isort==5.13.2
|
||||
mypy==1.11.2
|
||||
mypy==1.8.0
|
||||
types-six==1.17.0.20241205
|
||||
vermin==1.6.0
|
||||
|
||||
6
.github/workflows/valid-style.yml
vendored
6
.github/workflows/valid-style.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.13'
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.13'
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ inputs.with_coverage }}
|
||||
python_version: '3.13'
|
||||
python_version: '3.11'
|
||||
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
|
||||
bootstrap-dev-rhel8:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -25,23 +25,14 @@ These settings can be overridden in ``etc/spack/config.yaml`` or
|
||||
The location where Spack will install packages and their dependencies.
|
||||
Default is ``$spack/opt/spack``.
|
||||
|
||||
---------------
|
||||
``projections``
|
||||
---------------
|
||||
---------------------------------------------------
|
||||
``install_hash_length`` and ``install_path_scheme``
|
||||
---------------------------------------------------
|
||||
|
||||
.. warning::
|
||||
|
||||
Modifying projections of the install tree is strongly discouraged.
|
||||
|
||||
By default Spack installs all packages into a unique directory relative to the install
|
||||
tree root with the following layout:
|
||||
|
||||
.. code-block::
|
||||
|
||||
{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}
|
||||
|
||||
In very rare cases, it may be necessary to reduce the length of this path. For example,
|
||||
very old versions of the Intel compiler are known to segfault when input paths are too long:
|
||||
The default Spack installation path can be very long and can create problems
|
||||
for scripts with hardcoded shebangs. Additionally, when using the Intel
|
||||
compiler, and if there is also a long list of dependencies, the compiler may
|
||||
segfault. If you see the following:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -49,25 +40,36 @@ very old versions of the Intel compiler are known to segfault when input paths a
|
||||
** Segmentation violation signal raised. **
|
||||
Access violation or stack overflow. Please contact Intel Support for assistance.
|
||||
|
||||
Another case is Python and R packages with many runtime dependencies, which can result
|
||||
in very large ``PYTHONPATH`` and ``R_LIBS`` environment variables. This can cause the
|
||||
``execve`` system call to fail with ``E2BIG``, preventing processes from starting.
|
||||
it may be because variables containing dependency specs may be too long. There
|
||||
are two parameters to help with long path names. Firstly, the
|
||||
``install_hash_length`` parameter can set the length of the hash in the
|
||||
installation path from 1 to 32. The default path uses the full 32 characters.
|
||||
|
||||
For this reason, Spack allows users to modify the installation layout through custom
|
||||
projections. For example
|
||||
Secondly, it is also possible to modify the entire installation
|
||||
scheme. By default Spack uses
|
||||
``{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}``
|
||||
where the tokens that are available for use in this directive are the
|
||||
same as those understood by the :meth:`~spack.spec.Spec.format`
|
||||
method. Using this parameter it is possible to use a different package
|
||||
layout or reduce the depth of the installation paths. For example
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
config:
|
||||
install_tree:
|
||||
root: $spack/opt/spack
|
||||
projections:
|
||||
all: "{name}/{version}/{hash:16}"
|
||||
install_path_scheme: '{name}/{version}/{hash:7}'
|
||||
|
||||
would install packages into sub-directories using only the package name, version and a
|
||||
hash length of 16 characters.
|
||||
would install packages into sub-directories using only the package
|
||||
name, version and a hash length of 7 characters.
|
||||
|
||||
Notice that reducing the hash length increases the likelihood of hash collisions.
|
||||
When using either parameter to set the hash length it only affects the
|
||||
representation of the hash in the installation directory. You
|
||||
should be aware that the smaller the hash length the more likely
|
||||
naming conflicts will occur. These parameters are independent of those
|
||||
used to configure module names.
|
||||
|
||||
.. warning:: Modifying the installation hash length or path scheme after
|
||||
packages have been installed will prevent Spack from being
|
||||
able to find the old installation directories.
|
||||
|
||||
--------------------
|
||||
``build_stage``
|
||||
|
||||
@@ -4,7 +4,7 @@ sphinx_design==0.6.1
|
||||
sphinx-rtd-theme==3.0.2
|
||||
python-levenshtein==0.26.1
|
||||
docutils==0.21.2
|
||||
pygments==2.19.1
|
||||
pygments==2.18.0
|
||||
urllib3==2.3.0
|
||||
pytest==8.3.4
|
||||
isort==5.13.2
|
||||
|
||||
@@ -591,18 +591,32 @@ def file_matches(f: IO[bytes], regex: llnl.util.lang.PatternBytes) -> bool:
|
||||
f.seek(0)
|
||||
|
||||
|
||||
def specs_to_relocate(spec: spack.spec.Spec) -> List[spack.spec.Spec]:
|
||||
"""Return the set of specs that may be referenced in the install prefix of the provided spec.
|
||||
We currently include non-external transitive link and direct run dependencies."""
|
||||
specs = [
|
||||
def deps_to_relocate(spec):
|
||||
"""Return the transitive link and direct run dependencies of the spec.
|
||||
|
||||
This is a special traversal for dependencies we need to consider when relocating a package.
|
||||
|
||||
Package binaries, scripts, and other files may refer to the prefixes of dependencies, so
|
||||
we need to rewrite those locations when dependencies are in a different place at install time
|
||||
than they were at build time.
|
||||
|
||||
This traversal covers transitive link dependencies and direct run dependencies because:
|
||||
|
||||
1. Spack adds RPATHs for transitive link dependencies so that packages can find needed
|
||||
dependency libraries.
|
||||
2. Packages may call any of their *direct* run dependencies (and may bake their paths into
|
||||
binaries or scripts), so we also need to search for run dependency prefixes when relocating.
|
||||
|
||||
This returns a deduplicated list of transitive link dependencies and direct run dependencies.
|
||||
"""
|
||||
deps = [
|
||||
s
|
||||
for s in itertools.chain(
|
||||
spec.traverse(root=True, deptype="link", order="breadth", key=traverse.by_dag_hash),
|
||||
spec.dependencies(deptype="run"),
|
||||
spec.traverse(root=True, deptype="link"), spec.dependencies(deptype="run")
|
||||
)
|
||||
if not s.external
|
||||
]
|
||||
return list(llnl.util.lang.dedupe(specs, key=lambda s: s.dag_hash()))
|
||||
return llnl.util.lang.dedupe(deps, key=lambda s: s.dag_hash())
|
||||
|
||||
|
||||
def get_buildinfo_dict(spec):
|
||||
@@ -616,7 +630,7 @@ def get_buildinfo_dict(spec):
|
||||
# "relocate_binaries": [],
|
||||
# "relocate_links": [],
|
||||
"hardlinks_deduped": True,
|
||||
"hash_to_prefix": {d.dag_hash(): str(d.prefix) for d in specs_to_relocate(spec)},
|
||||
"hash_to_prefix": {d.dag_hash(): str(d.prefix) for d in deps_to_relocate(spec)},
|
||||
}
|
||||
|
||||
|
||||
@@ -1098,7 +1112,7 @@ def _exists_in_buildcache(spec: spack.spec.Spec, tmpdir: str, out_url: str) -> E
|
||||
|
||||
|
||||
def prefixes_to_relocate(spec):
|
||||
prefixes = [s.prefix for s in specs_to_relocate(spec)]
|
||||
prefixes = [s.prefix for s in deps_to_relocate(spec)]
|
||||
prefixes.append(spack.hooks.sbang.sbang_install_path())
|
||||
prefixes.append(str(spack.store.STORE.layout.root))
|
||||
return prefixes
|
||||
@@ -2175,12 +2189,7 @@ def relocate_package(spec):
|
||||
old_spack_prefix = str(buildinfo.get("spackprefix"))
|
||||
old_rel_prefix = buildinfo.get("relative_prefix")
|
||||
old_prefix = os.path.join(old_layout_root, old_rel_prefix)
|
||||
|
||||
# Warn about old style tarballs created with the now removed --rel flag.
|
||||
if buildinfo.get("relative_rpaths", False):
|
||||
tty.warn(
|
||||
f"Tarball for {spec} uses relative rpaths, " "which can cause library loading issues."
|
||||
)
|
||||
rel = buildinfo.get("relative_rpaths", False)
|
||||
|
||||
# In the past prefix_to_hash was the default and externals were not dropped, so prefixes
|
||||
# were not unique.
|
||||
@@ -2220,7 +2229,7 @@ def relocate_package(spec):
|
||||
# An analog in this algorithm is any spec that shares a name or provides the same virtuals
|
||||
# in the context of the relevant root spec. This ensures that the analog for a spec s
|
||||
# is the spec that s replaced when we spliced.
|
||||
relocation_specs = specs_to_relocate(spec)
|
||||
relocation_specs = deps_to_relocate(spec)
|
||||
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
||||
for s in relocation_specs:
|
||||
analog = s
|
||||
@@ -2258,11 +2267,19 @@ def relocate_package(spec):
|
||||
|
||||
tty.debug("Relocating package from", "%s to %s." % (old_layout_root, new_layout_root))
|
||||
|
||||
# Old archives may have hardlinks repeated.
|
||||
# Old archives maybe have hardlinks repeated.
|
||||
dedupe_hardlinks_if_necessary(workdir, buildinfo)
|
||||
|
||||
def is_backup_file(file):
|
||||
return file.endswith("~")
|
||||
|
||||
# Text files containing the prefix text
|
||||
text_names = [os.path.join(workdir, f) for f in buildinfo["relocate_textfiles"]]
|
||||
text_names = list()
|
||||
for filename in buildinfo["relocate_textfiles"]:
|
||||
text_name = os.path.join(workdir, filename)
|
||||
# Don't add backup files generated by filter_file during install step.
|
||||
if not is_backup_file(text_name):
|
||||
text_names.append(text_name)
|
||||
|
||||
# If we are not installing back to the same install tree do the relocation
|
||||
if old_prefix != new_prefix:
|
||||
@@ -2273,11 +2290,29 @@ def relocate_package(spec):
|
||||
# do the relocation of path in binaries
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
if "macho" in platform.binary_formats:
|
||||
relocate.relocate_macho_binaries(files_to_relocate, prefix_to_prefix_bin)
|
||||
elif "elf" in platform.binary_formats:
|
||||
relocate.relocate_macho_binaries(
|
||||
files_to_relocate,
|
||||
old_layout_root,
|
||||
new_layout_root,
|
||||
prefix_to_prefix_bin,
|
||||
rel,
|
||||
old_prefix,
|
||||
new_prefix,
|
||||
)
|
||||
elif "elf" in platform.binary_formats and not rel:
|
||||
# The new ELF dynamic section relocation logic only handles absolute to
|
||||
# absolute relocation.
|
||||
relocate.relocate_elf_binaries(files_to_relocate, prefix_to_prefix_bin)
|
||||
relocate.new_relocate_elf_binaries(files_to_relocate, prefix_to_prefix_bin)
|
||||
elif "elf" in platform.binary_formats and rel:
|
||||
relocate.relocate_elf_binaries(
|
||||
files_to_relocate,
|
||||
old_layout_root,
|
||||
new_layout_root,
|
||||
prefix_to_prefix_bin,
|
||||
rel,
|
||||
old_prefix,
|
||||
new_prefix,
|
||||
)
|
||||
|
||||
# Relocate links to the new install prefix
|
||||
links = [os.path.join(workdir, f) for f in buildinfo.get("relocate_links", [])]
|
||||
|
||||
@@ -298,14 +298,7 @@ def initconfig_hardware_entries(self):
|
||||
def std_initconfig_entries(self):
|
||||
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
|
||||
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
|
||||
complete_rpath_list = ";".join(
|
||||
[
|
||||
self.pkg.spec.prefix.lib,
|
||||
self.pkg.spec.prefix.lib64,
|
||||
*os.environ.get("SPACK_COMPILER_EXTRA_RPATHS", "").split(":"),
|
||||
*os.environ.get("SPACK_COMPILER_IMPLICIT_RPATHS", "").split(":"),
|
||||
]
|
||||
)
|
||||
|
||||
return [
|
||||
"#------------------{0}".format("-" * 60),
|
||||
"# !!!! This is a generated file, edit at own risk !!!!",
|
||||
@@ -314,8 +307,6 @@ def std_initconfig_entries(self):
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
||||
cmake_cache_string("CMAKE_INSTALL_RPATH_USE_LINK_PATH", "ON"),
|
||||
cmake_cache_string("CMAKE_BUILD_RPATH", complete_rpath_list),
|
||||
cmake_cache_string("CMAKE_INSTALL_RPATH", complete_rpath_list),
|
||||
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
|
||||
]
|
||||
|
||||
|
||||
@@ -71,16 +71,13 @@ def build_directory(self):
|
||||
@property
|
||||
def build_args(self):
|
||||
"""Arguments for ``cargo build``."""
|
||||
return ["-j", str(self.pkg.module.make_jobs)]
|
||||
return []
|
||||
|
||||
@property
|
||||
def check_args(self):
|
||||
"""Argument for ``cargo test`` during check phase"""
|
||||
return []
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
env.set("CARGO_HOME", self.stage.path)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Runs ``cargo install`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
|
||||
@@ -10,9 +10,8 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.directives import build_system, extends
|
||||
from spack.install_test import SkipTest, test_part
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
@@ -29,9 +28,7 @@ class PerlPackage(spack.package_base.PackageBase):
|
||||
|
||||
build_system("perl")
|
||||
|
||||
with when("build_system=perl"):
|
||||
extends("perl")
|
||||
depends_on("gmake", type="build")
|
||||
extends("perl", when="build_system=perl")
|
||||
|
||||
@property
|
||||
@memoized
|
||||
|
||||
@@ -171,9 +171,7 @@ def quote_kvp(string: str) -> str:
|
||||
|
||||
|
||||
def parse_specs(
|
||||
args: Union[str, List[str]],
|
||||
concretize: bool = False,
|
||||
tests: spack.concretize.TestsType = False,
|
||||
args: Union[str, List[str]], concretize: bool = False, tests: bool = False
|
||||
) -> List[spack.spec.Spec]:
|
||||
"""Convenience function for parsing arguments from specs. Handles common
|
||||
exceptions and dies if there are errors.
|
||||
@@ -185,13 +183,11 @@ def parse_specs(
|
||||
if not concretize:
|
||||
return specs
|
||||
|
||||
to_concretize: List[spack.concretize.SpecPairInput] = [(s, None) for s in specs]
|
||||
to_concretize = [(s, None) for s in specs]
|
||||
return _concretize_spec_pairs(to_concretize, tests=tests)
|
||||
|
||||
|
||||
def _concretize_spec_pairs(
|
||||
to_concretize: List[spack.concretize.SpecPairInput], tests: spack.concretize.TestsType = False
|
||||
) -> List[spack.spec.Spec]:
|
||||
def _concretize_spec_pairs(to_concretize, tests=False):
|
||||
"""Helper method that concretizes abstract specs from a list of abstract,concrete pairs.
|
||||
|
||||
Any spec with a concrete spec associated with it will concretize to that spec. Any spec
|
||||
@@ -202,7 +198,7 @@ def _concretize_spec_pairs(
|
||||
# Special case for concretizing a single spec
|
||||
if len(to_concretize) == 1:
|
||||
abstract, concrete = to_concretize[0]
|
||||
return [concrete or abstract.concretized(tests=tests)]
|
||||
return [concrete or abstract.concretized()]
|
||||
|
||||
# Special case if every spec is either concrete or has an abstract hash
|
||||
if all(
|
||||
|
||||
@@ -144,7 +144,7 @@ def is_installed(spec):
|
||||
record = spack.store.STORE.db.query_local_by_spec_hash(spec.dag_hash())
|
||||
return record and record.installed
|
||||
|
||||
all_specs = traverse.traverse_nodes(
|
||||
specs = traverse.traverse_nodes(
|
||||
specs,
|
||||
root=False,
|
||||
order="breadth",
|
||||
@@ -155,7 +155,7 @@ def is_installed(spec):
|
||||
)
|
||||
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
return [spec for spec in all_specs if is_installed(spec)]
|
||||
return [spec for spec in specs if is_installed(spec)]
|
||||
|
||||
|
||||
def dependent_environments(
|
||||
|
||||
@@ -749,18 +749,12 @@ def __init__(self, compiler, feature, flag_name, ver_string=None):
|
||||
class CompilerCacheEntry:
|
||||
"""Deserialized cache entry for a compiler"""
|
||||
|
||||
__slots__ = ("c_compiler_output", "real_version")
|
||||
__slots__ = ["c_compiler_output", "real_version"]
|
||||
|
||||
def __init__(self, c_compiler_output: Optional[str], real_version: str):
|
||||
self.c_compiler_output = c_compiler_output
|
||||
self.real_version = real_version
|
||||
|
||||
@property
|
||||
def empty(self) -> bool:
|
||||
"""Sometimes the compiler is temporarily broken, preventing us from getting output. The
|
||||
call site determines if that is a problem."""
|
||||
return self.c_compiler_output is None
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Optional[str]]):
|
||||
if not isinstance(data, dict):
|
||||
@@ -798,10 +792,9 @@ def __init__(self, cache: "FileCache") -> None:
|
||||
self.cache.init_entry(self.name)
|
||||
self._data: Dict[str, Dict[str, Optional[str]]] = {}
|
||||
|
||||
def _get_entry(self, key: str, *, allow_empty: bool) -> Optional[CompilerCacheEntry]:
|
||||
def _get_entry(self, key: str) -> Optional[CompilerCacheEntry]:
|
||||
try:
|
||||
entry = CompilerCacheEntry.from_dict(self._data[key])
|
||||
return entry if allow_empty or not entry.empty else None
|
||||
return CompilerCacheEntry.from_dict(self._data[key])
|
||||
except ValueError:
|
||||
del self._data[key]
|
||||
except KeyError:
|
||||
@@ -819,7 +812,7 @@ def get(self, compiler: Compiler) -> CompilerCacheEntry:
|
||||
self._data = {}
|
||||
|
||||
key = self._key(compiler)
|
||||
value = self._get_entry(key, allow_empty=False)
|
||||
value = self._get_entry(key)
|
||||
if value is not None:
|
||||
return value
|
||||
|
||||
@@ -833,7 +826,7 @@ def get(self, compiler: Compiler) -> CompilerCacheEntry:
|
||||
self._data = {}
|
||||
|
||||
# Use cache entry that may have been created by another process in the meantime.
|
||||
entry = self._get_entry(key, allow_empty=True)
|
||||
entry = self._get_entry(key)
|
||||
|
||||
# Finally compute the cache entry
|
||||
if entry is None:
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
import sys
|
||||
import time
|
||||
from contextlib import contextmanager
|
||||
from typing import Iterable, List, Optional, Sequence, Tuple, Union
|
||||
from typing import Iterable, Optional, Sequence, Tuple, Union
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -35,7 +35,6 @@ def enable_compiler_existence_check():
|
||||
CHECK_COMPILER_EXISTENCE = saved
|
||||
|
||||
|
||||
SpecPairInput = Tuple[Spec, Optional[Spec]]
|
||||
SpecPair = Tuple[Spec, Spec]
|
||||
SpecLike = Union[Spec, str]
|
||||
TestsType = Union[bool, Iterable[str]]
|
||||
@@ -60,8 +59,8 @@ def concretize_specs_together(
|
||||
|
||||
|
||||
def concretize_together(
|
||||
spec_list: Sequence[SpecPairInput], tests: TestsType = False
|
||||
) -> List[SpecPair]:
|
||||
spec_list: Sequence[SpecPair], tests: TestsType = False
|
||||
) -> Sequence[SpecPair]:
|
||||
"""Given a number of specs as input, tries to concretize them together.
|
||||
|
||||
Args:
|
||||
@@ -77,8 +76,8 @@ def concretize_together(
|
||||
|
||||
|
||||
def concretize_together_when_possible(
|
||||
spec_list: Sequence[SpecPairInput], tests: TestsType = False
|
||||
) -> List[SpecPair]:
|
||||
spec_list: Sequence[SpecPair], tests: TestsType = False
|
||||
) -> Sequence[SpecPair]:
|
||||
"""Given a number of specs as input, tries to concretize them together to the extent possible.
|
||||
|
||||
See documentation for ``unify: when_possible`` concretization for the precise definition of
|
||||
@@ -114,8 +113,8 @@ def concretize_together_when_possible(
|
||||
|
||||
|
||||
def concretize_separately(
|
||||
spec_list: Sequence[SpecPairInput], tests: TestsType = False
|
||||
) -> List[SpecPair]:
|
||||
spec_list: Sequence[SpecPair], tests: TestsType = False
|
||||
) -> Sequence[SpecPair]:
|
||||
"""Concretizes the input specs separately from each other.
|
||||
|
||||
Args:
|
||||
|
||||
@@ -951,6 +951,12 @@ def set(path: str, value: Any, scope: Optional[str] = None) -> None:
|
||||
return CONFIG.set(path, value, scope)
|
||||
|
||||
|
||||
def add_default_platform_scope(platform: str) -> None:
|
||||
plat_name = os.path.join("defaults", platform)
|
||||
plat_path = os.path.join(CONFIGURATION_DEFAULTS_PATH[1], platform)
|
||||
CONFIG.push_scope(DirectoryConfigScope(plat_name, plat_path))
|
||||
|
||||
|
||||
def scopes() -> Dict[str, ConfigScope]:
|
||||
"""Convenience function to get list of configuration scopes."""
|
||||
return CONFIG.scopes
|
||||
|
||||
@@ -1330,7 +1330,7 @@ def deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") -> N
|
||||
def installed_relatives(
|
||||
self,
|
||||
spec: "spack.spec.Spec",
|
||||
direction: tr.DirectionType = "children",
|
||||
direction: str = "children",
|
||||
transitive: bool = True,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
|
||||
) -> Set["spack.spec.Spec"]:
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
import shutil
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util.symlink import readlink
|
||||
@@ -17,6 +17,7 @@
|
||||
import spack.hash_types as ht
|
||||
import spack.projections
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.error import SpackError
|
||||
|
||||
@@ -68,9 +69,10 @@ def specs_from_metadata_dirs(root: str) -> List["spack.spec.Spec"]:
|
||||
|
||||
|
||||
class DirectoryLayout:
|
||||
"""A directory layout is used to associate unique paths with specs. Different installations are
|
||||
going to want different layouts for their install, and they can use this to customize the
|
||||
nesting structure of spack installs. The default layout is:
|
||||
"""A directory layout is used to associate unique paths with specs.
|
||||
Different installations are going to want different layouts for their
|
||||
install, and they can use this to customize the nesting structure of
|
||||
spack installs. The default layout is:
|
||||
|
||||
* <install root>/
|
||||
|
||||
@@ -80,30 +82,35 @@ class DirectoryLayout:
|
||||
|
||||
* <name>-<version>-<hash>
|
||||
|
||||
The installation directory projections can be modified with the projections argument."""
|
||||
The hash here is a SHA-1 hash for the full DAG plus the build
|
||||
spec.
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
root,
|
||||
*,
|
||||
projections: Optional[Dict[str, str]] = None,
|
||||
hash_length: Optional[int] = None,
|
||||
) -> None:
|
||||
The installation directory projections can be modified with the
|
||||
projections argument.
|
||||
"""
|
||||
|
||||
def __init__(self, root, **kwargs):
|
||||
self.root = root
|
||||
projections = projections or default_projections
|
||||
self.projections = {key: projection.lower() for key, projection in projections.items()}
|
||||
self.check_upstream = True
|
||||
projections = kwargs.get("projections") or default_projections
|
||||
self.projections = dict(
|
||||
(key, projection.lower()) for key, projection in projections.items()
|
||||
)
|
||||
|
||||
# apply hash length as appropriate
|
||||
self.hash_length = hash_length
|
||||
self.hash_length = kwargs.get("hash_length", None)
|
||||
if self.hash_length is not None:
|
||||
for when_spec, projection in self.projections.items():
|
||||
if "{hash}" not in projection:
|
||||
raise InvalidDirectoryLayoutParametersError(
|
||||
"Conflicting options for installation layout hash length"
|
||||
if "{hash" in projection
|
||||
else "Cannot specify hash length when the hash is not part of all "
|
||||
"install_tree projections"
|
||||
)
|
||||
if "{hash" in projection:
|
||||
raise InvalidDirectoryLayoutParametersError(
|
||||
"Conflicting options for installation layout hash" " length"
|
||||
)
|
||||
else:
|
||||
raise InvalidDirectoryLayoutParametersError(
|
||||
"Cannot specify hash length when the hash is not"
|
||||
" part of all install_tree projections"
|
||||
)
|
||||
self.projections[when_spec] = projection.replace(
|
||||
"{hash}", "{hash:%d}" % self.hash_length
|
||||
)
|
||||
@@ -272,6 +279,13 @@ def path_for_spec(self, spec):
|
||||
|
||||
if spec.external:
|
||||
return spec.external_path
|
||||
if self.check_upstream:
|
||||
upstream, record = spack.store.STORE.db.query_by_spec_hash(spec.dag_hash())
|
||||
if upstream:
|
||||
raise SpackError(
|
||||
"Internal error: attempted to call path_for_spec on"
|
||||
" upstream-installed package."
|
||||
)
|
||||
|
||||
path = self.relative_path_for_spec(spec)
|
||||
assert not path.startswith(self.root)
|
||||
|
||||
@@ -2634,29 +2634,6 @@ def _ensure_env_dir():
|
||||
|
||||
shutil.copy(envfile, target_manifest)
|
||||
|
||||
# Copy relative path includes that live inside the environment dir
|
||||
try:
|
||||
manifest = EnvironmentManifestFile(environment_dir)
|
||||
except Exception as e:
|
||||
msg = f"cannot initialize environment, '{environment_dir}' from manifest"
|
||||
raise SpackEnvironmentError(msg) from e
|
||||
else:
|
||||
includes = manifest[TOP_LEVEL_KEY].get("include", [])
|
||||
for include in includes:
|
||||
if os.path.isabs(include):
|
||||
continue
|
||||
|
||||
abspath = pathlib.Path(os.path.normpath(environment_dir / include))
|
||||
if not abspath.is_relative_to(environment_dir):
|
||||
# Warn that we are not copying relative path
|
||||
msg = "Spack will not copy relative include path from outside environment"
|
||||
msg += f" directory: {include}"
|
||||
tty.warn(msg)
|
||||
continue
|
||||
|
||||
orig_abspath = os.path.normpath(envfile.parent / include)
|
||||
shutil.copy(orig_abspath, abspath)
|
||||
|
||||
|
||||
class EnvironmentManifestFile(collections.abc.Mapping):
|
||||
"""Manages the in-memory representation of a manifest file, and its synchronization
|
||||
|
||||
@@ -15,10 +15,6 @@
|
||||
SHOW_BACKTRACE = False
|
||||
|
||||
|
||||
class SpackAPIWarning(UserWarning):
|
||||
"""Warning that formats with file and line number."""
|
||||
|
||||
|
||||
class SpackError(Exception):
|
||||
"""This is the superclass for all Spack errors.
|
||||
Subclasses can be found in the modules they have to do with.
|
||||
|
||||
@@ -539,7 +539,7 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
||||
# Note that we copy them in as they are in the *install* directory
|
||||
# NOT as they are in the repository, because we want a snapshot of
|
||||
# how *this* particular build was done.
|
||||
for node in spec.traverse(deptype="all"):
|
||||
for node in spec.traverse(deptype=all):
|
||||
if node is not spec:
|
||||
# Locate the dependency package in the install tree and find
|
||||
# its provenance information.
|
||||
|
||||
@@ -503,16 +503,16 @@ def make_argument_parser(**kwargs):
|
||||
return parser
|
||||
|
||||
|
||||
def showwarning(message, category, filename, lineno, file=None, line=None):
|
||||
def send_warning_to_tty(message, *args):
|
||||
"""Redirects messages to tty.warn."""
|
||||
if category is spack.error.SpackAPIWarning:
|
||||
tty.warn(f"{filename}:{lineno}: {message}")
|
||||
else:
|
||||
tty.warn(message)
|
||||
tty.warn(message)
|
||||
|
||||
|
||||
def setup_main_options(args):
|
||||
"""Configure spack globals based on the basic options."""
|
||||
# Assign a custom function to show warnings
|
||||
warnings.showwarning = send_warning_to_tty
|
||||
|
||||
# Set up environment based on args.
|
||||
tty.set_verbose(args.verbose)
|
||||
tty.set_debug(args.debug)
|
||||
@@ -903,10 +903,9 @@ def _main(argv=None):
|
||||
# main() is tricky to get right, so be careful where you put things.
|
||||
#
|
||||
# Things in this first part of `main()` should *not* require any
|
||||
# configuration. This doesn't include much -- setting up the parser,
|
||||
# configuration. This doesn't include much -- setting up th parser,
|
||||
# restoring some key environment variables, very simple CLI options, etc.
|
||||
# ------------------------------------------------------------------------
|
||||
warnings.showwarning = showwarning
|
||||
|
||||
# Create a parser with a simple positional argument first. We'll
|
||||
# lazily load the subcommand(s) we need later. This allows us to
|
||||
|
||||
@@ -767,9 +767,6 @@ def __init__(self, spec):
|
||||
self.win_rpath = fsys.WindowsSimulatedRPath(self)
|
||||
super().__init__()
|
||||
|
||||
def __getitem__(self, key: str) -> "PackageBase":
|
||||
return self.spec[key].package
|
||||
|
||||
@classmethod
|
||||
def dependency_names(cls):
|
||||
return _subkeys(cls.dependencies)
|
||||
|
||||
@@ -54,11 +54,144 @@ def _patchelf() -> Optional[executable.Executable]:
|
||||
return spack.bootstrap.ensure_patchelf_in_path_or_raise()
|
||||
|
||||
|
||||
def _elf_rpaths_for(path):
|
||||
"""Return the RPATHs for an executable or a library.
|
||||
|
||||
Args:
|
||||
path (str): full path to the executable or library
|
||||
|
||||
Return:
|
||||
RPATHs as a list of strings. Returns an empty array
|
||||
on ELF parsing errors, or when the ELF file simply
|
||||
has no rpaths.
|
||||
"""
|
||||
return elf.get_rpaths(path) or []
|
||||
|
||||
|
||||
def _make_relative(reference_file, path_root, paths):
|
||||
"""Return a list where any path in ``paths`` that starts with
|
||||
``path_root`` is made relative to the directory in which the
|
||||
reference file is stored.
|
||||
|
||||
After a path is made relative it is prefixed with the ``$ORIGIN``
|
||||
string.
|
||||
|
||||
Args:
|
||||
reference_file (str): file from which the reference directory
|
||||
is computed
|
||||
path_root (str): root of the relative paths
|
||||
paths: (list) paths to be examined
|
||||
|
||||
Returns:
|
||||
List of relative paths
|
||||
"""
|
||||
start_directory = os.path.dirname(reference_file)
|
||||
pattern = re.compile(path_root)
|
||||
relative_paths = []
|
||||
|
||||
for path in paths:
|
||||
if pattern.match(path):
|
||||
rel = os.path.relpath(path, start=start_directory)
|
||||
path = os.path.join("$ORIGIN", rel)
|
||||
|
||||
relative_paths.append(path)
|
||||
|
||||
return relative_paths
|
||||
|
||||
|
||||
def _normalize_relative_paths(start_path, relative_paths):
|
||||
"""Normalize the relative paths with respect to the original path name
|
||||
of the file (``start_path``).
|
||||
|
||||
The paths that are passed to this function existed or were relevant
|
||||
on another filesystem, so os.path.abspath cannot be used.
|
||||
|
||||
A relative path may contain the signifier $ORIGIN. Assuming that
|
||||
``start_path`` is absolute, this implies that the relative path
|
||||
(relative to start_path) should be replaced with an absolute path.
|
||||
|
||||
Args:
|
||||
start_path (str): path from which the starting directory
|
||||
is extracted
|
||||
relative_paths (str): list of relative paths as obtained by a
|
||||
call to :ref:`_make_relative`
|
||||
|
||||
Returns:
|
||||
List of normalized paths
|
||||
"""
|
||||
normalized_paths = []
|
||||
pattern = re.compile(re.escape("$ORIGIN"))
|
||||
start_directory = os.path.dirname(start_path)
|
||||
|
||||
for path in relative_paths:
|
||||
if path.startswith("$ORIGIN"):
|
||||
sub = pattern.sub(start_directory, path)
|
||||
path = os.path.normpath(sub)
|
||||
normalized_paths.append(path)
|
||||
|
||||
return normalized_paths
|
||||
|
||||
|
||||
def _decode_macho_data(bytestring):
|
||||
return bytestring.rstrip(b"\x00").decode("ascii")
|
||||
|
||||
|
||||
def macho_find_paths(orig_rpaths, deps, idpath, prefix_to_prefix):
|
||||
def macho_make_paths_relative(path_name, old_layout_root, rpaths, deps, idpath):
|
||||
"""
|
||||
Return a dictionary mapping the original rpaths to the relativized rpaths.
|
||||
This dictionary is used to replace paths in mach-o binaries.
|
||||
Replace old_dir with relative path from dirname of path name
|
||||
in rpaths and deps; idpath is replaced with @rpath/libname.
|
||||
"""
|
||||
paths_to_paths = dict()
|
||||
if idpath:
|
||||
paths_to_paths[idpath] = os.path.join("@rpath", "%s" % os.path.basename(idpath))
|
||||
for rpath in rpaths:
|
||||
if re.match(old_layout_root, rpath):
|
||||
rel = os.path.relpath(rpath, start=os.path.dirname(path_name))
|
||||
paths_to_paths[rpath] = os.path.join("@loader_path", "%s" % rel)
|
||||
else:
|
||||
paths_to_paths[rpath] = rpath
|
||||
for dep in deps:
|
||||
if re.match(old_layout_root, dep):
|
||||
rel = os.path.relpath(dep, start=os.path.dirname(path_name))
|
||||
paths_to_paths[dep] = os.path.join("@loader_path", "%s" % rel)
|
||||
else:
|
||||
paths_to_paths[dep] = dep
|
||||
return paths_to_paths
|
||||
|
||||
|
||||
def macho_make_paths_normal(orig_path_name, rpaths, deps, idpath):
|
||||
"""
|
||||
Return a dictionary mapping the relativized rpaths to the original rpaths.
|
||||
This dictionary is used to replace paths in mach-o binaries.
|
||||
Replace '@loader_path' with the dirname of the origname path name
|
||||
in rpaths and deps; idpath is replaced with the original path name
|
||||
"""
|
||||
rel_to_orig = dict()
|
||||
if idpath:
|
||||
rel_to_orig[idpath] = orig_path_name
|
||||
|
||||
for rpath in rpaths:
|
||||
if re.match("@loader_path", rpath):
|
||||
norm = os.path.normpath(
|
||||
re.sub(re.escape("@loader_path"), os.path.dirname(orig_path_name), rpath)
|
||||
)
|
||||
rel_to_orig[rpath] = norm
|
||||
else:
|
||||
rel_to_orig[rpath] = rpath
|
||||
for dep in deps:
|
||||
if re.match("@loader_path", dep):
|
||||
norm = os.path.normpath(
|
||||
re.sub(re.escape("@loader_path"), os.path.dirname(orig_path_name), dep)
|
||||
)
|
||||
rel_to_orig[dep] = norm
|
||||
else:
|
||||
rel_to_orig[dep] = dep
|
||||
return rel_to_orig
|
||||
|
||||
|
||||
def macho_find_paths(orig_rpaths, deps, idpath, old_layout_root, prefix_to_prefix):
|
||||
"""
|
||||
Inputs
|
||||
original rpaths from mach-o binaries
|
||||
@@ -74,12 +207,13 @@ def macho_find_paths(orig_rpaths, deps, idpath, prefix_to_prefix):
|
||||
# Sort from longest path to shortest, to ensure we try /foo/bar/baz before /foo/bar
|
||||
prefix_iteration_order = sorted(prefix_to_prefix, key=len, reverse=True)
|
||||
for orig_rpath in orig_rpaths:
|
||||
for old_prefix in prefix_iteration_order:
|
||||
new_prefix = prefix_to_prefix[old_prefix]
|
||||
if orig_rpath.startswith(old_prefix):
|
||||
new_rpath = re.sub(re.escape(old_prefix), new_prefix, orig_rpath)
|
||||
paths_to_paths[orig_rpath] = new_rpath
|
||||
break
|
||||
if orig_rpath.startswith(old_layout_root):
|
||||
for old_prefix in prefix_iteration_order:
|
||||
new_prefix = prefix_to_prefix[old_prefix]
|
||||
if orig_rpath.startswith(old_prefix):
|
||||
new_rpath = re.sub(re.escape(old_prefix), new_prefix, orig_rpath)
|
||||
paths_to_paths[orig_rpath] = new_rpath
|
||||
break
|
||||
else:
|
||||
paths_to_paths[orig_rpath] = orig_rpath
|
||||
|
||||
@@ -214,7 +348,9 @@ def _set_elf_rpaths_and_interpreter(
|
||||
return None
|
||||
|
||||
|
||||
def relocate_macho_binaries(path_names, prefix_to_prefix):
|
||||
def relocate_macho_binaries(
|
||||
path_names, old_layout_root, new_layout_root, prefix_to_prefix, rel, old_prefix, new_prefix
|
||||
):
|
||||
"""
|
||||
Use macholib python package to get the rpaths, depedent libraries
|
||||
and library identity for libraries from the MachO object. Modify them
|
||||
@@ -227,15 +363,77 @@ def relocate_macho_binaries(path_names, prefix_to_prefix):
|
||||
# Corner case where macho object file ended up in the path name list
|
||||
if path_name.endswith(".o"):
|
||||
continue
|
||||
# get the paths in the old prefix
|
||||
rpaths, deps, idpath = macholib_get_paths(path_name)
|
||||
# get the mapping of paths in the old prerix to the new prefix
|
||||
paths_to_paths = macho_find_paths(rpaths, deps, idpath, prefix_to_prefix)
|
||||
# replace the old paths with new paths
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||
if rel:
|
||||
# get the relativized paths
|
||||
rpaths, deps, idpath = macholib_get_paths(path_name)
|
||||
# get the file path name in the original prefix
|
||||
orig_path_name = re.sub(re.escape(new_prefix), old_prefix, path_name)
|
||||
# get the mapping of the relativized paths to the original
|
||||
# normalized paths
|
||||
rel_to_orig = macho_make_paths_normal(orig_path_name, rpaths, deps, idpath)
|
||||
# replace the relativized paths with normalized paths
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, rel_to_orig)
|
||||
# get the normalized paths in the mach-o binary
|
||||
rpaths, deps, idpath = macholib_get_paths(path_name)
|
||||
# get the mapping of paths in old prefix to path in new prefix
|
||||
paths_to_paths = macho_find_paths(
|
||||
rpaths, deps, idpath, old_layout_root, prefix_to_prefix
|
||||
)
|
||||
# replace the old paths with new paths
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||
# get the new normalized path in the mach-o binary
|
||||
rpaths, deps, idpath = macholib_get_paths(path_name)
|
||||
# get the mapping of paths to relative paths in the new prefix
|
||||
paths_to_paths = macho_make_paths_relative(
|
||||
path_name, new_layout_root, rpaths, deps, idpath
|
||||
)
|
||||
# replace the new paths with relativized paths in the new prefix
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||
else:
|
||||
# get the paths in the old prefix
|
||||
rpaths, deps, idpath = macholib_get_paths(path_name)
|
||||
# get the mapping of paths in the old prerix to the new prefix
|
||||
paths_to_paths = macho_find_paths(
|
||||
rpaths, deps, idpath, old_layout_root, prefix_to_prefix
|
||||
)
|
||||
# replace the old paths with new paths
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||
|
||||
|
||||
def relocate_elf_binaries(binaries, prefix_to_prefix):
|
||||
def _transform_rpaths(orig_rpaths, orig_root, new_prefixes):
|
||||
"""Return an updated list of RPATHs where each entry in the original list
|
||||
starting with the old root is relocated to another place according to the
|
||||
mapping passed as argument.
|
||||
|
||||
Args:
|
||||
orig_rpaths (list): list of the original RPATHs
|
||||
orig_root (str): original root to be substituted
|
||||
new_prefixes (dict): dictionary that maps the original prefixes to
|
||||
where they should be relocated
|
||||
|
||||
Returns:
|
||||
List of paths
|
||||
"""
|
||||
new_rpaths = []
|
||||
for orig_rpath in orig_rpaths:
|
||||
# If the original RPATH doesn't start with the target root
|
||||
# append it verbatim and proceed
|
||||
if not orig_rpath.startswith(orig_root):
|
||||
new_rpaths.append(orig_rpath)
|
||||
continue
|
||||
|
||||
# Otherwise inspect the mapping and transform + append any prefix
|
||||
# that starts with a registered key
|
||||
# avoiding duplicates
|
||||
for old_prefix, new_prefix in new_prefixes.items():
|
||||
if orig_rpath.startswith(old_prefix):
|
||||
new_rpath = re.sub(re.escape(old_prefix), new_prefix, orig_rpath)
|
||||
if new_rpath not in new_rpaths:
|
||||
new_rpaths.append(new_rpath)
|
||||
return new_rpaths
|
||||
|
||||
|
||||
def new_relocate_elf_binaries(binaries, prefix_to_prefix):
|
||||
"""Take a list of binaries, and an ordered dictionary of
|
||||
prefix to prefix mapping, and update the rpaths accordingly."""
|
||||
|
||||
@@ -254,6 +452,98 @@ def relocate_elf_binaries(binaries, prefix_to_prefix):
|
||||
_set_elf_rpaths_and_interpreter(path, rpaths=rpaths, interpreter=interpreter)
|
||||
|
||||
|
||||
def relocate_elf_binaries(
|
||||
binaries, orig_root, new_root, new_prefixes, rel, orig_prefix, new_prefix
|
||||
):
|
||||
"""Relocate the binaries passed as arguments by changing their RPATHs.
|
||||
|
||||
Use patchelf to get the original RPATHs and then replace them with
|
||||
rpaths in the new directory layout.
|
||||
|
||||
New RPATHs are determined from a dictionary mapping the prefixes in the
|
||||
old directory layout to the prefixes in the new directory layout if the
|
||||
rpath was in the old layout root, i.e. system paths are not replaced.
|
||||
|
||||
Args:
|
||||
binaries (list): list of binaries that might need relocation, located
|
||||
in the new prefix
|
||||
orig_root (str): original root to be substituted
|
||||
new_root (str): new root to be used, only relevant for relative RPATHs
|
||||
new_prefixes (dict): dictionary that maps the original prefixes to
|
||||
where they should be relocated
|
||||
rel (bool): True if the RPATHs are relative, False if they are absolute
|
||||
orig_prefix (str): prefix where the executable was originally located
|
||||
new_prefix (str): prefix where we want to relocate the executable
|
||||
"""
|
||||
for new_binary in binaries:
|
||||
orig_rpaths = _elf_rpaths_for(new_binary)
|
||||
# TODO: Can we deduce `rel` from the original RPATHs?
|
||||
if rel:
|
||||
# Get the file path in the original prefix
|
||||
orig_binary = re.sub(re.escape(new_prefix), orig_prefix, new_binary)
|
||||
|
||||
# Get the normalized RPATHs in the old prefix using the file path
|
||||
# in the orig prefix
|
||||
orig_norm_rpaths = _normalize_relative_paths(orig_binary, orig_rpaths)
|
||||
# Get the normalize RPATHs in the new prefix
|
||||
new_norm_rpaths = _transform_rpaths(orig_norm_rpaths, orig_root, new_prefixes)
|
||||
# Get the relative RPATHs in the new prefix
|
||||
new_rpaths = _make_relative(new_binary, new_root, new_norm_rpaths)
|
||||
# check to see if relative rpaths are changed before rewriting
|
||||
if sorted(new_rpaths) != sorted(orig_rpaths):
|
||||
_set_elf_rpaths_and_interpreter(new_binary, new_rpaths)
|
||||
else:
|
||||
new_rpaths = _transform_rpaths(orig_rpaths, orig_root, new_prefixes)
|
||||
_set_elf_rpaths_and_interpreter(new_binary, new_rpaths)
|
||||
|
||||
|
||||
def make_link_relative(new_links, orig_links):
|
||||
"""Compute the relative target from the original link and
|
||||
make the new link relative.
|
||||
|
||||
Args:
|
||||
new_links (list): new links to be made relative
|
||||
orig_links (list): original links
|
||||
"""
|
||||
for new_link, orig_link in zip(new_links, orig_links):
|
||||
target = readlink(orig_link)
|
||||
relative_target = os.path.relpath(target, os.path.dirname(orig_link))
|
||||
os.unlink(new_link)
|
||||
symlink(relative_target, new_link)
|
||||
|
||||
|
||||
def make_macho_binaries_relative(cur_path_names, orig_path_names, old_layout_root):
|
||||
"""
|
||||
Replace old RPATHs with paths relative to old_dir in binary files
|
||||
"""
|
||||
if not sys.platform == "darwin":
|
||||
return
|
||||
|
||||
for cur_path, orig_path in zip(cur_path_names, orig_path_names):
|
||||
(rpaths, deps, idpath) = macholib_get_paths(cur_path)
|
||||
paths_to_paths = macho_make_paths_relative(
|
||||
orig_path, old_layout_root, rpaths, deps, idpath
|
||||
)
|
||||
modify_macho_object(cur_path, rpaths, deps, idpath, paths_to_paths)
|
||||
|
||||
|
||||
def make_elf_binaries_relative(new_binaries, orig_binaries, orig_layout_root):
|
||||
"""Replace the original RPATHs in the new binaries making them
|
||||
relative to the original layout root.
|
||||
|
||||
Args:
|
||||
new_binaries (list): new binaries whose RPATHs is to be made relative
|
||||
orig_binaries (list): original binaries
|
||||
orig_layout_root (str): path to be used as a base for making
|
||||
RPATHs relative
|
||||
"""
|
||||
for new_binary, orig_binary in zip(new_binaries, orig_binaries):
|
||||
orig_rpaths = _elf_rpaths_for(new_binary)
|
||||
if orig_rpaths:
|
||||
new_rpaths = _make_relative(orig_binary, orig_layout_root, orig_rpaths)
|
||||
_set_elf_rpaths_and_interpreter(new_binary, new_rpaths)
|
||||
|
||||
|
||||
def warn_if_link_cant_be_relocated(link, target):
|
||||
if not os.path.isabs(target):
|
||||
return
|
||||
|
||||
@@ -48,7 +48,7 @@ def rewire_node(spec, explicit):
|
||||
# spec
|
||||
prefix_to_prefix = {spec.build_spec.prefix: spec.prefix}
|
||||
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
||||
for s in bindist.specs_to_relocate(spec):
|
||||
for s in bindist.deps_to_relocate(spec):
|
||||
analog = s
|
||||
if id(s) not in build_spec_ids:
|
||||
analogs = [
|
||||
@@ -77,9 +77,25 @@ def rewire_node(spec, explicit):
|
||||
]
|
||||
if bins_to_relocate:
|
||||
if "macho" in platform.binary_formats:
|
||||
relocate.relocate_macho_binaries(bins_to_relocate, prefix_to_prefix)
|
||||
relocate.relocate_macho_binaries(
|
||||
bins_to_relocate,
|
||||
str(spack.store.STORE.layout.root),
|
||||
str(spack.store.STORE.layout.root),
|
||||
prefix_to_prefix,
|
||||
False,
|
||||
spec.build_spec.prefix,
|
||||
spec.prefix,
|
||||
)
|
||||
if "elf" in platform.binary_formats:
|
||||
relocate.relocate_elf_binaries(bins_to_relocate, prefix_to_prefix)
|
||||
relocate.relocate_elf_binaries(
|
||||
bins_to_relocate,
|
||||
str(spack.store.STORE.layout.root),
|
||||
str(spack.store.STORE.layout.root),
|
||||
prefix_to_prefix,
|
||||
False,
|
||||
spec.build_spec.prefix,
|
||||
spec.prefix,
|
||||
)
|
||||
relocate.relocate_text_bin(binaries=bins_to_relocate, prefixes=prefix_to_prefix)
|
||||
shutil.rmtree(tempdir)
|
||||
install_manifest = os.path.join(
|
||||
|
||||
@@ -106,17 +106,10 @@
|
||||
{
|
||||
"names": ["install_missing_compilers"],
|
||||
"message": "The config:install_missing_compilers option has been deprecated in "
|
||||
"Spack v0.23, and is currently ignored. It will be removed from config in "
|
||||
"Spack v0.23, and is currently ignored. It will be removed from config after "
|
||||
"Spack v1.0.",
|
||||
"error": False,
|
||||
},
|
||||
{
|
||||
"names": ["install_path_scheme"],
|
||||
"message": "The config:install_path_scheme option was deprecated in Spack v0.16 "
|
||||
"in favor of config:install_tree:projections:all. It will be removed in Spack "
|
||||
"v1.0.",
|
||||
"error": False,
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
@@ -265,11 +265,6 @@ def specify(spec):
|
||||
return spack.spec.Spec(spec)
|
||||
|
||||
|
||||
def remove_node(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunction]:
|
||||
"""Transformation that removes all "node" and "virtual_node" from the input list of facts."""
|
||||
return list(filter(lambda x: x.args[0] not in ("node", "virtual_node"), facts))
|
||||
|
||||
|
||||
def _create_counter(specs: List[spack.spec.Spec], tests: bool):
|
||||
strategy = spack.config.CONFIG.get("concretizer:duplicates:strategy", "none")
|
||||
if strategy == "full":
|
||||
@@ -1521,6 +1516,7 @@ def _get_condition_id(
|
||||
return result[0]
|
||||
|
||||
cond_id = next(self._id_counter)
|
||||
|
||||
requirements = self.spec_clauses(named_cond, body=body, context=context)
|
||||
if context.transform:
|
||||
requirements = context.transform(named_cond, requirements)
|
||||
@@ -1559,7 +1555,6 @@ def condition(
|
||||
|
||||
if not context:
|
||||
context = ConditionContext()
|
||||
context.transform_imposed = remove_node
|
||||
|
||||
if imposed_spec:
|
||||
imposed_name = imposed_spec.name or imposed_name
|
||||
@@ -1594,14 +1589,6 @@ def condition(
|
||||
|
||||
return condition_id
|
||||
|
||||
def impose(self, condition_id, imposed_spec, node=True, body=False):
|
||||
imposed_constraints = self.spec_clauses(imposed_spec, body=body)
|
||||
for pred in imposed_constraints:
|
||||
# imposed "node"-like conditions are no-ops
|
||||
if not node and pred.args[0] in ("node", "virtual_node"):
|
||||
continue
|
||||
self.gen.fact(fn.imposed_constraint(condition_id, *pred.args))
|
||||
|
||||
def package_provider_rules(self, pkg):
|
||||
for vpkg_name in pkg.provided_virtual_names():
|
||||
if vpkg_name not in self.possible_virtuals:
|
||||
@@ -1659,7 +1646,7 @@ def track_dependencies(input_spec, requirements):
|
||||
return requirements + [fn.attr("track_dependencies", input_spec.name)]
|
||||
|
||||
def dependency_holds(input_spec, requirements):
|
||||
return remove_node(input_spec, requirements) + [
|
||||
return requirements + [
|
||||
fn.attr(
|
||||
"dependency_holds", pkg.name, input_spec.name, dt.flag_to_string(t)
|
||||
)
|
||||
@@ -1717,13 +1704,13 @@ def package_splice_rules(self, pkg):
|
||||
splice_node = fn.node(AspVar("NID"), cond.name)
|
||||
when_spec_attrs = [
|
||||
fn.attr(c.args[0], splice_node, *(c.args[2:]))
|
||||
for c in self.spec_clauses(cond, body=True, required_from=None)
|
||||
if c.args[0] != "node"
|
||||
for c in self.spec_clauses(cond, body=True, required_from=None, node=False)
|
||||
]
|
||||
splice_spec_hash_attrs = [
|
||||
fn.hash_attr(hash_var, *(c.args))
|
||||
for c in self.spec_clauses(spec_to_splice, body=True, required_from=None)
|
||||
if c.args[0] != "node"
|
||||
for c in self.spec_clauses(
|
||||
spec_to_splice, body=True, required_from=None, node=False
|
||||
)
|
||||
]
|
||||
if match_variants is None:
|
||||
variant_constraints = []
|
||||
@@ -1845,10 +1832,6 @@ def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
|
||||
context.source = ConstraintOrigin.append_type_suffix(
|
||||
pkg_name, ConstraintOrigin.REQUIRE
|
||||
)
|
||||
if not virtual:
|
||||
context.transform_imposed = remove_node
|
||||
# else: for virtuals we want to emit "node" and
|
||||
# "virtual_node" in imposed specs
|
||||
|
||||
member_id = self.condition(
|
||||
required_spec=when_spec,
|
||||
@@ -2022,6 +2005,7 @@ def spec_clauses(
|
||||
self,
|
||||
spec: spack.spec.Spec,
|
||||
*,
|
||||
node: bool = True,
|
||||
body: bool = False,
|
||||
transitive: bool = True,
|
||||
expand_hashes: bool = False,
|
||||
@@ -2039,6 +2023,7 @@ def spec_clauses(
|
||||
try:
|
||||
clauses = self._spec_clauses(
|
||||
spec,
|
||||
node=node,
|
||||
body=body,
|
||||
transitive=transitive,
|
||||
expand_hashes=expand_hashes,
|
||||
@@ -2056,6 +2041,7 @@ def _spec_clauses(
|
||||
self,
|
||||
spec: spack.spec.Spec,
|
||||
*,
|
||||
node: bool = True,
|
||||
body: bool = False,
|
||||
transitive: bool = True,
|
||||
expand_hashes: bool = False,
|
||||
@@ -2066,6 +2052,7 @@ def _spec_clauses(
|
||||
|
||||
Arguments:
|
||||
spec: the spec to analyze
|
||||
node: if True, emit node(PackageName, ...) and virtual_node(PackageaName, ...) facts
|
||||
body: if True, generate clauses to be used in rule bodies (final values) instead
|
||||
of rule heads (setters).
|
||||
transitive: if False, don't generate clauses from dependencies (default True)
|
||||
@@ -2085,8 +2072,10 @@ def _spec_clauses(
|
||||
|
||||
f: Union[Type[_Head], Type[_Body]] = _Body if body else _Head
|
||||
|
||||
if spec.name:
|
||||
# only generate this if caller asked for node facts -- not needed for most conditions
|
||||
if node and spec.name:
|
||||
clauses.append(f.node(spec.name) if not spec.virtual else f.virtual_node(spec.name))
|
||||
|
||||
if spec.namespace:
|
||||
clauses.append(f.namespace(spec.name, spec.namespace))
|
||||
|
||||
@@ -2244,6 +2233,7 @@ def _spec_clauses(
|
||||
clauses.extend(
|
||||
self._spec_clauses(
|
||||
dep,
|
||||
node=node,
|
||||
body=body,
|
||||
expand_hashes=expand_hashes,
|
||||
concrete_build_deps=concrete_build_deps,
|
||||
@@ -2628,7 +2618,7 @@ def concrete_specs(self):
|
||||
# this indicates that there is a spec like this installed
|
||||
self.gen.fact(fn.installed_hash(spec.name, h))
|
||||
# indirection layer between hash constraints and imposition to allow for splicing
|
||||
for pred in self.spec_clauses(spec, body=True, required_from=None):
|
||||
for pred in self.spec_clauses(spec, body=True, required_from=None, node=False):
|
||||
self.gen.fact(fn.hash_attr(h, *pred.args))
|
||||
self.gen.newline()
|
||||
# Declare as possible parts of specs that are not in package.py
|
||||
@@ -3238,7 +3228,7 @@ def depends_on(
|
||||
node_variable = "node(ID, Package)"
|
||||
when_spec.name = placeholder
|
||||
|
||||
body_clauses = self._setup.spec_clauses(when_spec, body=True)
|
||||
body_clauses = self._setup.spec_clauses(when_spec, body=True, node=False)
|
||||
body_str = (
|
||||
f" {f',{os.linesep} '.join(str(x) for x in body_clauses)},\n"
|
||||
f" not external({node_variable}),\n"
|
||||
@@ -3326,7 +3316,7 @@ def propagate(self, constraint_str: str, *, when: str):
|
||||
node_variable = "node(ID, Package)"
|
||||
when_spec.name = placeholder
|
||||
|
||||
body_clauses = self._setup.spec_clauses(when_spec, body=True)
|
||||
body_clauses = self._setup.spec_clauses(when_spec, body=True, node=False)
|
||||
body_str = (
|
||||
f" {f',{os.linesep} '.join(str(x) for x in body_clauses)},\n"
|
||||
f" not external({node_variable}),\n"
|
||||
@@ -3337,7 +3327,7 @@ def propagate(self, constraint_str: str, *, when: str):
|
||||
assert constraint_spec.name is None, "only anonymous constraint specs are accepted"
|
||||
|
||||
constraint_spec.name = placeholder
|
||||
constraint_clauses = self._setup.spec_clauses(constraint_spec, body=False)
|
||||
constraint_clauses = self._setup.spec_clauses(constraint_spec, body=False, node=False)
|
||||
for clause in constraint_clauses:
|
||||
if clause.args[0] == "node_compiler_version_satisfies":
|
||||
self._setup.compiler_version_constraints.add(constraint_spec.compiler)
|
||||
|
||||
@@ -58,21 +58,7 @@
|
||||
import re
|
||||
import socket
|
||||
import warnings
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
Match,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
from typing_extensions import Literal
|
||||
from typing import Any, Callable, Dict, Iterable, List, Match, Optional, Set, Tuple, Union
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -97,7 +83,7 @@
|
||||
import spack.solver
|
||||
import spack.spec_parser
|
||||
import spack.store
|
||||
import spack.traverse
|
||||
import spack.traverse as traverse
|
||||
import spack.util.executable
|
||||
import spack.util.hash
|
||||
import spack.util.module_cmd as md
|
||||
@@ -1353,16 +1339,16 @@ def tree(
|
||||
depth: bool = False,
|
||||
hashes: bool = False,
|
||||
hashlen: Optional[int] = None,
|
||||
cover: spack.traverse.CoverType = "nodes",
|
||||
cover: str = "nodes",
|
||||
indent: int = 0,
|
||||
format: str = DEFAULT_FORMAT,
|
||||
deptypes: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
|
||||
deptypes: Union[Tuple[str, ...], str] = "all",
|
||||
show_types: bool = False,
|
||||
depth_first: bool = False,
|
||||
recurse_dependencies: bool = True,
|
||||
status_fn: Optional[Callable[["Spec"], InstallStatus]] = None,
|
||||
prefix: Optional[Callable[["Spec"], str]] = None,
|
||||
key: Callable[["Spec"], Any] = id,
|
||||
key=id,
|
||||
) -> str:
|
||||
"""Prints out specs and their dependencies, tree-formatted with indentation.
|
||||
|
||||
@@ -1394,16 +1380,11 @@ def tree(
|
||||
# reduce deptypes over all in-edges when covering nodes
|
||||
if show_types and cover == "nodes":
|
||||
deptype_lookup: Dict[str, dt.DepFlag] = collections.defaultdict(dt.DepFlag)
|
||||
for edge in spack.traverse.traverse_edges(
|
||||
specs, cover="edges", deptype=deptypes, root=False
|
||||
):
|
||||
for edge in traverse.traverse_edges(specs, cover="edges", deptype=deptypes, root=False):
|
||||
deptype_lookup[edge.spec.dag_hash()] |= edge.depflag
|
||||
|
||||
# SupportsRichComparisonT issue with List[Spec]
|
||||
sorted_specs: List["Spec"] = sorted(specs) # type: ignore[type-var]
|
||||
|
||||
for d, dep_spec in spack.traverse.traverse_tree(
|
||||
sorted_specs, cover=cover, deptype=deptypes, depth_first=depth_first, key=key
|
||||
for d, dep_spec in traverse.traverse_tree(
|
||||
sorted(specs), cover=cover, deptype=deptypes, depth_first=depth_first, key=key
|
||||
):
|
||||
node = dep_spec.spec
|
||||
|
||||
@@ -1946,111 +1927,13 @@ def installed_upstream(self):
|
||||
upstream, _ = spack.store.STORE.db.query_by_spec_hash(self.dag_hash())
|
||||
return upstream
|
||||
|
||||
@overload
|
||||
def traverse(
|
||||
self,
|
||||
*,
|
||||
root: bool = ...,
|
||||
order: spack.traverse.OrderType = ...,
|
||||
cover: spack.traverse.CoverType = ...,
|
||||
direction: spack.traverse.DirectionType = ...,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||
depth: Literal[False] = False,
|
||||
key: Callable[["Spec"], Any] = ...,
|
||||
visited: Optional[Set[Any]] = ...,
|
||||
) -> Iterable["Spec"]: ...
|
||||
|
||||
@overload
|
||||
def traverse(
|
||||
self,
|
||||
*,
|
||||
root: bool = ...,
|
||||
order: spack.traverse.OrderType = ...,
|
||||
cover: spack.traverse.CoverType = ...,
|
||||
direction: spack.traverse.DirectionType = ...,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||
depth: Literal[True],
|
||||
key: Callable[["Spec"], Any] = ...,
|
||||
visited: Optional[Set[Any]] = ...,
|
||||
) -> Iterable[Tuple[int, "Spec"]]: ...
|
||||
|
||||
def traverse(
|
||||
self,
|
||||
*,
|
||||
root: bool = True,
|
||||
order: spack.traverse.OrderType = "pre",
|
||||
cover: spack.traverse.CoverType = "nodes",
|
||||
direction: spack.traverse.DirectionType = "children",
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = "all",
|
||||
depth: bool = False,
|
||||
key: Callable[["Spec"], Any] = id,
|
||||
visited: Optional[Set[Any]] = None,
|
||||
) -> Iterable[Union["Spec", Tuple[int, "Spec"]]]:
|
||||
def traverse(self, **kwargs):
|
||||
"""Shorthand for :meth:`~spack.traverse.traverse_nodes`"""
|
||||
return spack.traverse.traverse_nodes(
|
||||
[self],
|
||||
root=root,
|
||||
order=order,
|
||||
cover=cover,
|
||||
direction=direction,
|
||||
deptype=deptype,
|
||||
depth=depth,
|
||||
key=key,
|
||||
visited=visited,
|
||||
)
|
||||
return traverse.traverse_nodes([self], **kwargs)
|
||||
|
||||
@overload
|
||||
def traverse_edges(
|
||||
self,
|
||||
*,
|
||||
root: bool = ...,
|
||||
order: spack.traverse.OrderType = ...,
|
||||
cover: spack.traverse.CoverType = ...,
|
||||
direction: spack.traverse.DirectionType = ...,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||
depth: Literal[False] = False,
|
||||
key: Callable[["Spec"], Any] = ...,
|
||||
visited: Optional[Set[Any]] = ...,
|
||||
) -> Iterable[DependencySpec]: ...
|
||||
|
||||
@overload
|
||||
def traverse_edges(
|
||||
self,
|
||||
*,
|
||||
root: bool = ...,
|
||||
order: spack.traverse.OrderType = ...,
|
||||
cover: spack.traverse.CoverType = ...,
|
||||
direction: spack.traverse.DirectionType = ...,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||
depth: Literal[True],
|
||||
key: Callable[["Spec"], Any] = ...,
|
||||
visited: Optional[Set[Any]] = ...,
|
||||
) -> Iterable[Tuple[int, DependencySpec]]: ...
|
||||
|
||||
def traverse_edges(
|
||||
self,
|
||||
*,
|
||||
root: bool = True,
|
||||
order: spack.traverse.OrderType = "pre",
|
||||
cover: spack.traverse.CoverType = "nodes",
|
||||
direction: spack.traverse.DirectionType = "children",
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = "all",
|
||||
depth: bool = False,
|
||||
key: Callable[["Spec"], Any] = id,
|
||||
visited: Optional[Set[Any]] = None,
|
||||
) -> Iterable[Union[DependencySpec, Tuple[int, DependencySpec]]]:
|
||||
def traverse_edges(self, **kwargs):
|
||||
"""Shorthand for :meth:`~spack.traverse.traverse_edges`"""
|
||||
return spack.traverse.traverse_edges(
|
||||
[self],
|
||||
root=root,
|
||||
order=order,
|
||||
cover=cover,
|
||||
direction=direction,
|
||||
deptype=deptype,
|
||||
depth=depth,
|
||||
key=key,
|
||||
visited=visited,
|
||||
)
|
||||
return traverse.traverse_edges([self], **kwargs)
|
||||
|
||||
@property
|
||||
def short_spec(self):
|
||||
@@ -3061,7 +2944,7 @@ def _finalize_concretization(self):
|
||||
for spec in self.traverse():
|
||||
spec._cached_hash(ht.dag_hash)
|
||||
|
||||
def concretized(self, tests: Union[bool, Iterable[str]] = False) -> "Spec":
|
||||
def concretized(self, tests: Union[bool, Iterable[str]] = False) -> "spack.spec.Spec":
|
||||
"""This is a non-destructive version of concretize().
|
||||
|
||||
First clones, then returns a concrete version of this package
|
||||
@@ -4222,10 +4105,10 @@ def tree(
|
||||
depth: bool = False,
|
||||
hashes: bool = False,
|
||||
hashlen: Optional[int] = None,
|
||||
cover: spack.traverse.CoverType = "nodes",
|
||||
cover: str = "nodes",
|
||||
indent: int = 0,
|
||||
format: str = DEFAULT_FORMAT,
|
||||
deptypes: Union[dt.DepTypes, dt.DepFlag] = dt.ALL,
|
||||
deptypes: Union[Tuple[str, ...], str] = "all",
|
||||
show_types: bool = False,
|
||||
depth_first: bool = False,
|
||||
recurse_dependencies: bool = True,
|
||||
|
||||
@@ -43,6 +43,7 @@
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.binary_distribution import CannotListKeys, GenerateIndexError
|
||||
from spack.directory_layout import DirectoryLayout
|
||||
from spack.paths import test_path
|
||||
from spack.spec import Spec
|
||||
|
||||
@@ -135,28 +136,35 @@ def default_config(tmp_path, config_directory, monkeypatch, install_mockery):
|
||||
@pytest.fixture(scope="function")
|
||||
def install_dir_default_layout(tmpdir):
|
||||
"""Hooks a fake install directory with a default layout"""
|
||||
scheme = os.path.join(
|
||||
"${architecture}", "${compiler.name}-${compiler.version}", "${name}-${version}-${hash}"
|
||||
)
|
||||
real_store, real_layout = spack.store.STORE, spack.store.STORE.layout
|
||||
opt_dir = tmpdir.join("opt")
|
||||
original_store, spack.store.STORE = spack.store.STORE, spack.store.Store(str(opt_dir))
|
||||
spack.store.STORE = spack.store.Store(str(opt_dir))
|
||||
spack.store.STORE.layout = DirectoryLayout(str(opt_dir), path_scheme=scheme)
|
||||
try:
|
||||
yield spack.store
|
||||
finally:
|
||||
spack.store.STORE = original_store
|
||||
spack.store.STORE = real_store
|
||||
spack.store.STORE.layout = real_layout
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def install_dir_non_default_layout(tmpdir):
|
||||
"""Hooks a fake install directory with a non-default layout"""
|
||||
opt_dir = tmpdir.join("opt")
|
||||
original_store, spack.store.STORE = spack.store.STORE, spack.store.Store(
|
||||
str(opt_dir),
|
||||
projections={
|
||||
"all": "{name}/{version}/{architecture}-{compiler.name}-{compiler.version}-{hash}"
|
||||
},
|
||||
scheme = os.path.join(
|
||||
"${name}", "${version}", "${architecture}-${compiler.name}-${compiler.version}-${hash}"
|
||||
)
|
||||
real_store, real_layout = spack.store.STORE, spack.store.STORE.layout
|
||||
opt_dir = tmpdir.join("opt")
|
||||
spack.store.STORE = spack.store.Store(str(opt_dir))
|
||||
spack.store.STORE.layout = DirectoryLayout(str(opt_dir), path_scheme=scheme)
|
||||
try:
|
||||
yield spack.store
|
||||
finally:
|
||||
spack.store.STORE = original_store
|
||||
spack.store.STORE = real_store
|
||||
spack.store.STORE.layout = real_layout
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
|
||||
@@ -635,6 +635,11 @@ def ensure_debug(monkeypatch):
|
||||
tty.set_debug(current_debug_level)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=sys.platform == "win32", scope="session")
|
||||
def platform_config():
|
||||
spack.config.add_default_platform_scope(spack.platforms.real_host().name)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def default_config():
|
||||
"""Isolates the default configuration from the user configs.
|
||||
|
||||
@@ -285,16 +285,3 @@ def compilers(compiler, arch_spec):
|
||||
error = capfd.readouterr()[1]
|
||||
assert "Skipping tests for package" in error
|
||||
assert "test requires missing compiler" in error
|
||||
|
||||
|
||||
def test_package_subscript(default_mock_concretization):
|
||||
"""Tests that we can use the subscript notation on packages, and that it returns a package"""
|
||||
root = default_mock_concretization("mpileaks")
|
||||
root_pkg = root.package
|
||||
|
||||
# Subscript of a virtual
|
||||
assert isinstance(root_pkg["mpi"], spack.package_base.PackageBase)
|
||||
|
||||
# Subscript on concrete
|
||||
for d in root.traverse():
|
||||
assert isinstance(root_pkg[d.name], spack.package_base.PackageBase)
|
||||
|
||||
@@ -31,7 +31,13 @@
|
||||
from spack.fetch_strategy import URLFetchStrategy
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.paths import mock_gpg_keys_path
|
||||
from spack.relocate import macho_find_paths, relocate_links, relocate_text
|
||||
from spack.relocate import (
|
||||
macho_find_paths,
|
||||
macho_make_paths_normal,
|
||||
macho_make_paths_relative,
|
||||
relocate_links,
|
||||
relocate_text,
|
||||
)
|
||||
from spack.spec import Spec
|
||||
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
@@ -295,6 +301,7 @@ def test_replace_paths(tmpdir):
|
||||
os.path.join(oldlibdir_local, libfile_loco),
|
||||
],
|
||||
os.path.join(oldlibdir_cc, libfile_c),
|
||||
old_spack_dir,
|
||||
prefix2prefix,
|
||||
)
|
||||
assert out_dict == {
|
||||
@@ -318,6 +325,7 @@ def test_replace_paths(tmpdir):
|
||||
os.path.join(oldlibdir_local, libfile_loco),
|
||||
],
|
||||
None,
|
||||
old_spack_dir,
|
||||
prefix2prefix,
|
||||
)
|
||||
assert out_dict == {
|
||||
@@ -341,6 +349,7 @@ def test_replace_paths(tmpdir):
|
||||
f"@rpath/{libfile_loco}",
|
||||
],
|
||||
None,
|
||||
old_spack_dir,
|
||||
prefix2prefix,
|
||||
)
|
||||
|
||||
@@ -360,6 +369,7 @@ def test_replace_paths(tmpdir):
|
||||
[oldlibdir_a, oldlibdir_b, oldlibdir_d, oldlibdir_local],
|
||||
[f"@rpath/{libfile_a}", f"@rpath/{libfile_b}", f"@rpath/{libfile_loco}"],
|
||||
None,
|
||||
old_spack_dir,
|
||||
prefix2prefix,
|
||||
)
|
||||
assert out_dict == {
|
||||
@@ -373,6 +383,91 @@ def test_replace_paths(tmpdir):
|
||||
}
|
||||
|
||||
|
||||
def test_macho_make_paths():
|
||||
out = macho_make_paths_relative(
|
||||
"/Users/Shared/spack/pkgC/lib/libC.dylib",
|
||||
"/Users/Shared/spack",
|
||||
("/Users/Shared/spack/pkgA/lib", "/Users/Shared/spack/pkgB/lib", "/usr/local/lib"),
|
||||
(
|
||||
"/Users/Shared/spack/pkgA/libA.dylib",
|
||||
"/Users/Shared/spack/pkgB/libB.dylib",
|
||||
"/usr/local/lib/libloco.dylib",
|
||||
),
|
||||
"/Users/Shared/spack/pkgC/lib/libC.dylib",
|
||||
)
|
||||
assert out == {
|
||||
"/Users/Shared/spack/pkgA/lib": "@loader_path/../../pkgA/lib",
|
||||
"/Users/Shared/spack/pkgB/lib": "@loader_path/../../pkgB/lib",
|
||||
"/usr/local/lib": "/usr/local/lib",
|
||||
"/Users/Shared/spack/pkgA/libA.dylib": "@loader_path/../../pkgA/libA.dylib",
|
||||
"/Users/Shared/spack/pkgB/libB.dylib": "@loader_path/../../pkgB/libB.dylib",
|
||||
"/usr/local/lib/libloco.dylib": "/usr/local/lib/libloco.dylib",
|
||||
"/Users/Shared/spack/pkgC/lib/libC.dylib": "@rpath/libC.dylib",
|
||||
}
|
||||
|
||||
out = macho_make_paths_normal(
|
||||
"/Users/Shared/spack/pkgC/lib/libC.dylib",
|
||||
("@loader_path/../../pkgA/lib", "@loader_path/../../pkgB/lib", "/usr/local/lib"),
|
||||
(
|
||||
"@loader_path/../../pkgA/libA.dylib",
|
||||
"@loader_path/../../pkgB/libB.dylib",
|
||||
"/usr/local/lib/libloco.dylib",
|
||||
),
|
||||
"@rpath/libC.dylib",
|
||||
)
|
||||
|
||||
assert out == {
|
||||
"@rpath/libC.dylib": "/Users/Shared/spack/pkgC/lib/libC.dylib",
|
||||
"@loader_path/../../pkgA/lib": "/Users/Shared/spack/pkgA/lib",
|
||||
"@loader_path/../../pkgB/lib": "/Users/Shared/spack/pkgB/lib",
|
||||
"/usr/local/lib": "/usr/local/lib",
|
||||
"@loader_path/../../pkgA/libA.dylib": "/Users/Shared/spack/pkgA/libA.dylib",
|
||||
"@loader_path/../../pkgB/libB.dylib": "/Users/Shared/spack/pkgB/libB.dylib",
|
||||
"/usr/local/lib/libloco.dylib": "/usr/local/lib/libloco.dylib",
|
||||
}
|
||||
|
||||
out = macho_make_paths_relative(
|
||||
"/Users/Shared/spack/pkgC/bin/exeC",
|
||||
"/Users/Shared/spack",
|
||||
("/Users/Shared/spack/pkgA/lib", "/Users/Shared/spack/pkgB/lib", "/usr/local/lib"),
|
||||
(
|
||||
"/Users/Shared/spack/pkgA/libA.dylib",
|
||||
"/Users/Shared/spack/pkgB/libB.dylib",
|
||||
"/usr/local/lib/libloco.dylib",
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
assert out == {
|
||||
"/Users/Shared/spack/pkgA/lib": "@loader_path/../../pkgA/lib",
|
||||
"/Users/Shared/spack/pkgB/lib": "@loader_path/../../pkgB/lib",
|
||||
"/usr/local/lib": "/usr/local/lib",
|
||||
"/Users/Shared/spack/pkgA/libA.dylib": "@loader_path/../../pkgA/libA.dylib",
|
||||
"/Users/Shared/spack/pkgB/libB.dylib": "@loader_path/../../pkgB/libB.dylib",
|
||||
"/usr/local/lib/libloco.dylib": "/usr/local/lib/libloco.dylib",
|
||||
}
|
||||
|
||||
out = macho_make_paths_normal(
|
||||
"/Users/Shared/spack/pkgC/bin/exeC",
|
||||
("@loader_path/../../pkgA/lib", "@loader_path/../../pkgB/lib", "/usr/local/lib"),
|
||||
(
|
||||
"@loader_path/../../pkgA/libA.dylib",
|
||||
"@loader_path/../../pkgB/libB.dylib",
|
||||
"/usr/local/lib/libloco.dylib",
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
assert out == {
|
||||
"@loader_path/../../pkgA/lib": "/Users/Shared/spack/pkgA/lib",
|
||||
"@loader_path/../../pkgB/lib": "/Users/Shared/spack/pkgB/lib",
|
||||
"/usr/local/lib": "/usr/local/lib",
|
||||
"@loader_path/../../pkgA/libA.dylib": "/Users/Shared/spack/pkgA/libA.dylib",
|
||||
"@loader_path/../../pkgB/libB.dylib": "/Users/Shared/spack/pkgB/libB.dylib",
|
||||
"/usr/local/lib/libloco.dylib": "/usr/local/lib/libloco.dylib",
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_download(monkeypatch):
|
||||
"""Mock a failing download strategy."""
|
||||
@@ -466,6 +561,10 @@ def test_macho_relocation_with_changing_projection(relocation_dict):
|
||||
"""
|
||||
original_rpath = "/foo/bar/baz/abcdef"
|
||||
result = macho_find_paths(
|
||||
[original_rpath], deps=[], idpath=None, prefix_to_prefix=relocation_dict
|
||||
[original_rpath],
|
||||
deps=[],
|
||||
idpath=None,
|
||||
old_layout_root="/foo",
|
||||
prefix_to_prefix=relocation_dict,
|
||||
)
|
||||
assert result[original_rpath] == "/a/b/c/abcdef"
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
|
||||
@@ -112,6 +114,49 @@ def _copy_somewhere(orig_binary):
|
||||
return _copy_somewhere
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"start_path,path_root,paths,expected",
|
||||
[
|
||||
(
|
||||
"/usr/bin/test",
|
||||
"/usr",
|
||||
["/usr/lib", "/usr/lib64", "/opt/local/lib"],
|
||||
[
|
||||
os.path.join("$ORIGIN", "..", "lib"),
|
||||
os.path.join("$ORIGIN", "..", "lib64"),
|
||||
"/opt/local/lib",
|
||||
],
|
||||
)
|
||||
],
|
||||
)
|
||||
def test_make_relative_paths(start_path, path_root, paths, expected):
|
||||
relatives = spack.relocate._make_relative(start_path, path_root, paths)
|
||||
assert relatives == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"start_path,relative_paths,expected",
|
||||
[
|
||||
# $ORIGIN will be replaced with os.path.dirname('usr/bin/test')
|
||||
# and then normalized
|
||||
(
|
||||
"/usr/bin/test",
|
||||
["$ORIGIN/../lib", "$ORIGIN/../lib64", "/opt/local/lib"],
|
||||
[
|
||||
os.sep + os.path.join("usr", "lib"),
|
||||
os.sep + os.path.join("usr", "lib64"),
|
||||
"/opt/local/lib",
|
||||
],
|
||||
),
|
||||
# Relative path without $ORIGIN
|
||||
("/usr/bin/test", ["../local/lib"], ["../local/lib"]),
|
||||
],
|
||||
)
|
||||
def test_normalize_relative_paths(start_path, relative_paths, expected):
|
||||
normalized = spack.relocate._normalize_relative_paths(start_path, relative_paths)
|
||||
assert normalized == expected
|
||||
|
||||
|
||||
@pytest.mark.requires_executables("patchelf", "gcc")
|
||||
@skip_unless_linux
|
||||
def test_relocate_text_bin(binary_with_rpaths, prefix_like):
|
||||
@@ -137,13 +182,61 @@ def test_relocate_elf_binaries_absolute_paths(binary_with_rpaths, copy_binary, p
|
||||
new_binary = copy_binary(orig_binary)
|
||||
|
||||
spack.relocate.relocate_elf_binaries(
|
||||
binaries=[str(new_binary)], prefix_to_prefix={str(orig_binary.dirpath()): "/foo"}
|
||||
binaries=[str(new_binary)],
|
||||
orig_root=str(orig_binary.dirpath()),
|
||||
new_root=None, # Not needed when relocating absolute paths
|
||||
new_prefixes={str(orig_binary.dirpath()): "/foo"},
|
||||
rel=False,
|
||||
# Not needed when relocating absolute paths
|
||||
orig_prefix=None,
|
||||
new_prefix=None,
|
||||
)
|
||||
|
||||
# Some compilers add rpaths so ensure changes included in final result
|
||||
assert "/foo/lib:/usr/lib64" in rpaths_for(new_binary)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables("patchelf", "gcc")
|
||||
@skip_unless_linux
|
||||
def test_relocate_elf_binaries_relative_paths(binary_with_rpaths, copy_binary):
|
||||
# Create an executable, set some RPATHs, copy it to another location
|
||||
orig_binary = binary_with_rpaths(rpaths=["lib", "lib64", "/opt/local/lib"])
|
||||
new_binary = copy_binary(orig_binary)
|
||||
|
||||
spack.relocate.relocate_elf_binaries(
|
||||
binaries=[str(new_binary)],
|
||||
orig_root=str(orig_binary.dirpath()),
|
||||
new_root=str(new_binary.dirpath()),
|
||||
new_prefixes={str(orig_binary.dirpath()): "/foo"},
|
||||
rel=True,
|
||||
orig_prefix=str(orig_binary.dirpath()),
|
||||
new_prefix=str(new_binary.dirpath()),
|
||||
)
|
||||
|
||||
# Some compilers add rpaths so ensure changes included in final result
|
||||
assert "/foo/lib:/foo/lib64:/opt/local/lib" in rpaths_for(new_binary)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables("patchelf", "gcc")
|
||||
@skip_unless_linux
|
||||
def test_make_elf_binaries_relative(binary_with_rpaths, copy_binary, prefix_tmpdir):
|
||||
orig_binary = binary_with_rpaths(
|
||||
rpaths=[
|
||||
str(prefix_tmpdir.mkdir("lib")),
|
||||
str(prefix_tmpdir.mkdir("lib64")),
|
||||
"/opt/local/lib",
|
||||
]
|
||||
)
|
||||
new_binary = copy_binary(orig_binary)
|
||||
|
||||
spack.relocate.make_elf_binaries_relative(
|
||||
[str(new_binary)], [str(orig_binary)], str(orig_binary.dirpath())
|
||||
)
|
||||
|
||||
# Some compilers add rpaths so ensure changes included in final result
|
||||
assert "$ORIGIN/lib:$ORIGIN/lib64:/opt/local/lib" in rpaths_for(new_binary)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables("patchelf", "gcc")
|
||||
@skip_unless_linux
|
||||
def test_relocate_text_bin_with_message(binary_with_rpaths, copy_binary, prefix_tmpdir):
|
||||
|
||||
@@ -3,21 +3,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from collections import defaultdict
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Iterable,
|
||||
List,
|
||||
NamedTuple,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
Tuple,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
from typing_extensions import Literal
|
||||
from typing import Any, Callable, List, NamedTuple, Set, Union
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.spec
|
||||
@@ -438,95 +424,49 @@ def traverse_topo_edges_generator(edges, visitor, key=id, root=True, all_edges=F
|
||||
|
||||
# High-level API: traverse_edges, traverse_nodes, traverse_tree.
|
||||
|
||||
OrderType = Literal["pre", "post", "breadth", "topo"]
|
||||
CoverType = Literal["nodes", "edges", "paths"]
|
||||
DirectionType = Literal["children", "parents"]
|
||||
|
||||
|
||||
@overload
|
||||
def traverse_edges(
|
||||
specs: Sequence["spack.spec.Spec"],
|
||||
*,
|
||||
root: bool = ...,
|
||||
order: OrderType = ...,
|
||||
cover: CoverType = ...,
|
||||
direction: DirectionType = ...,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||
depth: Literal[False] = False,
|
||||
key: Callable[["spack.spec.Spec"], Any] = ...,
|
||||
visited: Optional[Set[Any]] = ...,
|
||||
) -> Iterable["spack.spec.DependencySpec"]: ...
|
||||
|
||||
|
||||
@overload
|
||||
def traverse_edges(
|
||||
specs: Sequence["spack.spec.Spec"],
|
||||
*,
|
||||
root: bool = ...,
|
||||
order: OrderType = ...,
|
||||
cover: CoverType = ...,
|
||||
direction: DirectionType = ...,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||
depth: Literal[True],
|
||||
key: Callable[["spack.spec.Spec"], Any] = ...,
|
||||
visited: Optional[Set[Any]] = ...,
|
||||
) -> Iterable[Tuple[int, "spack.spec.DependencySpec"]]: ...
|
||||
|
||||
|
||||
@overload
|
||||
def traverse_edges(
|
||||
specs: Sequence["spack.spec.Spec"],
|
||||
*,
|
||||
root: bool = ...,
|
||||
order: OrderType = ...,
|
||||
cover: CoverType = ...,
|
||||
direction: DirectionType = ...,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||
depth: bool,
|
||||
key: Callable[["spack.spec.Spec"], Any] = ...,
|
||||
visited: Optional[Set[Any]] = ...,
|
||||
) -> Iterable[Union["spack.spec.DependencySpec", Tuple[int, "spack.spec.DependencySpec"]]]: ...
|
||||
|
||||
|
||||
def traverse_edges(
|
||||
specs: Sequence["spack.spec.Spec"],
|
||||
root: bool = True,
|
||||
order: OrderType = "pre",
|
||||
cover: CoverType = "nodes",
|
||||
direction: DirectionType = "children",
|
||||
specs,
|
||||
root=True,
|
||||
order="pre",
|
||||
cover="nodes",
|
||||
direction="children",
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = "all",
|
||||
depth: bool = False,
|
||||
key: Callable[["spack.spec.Spec"], Any] = id,
|
||||
visited: Optional[Set[Any]] = None,
|
||||
) -> Iterable[Union["spack.spec.DependencySpec", Tuple[int, "spack.spec.DependencySpec"]]]:
|
||||
depth=False,
|
||||
key=id,
|
||||
visited=None,
|
||||
):
|
||||
"""
|
||||
Iterable of edges from the DAG, starting from a list of root specs.
|
||||
Generator that yields edges from the DAG, starting from a list of root specs.
|
||||
|
||||
Arguments:
|
||||
|
||||
specs: List of root specs (considered to be depth 0)
|
||||
root: Yield the root nodes themselves
|
||||
order: What order of traversal to use in the DAG. For depth-first search this can be
|
||||
``pre`` or ``post``. For BFS this should be ``breadth``. For topological order use
|
||||
``topo``
|
||||
cover: Determines how extensively to cover the dag. Possible values:
|
||||
specs (list): List of root specs (considered to be depth 0)
|
||||
root (bool): Yield the root nodes themselves
|
||||
order (str): What order of traversal to use in the DAG. For depth-first
|
||||
search this can be ``pre`` or ``post``. For BFS this should be ``breadth``.
|
||||
For topological order use ``topo``
|
||||
cover (str): Determines how extensively to cover the dag. Possible values:
|
||||
``nodes`` -- Visit each unique node in the dag only once.
|
||||
``edges`` -- If a node has been visited once but is reached along a new path, it's
|
||||
accepted, but not recurisvely followed. This traverses each 'edge' in the DAG once.
|
||||
``paths`` -- Explore every unique path reachable from the root. This descends into
|
||||
visited subtrees and will accept nodes multiple times if they're reachable by multiple
|
||||
paths.
|
||||
direction: ``children`` or ``parents``. If ``children``, does a traversal of this spec's
|
||||
children. If ``parents``, traverses upwards in the DAG towards the root.
|
||||
``edges`` -- If a node has been visited once but is reached along a
|
||||
new path, it's accepted, but not recurisvely followed. This traverses
|
||||
each 'edge' in the DAG once.
|
||||
``paths`` -- Explore every unique path reachable from the root.
|
||||
This descends into visited subtrees and will accept nodes multiple
|
||||
times if they're reachable by multiple paths.
|
||||
direction (str): ``children`` or ``parents``. If ``children``, does a traversal
|
||||
of this spec's children. If ``parents``, traverses upwards in the DAG
|
||||
towards the root.
|
||||
deptype: allowed dependency types
|
||||
depth: When ``False``, yield just edges. When ``True`` yield the tuple (depth, edge), where
|
||||
depth corresponds to the depth at which edge.spec was discovered.
|
||||
depth (bool): When ``False``, yield just edges. When ``True`` yield
|
||||
the tuple (depth, edge), where depth corresponds to the depth
|
||||
at which edge.spec was discovered.
|
||||
key: function that takes a spec and outputs a key for uniqueness test.
|
||||
visited: a set of nodes not to follow
|
||||
visited (set or None): a set of nodes not to follow
|
||||
|
||||
Returns:
|
||||
An iterable of ``DependencySpec`` if depth is ``False`` or a tuple of
|
||||
``(depth, DependencySpec)`` if depth is ``True``.
|
||||
A generator that yields ``DependencySpec`` if depth is ``False``
|
||||
or a tuple of ``(depth, DependencySpec)`` if depth is ``True``.
|
||||
"""
|
||||
# validate input
|
||||
if order == "topo":
|
||||
@@ -544,7 +484,7 @@ def traverse_edges(
|
||||
root_edges = with_artificial_edges(specs)
|
||||
|
||||
# Depth-first
|
||||
if order == "pre" or order == "post":
|
||||
if order in ("pre", "post"):
|
||||
return traverse_depth_first_edges_generator(
|
||||
root_edges, visitor, order == "post", root, depth
|
||||
)
|
||||
@@ -556,135 +496,79 @@ def traverse_edges(
|
||||
)
|
||||
|
||||
|
||||
@overload
|
||||
def traverse_nodes(
|
||||
specs: Sequence["spack.spec.Spec"],
|
||||
*,
|
||||
root: bool = ...,
|
||||
order: OrderType = ...,
|
||||
cover: CoverType = ...,
|
||||
direction: DirectionType = ...,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||
depth: Literal[False] = False,
|
||||
key: Callable[["spack.spec.Spec"], Any] = ...,
|
||||
visited: Optional[Set[Any]] = ...,
|
||||
) -> Iterable["spack.spec.Spec"]: ...
|
||||
|
||||
|
||||
@overload
|
||||
def traverse_nodes(
|
||||
specs: Sequence["spack.spec.Spec"],
|
||||
*,
|
||||
root: bool = ...,
|
||||
order: OrderType = ...,
|
||||
cover: CoverType = ...,
|
||||
direction: DirectionType = ...,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||
depth: Literal[True],
|
||||
key: Callable[["spack.spec.Spec"], Any] = ...,
|
||||
visited: Optional[Set[Any]] = ...,
|
||||
) -> Iterable[Tuple[int, "spack.spec.Spec"]]: ...
|
||||
|
||||
|
||||
@overload
|
||||
def traverse_nodes(
|
||||
specs: Sequence["spack.spec.Spec"],
|
||||
*,
|
||||
root: bool = ...,
|
||||
order: OrderType = ...,
|
||||
cover: CoverType = ...,
|
||||
direction: DirectionType = ...,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||
depth: bool,
|
||||
key: Callable[["spack.spec.Spec"], Any] = ...,
|
||||
visited: Optional[Set[Any]] = ...,
|
||||
) -> Iterable[Union["spack.spec.Spec", Tuple[int, "spack.spec.Spec"]]]: ...
|
||||
|
||||
|
||||
def traverse_nodes(
|
||||
specs: Sequence["spack.spec.Spec"],
|
||||
*,
|
||||
root: bool = True,
|
||||
order: OrderType = "pre",
|
||||
cover: CoverType = "nodes",
|
||||
direction: DirectionType = "children",
|
||||
specs,
|
||||
root=True,
|
||||
order="pre",
|
||||
cover="nodes",
|
||||
direction="children",
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = "all",
|
||||
depth: bool = False,
|
||||
key: Callable[["spack.spec.Spec"], Any] = id,
|
||||
visited: Optional[Set[Any]] = None,
|
||||
) -> Iterable[Union["spack.spec.Spec", Tuple[int, "spack.spec.Spec"]]]:
|
||||
depth=False,
|
||||
key=id,
|
||||
visited=None,
|
||||
):
|
||||
"""
|
||||
Iterable of specs from the DAG, starting from a list of root specs.
|
||||
Generator that yields specs from the DAG, starting from a list of root specs.
|
||||
|
||||
Arguments:
|
||||
specs: List of root specs (considered to be depth 0)
|
||||
root: Yield the root nodes themselves
|
||||
order: What order of traversal to use in the DAG. For depth-first search this can be
|
||||
``pre`` or ``post``. For BFS this should be ``breadth``.
|
||||
cover: Determines how extensively to cover the dag. Possible values:
|
||||
specs (list): List of root specs (considered to be depth 0)
|
||||
root (bool): Yield the root nodes themselves
|
||||
order (str): What order of traversal to use in the DAG. For depth-first
|
||||
search this can be ``pre`` or ``post``. For BFS this should be ``breadth``.
|
||||
cover (str): Determines how extensively to cover the dag. Possible values:
|
||||
``nodes`` -- Visit each unique node in the dag only once.
|
||||
``edges`` -- If a node has been visited once but is reached along a new path, it's
|
||||
accepted, but not recurisvely followed. This traverses each 'edge' in the DAG once.
|
||||
``paths`` -- Explore every unique path reachable from the root. This descends into
|
||||
visited subtrees and will accept nodes multiple times if they're reachable by multiple
|
||||
paths.
|
||||
direction: ``children`` or ``parents``. If ``children``, does a traversal of this spec's
|
||||
children. If ``parents``, traverses upwards in the DAG towards the root.
|
||||
``edges`` -- If a node has been visited once but is reached along a
|
||||
new path, it's accepted, but not recurisvely followed. This traverses
|
||||
each 'edge' in the DAG once.
|
||||
``paths`` -- Explore every unique path reachable from the root.
|
||||
This descends into visited subtrees and will accept nodes multiple
|
||||
times if they're reachable by multiple paths.
|
||||
direction (str): ``children`` or ``parents``. If ``children``, does a traversal
|
||||
of this spec's children. If ``parents``, traverses upwards in the DAG
|
||||
towards the root.
|
||||
deptype: allowed dependency types
|
||||
depth: When ``False``, yield just edges. When ``True`` yield the tuple ``(depth, edge)``,
|
||||
where depth corresponds to the depth at which ``edge.spec`` was discovered.
|
||||
depth (bool): When ``False``, yield just edges. When ``True`` yield
|
||||
the tuple ``(depth, edge)``, where depth corresponds to the depth
|
||||
at which ``edge.spec`` was discovered.
|
||||
key: function that takes a spec and outputs a key for uniqueness test.
|
||||
visited: a set of nodes not to follow
|
||||
visited (set or None): a set of nodes not to follow
|
||||
|
||||
Yields:
|
||||
By default :class:`~spack.spec.Spec`, or a tuple ``(depth, Spec)`` if depth is
|
||||
set to ``True``.
|
||||
"""
|
||||
for item in traverse_edges(
|
||||
specs,
|
||||
root=root,
|
||||
order=order,
|
||||
cover=cover,
|
||||
direction=direction,
|
||||
deptype=deptype,
|
||||
depth=depth,
|
||||
key=key,
|
||||
visited=visited,
|
||||
):
|
||||
yield (item[0], item[1].spec) if depth else item.spec # type: ignore
|
||||
for item in traverse_edges(specs, root, order, cover, direction, deptype, depth, key, visited):
|
||||
yield (item[0], item[1].spec) if depth else item.spec
|
||||
|
||||
|
||||
def traverse_tree(
|
||||
specs: Sequence["spack.spec.Spec"],
|
||||
cover: CoverType = "nodes",
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = "all",
|
||||
key: Callable[["spack.spec.Spec"], Any] = id,
|
||||
depth_first: bool = True,
|
||||
) -> Iterable[Tuple[int, "spack.spec.DependencySpec"]]:
|
||||
specs, cover="nodes", deptype: Union[dt.DepFlag, dt.DepTypes] = "all", key=id, depth_first=True
|
||||
):
|
||||
"""
|
||||
Generator that yields ``(depth, DependencySpec)`` tuples in the depth-first
|
||||
pre-order, so that a tree can be printed from it.
|
||||
|
||||
Arguments:
|
||||
|
||||
specs: List of root specs (considered to be depth 0)
|
||||
cover: Determines how extensively to cover the dag. Possible values:
|
||||
specs (list): List of root specs (considered to be depth 0)
|
||||
cover (str): Determines how extensively to cover the dag. Possible values:
|
||||
``nodes`` -- Visit each unique node in the dag only once.
|
||||
``edges`` -- If a node has been visited once but is reached along a
|
||||
new path, it's accepted, but not recurisvely followed. This traverses each 'edge' in
|
||||
the DAG once.
|
||||
``paths`` -- Explore every unique path reachable from the root. This descends into
|
||||
visited subtrees and will accept nodes multiple times if they're reachable by multiple
|
||||
paths.
|
||||
new path, it's accepted, but not recurisvely followed. This traverses
|
||||
each 'edge' in the DAG once.
|
||||
``paths`` -- Explore every unique path reachable from the root.
|
||||
This descends into visited subtrees and will accept nodes multiple
|
||||
times if they're reachable by multiple paths.
|
||||
deptype: allowed dependency types
|
||||
key: function that takes a spec and outputs a key for uniqueness test.
|
||||
depth_first: Explore the tree in depth-first or breadth-first order. When setting
|
||||
``depth_first=True`` and ``cover=nodes``, each spec only occurs once at the shallowest
|
||||
level, which is useful when rendering the tree in a terminal.
|
||||
depth_first (bool): Explore the tree in depth-first or breadth-first order.
|
||||
When setting ``depth_first=True`` and ``cover=nodes``, each spec only
|
||||
occurs once at the shallowest level, which is useful when rendering
|
||||
the tree in a terminal.
|
||||
|
||||
Returns:
|
||||
A generator that yields ``(depth, DependencySpec)`` tuples in such an order that a tree can
|
||||
be printed.
|
||||
A generator that yields ``(depth, DependencySpec)`` tuples in such an order
|
||||
that a tree can be printed.
|
||||
"""
|
||||
# BFS only makes sense when going over edges and nodes, for paths the tree is
|
||||
# identical to DFS, which is much more efficient then.
|
||||
|
||||
@@ -7,12 +7,11 @@
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path, PurePath
|
||||
from typing import Callable, Dict, Optional, Sequence, TextIO, Type, Union, overload
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.error
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
import spack.util.environment
|
||||
|
||||
__all__ = ["Executable", "which", "which_string", "ProcessError"]
|
||||
|
||||
@@ -20,29 +19,33 @@
|
||||
class Executable:
|
||||
"""Class representing a program that can be run on the command line."""
|
||||
|
||||
def __init__(self, name: str) -> None:
|
||||
def __init__(self, name):
|
||||
file_path = str(Path(name))
|
||||
if sys.platform != "win32" and name.startswith("."):
|
||||
# pathlib strips the ./ from relative paths so it must be added back
|
||||
file_path = os.path.join(".", file_path)
|
||||
|
||||
self.exe = [file_path]
|
||||
self.default_env: Dict[str, str] = {}
|
||||
self.default_envmod = EnvironmentModifications()
|
||||
self.returncode = 0
|
||||
|
||||
self.default_env = {}
|
||||
|
||||
self.default_envmod = spack.util.environment.EnvironmentModifications()
|
||||
self.returncode = None
|
||||
self.ignore_quotes = False
|
||||
|
||||
def add_default_arg(self, *args: str) -> None:
|
||||
if not self.exe:
|
||||
raise ProcessError("Cannot construct executable for '%s'" % name)
|
||||
|
||||
def add_default_arg(self, *args):
|
||||
"""Add default argument(s) to the command."""
|
||||
self.exe.extend(args)
|
||||
|
||||
def with_default_args(self, *args: str) -> "Executable":
|
||||
def with_default_args(self, *args):
|
||||
"""Same as add_default_arg, but returns a copy of the executable."""
|
||||
new = self.copy()
|
||||
new.add_default_arg(*args)
|
||||
return new
|
||||
|
||||
def copy(self) -> "Executable":
|
||||
def copy(self):
|
||||
"""Return a copy of this Executable."""
|
||||
new = Executable(self.exe[0])
|
||||
new.exe[:] = self.exe
|
||||
@@ -50,7 +53,7 @@ def copy(self) -> "Executable":
|
||||
new.default_envmod.extend(self.default_envmod)
|
||||
return new
|
||||
|
||||
def add_default_env(self, key: str, value: str) -> None:
|
||||
def add_default_env(self, key, value):
|
||||
"""Set an environment variable when the command is run.
|
||||
|
||||
Parameters:
|
||||
@@ -59,109 +62,68 @@ def add_default_env(self, key: str, value: str) -> None:
|
||||
"""
|
||||
self.default_env[key] = value
|
||||
|
||||
def add_default_envmod(self, envmod: EnvironmentModifications) -> None:
|
||||
def add_default_envmod(self, envmod):
|
||||
"""Set an EnvironmentModifications to use when the command is run."""
|
||||
self.default_envmod.extend(envmod)
|
||||
|
||||
@property
|
||||
def command(self) -> str:
|
||||
"""Returns the entire command-line string"""
|
||||
def command(self):
|
||||
"""The command-line string.
|
||||
|
||||
Returns:
|
||||
str: The executable and default arguments
|
||||
"""
|
||||
return " ".join(self.exe)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Returns the executable name"""
|
||||
def name(self):
|
||||
"""The executable name.
|
||||
|
||||
Returns:
|
||||
str: The basename of the executable
|
||||
"""
|
||||
return PurePath(self.path).name
|
||||
|
||||
@property
|
||||
def path(self) -> str:
|
||||
"""Returns the executable path"""
|
||||
def path(self):
|
||||
"""The path to the executable.
|
||||
|
||||
Returns:
|
||||
str: The path to the executable
|
||||
"""
|
||||
return str(PurePath(self.exe[0]))
|
||||
|
||||
@overload
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
fail_on_error: bool = ...,
|
||||
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||
ignore_quotes: Optional[bool] = ...,
|
||||
timeout: Optional[int] = ...,
|
||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
input: Optional[TextIO] = ...,
|
||||
output: Union[Optional[TextIO], str] = ...,
|
||||
error: Union[Optional[TextIO], str] = ...,
|
||||
_dump_env: Optional[Dict[str, str]] = ...,
|
||||
) -> None: ...
|
||||
|
||||
@overload
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
fail_on_error: bool = ...,
|
||||
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||
ignore_quotes: Optional[bool] = ...,
|
||||
timeout: Optional[int] = ...,
|
||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
input: Optional[TextIO] = ...,
|
||||
output: Union[Type[str], Callable],
|
||||
error: Union[Optional[TextIO], str, Type[str], Callable] = ...,
|
||||
_dump_env: Optional[Dict[str, str]] = ...,
|
||||
) -> str: ...
|
||||
|
||||
@overload
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
fail_on_error: bool = ...,
|
||||
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||
ignore_quotes: Optional[bool] = ...,
|
||||
timeout: Optional[int] = ...,
|
||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
input: Optional[TextIO] = ...,
|
||||
output: Union[Optional[TextIO], str, Type[str], Callable] = ...,
|
||||
error: Union[Type[str], Callable],
|
||||
_dump_env: Optional[Dict[str, str]] = ...,
|
||||
) -> str: ...
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
fail_on_error: bool = True,
|
||||
ignore_errors: Union[int, Sequence[int]] = (),
|
||||
ignore_quotes: Optional[bool] = None,
|
||||
timeout: Optional[int] = None,
|
||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = None,
|
||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = None,
|
||||
input: Optional[TextIO] = None,
|
||||
output: Union[Optional[TextIO], str, Type[str], Callable] = None,
|
||||
error: Union[Optional[TextIO], str, Type[str], Callable] = None,
|
||||
_dump_env: Optional[Dict[str, str]] = None,
|
||||
) -> Optional[str]:
|
||||
"""Runs this executable in a subprocess.
|
||||
def __call__(self, *args, **kwargs):
|
||||
"""Run this executable in a subprocess.
|
||||
|
||||
Parameters:
|
||||
*args: command-line arguments to the executable to run
|
||||
fail_on_error: if True, raises an exception if the subprocess returns an error
|
||||
The return code is available as ``self.returncode``
|
||||
ignore_errors: a sequence of error codes to ignore. If these codes are returned, this
|
||||
process will not raise an exception, even if ``fail_on_error`` is set to ``True``
|
||||
ignore_quotes: if False, warn users that quotes are not needed, as Spack does not
|
||||
use a shell. If None, use ``self.ignore_quotes``.
|
||||
timeout: the number of seconds to wait before killing the child process
|
||||
env: the environment with which to run the executable
|
||||
extra_env: extra items to add to the environment (neither requires nor precludes env)
|
||||
input: where to read stdin from
|
||||
output: where to send stdout
|
||||
error: where to send stderr
|
||||
_dump_env: dict to be set to the environment actually used (envisaged for
|
||||
testing purposes only)
|
||||
*args (str): Command-line arguments to the executable to run
|
||||
|
||||
Keyword Arguments:
|
||||
_dump_env (dict): Dict to be set to the environment actually
|
||||
used (envisaged for testing purposes only)
|
||||
env (dict or EnvironmentModifications): The environment with which
|
||||
to run the executable
|
||||
extra_env (dict or EnvironmentModifications): Extra items to add to
|
||||
the environment (neither requires nor precludes env)
|
||||
fail_on_error (bool): Raise an exception if the subprocess returns
|
||||
an error. Default is True. The return code is available as
|
||||
``exe.returncode``
|
||||
ignore_errors (int or list): A list of error codes to ignore.
|
||||
If these codes are returned, this process will not raise
|
||||
an exception even if ``fail_on_error`` is set to ``True``
|
||||
ignore_quotes (bool): If False, warn users that quotes are not needed
|
||||
as Spack does not use a shell. Defaults to False.
|
||||
timeout (int or float): The number of seconds to wait before killing
|
||||
the child process
|
||||
input: Where to read stdin from
|
||||
output: Where to send stdout
|
||||
error: Where to send stderr
|
||||
|
||||
Accepted values for input, output, and error:
|
||||
|
||||
* python streams, e.g. open Python file objects, or ``os.devnull``
|
||||
* filenames, which will be automatically opened for writing
|
||||
* ``str``, as in the Python string type. If you set these to ``str``,
|
||||
output and error will be written to pipes and returned as a string.
|
||||
If both ``output`` and ``error`` are set to ``str``, then one string
|
||||
@@ -171,11 +133,8 @@ def __call__(
|
||||
Behaves the same as ``str``, except that value is also written to
|
||||
``stdout`` or ``stderr``.
|
||||
|
||||
For output and error it's also accepted:
|
||||
|
||||
* filenames, which will be automatically opened for writing
|
||||
|
||||
By default, the subprocess inherits the parent's file descriptors.
|
||||
|
||||
"""
|
||||
|
||||
def process_cmd_output(out, err):
|
||||
@@ -200,34 +159,44 @@ def process_cmd_output(out, err):
|
||||
sys.stderr.write(errstr)
|
||||
return result
|
||||
|
||||
# Environment
|
||||
env_arg = kwargs.get("env", None)
|
||||
|
||||
# Setup default environment
|
||||
current_environment = os.environ.copy() if env is None else {}
|
||||
self.default_envmod.apply_modifications(current_environment)
|
||||
current_environment.update(self.default_env)
|
||||
env = os.environ.copy() if env_arg is None else {}
|
||||
self.default_envmod.apply_modifications(env)
|
||||
env.update(self.default_env)
|
||||
|
||||
# Apply env argument
|
||||
if isinstance(env, EnvironmentModifications):
|
||||
env.apply_modifications(current_environment)
|
||||
elif env:
|
||||
current_environment.update(env)
|
||||
if isinstance(env_arg, spack.util.environment.EnvironmentModifications):
|
||||
env_arg.apply_modifications(env)
|
||||
elif env_arg:
|
||||
env.update(env_arg)
|
||||
|
||||
# Apply extra env
|
||||
if isinstance(extra_env, EnvironmentModifications):
|
||||
extra_env.apply_modifications(current_environment)
|
||||
elif extra_env is not None:
|
||||
current_environment.update(extra_env)
|
||||
extra_env = kwargs.get("extra_env", {})
|
||||
if isinstance(extra_env, spack.util.environment.EnvironmentModifications):
|
||||
extra_env.apply_modifications(env)
|
||||
else:
|
||||
env.update(extra_env)
|
||||
|
||||
if _dump_env is not None:
|
||||
_dump_env.clear()
|
||||
_dump_env.update(current_environment)
|
||||
if "_dump_env" in kwargs:
|
||||
kwargs["_dump_env"].clear()
|
||||
kwargs["_dump_env"].update(env)
|
||||
|
||||
if ignore_quotes is None:
|
||||
ignore_quotes = self.ignore_quotes
|
||||
fail_on_error = kwargs.pop("fail_on_error", True)
|
||||
ignore_errors = kwargs.pop("ignore_errors", ())
|
||||
ignore_quotes = kwargs.pop("ignore_quotes", self.ignore_quotes)
|
||||
timeout = kwargs.pop("timeout", None)
|
||||
|
||||
# If they just want to ignore one error code, make it a tuple.
|
||||
if isinstance(ignore_errors, int):
|
||||
ignore_errors = (ignore_errors,)
|
||||
|
||||
input = kwargs.pop("input", None)
|
||||
output = kwargs.pop("output", None)
|
||||
error = kwargs.pop("error", None)
|
||||
|
||||
if input is str:
|
||||
raise ValueError("Cannot use `str` as input stream.")
|
||||
|
||||
@@ -261,15 +230,9 @@ def streamify(arg, mode):
|
||||
cmd_line_string = " ".join(escaped_cmd)
|
||||
tty.debug(cmd_line_string)
|
||||
|
||||
result = None
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
cmd,
|
||||
stdin=istream,
|
||||
stderr=estream,
|
||||
stdout=ostream,
|
||||
env=current_environment,
|
||||
close_fds=False,
|
||||
cmd, stdin=istream, stderr=estream, stdout=ostream, env=env, close_fds=False
|
||||
)
|
||||
out, err = proc.communicate(timeout=timeout)
|
||||
|
||||
@@ -285,6 +248,9 @@ def streamify(arg, mode):
|
||||
long_msg += "\n" + result
|
||||
|
||||
raise ProcessError("Command exited with status %d:" % proc.returncode, long_msg)
|
||||
|
||||
return result
|
||||
|
||||
except OSError as e:
|
||||
message = "Command: " + cmd_line_string
|
||||
if " " in self.exe[0]:
|
||||
@@ -320,8 +286,6 @@ def streamify(arg, mode):
|
||||
if close_istream:
|
||||
istream.close()
|
||||
|
||||
return result
|
||||
|
||||
def __eq__(self, other):
|
||||
return hasattr(other, "exe") and self.exe == other.exe
|
||||
|
||||
|
||||
@@ -14,10 +14,10 @@ default:
|
||||
image: { "name": "ghcr.io/spack/e4s-ubuntu-18.04:v2021-10-18", "entrypoint": [""] }
|
||||
|
||||
# CI Platform-Arch
|
||||
.cray_rhel_x86_64_v3:
|
||||
.cray_rhel_zen4:
|
||||
variables:
|
||||
SPACK_TARGET_PLATFORM: "cray-rhel"
|
||||
SPACK_TARGET_ARCH: "x86_64_v3"
|
||||
SPACK_TARGET_ARCH: "zen4"
|
||||
|
||||
.cray_sles_zen4:
|
||||
variables:
|
||||
@@ -884,7 +884,7 @@ aws-pcluster-build-neoverse_v1:
|
||||
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
|
||||
|
||||
.generate-cray-rhel:
|
||||
tags: [ "cray-rhel-x86_64_v3", "public" ]
|
||||
tags: [ "cray-rhel-zen4", "public" ]
|
||||
extends: [ ".generate-cray" ]
|
||||
|
||||
.generate-cray-sles:
|
||||
@@ -896,7 +896,7 @@ aws-pcluster-build-neoverse_v1:
|
||||
# E4S - Cray RHEL
|
||||
#######################################
|
||||
.e4s-cray-rhel:
|
||||
extends: [ ".cray_rhel_x86_64_v3" ]
|
||||
extends: [ ".cray_rhel_zen4" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: e4s-cray-rhel
|
||||
|
||||
@@ -904,6 +904,7 @@ e4s-cray-rhel-generate:
|
||||
extends: [ ".generate-cray-rhel", ".e4s-cray-rhel" ]
|
||||
|
||||
e4s-cray-rhel-build:
|
||||
allow_failure: true # libsci_cray.so broken, misses DT_NEEDED for libdl.so
|
||||
extends: [ ".build", ".e4s-cray-rhel" ]
|
||||
trigger:
|
||||
include:
|
||||
@@ -922,10 +923,10 @@ e4s-cray-rhel-build:
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: e4s-cray-sles
|
||||
|
||||
.e4s-cray-sles-generate:
|
||||
e4s-cray-sles-generate:
|
||||
extends: [ ".generate-cray-sles", ".e4s-cray-sles" ]
|
||||
|
||||
.e4s-cray-sles-build:
|
||||
e4s-cray-sles-build:
|
||||
allow_failure: true # libsci_cray.so broken, misses DT_NEEDED for libdl.so
|
||||
extends: [ ".build", ".e4s-cray-sles" ]
|
||||
trigger:
|
||||
|
||||
@@ -1,27 +1,31 @@
|
||||
compilers:
|
||||
- compiler:
|
||||
spec: cce@=18.0.0
|
||||
spec: cce@15.0.1
|
||||
paths:
|
||||
cc: /opt/cray/pe/cce/18.0.0/bin/craycc
|
||||
cxx: /opt/cray/pe/cce/18.0.0/bin/crayCC
|
||||
f77: /opt/cray/pe/cce/18.0.0/bin/crayftn
|
||||
fc: /opt/cray/pe/cce/18.0.0/bin/crayftn
|
||||
cc: cc
|
||||
cxx: CC
|
||||
f77: ftn
|
||||
fc: ftn
|
||||
flags: {}
|
||||
operating_system: rhel8
|
||||
target: x86_64
|
||||
modules: []
|
||||
environment: {}
|
||||
extra_rpaths: []
|
||||
target: any
|
||||
modules:
|
||||
- PrgEnv-cray/8.3.3
|
||||
- cce/15.0.1
|
||||
environment:
|
||||
set:
|
||||
MACHTYPE: x86_64
|
||||
- compiler:
|
||||
spec: gcc@=8.5.0
|
||||
spec: gcc@11.2.0
|
||||
paths:
|
||||
cc: /usr/bin/gcc
|
||||
cxx: /usr/bin/g++
|
||||
f77: /usr/bin/gfortran
|
||||
fc: /usr/bin/gfortran
|
||||
cc: gcc
|
||||
cxx: g++
|
||||
f77: gfortran
|
||||
fc: gfortran
|
||||
flags: {}
|
||||
operating_system: rhel8
|
||||
target: x86_64
|
||||
modules: []
|
||||
environment: {}
|
||||
extra_rpaths: []
|
||||
target: any
|
||||
modules:
|
||||
- PrgEnv-gnu
|
||||
- gcc/11.2.0
|
||||
environment: {}
|
||||
@@ -1,15 +1,16 @@
|
||||
packages:
|
||||
# EXTERNALS
|
||||
cray-mpich:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: cray-mpich@8.1.30 %cce
|
||||
prefix: /opt/cray/pe/mpich/8.1.30/ofi/cray/18.0
|
||||
- spec: cray-mpich@8.1.25 %cce@15.0.1
|
||||
prefix: /opt/cray/pe/mpich/8.1.25/ofi/cray/10.0
|
||||
modules:
|
||||
- cray-mpich/8.1.30
|
||||
- cray-mpich/8.1.25
|
||||
cray-libsci:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: cray-libsci@24.07.0 %cce
|
||||
prefix: /opt/cray/pe/libsci/24.07.0/CRAY/18.0/x86_64/
|
||||
- spec: cray-libsci@23.02.1.1 %cce@15.0.1
|
||||
prefix: /opt/cray/pe/libsci/23.02.1.1/CRAY/9.0/x86_64/
|
||||
modules:
|
||||
- cray-libsci/24.07.0
|
||||
- cray-libsci/23.02.1.1
|
||||
@@ -1,4 +0,0 @@
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
tags: ["cray-rhel-x86_64_v3"]
|
||||
@@ -0,0 +1,4 @@
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
tags: ["cray-rhel-zen4"]
|
||||
@@ -13,7 +13,6 @@ spack:
|
||||
- openjpeg # CMakePackage
|
||||
- r-rcpp # RPackage
|
||||
- ruby-rake # RubyPackage
|
||||
- perl-data-dumper # PerlPackage
|
||||
- arch:
|
||||
- '%gcc'
|
||||
|
||||
|
||||
@@ -14,7 +14,8 @@ spack:
|
||||
|
||||
packages:
|
||||
all:
|
||||
require: "%cce@18.0.0 target=x86_64_v3"
|
||||
prefer:
|
||||
- "%cce"
|
||||
compiler: [cce]
|
||||
providers:
|
||||
blas: [cray-libsci]
|
||||
@@ -22,15 +23,17 @@ spack:
|
||||
mpi: [cray-mpich]
|
||||
tbb: [intel-tbb]
|
||||
scalapack: [netlib-scalapack]
|
||||
target: [zen4]
|
||||
variants: +mpi
|
||||
ncurses:
|
||||
require: +termlib ldflags=-Wl,--undefined-version
|
||||
|
||||
tbb:
|
||||
require: "intel-tbb"
|
||||
binutils:
|
||||
variants: +ld +gold +headers +libiberty ~nls
|
||||
boost:
|
||||
variants: +python +filesystem +iostreams +system
|
||||
cuda:
|
||||
version: [11.7.0]
|
||||
elfutils:
|
||||
variants: ~nls
|
||||
require: "%gcc"
|
||||
@@ -40,14 +43,18 @@ spack:
|
||||
variants: +fortran +hl +shared
|
||||
libfabric:
|
||||
variants: fabrics=sockets,tcp,udp,rxm
|
||||
libunwind:
|
||||
variants: +pic +xz
|
||||
mgard:
|
||||
require:
|
||||
- "@2023-01-10:"
|
||||
mpich:
|
||||
variants: ~wrapperrpath
|
||||
ncurses:
|
||||
variants: +termlib
|
||||
paraview:
|
||||
# Don't build GUI support or GLX rendering for HPC/container deployments
|
||||
require: "~qt ^[virtuals=gl] osmesa"
|
||||
require: "@5.11 ~qt ^[virtuals=gl] osmesa"
|
||||
trilinos:
|
||||
require:
|
||||
- one_of: [+amesos +amesos2 +anasazi +aztec +boost +epetra +epetraext +ifpack
|
||||
@@ -58,6 +65,12 @@ spack:
|
||||
- one_of: [~ml ~muelu ~zoltan2 ~teko, +ml +muelu +zoltan2 +teko]
|
||||
- one_of: [+superlu-dist, ~superlu-dist]
|
||||
- one_of: [+shylu, ~shylu]
|
||||
xz:
|
||||
variants: +pic
|
||||
mesa:
|
||||
version: [21.3.8]
|
||||
unzip:
|
||||
require: "%gcc"
|
||||
|
||||
specs:
|
||||
# CPU
|
||||
@@ -65,43 +78,62 @@ spack:
|
||||
- aml
|
||||
- arborx
|
||||
- argobots
|
||||
- bolt
|
||||
- butterflypack
|
||||
- boost +python +filesystem +iostreams +system
|
||||
- cabana
|
||||
- caliper
|
||||
- chai
|
||||
- charliecloud
|
||||
- conduit
|
||||
# - cp2k +mpi # libxsmm: ftn-78 ftn: ERROR in command linel; The -f option has an invalid argument, "tree-vectorize".
|
||||
- datatransferkit
|
||||
- flecsi
|
||||
- flit
|
||||
- flux-core
|
||||
- fortrilinos
|
||||
- ginkgo
|
||||
- globalarrays
|
||||
- gmp
|
||||
- gotcha
|
||||
- h5bench
|
||||
- hdf5-vol-async
|
||||
- hdf5-vol-cache cflags=-Wno-error=incompatible-function-pointer-types
|
||||
- hdf5-vol-cache
|
||||
- hdf5-vol-log
|
||||
- heffte +fftw
|
||||
- hpx max_cpu_count=512 networking=mpi
|
||||
- hypre
|
||||
- kokkos +openmp
|
||||
- kokkos-kernels +openmp
|
||||
- lammps
|
||||
- legion
|
||||
- libnrm
|
||||
#- libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf +mgard # mgard:
|
||||
- libquo
|
||||
- libunwind
|
||||
- mercury
|
||||
- metall
|
||||
- mfem
|
||||
# - mgard +serial +openmp +timing +unstructured ~cuda # mgard
|
||||
- mpark-variant
|
||||
- mpifileutils ~xattr cflags=-Wno-error=implicit-function-declaration
|
||||
- mpifileutils ~xattr
|
||||
- nccmp
|
||||
- nco
|
||||
- netlib-scalapack cflags=-Wno-error=implicit-function-declaration
|
||||
- netlib-scalapack
|
||||
- omega-h
|
||||
- openmpi
|
||||
- openpmd-api ^adios2~mgard
|
||||
- papi
|
||||
- papyrus
|
||||
- pdt
|
||||
- petsc
|
||||
- plumed
|
||||
- precice
|
||||
- pumi
|
||||
- py-h5py +mpi
|
||||
- py-h5py ~mpi
|
||||
- py-libensemble +mpi +nlopt
|
||||
- py-petsc4py
|
||||
- qthreads scheduler=distrib
|
||||
- raja
|
||||
- slate ~cuda
|
||||
@@ -114,7 +146,8 @@ spack:
|
||||
- swig@4.0.2-fortran
|
||||
- sz3
|
||||
- tasmanian
|
||||
- trilinos +belos +ifpack2 +stokhos
|
||||
- tau +mpi +python
|
||||
- trilinos@13.0.1 +belos +ifpack2 +stokhos
|
||||
- turbine
|
||||
- umap
|
||||
- umpire
|
||||
@@ -124,47 +157,27 @@ spack:
|
||||
# - alquimia # pflotran: petsc-3.19.4-c6pmpdtpzarytxo434zf76jqdkhdyn37/lib/petsc/conf/rules:169: material_aux.o] Error 1: fortran errors
|
||||
# - amrex # disabled temporarily pending resolution of unreproducible CI failure
|
||||
# - axom # axom: CMake Error at axom/sidre/cmake_install.cmake:154 (file): file INSTALL cannot find "/tmp/gitlab-runner-2/spack-stage/spack-stage-axom-0.8.1-jvol6riu34vuyqvrd5ft2gyhrxdqvf63/spack-build-jvol6ri/lib/fortran/axom_spio.mod": No such file or directory.
|
||||
# - bolt # ld.lld: error: CMakeFiles/bolt-omp.dir/kmp_gsupport.cpp.o: symbol GOMP_atomic_end@@GOMP_1.0 has undefined version GOMP_1.0
|
||||
# - bricks # bricks: clang-15: error: clang frontend command failed with exit code 134 (use -v to see invocation)
|
||||
# - butterflypack ^netlib-scalapack cflags=-Wno-error=implicit-function-declaration # ftn-2116 ftn: INTERNAL "driver" was terminated due to receipt of signal 01: Hangup.
|
||||
# - caliper # papi: papi_internal.c:124:3: error: use of undeclared identifier '_papi_hwi_my_thread'; did you mean '_papi_hwi_read'?
|
||||
# - charliecloud # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - cp2k +mpi # libxsmm: ftn-78 ftn: ERROR in command linel; The -f option has an invalid argument, "tree-vectorize".
|
||||
# - dealii # llvm@14.0.6: ?; intel-tbb@2020.3: clang-15: error: unknown argument: '-flifetime-dse=1'; assimp@5.2.5: clang-15: error: clang frontend command failed with exit code 134 (use -v to see invocation)
|
||||
# - dyninst # requires %gcc
|
||||
# - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 +paraview +pnetcdf +sz +unifyfs +veloc ~visit +vtkm +zfp ^hdf5@1.14 # llvm@14.0.6: ?;
|
||||
# - exaworks # rust: ld.lld: error: relocation R_X86_64_32 cannot be used against local symbol; recompile with -fPIC'; defined in /opt/cray/pe/cce/15.0.1/cce/x86_64/lib/no_mmap.o, referenced by /opt/cray/pe/cce/15.0.1/cce/x86_64/lib/no_mmap.o:(__no_mmap_for_malloc)
|
||||
# - flux-core # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - fortrilinos # trilinos-14.0.0: packages/teuchos/core/src/Teuchos_BigUIntDecl.hpp:67:8: error: no type named 'uint32_t' in namespace 'std'
|
||||
# - gasnet # configure error: User requested --enable-ofi but I don't know how to build ofi programs for your system
|
||||
# - gptune # py-scipy: meson.build:82:0: ERROR: Unknown compiler(s): [['/home/gitlab-runner-3/builds/dWfnZWPh/0/spack/spack/lib/spack/env/cce/ftn']]
|
||||
# - hpctoolkit # dyninst requires %gcc
|
||||
# - hpx max_cpu_count=512 networking=mpi # libxcrypt-4.4.35
|
||||
# - lammps # lammps-20240829.1: Reversed (or previously applied) patch detected! Assume -R? [n]
|
||||
# - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf +mgard # mgard:
|
||||
# - mgard +serial +openmp +timing +unstructured ~cuda # mgard
|
||||
# - nrm # py-scipy: meson.build:82:0: ERROR: Unknown compiler(s): [['/home/gitlab-runner-3/builds/dWfnZWPh/0/spack/spack/lib/spack/env/cce/ftn']]
|
||||
# - nvhpc # requires %gcc
|
||||
# - omega-h # trilinos-13.4.1: packages/kokkos/core/src/impl/Kokkos_MemoryPool.cpp:112:48: error: unknown type name 'uint32_t'
|
||||
# - openmpi # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - papi # papi_internal.c:124:3: error: use of undeclared identifier '_papi_hwi_my_thread'; did you mean '_papi_hwi_read'?
|
||||
# - parsec ~cuda # parsec: parsec/fortran/CMakeFiles/parsec_fortran.dir/parsecf.F90.o: ftn-2103 ftn: WARNING in command line. The -W extra option is not supported or invalid and will be ignored.
|
||||
# - phist # fortran_bindings/CMakeFiles/phist_fort.dir/phist_testing.F90.o: ftn-78 ftn: ERROR in command line. The -f option has an invalid argument, "no-math-errno".
|
||||
# - plasma # %cce conflict
|
||||
# - plumed # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - py-h5py +mpi # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - py-h5py ~mpi # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - py-jupyterhub # rust: ld.lld: error: relocation R_X86_64_32 cannot be used against local symbol; recompile with -fPIC'; defined in /opt/cray/pe/cce/15.0.1/cce/x86_64/lib/no_mmap.o, referenced by /opt/cray/pe/cce/15.0.1/cce/x86_64/lib/no_mmap.o:(__no_mmap_for_malloc)
|
||||
# - py-libensemble +mpi +nlopt # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - py-petsc4py # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - quantum-espresso # quantum-espresso: CMake Error at cmake/FindSCALAPACK.cmake:503 (message): A required library with SCALAPACK API not found. Please specify library
|
||||
# - scr # scr: make[2]: *** [examples/CMakeFiles/test_ckpt_F.dir/build.make:112: examples/test_ckpt_F] Error 1: /opt/cray/pe/cce/15.0.1/binutils/x86_64/x86_64-pc-linux-gnu/bin/ld: /opt/cray/pe/mpich/8.1.25/ofi/cray/10.0/lib/libmpi_cray.so: undefined reference to `PMI_Barrier'
|
||||
# - strumpack ~slate # strumpack: [test/CMakeFiles/test_HSS_seq.dir/build.make:117: test/test_HSS_seq] Error 1: ld.lld: error: undefined reference due to --no-allow-shlib-undefined: mpi_abort_
|
||||
# - tau +mpi +python # libelf: configure: error: installation or configuration problem: C compiler cannot create executables.; papi: papi_internal.c:124:3: error: use of undeclared identifier '_papi_hwi_my_thread'; did you mean '_papi_hwi_read'?
|
||||
# - upcxx # upcxx: configure error: User requested --enable-ofi but I don't know how to build ofi programs for your system
|
||||
# - variorum # variorum: /opt/cray/pe/cce/15.0.1/binutils/x86_64/x86_64-pc-linux-gnu/bin/ld: /opt/cray/pe/lib64/libpals.so.0: undefined reference to `json_array_append_new@@libjansson.so.4'
|
||||
# - warpx +python # py-scipy: meson.build:82:0: ERROR: Unknown compiler(s): [['/home/gitlab-runner-3/builds/dWfnZWPh/0/spack/spack/lib/spack/env/cce/ftn']]
|
||||
# - xyce +mpi +shared +pymi +pymi_static_tpls ^trilinos~shylu # openblas: ftn-2307 ftn: ERROR in command line: The "-m" option must be followed by 0, 1, 2, 3 or 4.; make[2]: *** [<builtin>: spotrf2.o] Error 1; make[1]: *** [Makefile:27: lapacklib] Error 2; make: *** [Makefile:250: netlib] Error 2
|
||||
# - warpx +python # py-scipy: meson.build:82:0: ERROR: Unknown compiler(s): [['/home/gitlab-runner-3/builds/dWfnZWPh/0/spack/spack/lib/spack/env/cce/ftn']]
|
||||
|
||||
cdash:
|
||||
build-group: E4S Cray
|
||||
|
||||
@@ -31,7 +31,6 @@ spack:
|
||||
specs:
|
||||
# CPU
|
||||
- acts +analysis +dd4hep +edm4hep +examples +fatras +geant4 +hepmc3 +podio +pythia8 +python +tgeo cxxstd=20
|
||||
- celeritas +geant4 +hepmc3 +openmp +root +shared +vecgeom cxxstd=20
|
||||
- dd4hep +ddalign +ddcad +ddcond +dddetectors +dddigi +ddeve +ddg4 +ddrec +edm4hep +hepmc3 +lcio +utilityapps +xercesc
|
||||
- delphes +pythia8
|
||||
- edm4hep
|
||||
|
||||
@@ -93,7 +93,7 @@ class AbseilCpp(CMakePackage):
|
||||
depends_on("cmake@3.5:", when="@20190312:", type="build")
|
||||
depends_on("cmake@3.1:", type="build")
|
||||
|
||||
depends_on("googletest~absl", type="test", when="@20220623:")
|
||||
depends_on("googletest", type="build", when="@20220623:")
|
||||
|
||||
def cmake_args(self):
|
||||
run_tests = self.run_tests and self.spec.satisfies("@20220623:")
|
||||
|
||||
@@ -16,7 +16,6 @@ class ActsAlgebraPlugins(CMakePackage):
|
||||
|
||||
license("MPL-2.0", checked_by="stephenswat")
|
||||
|
||||
version("0.26.2", sha256="0170f22e1a75493b86464f27991117bc2c5a9d52554c75786e321d4c591990e7")
|
||||
version("0.26.1", sha256="8eb1e9e28ec2839d149b6a6bddd0f983b0cdf71c286c0aeb67ede31727c5b7d3")
|
||||
version("0.26.0", sha256="301702e3d0a3d12e46ae6d949f3027ddebd0b1167cbb3004d9a4a5697d3adc7f")
|
||||
version("0.25.0", sha256="bb0cba6e37558689d780a6de8f749abb3b96f8cd9e0c8851474eb4532e1e98b8")
|
||||
|
||||
@@ -40,7 +40,6 @@ class Acts(CMakePackage, CudaPackage):
|
||||
# Supported Acts versions
|
||||
version("main", branch="main")
|
||||
version("master", branch="main", deprecated=True) # For compatibility
|
||||
version("38.2.0", commit="9cb8f4494656553fd9b85955938b79b2fac4c9b0", submodules=True)
|
||||
version("38.1.0", commit="8a20c88808f10bf4fcdfd7c6e077f23614c3ab90", submodules=True)
|
||||
version("38.0.0", commit="0a6b5155e29e3b755bf351b8a76067fff9b4214b", submodules=True)
|
||||
version("37.4.0", commit="4ae9a44f54c854599d1d753222ec36e0b5b4e9c7", submodules=True)
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
#!/bin/sh
|
||||
cd ${0%/*} || exit 1 # Run from this directory
|
||||
|
||||
applications/Allwmake $targetType $*
|
||||
@@ -1,9 +0,0 @@
|
||||
#!/bin/sh
|
||||
cd ${0%/*} || exit 1 # Run from this directory
|
||||
|
||||
# Parse arguments for library compilation
|
||||
. $WM_PROJECT_DIR/wmake/scripts/AllwmakeParseArguments
|
||||
|
||||
wmake $targetType solvers/additiveFoam/functionObjects/ExaCA
|
||||
wmake $targetType solvers/additiveFoam/movingHeatSource
|
||||
wmake $targetType solvers/additiveFoam
|
||||
@@ -2,14 +2,10 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import inspect
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.pkg.builtin.openfoam as openfoam
|
||||
from spack.package import *
|
||||
from spack.version import Version
|
||||
from spack.pkg.builtin.openfoam import add_extra_files
|
||||
|
||||
|
||||
class Additivefoam(Package):
|
||||
@@ -33,36 +29,14 @@ class Additivefoam(Package):
|
||||
depends_on("openfoam-org@10")
|
||||
|
||||
common = ["spack-derived-Allwmake"]
|
||||
assets = [join_path("applications", "Allwmake"), "Allwmake"]
|
||||
assets = ["applications/Allwmake", "Allwmake"]
|
||||
|
||||
build_script = "./spack-derived-Allwmake"
|
||||
|
||||
phases = ["configure", "build", "install"]
|
||||
|
||||
def add_extra_files(self, common, local_prefix, local):
|
||||
"""Copy additional common and local files into the stage.source_path
|
||||
from the openfoam/common and the package/assets directories,
|
||||
respectively. Modified from `spack.pkg.builtin.openfoam.add_extra_files()`.
|
||||
"""
|
||||
outdir = self.stage.source_path
|
||||
indir = join_path(os.path.dirname(inspect.getfile(openfoam)), "common")
|
||||
for f in common:
|
||||
tty.info("Added file {0}".format(f))
|
||||
openfoam.install(join_path(indir, f), join_path(outdir, f))
|
||||
|
||||
indir = join_path(self.package_dir, "assets", local_prefix)
|
||||
for f in local:
|
||||
tty.info("Added file {0}".format(f))
|
||||
openfoam.install(join_path(indir, f), join_path(outdir, f))
|
||||
|
||||
def patch(self):
|
||||
spec = self.spec
|
||||
asset_dir = ""
|
||||
if Version("main") in spec.versions:
|
||||
asset_dir = "assets_main"
|
||||
elif Version("1.0.0") in spec.versions:
|
||||
asset_dir = "assets_1.0.0"
|
||||
self.add_extra_files(self.common, asset_dir, self.assets)
|
||||
add_extra_files(self, self.common, self.assets)
|
||||
|
||||
def configure(self, spec, prefix):
|
||||
pass
|
||||
|
||||
@@ -12,8 +12,8 @@ class Alpgen(CMakePackage, MakefilePackage):
|
||||
in hadronic collisions.
|
||||
"""
|
||||
|
||||
homepage = "https://alpgen.web.cern.ch/"
|
||||
url = "https://alpgen.web.cern.ch/V2.1/v214.tgz"
|
||||
homepage = "http://mlm.home.cern.ch/mlm/alpgen/"
|
||||
url = "http://mlm.home.cern.ch/mlm/alpgen/V2.1/v214.tgz"
|
||||
|
||||
tags = ["hep"]
|
||||
|
||||
|
||||
@@ -25,7 +25,6 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage):
|
||||
license("BSD-3-Clause")
|
||||
|
||||
version("develop", branch="development")
|
||||
version("25.01", sha256="29eb35cf67d66b0fd0654282454c210abfadf27fcff8478b256e3196f237c74f")
|
||||
version("24.12", sha256="ca4b41ac73fabb9cf3600b530c9823eb3625f337d9b7b9699c1089e81c67fc67")
|
||||
version("24.11", sha256="31cc37b39f15e02252875815f6066046fc56a479bf459362b9889b0d6a202df6")
|
||||
version("24.10", sha256="a2d15e417bd7c41963749338e884d939c80c5f2fcae3279fe3f1b463e3e4208a")
|
||||
@@ -152,8 +151,6 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage):
|
||||
|
||||
# Build dependencies
|
||||
depends_on("mpi", when="+mpi")
|
||||
with when("+linear_solvers"):
|
||||
depends_on("rocsparse", when="@25.01: +rocm")
|
||||
with when("+fft"):
|
||||
depends_on("rocfft", when="+rocm")
|
||||
depends_on("fftw@3", when="~cuda ~rocm ~sycl")
|
||||
|
||||
@@ -16,7 +16,6 @@ class Armadillo(CMakePackage):
|
||||
|
||||
license("Apache-2.0")
|
||||
|
||||
version("14.2.2", sha256="3054c8e63db3abdf1a5c8f9fdb7e6b4ad833f9bcfb58324c0ff86de0784c70e0")
|
||||
version("14.0.3", sha256="ebd6215eeb01ee412fed078c8a9f7f87d4e1f6187ebcdc1bc09f46095a4f4003")
|
||||
version("14.0.2", sha256="248e2535fc092add6cb7dea94fc86ae1c463bda39e46fd82d2a7165c1c197dff")
|
||||
version("12.8.4", sha256="558fe526b990a1663678eff3af6ec93f79ee128c81a4c8aef27ad328fae61138")
|
||||
@@ -34,14 +33,14 @@ class Armadillo(CMakePackage):
|
||||
depends_on("c", type="build")
|
||||
depends_on("cxx", type="build")
|
||||
|
||||
variant("hdf5", default=False, description="Include HDF5 support", when="@:10")
|
||||
variant("hdf5", default=False, description="Include HDF5 support")
|
||||
|
||||
depends_on("cmake@2.8.12:", type="build")
|
||||
depends_on("cmake@3.5:", type="build", when="@14:")
|
||||
depends_on("arpack-ng") # old arpack causes undefined symbols
|
||||
depends_on("blas")
|
||||
depends_on("lapack")
|
||||
depends_on("superlu@5.2:5") # only superlu@5 is supported
|
||||
depends_on("superlu@5.2:")
|
||||
depends_on("hdf5", when="+hdf5")
|
||||
|
||||
# Adds an `#undef linux` to prevent preprocessor expansion of include
|
||||
|
||||
@@ -97,8 +97,7 @@ class Chai(CachedCMakePackage, CudaPackage, ROCmPackage):
|
||||
)
|
||||
version("1.0", tag="v1.0", commit="501a098ad879dc8deb4a74fcfe8c08c283a10627", submodules=True)
|
||||
|
||||
depends_on("c", type="build")
|
||||
depends_on("cxx", type="build")
|
||||
depends_on("cxx", type="build") # generated
|
||||
|
||||
# Patching Umpire for dual BLT targets import changed MPI target name in Umpire link interface
|
||||
# We propagate the patch here.
|
||||
|
||||
@@ -790,7 +790,7 @@ def edit(self, pkg, spec, prefix):
|
||||
"# include Plumed.inc as recommended by"
|
||||
"PLUMED to include libraries and flags"
|
||||
)
|
||||
mkf.write("include {0}\n".format(self.pkg["plumed"].plumed_inc))
|
||||
mkf.write("include {0}\n".format(spec["plumed"].package.plumed_inc))
|
||||
|
||||
mkf.write("\n# COMPILER, LINKER, TOOLS\n\n")
|
||||
mkf.write(
|
||||
|
||||
@@ -19,7 +19,6 @@ class Detray(CMakePackage):
|
||||
|
||||
license("MPL-2.0", checked_by="stephenswat")
|
||||
|
||||
version("0.87.0", sha256="2d4a76432dd6ddbfc00b88b5d482072e471fefc264b60748bb1f9a123963576e")
|
||||
version("0.86.0", sha256="98350c94e8a2395b8712b7102fd449536857e8158b38a96cc913c79b70301170")
|
||||
version("0.85.0", sha256="a0121a27fd08243d4a6aab060e8ab379ad5129e96775b45f6a683835767fa8e7")
|
||||
version("0.84.0", sha256="b1d133a97dc90b1513f8c1ef235ceaa542d80243028a41f59a79300c7d71eb25")
|
||||
@@ -78,7 +77,6 @@ class Detray(CMakePackage):
|
||||
depends_on("acts-algebra-plugins +vc", when="+vc")
|
||||
depends_on("acts-algebra-plugins +eigen", when="+eigen")
|
||||
depends_on("acts-algebra-plugins +smatrix", when="+smatrix")
|
||||
depends_on("acts-algebra-plugins@0.26.0:", when="@0.87:")
|
||||
|
||||
# Detray imposes requirements on the C++ standard values used by Algebra
|
||||
# Plugins.
|
||||
|
||||
@@ -9,7 +9,7 @@ class Dftd4(MesonPackage):
|
||||
"""Generally Applicable Atomic-Charge Dependent London Dispersion Correction"""
|
||||
|
||||
homepage = "https://www.chemie.uni-bonn.de/pctc/mulliken-center/software/dftd4"
|
||||
url = "https://github.com/dftd4/dftd4/releases/download/v0.0.0/dftd4-0.0.0.tar.xz"
|
||||
url = "https://github.com/dftd4/dftd4/releases/download/v3.5.0/dftd4-3.5.0-source.tar.xz"
|
||||
git = "https://github.com/dftd4/dftd4.git"
|
||||
|
||||
maintainers("awvwgk")
|
||||
@@ -17,8 +17,6 @@ class Dftd4(MesonPackage):
|
||||
license("LGPL-3.0-only")
|
||||
|
||||
version("main", branch="main")
|
||||
version("3.7.0", sha256="4e8749df6852bf863d5d1831780a2d30e9ac4afcfebbbfe5f6a6a73d06d6c6ee")
|
||||
version("3.6.0", sha256="56b3b4650853a34347d3d56c93d7596ecbe2208c4a14dbd027959fd4a009679d")
|
||||
version("3.5.0", sha256="d2bab992b5ef999fd13fec8eb1da9e9e8d94b8727a2e624d176086197a00a46f")
|
||||
version("3.4.0", sha256="24fcb225cdd5c292ac26f7d3204ee3c4024174adb5272eeda9ae7bc57113ec8d")
|
||||
version("3.3.0", sha256="408720b8545532d5240dd743c05d57b140af983192dad6d965b0d79393d0a9ef")
|
||||
@@ -56,8 +54,3 @@ def meson_args(self):
|
||||
"-Dopenmp={0}".format(str("+openmp" in self.spec).lower()),
|
||||
"-Dpython={0}".format(str("+python" in self.spec).lower()),
|
||||
]
|
||||
|
||||
def url_for_version(self, version):
|
||||
if version <= Version("3.6.0"):
|
||||
return f"https://github.com/dftd4/dftd4/releases/download/v{version}/dftd4-{version}-source.tar.xz"
|
||||
return super().url_for_version(version)
|
||||
|
||||
@@ -90,8 +90,6 @@ class Edm4hep(CMakePackage):
|
||||
# Corresponding changes in EDM4hep landed with https://github.com/key4hep/EDM4hep/pull/314
|
||||
extends("python", when="@0.10.6:")
|
||||
|
||||
conflicts("%clang@:16", when="@0.99.1:", msg="Incomplete consteval support in clang")
|
||||
|
||||
def cmake_args(self):
|
||||
args = [
|
||||
self.define_from_variant("CMAKE_CXX_STANDARD", "cxxstd"),
|
||||
|
||||
@@ -20,17 +20,15 @@ class Ensmallen(CMakePackage):
|
||||
|
||||
license("BSD-3-Clause")
|
||||
|
||||
version("2.22.1", sha256="daf53fe96783043ca33151a3851d054a826fab8d9a173e6bcbbedd4a7eabf5b1")
|
||||
version("2.21.1", sha256="820eee4d8aa32662ff6a7d883a1bcaf4e9bf9ca0a3171d94c5398fe745008750")
|
||||
version("2.19.1", sha256="f36ad7f08b0688d2a8152e1c73dd437c56ed7a5af5facf65db6ffd977b275b2e")
|
||||
|
||||
depends_on("cxx", type="build")
|
||||
depends_on("cxx", type="build") # generated
|
||||
|
||||
variant("openmp", default=True, description="Use OpenMP for parallelization")
|
||||
|
||||
depends_on("cmake@3.3.2:")
|
||||
depends_on("armadillo@9.800.0:")
|
||||
depends_on("armadillo@10.8.2:", when="@2.22:")
|
||||
|
||||
def cmake_args(self):
|
||||
args = [self.define_from_variant("USE_OPENMP", "openmp")]
|
||||
|
||||
@@ -16,20 +16,24 @@ class Evtgen(CMakePackage):
|
||||
|
||||
maintainers("vvolkl")
|
||||
|
||||
version("02.02.03", sha256="b642700b703190e3304edb98ff464622db5d03c1cfc5d275ba4a628227d7d6d0")
|
||||
version("02.02.02", sha256="e543d1213cd5003124139d0dc7eee9247b0b9d44154ff8a88bac52ba91c5dfc9")
|
||||
version("02.02.01", sha256="1fcae56c6b27b89c4a2f4b224d27980607442185f5570e961f6334a3543c6e77")
|
||||
version("02.02.00", sha256="0c626e51cb17e799ad0ffd0beea5cb94d7ac8a5f8777b746aa1944dd26071ecf")
|
||||
version("02.00.00", sha256="02372308e1261b8369d10538a3aa65fe60728ab343fcb64b224dac7313deb719")
|
||||
# switched to cmake in 02.00.00
|
||||
version(
|
||||
"01.07.00",
|
||||
sha256="2648f1e2be5f11568d589d2079f22f589c283a2960390bbdb8d9d7f71bc9c014",
|
||||
deprecated=True,
|
||||
)
|
||||
|
||||
depends_on("cxx", type="build") # generated
|
||||
|
||||
variant("pythia8", default=True, description="Build with pythia8")
|
||||
variant("tauola", default=False, description="Build with tauola")
|
||||
variant("photos", default=False, description="Build with photos")
|
||||
variant("sherpa", default=False, description="build with sherpa")
|
||||
variant("hepmc3", default=False, description="Link with hepmc3 (instead of hepmc)")
|
||||
|
||||
patch("g2c.patch", when="@01.07.00")
|
||||
patch("evtgen-2.0.0.patch", when="@02.00.00 ^pythia8@8.304:")
|
||||
|
||||
depends_on("hepmc", when="~hepmc3")
|
||||
@@ -40,8 +44,6 @@ class Evtgen(CMakePackage):
|
||||
depends_on("photos~hepmc3", when="+photos~hepmc3")
|
||||
depends_on("tauola+hepmc3", when="+tauola+hepmc3")
|
||||
depends_on("photos+hepmc3", when="+photos+hepmc3")
|
||||
depends_on("sherpa@2:", when="@02.02.01: +sherpa")
|
||||
depends_on("sherpa@:2", when="@:02 +sherpa")
|
||||
|
||||
conflicts(
|
||||
"^pythia8+evtgen",
|
||||
@@ -51,6 +53,7 @@ class Evtgen(CMakePackage):
|
||||
"that cannot be resolved at the moment! "
|
||||
"Use evtgen+pythia8^pythia8~evtgen.",
|
||||
)
|
||||
conflicts("+hepmc3", when="@:01", msg="hepmc3 support was added in 02.00.00")
|
||||
|
||||
@property
|
||||
def root_cmakelists_dir(self):
|
||||
@@ -68,7 +71,6 @@ def cmake_args(self):
|
||||
args.append(self.define_from_variant("EVTGEN_PYTHIA", "pythia8"))
|
||||
args.append(self.define_from_variant("EVTGEN_TAUOLA", "tauola"))
|
||||
args.append(self.define_from_variant("EVTGEN_PHOTOS", "photos"))
|
||||
args.append(self.define_from_variant("EVTGEN_SHERPA", "sherpa"))
|
||||
args.append(self.define_from_variant("EVTGEN_HEPMC3", "hepmc3"))
|
||||
|
||||
return args
|
||||
@@ -83,5 +85,50 @@ def patch(self):
|
||||
|
||||
filter_file("-shared", "-dynamiclib -undefined dynamic_lookup", "make.inc")
|
||||
|
||||
# Taken from AutotoolsPackage
|
||||
def configure(self, spec, prefix):
|
||||
"""Runs configure with the arguments specified in
|
||||
:py:meth:`~.AutotoolsPackage.configure_args`
|
||||
and an appropriately set prefix.
|
||||
"""
|
||||
options = getattr(self, "configure_flag_args", [])
|
||||
options += ["--prefix={0}".format(prefix)]
|
||||
options += self.configure_args()
|
||||
|
||||
with working_dir(self.build_directory, create=True):
|
||||
configure(*options)
|
||||
|
||||
@when("@:01")
|
||||
def configure_args(self):
|
||||
args = []
|
||||
|
||||
args.append("--hepmcdir=%s" % self.spec["hepmc"].prefix)
|
||||
if self.spec.satisfies("+pythia8"):
|
||||
args.append("--pythiadir=%s" % self.spec["pythia8"].prefix)
|
||||
if self.spec.satisfies("+photos"):
|
||||
args.append("--photosdir=%s" % self.spec["photos"].prefix)
|
||||
if self.spec.satisfies("+tauola"):
|
||||
args.append("--tauoladir=%s" % self.spec["tauola"].prefix)
|
||||
|
||||
return args
|
||||
|
||||
@when("@:01")
|
||||
def cmake(self, spec, prefix):
|
||||
pass
|
||||
|
||||
@when("@:01")
|
||||
def build(self, spec, prefix):
|
||||
self.configure(spec, prefix)
|
||||
# avoid parallel compilation errors
|
||||
# due to libext_shared depending on lib_shared
|
||||
with working_dir(self.build_directory):
|
||||
make("lib_shared")
|
||||
make("all")
|
||||
|
||||
@when("@:01")
|
||||
def install(self, spec, prefix):
|
||||
with working_dir(self.build_directory):
|
||||
make("install")
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
env.set("EVTGEN", self.prefix.share)
|
||||
|
||||
@@ -20,7 +20,6 @@ class FluxCore(AutotoolsPackage):
|
||||
license("LGPL-3.0-only")
|
||||
|
||||
version("master", branch="master")
|
||||
version("0.67.0", sha256="9406e776cbeff971881143fd1b94c42ec912e5b226401d2d3d91d766dd81de8c")
|
||||
version("0.66.0", sha256="0a25cfb1ebc033c249614eb2350c6fb57b00cdf3c584d0759c787f595c360daa")
|
||||
version("0.65.0", sha256="a60bc7ed13b8e6d09e99176123a474aad2d9792fff6eb6fd4da2a00e1d2865ab")
|
||||
version("0.64.0", sha256="0334d6191915f1b89b70cdbf14f24200f8899da31090df5f502020533b304bb3")
|
||||
@@ -97,7 +96,6 @@ class FluxCore(AutotoolsPackage):
|
||||
depends_on("py-pyyaml@3.10:", type=("build", "run"))
|
||||
depends_on("py-jsonschema@2.3:", type=("build", "run"), when="@:0.58.0")
|
||||
depends_on("py-ply", type=("build", "run"), when="@0.46.1:")
|
||||
depends_on("py-setuptools", type="build", when="@0.67.0:")
|
||||
depends_on("jansson@2.10:")
|
||||
depends_on("pkgconfig")
|
||||
depends_on("lz4")
|
||||
|
||||
@@ -56,6 +56,8 @@ class Geant4Data(BundlePackage):
|
||||
"g4incl@1.2",
|
||||
"g4ensdfstate@3.0",
|
||||
"g4channeling@1.0",
|
||||
"g4nudexlib@1.0",
|
||||
"g4urrpt@1.1",
|
||||
],
|
||||
"11.2.2:11.2": [
|
||||
"g4ndl@4.7.1",
|
||||
@@ -193,23 +195,6 @@ class Geant4Data(BundlePackage):
|
||||
for _d in _dsets:
|
||||
depends_on(_d, type=("build", "run"), when=_vers)
|
||||
|
||||
_datasets_tendl = {
|
||||
"11.0:11.3": "g4tendl@1.4",
|
||||
"10.4:10.7": "g4tendl@1.3.2",
|
||||
"10.3:10.3": "g4tendl@1.3",
|
||||
}
|
||||
|
||||
variant("tendl", default=True, when="@10.3:", description="Enable G4TENDL")
|
||||
with when("+tendl"):
|
||||
for _vers, _d in _datasets_tendl.items():
|
||||
depends_on(_d, type=("build", "run"), when="@" + _vers)
|
||||
variant("nudexlib", default=True, when="@11.3.0:11.3", description="Enable G4NUDEXLIB")
|
||||
with when("+nudexlib"):
|
||||
depends_on("g4nudexlib@1.0", type=("build", "run"))
|
||||
variant("urrpt", default=True, when="@11.3.0:11.3", description="Enable G4URRPT")
|
||||
with when("+urrpt"):
|
||||
depends_on("g4urrpt@1.1", type=("build", "run"))
|
||||
|
||||
@property
|
||||
def datadir(self):
|
||||
spec = self.spec
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
diff --git a/source/g3tog4/include/G3EleTable.hh b/source/g3tog4/include/G3EleTable.hh
|
||||
index 0ab9c4fd566..18c6f73fde6 100644
|
||||
--- a/source/g3tog4/include/G3EleTable.hh
|
||||
+++ b/source/g3tog4/include/G3EleTable.hh
|
||||
@@ -56,7 +56,7 @@ public: // with description
|
||||
private:
|
||||
|
||||
void LoadUp();
|
||||
- G4int parse(G4double& Z, char* name, char* sym, G4double& A);
|
||||
+ G4int parse(G4double& Z, char (&name)[20], char (&sym)[3], G4double& A);
|
||||
|
||||
private:
|
||||
|
||||
diff --git a/source/g3tog4/src/G3EleTable.cc b/source/g3tog4/src/G3EleTable.cc
|
||||
index cecc494b201..a2f3af3d6a2 100644
|
||||
--- a/source/g3tog4/src/G3EleTable.cc
|
||||
+++ b/source/g3tog4/src/G3EleTable.cc
|
||||
@@ -64,7 +64,7 @@ G3EleTable::GetEle(G4double Z){
|
||||
}
|
||||
|
||||
G4int
|
||||
-G3EleTable::parse(G4double& Z, char* name, char* sym, G4double& A){
|
||||
+G3EleTable::parse(G4double& Z, char (&name)[20], char (&sym)[3], G4double& A){
|
||||
G4int rc = 0;
|
||||
if (Z>0 && Z <=_MaxEle){
|
||||
G4int z = (G4int) Z-1;
|
||||
@@ -20,7 +20,6 @@ class Geant4(CMakePackage):
|
||||
executables = ["^geant4-config$"]
|
||||
|
||||
maintainers("drbenmorgan", "sethrj")
|
||||
|
||||
version("11.3.0", sha256="d9d71daff8890a7b5e0e33ea9a65fe6308ad6713000b43ba6705af77078e7ead")
|
||||
version("11.2.2", sha256="3a8d98c63fc52578f6ebf166d7dffaec36256a186d57f2520c39790367700c8d")
|
||||
version("11.2.1", sha256="76c9093b01128ee2b45a6f4020a1bcb64d2a8141386dea4674b5ae28bcd23293")
|
||||
@@ -204,30 +203,29 @@ def std_when(values):
|
||||
depends_on("qt@5.9:", when="@11.2:")
|
||||
conflicts("@:11.1 ^[virtuals=qmake] qt-base", msg="Qt6 not supported before 11.2")
|
||||
|
||||
# CMAKE PROBLEMS #
|
||||
# As released, 10.0.4 has inconsistently capitalised filenames
|
||||
# in the cmake files; this patch also enables cxxstd 14
|
||||
patch("geant4-10.0.4.patch", when="@10.0.4")
|
||||
# Fix member field typo in g4tools wroot
|
||||
# See https://bugzilla-geant4.kek.jp/show_bug.cgi?id=2640
|
||||
patch("columns-11.patch", when="@11:11.2.2")
|
||||
patch("columns-10.patch", when="@10.4:10")
|
||||
# As released, 10.03.03 has issues with respect to using external
|
||||
# CLHEP.
|
||||
patch("CLHEP-10.03.03.patch", level=1, when="@10.3")
|
||||
# Build failure on clang 15, ubuntu 22: see Geant4 problem report #2444
|
||||
# fixed by ascii-V10-07-03
|
||||
patch("geant4-10.6.patch", when="@10.0:10.6")
|
||||
# Enable "17" cxxstd option in CMake (2 different filenames)
|
||||
patch("geant4-10.3-cxx17-cmake.patch", when="@10.3 cxxstd=17")
|
||||
patch("geant4-10.4-cxx17-cmake.patch", when="@10.4:10.4.2 cxxstd=17")
|
||||
# Fix exported cmake: https://bugzilla-geant4.kek.jp/show_bug.cgi?id=2556
|
||||
patch("package-cache.patch", when="@10.7.0:11.1.2^cmake@3.17:")
|
||||
patch("geant4-10.6.patch", level=1, when="@10.0:10.6")
|
||||
# These patches can be applied independent of the cxxstd value?
|
||||
patch("cxx17.patch", when="@10.3 cxxstd=17")
|
||||
patch("cxx17_geant4_10_0.patch", level=1, when="@10.4.0 cxxstd=17")
|
||||
patch("geant4-10.4.3-cxx17-removed-features.patch", level=1, when="@10.4.3 cxxstd=17")
|
||||
|
||||
# BUILD ERRORS #
|
||||
# Fix C++17: add -D_LIBCPP_ENABLE_CXX17_REMOVED_FEATURES C++ flag
|
||||
patch("geant4-10.4.3-cxx17-removed-features.patch", when="@10.4.3 cxxstd=17")
|
||||
# Fix C++20: build error due to removed-in-C++20 `ostream::operator>>(char*)`
|
||||
# (different, simpler approach than upstream Geant4 changes)
|
||||
patch("geant4-10.7-cxx20-g3tog4.patch", when="@:10.7 cxxstd=20")
|
||||
# Fix member field typo in g4tools wroot: https://bugzilla-geant4.kek.jp/show_bug.cgi?id=2640
|
||||
patch("columns-10.patch", when="@10.4:10")
|
||||
patch("columns-11.patch", when="@11:11.2.2")
|
||||
# Fix navigation errors with twisted tubes: https://bugzilla-geant4.kek.jp/show_bug.cgi?id=2619
|
||||
patch("twisted-tubes.patch", when="@11.2.0:11.2.2")
|
||||
# See https://bugzilla-geant4.kek.jp/show_bug.cgi?id=2556
|
||||
patch("package-cache.patch", level=1, when="@10.7.0:11.1.2^cmake@3.17:")
|
||||
|
||||
# Issue with Twisted tubes, see https://bugzilla-geant4.kek.jp/show_bug.cgi?id=2619
|
||||
patch("twisted-tubes.patch", level=1, when="@11.2.0:11.2.2")
|
||||
|
||||
# NVHPC: "thread-local declaration follows non-thread-local declaration"
|
||||
conflicts("%nvhpc", when="+threads")
|
||||
|
||||
@@ -58,20 +58,6 @@ def build_args(self):
|
||||
args.extend(["-trimpath", "./cmd/gh"])
|
||||
return args
|
||||
|
||||
@property
|
||||
def check_args(self):
|
||||
args = super().check_args
|
||||
skip_tests = (
|
||||
"TestHasNoActiveToken|TestTokenStoredIn.*|"
|
||||
"TestSwitchUser.*|TestSwitchClears.*|"
|
||||
"TestTokenWorksRightAfterMigration|"
|
||||
"Test_loginRun.*|Test_logoutRun.*|Test_refreshRun.*|"
|
||||
"Test_setupGitRun.*|Test_CheckAuth|TestSwitchRun.*|"
|
||||
"Test_statusRun.*|TestTokenRun.*"
|
||||
)
|
||||
args.extend([f"-skip={skip_tests}", "./..."])
|
||||
return args
|
||||
|
||||
@run_after("install")
|
||||
def install_completions(self):
|
||||
gh = Executable(self.prefix.bin.gh)
|
||||
|
||||
@@ -153,7 +153,7 @@ def common_args(self):
|
||||
"CC={0}".format(env["CC"]),
|
||||
"PREFIX={0}".format(self.spec.prefix.bin),
|
||||
"MFEM_DIR={0}".format(self.spec["mfem"].prefix),
|
||||
"CONFIG_MK={0}".format(self.pkg["mfem"].config_mk),
|
||||
"CONFIG_MK={0}".format(self.spec["mfem"].package.config_mk),
|
||||
]
|
||||
|
||||
# https://github.com/spack/spack/issues/42839
|
||||
|
||||
@@ -17,13 +17,11 @@ class Gnutls(AutotoolsPackage):
|
||||
|
||||
homepage = "https://www.gnutls.org"
|
||||
url = "https://www.gnupg.org/ftp/gcrypt/gnutls/v3.5/gnutls-3.5.19.tar.xz"
|
||||
list_depth = 2
|
||||
|
||||
maintainers("alecbcs")
|
||||
|
||||
license("LGPL-2.1-or-later")
|
||||
|
||||
version("3.8.8", sha256="ac4f020e583880b51380ed226e59033244bc536cad2623f2e26f5afa2939d8fb")
|
||||
version("3.8.4", sha256="2bea4e154794f3f00180fa2a5c51fe8b005ac7a31cd58bd44cdfa7f36ebc3a9b")
|
||||
version("3.8.3", sha256="f74fc5954b27d4ec6dfbb11dea987888b5b124289a3703afcada0ee520f4173e")
|
||||
version("3.7.8", sha256="c58ad39af0670efe6a8aee5e3a8b2331a1200418b64b7c51977fb396d4617114")
|
||||
|
||||
@@ -15,8 +15,6 @@ class Googletest(CMakePackage):
|
||||
maintainers("sethrj")
|
||||
|
||||
version("main", branch="main")
|
||||
version("1.15.2", sha256="7b42b4d6ed48810c5362c265a17faebe90dc2373c885e5216439d37927f02926")
|
||||
version("1.15.0", sha256="7315acb6bf10e99f332c8a43f00d5fbb1ee6ca48c52f6b936991b216c586aaad")
|
||||
version("1.14.0", sha256="8ad598c73ad796e0d8280b082cebd82a630d73e73cd3c70057938a6501bba5d7")
|
||||
version("1.13.0", sha256="ad7fdba11ea011c1d925b3289cf4af2c66a352e18d4c7264392fead75e919363")
|
||||
version("1.12.1", sha256="81964fe578e9bd7c94dfdb09c8e4d6e6759e19967e397dbea48d1c10e45d0df2")
|
||||
@@ -31,18 +29,14 @@ class Googletest(CMakePackage):
|
||||
depends_on("c", type="build")
|
||||
depends_on("cxx", type="build")
|
||||
|
||||
variant("absl", default=False, when="@1.12.1:", description="Build with abseil and RE2")
|
||||
depends_on("abseil-cpp", when="+absl")
|
||||
depends_on("re2", when="+absl")
|
||||
|
||||
variant("gmock", default=True, when="@1.8:", description="Build with gmock")
|
||||
variant("pthreads", default=True, description="Build multithreaded version with pthreads")
|
||||
variant("shared", default=True, description="Build shared libraries (DLLs)")
|
||||
|
||||
variant(
|
||||
"cxxstd",
|
||||
default="14",
|
||||
values=("98", "11", "14", "17", "20"),
|
||||
default="11",
|
||||
values=("98", "11", "14", "17"),
|
||||
multi=False,
|
||||
description="Use the specified C++ standard when building",
|
||||
)
|
||||
@@ -54,13 +48,12 @@ def cmake_args(self):
|
||||
args = [
|
||||
self.define_from_variant("BUILD_SHARED_LIBS", "shared"),
|
||||
self.define_from_variant("CMAKE_CXX_STANDARD", "cxxstd"),
|
||||
self.define_from_variant("BUILD_GMOCK", "gmock"),
|
||||
self.define_from_variant("GTEST_HAS_ABSL", "absl"),
|
||||
self.define("gtest_disable_pthreads", spec.satisfies("~pthreads")),
|
||||
]
|
||||
|
||||
if spec.satisfies("@:1.8.0"):
|
||||
args.append(self.define("gtest_disable_pthreads", not spec.satisfies("+pthreads")))
|
||||
if spec.satisfies("@1.8:"):
|
||||
# New style (contains both Google Mock and Google Test)
|
||||
args.append(self.define("BUILD_GTEST", True))
|
||||
args.append(self.define_from_variant("BUILD_GMOCK", "gmock"))
|
||||
|
||||
return args
|
||||
|
||||
|
||||
@@ -585,7 +585,7 @@ def patch(self):
|
||||
)
|
||||
|
||||
if self.spec.satisfies("+plumed"):
|
||||
self["plumed"].apply_patch(self)
|
||||
self.spec["plumed"].package.apply_patch(self)
|
||||
|
||||
if self.spec.satisfies("%nvhpc"):
|
||||
# Disable obsolete workaround
|
||||
|
||||
@@ -35,10 +35,6 @@ class Herwig3(AutotoolsPackage):
|
||||
depends_on("thepeg@2.2.3", when="@7.2.3")
|
||||
depends_on("thepeg@2.3.0", when="@7.3.0")
|
||||
depends_on("evtgen")
|
||||
conflicts(
|
||||
"^evtgen ~photos ~pythia8 ~sherpa ~tauola",
|
||||
msg="At least one external EvtGen component required",
|
||||
)
|
||||
|
||||
depends_on("boost +math+test")
|
||||
depends_on("python", type=("build", "run"))
|
||||
|
||||
@@ -17,7 +17,6 @@ class Hevea(MakefilePackage):
|
||||
license("LGPL-2.0-only")
|
||||
|
||||
version("develop", branch="master")
|
||||
version("2.36", sha256="9848359f935af24b6f962b2ed5d5ac32614bffeb37da374b0960cc0f58e69f0c")
|
||||
version("2.35", sha256="78f834cc7a8112ec59d0b8acdfbed0c8ac7dbb85f964d0be1f4eed04f25cdf54")
|
||||
version("2.34", sha256="f505a2a5bafdc2ea389ec521876844e6fdcb5c1b656396b7e8421c1631469ea2")
|
||||
version("2.33", sha256="122f9023f9cfe8b41dd8965b7d9669df21bf41e419bcf5e9de5314f428380d0f")
|
||||
@@ -28,8 +27,6 @@ class Hevea(MakefilePackage):
|
||||
# Dependency demands ocamlbuild
|
||||
depends_on("ocaml")
|
||||
depends_on("ocamlbuild")
|
||||
depends_on("ocaml@4", when="@:2.35")
|
||||
depends_on("ocaml@4.08.0:", when="@2.34:")
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
env["PREFIX"] = self.spec.prefix
|
||||
|
||||
@@ -58,8 +58,8 @@ def build_targets(self):
|
||||
spec = self.spec
|
||||
|
||||
targets.append("MFEM_DIR=%s" % spec["mfem"].prefix)
|
||||
targets.append("CONFIG_MK=%s" % self["mfem"].config_mk)
|
||||
targets.append("TEST_MK=%s" % self["mfem"].test_mk)
|
||||
targets.append("CONFIG_MK=%s" % spec["mfem"].package.config_mk)
|
||||
targets.append("TEST_MK=%s" % spec["mfem"].package.test_mk)
|
||||
if spec.satisfies("@:2.0"):
|
||||
targets.append("CXX=%s" % spec["mpi"].mpicxx)
|
||||
if self.spec.satisfies("+ofast %gcc"):
|
||||
|
||||
@@ -19,10 +19,6 @@ class Libmesh(AutotoolsPackage):
|
||||
|
||||
version("master", branch="master", submodules=True)
|
||||
|
||||
version("1.7.6", sha256="65093cc97227193241f78647ec2f04a1852437f40d3d1c49285c6ff712cd0bc8")
|
||||
version("1.7.5", sha256="03a50cb471e7724a46623f0892cf77152f969d9ba89f8fcebd20bdc0845aab83")
|
||||
version("1.7.4", sha256="0d603aacd2761292dff61ff7ce59d9fddd8691133f0219f7d1576bd4626b77b2")
|
||||
version("1.7.3", sha256="fe0bec45a083ddd9e87dc51ab7e68039f3859e7ef0c4a87e76e562b172b6f739")
|
||||
version("1.7.1", sha256="0387d62773cf92356eb128ba92f767e56c298d78f4b97446e68bf288da1eb6b4")
|
||||
version("1.4.1", sha256="67eb7d5a9c954d891ca1386b70f138333a87a141d9c44213449ca6be69a66414")
|
||||
version("1.4.0", sha256="62d7fce89096c950d1b38908484856ea63df57754b64cde6582e7ac407c8c81d")
|
||||
|
||||
@@ -42,7 +42,7 @@ class Libspatialite(AutotoolsPackage):
|
||||
depends_on("geos@:3.9", when="@:5.0.0")
|
||||
depends_on("iconv")
|
||||
depends_on("librttopo", when="@5.0.1:")
|
||||
depends_on("libxml2+http")
|
||||
depends_on("libxml2")
|
||||
depends_on("minizip", when="@5.0.0:")
|
||||
depends_on("proj")
|
||||
depends_on("proj@:5", when="@:4")
|
||||
|
||||
@@ -28,7 +28,6 @@ def url_for_version(self, version):
|
||||
|
||||
license("MIT")
|
||||
|
||||
version("2.13.5", sha256="74fc163217a3964257d3be39af943e08861263c4231f9ef5b496b6f6d4c7b2b6")
|
||||
version("2.13.4", sha256="65d042e1c8010243e617efb02afda20b85c2160acdbfbcb5b26b80cec6515650")
|
||||
version("2.12.9", sha256="59912db536ab56a3996489ea0299768c7bcffe57169f0235e7f962a91f483590")
|
||||
version("2.11.9", sha256="780157a1efdb57188ec474dca87acaee67a3a839c2525b2214d318228451809f")
|
||||
@@ -66,7 +65,6 @@ def url_for_version(self, version):
|
||||
|
||||
depends_on("c", type="build")
|
||||
|
||||
variant("http", default=False, description="Enable HTTP support")
|
||||
variant("python", default=False, description="Enable Python support")
|
||||
variant("shared", default=True, description="Build shared library")
|
||||
variant("pic", default=True, description="Enable position-independent code (PIC)")
|
||||
@@ -253,8 +251,6 @@ def configure_args(self):
|
||||
else:
|
||||
args.append("--without-python")
|
||||
|
||||
args.extend(self.with_or_without("http"))
|
||||
|
||||
args.extend(self.enable_or_disable("shared"))
|
||||
# PIC setting is taken care of above by self.flag_handler()
|
||||
args.append("--without-pic")
|
||||
@@ -298,6 +294,4 @@ def configure(self, pkg, spec, prefix):
|
||||
]
|
||||
if spec.satisfies("+python"):
|
||||
opts.append("python=yes")
|
||||
if spec.satisfies("+http"):
|
||||
opts.append("http=yes")
|
||||
cscript("configure.js", *opts)
|
||||
|
||||
@@ -56,7 +56,6 @@ class Llvm(CMakePackage, CudaPackage, LlvmDetection, CompilerPackage):
|
||||
license("Apache-2.0")
|
||||
|
||||
version("main", branch="main")
|
||||
version("19.1.6", sha256="f07fdcbb27b2b67aa95e5ddadf45406b33228481c250e65175066d36536a1ee2")
|
||||
version("19.1.5", sha256="e2204b9903cd9d7ee833a2f56a18bef40a33df4793e31cc090906b32cbd8a1f5")
|
||||
version("19.1.4", sha256="010e1fd3cabee8799bd2f8a6fbc68f28207494f315cf9da7057a2820f79fd531")
|
||||
version("19.1.3", sha256="e5106e2bef341b3f5e41340e4b6c6a58259f4021ad801acf14e88f1a84567b05")
|
||||
@@ -1144,12 +1143,12 @@ def post_install(self):
|
||||
with open(os.path.join(self.prefix.bin, cfg), "w") as f:
|
||||
print(gcc_install_dir_flag, file=f)
|
||||
|
||||
def llvm_config(self, *args, result=None, **kwargs):
|
||||
def llvm_config(self, *args, **kwargs):
|
||||
lc = Executable(self.prefix.bin.join("llvm-config"))
|
||||
if not kwargs.get("output"):
|
||||
kwargs["output"] = str
|
||||
ret = lc(*args, **kwargs)
|
||||
if result == "list":
|
||||
if kwargs.get("result") == "list":
|
||||
return ret.split()
|
||||
else:
|
||||
return ret
|
||||
|
||||
@@ -22,7 +22,6 @@ class Lmod(AutotoolsPackage):
|
||||
|
||||
license("MIT")
|
||||
|
||||
version("8.7.55", sha256="f85ed9b55c23afb563fa99c7201037628be016e8d88a1aa8dba4632c0ab450bd")
|
||||
version("8.7.37", sha256="171529152fedfbb3c45d27937b0eaa1ee62b5e5cdac3086f44a6d56e5d1d7da4")
|
||||
version("8.7.24", sha256="8451267652059b6507b652e1b563929ecf9b689ffb20830642085eb6a55bd539")
|
||||
version("8.7.20", sha256="c04deff7d2ca354610a362459a7aa9a1c642a095e45a4b0bb2471bb3254e85f4")
|
||||
|
||||
@@ -1309,7 +1309,7 @@ def libs(self):
|
||||
@property
|
||||
def config_mk(self):
|
||||
"""Export the location of the config.mk file.
|
||||
This property can be accessed using pkg["mfem"].config_mk
|
||||
This property can be accessed using spec["mfem"].package.config_mk
|
||||
"""
|
||||
dirs = [self.prefix, self.prefix.share.mfem]
|
||||
for d in dirs:
|
||||
@@ -1321,7 +1321,7 @@ def config_mk(self):
|
||||
@property
|
||||
def test_mk(self):
|
||||
"""Export the location of the test.mk file.
|
||||
This property can be accessed using pkg["mfem"].test_mk.
|
||||
This property can be accessed using spec["mfem"].package.test_mk.
|
||||
In version 3.3.2 and newer, the location of test.mk is also defined
|
||||
inside config.mk, variable MFEM_TEST_MK.
|
||||
"""
|
||||
|
||||
@@ -19,7 +19,6 @@ class Mlpack(CMakePackage):
|
||||
|
||||
license("BSD-3-Clause", checked_by="wdconinc")
|
||||
|
||||
version("4.5.1", sha256="58059b911a78b8bda91eef4cfc6278383b24e71865263c2e0569cf5faa59dda3")
|
||||
version("4.5.0", sha256="aab70aee10c134ef3fe568843fe4b3bb5e8901af30ea666f57462ad950682317")
|
||||
version("4.4.0", sha256="61c604026d05af26c244b0e47024698bbf150dfcc9d77b64057941d7d64d6cf6")
|
||||
version("4.3.0", sha256="08cd54f711fde66fc3b6c9db89dc26776f9abf1a6256c77cfa3556e2a56f1a3d")
|
||||
@@ -30,7 +29,8 @@ class Mlpack(CMakePackage):
|
||||
|
||||
depends_on("cxx", type="build") # generated
|
||||
|
||||
variant("go", default=False, description="Build Go bindings", when="@4.5.1:")
|
||||
# TODO: Go bindings are not supported due to the absence of gonum in spack
|
||||
# variant("go", default=False, description="Build Go bindings")
|
||||
variant("julia", default=False, description="Build Julia bindings")
|
||||
variant("python", default=False, description="Build Ppython bindings")
|
||||
variant("r", default=False, description="Build R bindings")
|
||||
@@ -47,9 +47,11 @@ class Mlpack(CMakePackage):
|
||||
conflicts("%gcc@:4", when="@4.0:", msg="mlpack 4.0+ requires at least gcc-5 with C++14")
|
||||
conflicts("%gcc@:7", when="@4.4:", msg="mlpack 4.4+ requires at least gcc-8 with C++17")
|
||||
|
||||
with when("+go"):
|
||||
# ref: src/mlpack/bindings/go/CMakeLists.txt
|
||||
depends_on("go@1.11.0:")
|
||||
# TODO: Go bindings are not supported due to the absence of gonum in spack
|
||||
# with when("+go"):
|
||||
# # ref: src/mlpack/bindings/go/CMakeLists.txt
|
||||
# depends_on("go@1.11.0:")
|
||||
# depends_on("gonum")
|
||||
with when("+julia"):
|
||||
# ref: src/mlpack/bindings/julia/CMakeLists.txt
|
||||
depends_on("julia@0.7.0:")
|
||||
@@ -83,7 +85,7 @@ class Mlpack(CMakePackage):
|
||||
def cmake_args(self):
|
||||
args = [
|
||||
self.define("BUILD_CLI_EXECUTABLES", True),
|
||||
self.define_from_variant("BUILD_GO_BINDINGS", "go"),
|
||||
# self.define_from_variant("BUILD_GO_BINDINGS", "go"),
|
||||
self.define_from_variant("BUILD_JULIA_BINDINGS", "julia"),
|
||||
self.define_from_variant("BUILD_PYTHON_BINDINGS", "python"),
|
||||
self.define_from_variant("BUILD_R_BINDINGS", "r"),
|
||||
|
||||
@@ -15,7 +15,6 @@ class Mold(CMakePackage):
|
||||
|
||||
license("MIT")
|
||||
|
||||
version("2.36.0", sha256="3f57fe75535500ecce7a80fa1ba33675830b7d7deb1e5ee9a737e2bc43cdb1c7")
|
||||
version("2.35.1", sha256="912b90afe7fde03e53db08d85a62c7b03a57417e54afc72c08e2fa07cab421ff")
|
||||
version("2.35.0", sha256="2703f1c88c588523815886478950bcae1ef02190dc4787e0d120a293b1a46e3b")
|
||||
version("2.34.1", sha256="a8cf638045b4a4b2697d0bcc77fd96eae93d54d57ad3021bf03b0333a727a59d")
|
||||
|
||||
@@ -147,9 +147,11 @@ def _copy_arch_file(self, lib):
|
||||
def _append_option(self, opts, lib):
|
||||
if lib != "python":
|
||||
self._copy_arch_file(lib)
|
||||
lib_pkg = self[lib]
|
||||
spec = self.spec
|
||||
lib_prefix = (
|
||||
lib_pkg.component_prefix if lib_pkg.name == "intel-oneapi-mkl" else lib_pkg.prefix
|
||||
spec[lib].package.component_prefix
|
||||
if spec[lib].name == "intel-oneapi-mkl"
|
||||
else spec[lib].prefix
|
||||
)
|
||||
opts.extend(["--with-{0}".format(lib), "--{0}-prefix".format(lib), lib_prefix])
|
||||
|
||||
|
||||
@@ -10,30 +10,27 @@ class Neko(AutotoolsPackage, CudaPackage, ROCmPackage):
|
||||
for high-fidelity computational fluid dynamics
|
||||
"""
|
||||
|
||||
homepage = "https://neko.cfd"
|
||||
homepage = "https://github.com/ExtremeFLOW/neko"
|
||||
git = "https://github.com/ExtremeFLOW/neko.git"
|
||||
url = "https://github.com/ExtremeFLOW/neko/releases/download/v0.3.2/neko-0.3.2.tar.gz"
|
||||
maintainers("njansson")
|
||||
|
||||
version("develop", branch="develop")
|
||||
version("0.9.1", sha256="098bee5cb807d10cdf2fb56111ba8cbc592882a87e4dae18caf9dbda894611ef")
|
||||
version("0.9.0", sha256="3cffe629ada1631d8774fa51d8bb14b95dc0cea21578c0e07e70deb611a5091a")
|
||||
version("0.8.1", sha256="ac8162bc18e7112fd21b49c5a9c36f45c7b84896e90738be36a182990798baec")
|
||||
version("0.8.0", sha256="09d0b253c8abda9f384bf8f03b17b50d774cb0a1f7b72744a8e863acac516a51")
|
||||
version("0.7.2", sha256="5dd17fbae83d0b26dc46fafce4e5444be679cdce9493cef4ff7d504e2f854254")
|
||||
version("0.7.1", sha256="c935c3d93b0975db46448045f97aced6ac2cab31a2b8803047f8086f98dcb981")
|
||||
version("0.7.0", sha256="fe871e0a79f388073e0b3dc191d1c0d5da3a53883f5b1951d88b9423fc79a53c")
|
||||
|
||||
with default_args(deprecated=True):
|
||||
version("0.6.1", sha256="6282baaf9c8a201669e274cba23c37922f7ad701ba20ef086442e48f00dabf29")
|
||||
version("0.6.0", sha256="ce37c7cea1a7bf1bf554c5717aa7fed35bbd079ff68c2fc9d3529facc717e31a")
|
||||
version("0.5.2", sha256="8873f5ada106f92f21c9bb13ea8164550bccde9301589b9e7f1c1a82a2efe2b8")
|
||||
version("0.5.1", sha256="8b176bcc9f2d4a6804b68dd93a2f5e02e2dfa986d5c88063bbc72d39e9659cc4")
|
||||
version("0.5.0", sha256="01a745f2e19dd278330889a0dd6c5ab8af49da99c888d95c10adb5accc1cbfc4")
|
||||
version("0.4.3", sha256="ba8fde09cbc052bb4791a03f69c880705615b572982cd3177ee31e4e14931da2")
|
||||
version("0.4.2", sha256="927f926bdbf027c30e8e383e1790e84b60f5a9ed61e48a413092aac2ab24abcc")
|
||||
version("0.3.2", sha256="0628910aa9838a414f2f27d09ea9474d1b3d7dcb5a7715556049a2fdf81a71ae")
|
||||
version("0.3.0", sha256="e46bef72f694e59945514ab8b1ad7d74f87ec9dca2ba2b230e2148662baefdc8")
|
||||
version("0.6.1", sha256="6282baaf9c8a201669e274cba23c37922f7ad701ba20ef086442e48f00dabf29")
|
||||
version("0.6.0", sha256="ce37c7cea1a7bf1bf554c5717aa7fed35bbd079ff68c2fc9d3529facc717e31a")
|
||||
version("0.5.2", sha256="8873f5ada106f92f21c9bb13ea8164550bccde9301589b9e7f1c1a82a2efe2b8")
|
||||
version("0.5.1", sha256="8b176bcc9f2d4a6804b68dd93a2f5e02e2dfa986d5c88063bbc72d39e9659cc4")
|
||||
version("0.5.0", sha256="01a745f2e19dd278330889a0dd6c5ab8af49da99c888d95c10adb5accc1cbfc4")
|
||||
version("0.4.3", sha256="ba8fde09cbc052bb4791a03f69c880705615b572982cd3177ee31e4e14931da2")
|
||||
version("0.4.2", sha256="927f926bdbf027c30e8e383e1790e84b60f5a9ed61e48a413092aac2ab24abcc")
|
||||
version("0.3.2", sha256="0628910aa9838a414f2f27d09ea9474d1b3d7dcb5a7715556049a2fdf81a71ae")
|
||||
version("0.3.0", sha256="e46bef72f694e59945514ab8b1ad7d74f87ec9dca2ba2b230e2148662baefdc8")
|
||||
version("develop", branch="develop")
|
||||
|
||||
depends_on("c", type="build") # generated
|
||||
depends_on("fortran", type="build") # generated
|
||||
|
||||
@@ -35,7 +35,7 @@ class Nfft(AutotoolsPackage):
|
||||
@property
|
||||
def fftw_selected_precisions(self):
|
||||
if not self._fftw_precisions:
|
||||
self._fftw_precisions = self["fftw"].selected_precisions
|
||||
self._fftw_precisions = self.spec["fftw"].package.selected_precisions
|
||||
return self._fftw_precisions
|
||||
|
||||
def configure(self, spec, prefix):
|
||||
|
||||
@@ -36,27 +36,13 @@ class Ninja(Package):
|
||||
version("1.7.2", sha256="2edda0a5421ace3cf428309211270772dd35a91af60c96f93f90df6bc41b16d9")
|
||||
version("1.6.0", sha256="b43e88fb068fe4d92a3dfd9eb4d19755dae5c33415db2e9b7b61b4659009cde7")
|
||||
|
||||
# ninja@1.12: needs googletest source, but 1.12 itself needs a patch to use it
|
||||
resource(
|
||||
name="googletest",
|
||||
url="https://github.com/google/googletest/archive/refs/tags/release-1.12.1.tar.gz",
|
||||
sha256="81964fe578e9bd7c94dfdb09c8e4d6e6759e19967e397dbea48d1c10e45d0df2",
|
||||
placement="gtest",
|
||||
when="@1.12:",
|
||||
)
|
||||
patch(
|
||||
"https://github.com/ninja-build/ninja/commit/f14a949534d673f847c407644441c8f37e130ce9.patch?full_index=1",
|
||||
sha256="93f4bb3234c3af04e2454c6f0ef2eca3107edd4537a70151ea66f1a1d4c22dad",
|
||||
when="@1.12",
|
||||
)
|
||||
depends_on("c", type="build") # generated
|
||||
depends_on("cxx", type="build") # generated
|
||||
|
||||
variant(
|
||||
"re2c", default=not sys.platform == "win32", description="Enable buidling Ninja with re2c"
|
||||
)
|
||||
|
||||
depends_on("c", type="build") # generated
|
||||
depends_on("cxx", type="build") # generated
|
||||
|
||||
depends_on("python", type="build")
|
||||
depends_on("re2c@0.11.3:", type="build", when="+re2c")
|
||||
|
||||
@@ -68,10 +54,7 @@ def determine_version(cls, exe):
|
||||
return output.strip()
|
||||
|
||||
def configure(self, spec, prefix):
|
||||
if self.run_tests and spec.satisfies("@1.12:"):
|
||||
python("configure.py", "--bootstrap", "--gtest-source-dir=gtest")
|
||||
else:
|
||||
python("configure.py", "--bootstrap")
|
||||
python("configure.py", "--bootstrap")
|
||||
|
||||
@run_after("configure")
|
||||
@on_package_attributes(run_tests=True)
|
||||
|
||||
@@ -78,10 +78,7 @@ def install(self, spec, prefix):
|
||||
string=True,
|
||||
)
|
||||
|
||||
if self.spec.satisfies("@4.8.0:"):
|
||||
base_args += [f"CC={self.compiler.cc}"]
|
||||
|
||||
configure(*(base_args))
|
||||
configure(*(base_args), f"CC={self.compiler.cc}")
|
||||
|
||||
make("world.opt")
|
||||
make("install", "PREFIX={0}".format(prefix))
|
||||
|
||||
@@ -20,17 +20,11 @@ class Ocamlbuild(MakefilePackage):
|
||||
|
||||
# Add proper versions here.
|
||||
version("master", branch="master")
|
||||
version("0.15.0", sha256="d3f6ee73100b575d4810247d10ed8f53fccef4e90daf0e4a4c5f3e6a3030a9c9")
|
||||
version("0.14.3", sha256="ce151bfd2141abc6ee0b3f25ba609e989ff564a48bf795d6fa7138a4db0fc2e1")
|
||||
version("0.14.2", sha256="62d2dab6037794c702a83ac584a7066d018cf1645370d1f3d5764c2b458791b1")
|
||||
version("0.14.1", sha256="4e1279ff0ef80c862eaa5207a77020d741e89ef94f0e4a92a37c4188dbf08256")
|
||||
version("0.14.0", sha256="87b29ce96958096c0a1a8eeafeb6268077b2d11e1bf2b3de0f5ebc9cf8d42e78")
|
||||
version("0.13.1", sha256="79839544bcaebc8f9f0d73d029e2b67e2c898bba046c559ea53de81ea763408c")
|
||||
|
||||
# Add dependencies if required.
|
||||
depends_on("ocaml")
|
||||
depends_on("ocaml@:5.0.0", when="@:0.14.1")
|
||||
depends_on("ocaml@:5.1.1", when="@:0.14.2")
|
||||
|
||||
# Installation : https://github.com/ocaml/ocamlbuild/
|
||||
def edit(self, spec, prefix):
|
||||
|
||||
@@ -32,7 +32,6 @@ class Openmpi(AutotoolsPackage, CudaPackage):
|
||||
url = "https://download.open-mpi.org/release/open-mpi/v4.1/openmpi-4.1.0.tar.bz2"
|
||||
list_url = "https://www.open-mpi.org/software/ompi/"
|
||||
git = "https://github.com/open-mpi/ompi.git"
|
||||
cxxname = "mpic++"
|
||||
|
||||
maintainers("hppritcha", "naughtont3")
|
||||
|
||||
@@ -886,7 +885,7 @@ def setup_run_environment(self, env):
|
||||
# Because MPI is both a runtime and a compiler, we have to setup the
|
||||
# compiler components as part of the run environment.
|
||||
env.set("MPICC", join_path(self.prefix.bin, "mpicc"))
|
||||
env.set("MPICXX", join_path(self.prefix.bin, self.cxxname))
|
||||
env.set("MPICXX", join_path(self.prefix.bin, "mpic++"))
|
||||
env.set("MPIF77", join_path(self.prefix.bin, "mpif77"))
|
||||
env.set("MPIF90", join_path(self.prefix.bin, "mpif90"))
|
||||
# Open MPI also has had mpifort since v1.7, so we can set MPIFC to that
|
||||
@@ -928,7 +927,7 @@ def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
|
||||
def setup_dependent_package(self, module, dependent_spec):
|
||||
self.spec.mpicc = join_path(self.prefix.bin, "mpicc")
|
||||
self.spec.mpicxx = join_path(self.prefix.bin, self.cxxname)
|
||||
self.spec.mpicxx = join_path(self.prefix.bin, "mpic++")
|
||||
self.spec.mpifc = join_path(self.prefix.bin, "mpif90")
|
||||
self.spec.mpif77 = join_path(self.prefix.bin, "mpif77")
|
||||
|
||||
|
||||
@@ -84,10 +84,10 @@ def post_install(self):
|
||||
pyso = "pyopenvdb.dylib"
|
||||
else:
|
||||
pyso = "pyopenvdb.so"
|
||||
pyver = f"python{self['python'].version.up_to(2)}"
|
||||
pyver = "python{0}".format(spec["python"].package.version.up_to(2))
|
||||
|
||||
src = self.prefix.lib.join(pyver).join(pyso)
|
||||
src = prefix.lib.join(pyver).join(pyso)
|
||||
if not os.path.isfile(src):
|
||||
src = self.prefix.lib64.join(pyver).join(pyso)
|
||||
src = prefix.lib64.join(pyver).join(pyso)
|
||||
assert os.path.isfile(src)
|
||||
os.rename(src, os.path.join(python_platlib, pyso))
|
||||
|
||||
@@ -21,11 +21,11 @@ def home(self):
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
return self["mesa"].libosmesa_headers
|
||||
return self.spec["mesa"].package.libosmesa_headers
|
||||
|
||||
@property
|
||||
def libs(self):
|
||||
return self["mesa"].libosmesa_libs
|
||||
return self.spec["mesa"].package.libosmesa_libs
|
||||
|
||||
@property
|
||||
def gl_headers(self):
|
||||
@@ -33,4 +33,4 @@ def gl_headers(self):
|
||||
|
||||
@property
|
||||
def gl_libs(self):
|
||||
return self["mesa"].libosmesa_libs
|
||||
return self.spec["mesa"].package.libosmesa_libs
|
||||
|
||||
@@ -19,8 +19,7 @@ class Palace(CMakePackage):
|
||||
version("0.12.0", tag="v0.12.0", commit="8c192071206466638d5818048ee712e1fada386f")
|
||||
version("0.11.2", tag="v0.11.2", commit="6c3aa5f84a934a6ddd58022b2945a1bdb5fa329d")
|
||||
|
||||
depends_on("c", type="build")
|
||||
depends_on("cxx", type="build")
|
||||
depends_on("cxx", type="build") # generated
|
||||
|
||||
variant("shared", default=True, description="Build shared libraries")
|
||||
variant("int64", default=False, description="Use 64 bit integers")
|
||||
|
||||
@@ -31,7 +31,7 @@ class Pnfft(AutotoolsPackage):
|
||||
@property
|
||||
def fftw_selected_precisions(self):
|
||||
if not self._fftw_precisions:
|
||||
self._fftw_precisions = self["fftw"].selected_precisions
|
||||
self._fftw_precisions = self.spec["fftw"].package.selected_precisions
|
||||
return self._fftw_precisions
|
||||
|
||||
def configure(self, spec, prefix):
|
||||
|
||||
@@ -14,10 +14,6 @@ class Podman(Package):
|
||||
|
||||
license("Apache-2.0")
|
||||
|
||||
version("4.9.3", sha256="37afc5bba2738c68dc24400893b99226c658cc9a2b22309f4d7abe7225d8c437")
|
||||
version("4.8.3", sha256="3a99b6c82644fa52929cf4143943c63d6784c84094892bc0e14197fa38a1c7fa")
|
||||
version("4.7.2", sha256="10346c5603546427bd809b4d855d1e39b660183232309128ad17a64969a0193d")
|
||||
version("4.6.2", sha256="2d8e04f0c3819c3f0ed1ca5d01da87e6d911571b96ae690448f7f75df41f2ad1")
|
||||
version("4.5.1", sha256="ee2c8b02b7fe301057f0382637b995a9c6c74e8d530692d6918e4c509ade6e39")
|
||||
version("4.3.1", sha256="455c29c4ee78cd6365e5d46e20dd31a5ce4e6e1752db6774253d76bd3ca78813")
|
||||
version("3.4.7", sha256="4af6606dd072fe946960680611ba65201be435b43edbfc5cc635b2a01a899e6e")
|
||||
|
||||
@@ -16,12 +16,9 @@ class PyArrow(PythonPackage):
|
||||
homepage = "https://arrow.readthedocs.io/en/latest/"
|
||||
pypi = "arrow/arrow-0.16.0.tar.gz"
|
||||
|
||||
maintainers("climbfuji")
|
||||
|
||||
license("Apache-2.0")
|
||||
|
||||
# https://github.com/spack/spack/issues/48477
|
||||
# version("1.3.0", sha256="d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85")
|
||||
version("1.3.0", sha256="d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85")
|
||||
version("1.2.3", sha256="3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1")
|
||||
version("1.2.2", sha256="05caf1fd3d9a11a1135b2b6f09887421153b94558e5ef4d090b567b47173ac2b")
|
||||
version("1.2.1", sha256="c2dde3c382d9f7e6922ce636bf0b318a7a853df40ecb383b29192e6c5cc82840")
|
||||
@@ -29,15 +26,12 @@ class PyArrow(PythonPackage):
|
||||
version("0.14.7", sha256="67f8be7c0cf420424bc62d8d7dc40b44e4bb2f7b515f9cc2954fb36e35797656")
|
||||
version("0.14.1", sha256="2d30837085011ef0b90ff75aa0a28f5c7d063e96b7e76b6cbc7e690310256685")
|
||||
|
||||
# https://github.com/spack/spack/issues/48477
|
||||
# depends_on("python@3.8:", type=("build", "run"), when="@1.3:")
|
||||
depends_on("python@3.8:", type=("build", "run"), when="@1.3:")
|
||||
depends_on("python@3.6:", type=("build", "run"), when="@1.2.1:")
|
||||
depends_on("python@2.7:2.8,3.5:", type=("build", "run"), when="@:0.16.0")
|
||||
depends_on("py-setuptools", type="build", when="@:1.2")
|
||||
# https://github.com/spack/spack/issues/48477
|
||||
# depends_on("py-flit-core@3.2:3", type="build", when="@1.3:")
|
||||
depends_on("py-flit-core@3.2:3", type="build", when="@1.3:")
|
||||
depends_on("py-python-dateutil", type=("build", "run"))
|
||||
depends_on("py-typing-extensions", type=("build", "run"), when="@1.2.1:1.2 ^python@:3.7")
|
||||
depends_on("py-python-dateutil@2.7.0:", type=("build", "run"), when="@1.2.1:")
|
||||
# https://github.com/spack/spack/issues/48477
|
||||
# depends_on("py-types-python-dateutil@2.8.10:", type=("build", "run"), when="@1.3:")
|
||||
depends_on("py-types-python-dateutil@2.8.10:", type=("build", "run"), when="@1.3:")
|
||||
|
||||
@@ -14,7 +14,6 @@ class PyClick(PythonPackage):
|
||||
|
||||
license("BSD-3-Clause")
|
||||
|
||||
version("8.1.8", sha256="ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a")
|
||||
version("8.1.7", sha256="ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de")
|
||||
version("8.1.3", sha256="7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e")
|
||||
version("8.0.3", sha256="410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b")
|
||||
@@ -29,10 +28,7 @@ class PyClick(PythonPackage):
|
||||
|
||||
# Needed to ensure that Spack can bootstrap black with Python 3.6
|
||||
depends_on("python@3.7:", when="@8.1:", type=("build", "run"))
|
||||
with when("@8.1.8:"):
|
||||
depends_on("py-flit-core@:3", type="build")
|
||||
with when("@:8.1.7"):
|
||||
depends_on("py-setuptools", type="build")
|
||||
depends_on("py-setuptools", type="build")
|
||||
|
||||
depends_on("py-importlib-metadata", when="@8: ^python@:3.7", type=("build", "run"))
|
||||
depends_on("py-colorama", when="@8: platform=windows", type=("build", "run"))
|
||||
|
||||
@@ -10,20 +10,16 @@ class PyCylcFlow(PythonPackage):
|
||||
|
||||
homepage = "https://cylc.org"
|
||||
pypi = "cylc-flow/cylc-flow-8.1.4.tar.gz"
|
||||
git = "https://github.com/cylc/cylc-flow.git"
|
||||
|
||||
maintainers("LydDeb", "climbfuji")
|
||||
|
||||
license("GPL-3.0-only")
|
||||
|
||||
# Version 8.3.6 is available at PyPI, but not at the URL that is considered canonical by Spack
|
||||
# https://github.com/spack/spack/issues/48479
|
||||
version("8.3.6", commit="7f63b43164638e27636b992b14b3fa088b692b94")
|
||||
version("8.2.3", sha256="dd5bea9e4b8dad00edd9c3459a38fb778e5a073da58ad2725bc9b84ad718e073")
|
||||
version("8.2.0", sha256="cbe35e0d72d1ca36f28a4cebe9b9040a3445a74253bc94051a3c906cf179ded0")
|
||||
version("8.1.4", sha256="d1835ac18f6f24f3115c56b2bc821185484e834a86b12fd0033ff7e4dc3c1f63")
|
||||
|
||||
depends_on("py-setuptools@49:66,68:", type=("build", "run"), when="@:8.2")
|
||||
depends_on("py-setuptools@49:66,68:", type=("build", "run"))
|
||||
depends_on("py-aiofiles@0.7", type=("build", "run"), when="@:8.1")
|
||||
depends_on("py-ansimarkup@1.0.0:", type=("build", "run"))
|
||||
depends_on("py-async-timeout@3.0.0:", type=("build", "run"))
|
||||
@@ -32,20 +28,15 @@ class PyCylcFlow(PythonPackage):
|
||||
depends_on("py-jinja2@3.0", type=("build", "run"))
|
||||
depends_on("py-metomi-isodatetime@3.0", type=("build", "run"), when="@:8.2.0")
|
||||
depends_on("py-metomi-isodatetime@3:3.1", type=("build", "run"), when="@8.2.3:")
|
||||
depends_on("py-packaging", type=("build", "run"), when="@8.3:")
|
||||
depends_on("py-protobuf@4.21.2:4.21", type=("build", "run"), when="@:8.2")
|
||||
depends_on("py-protobuf@4.24.4:4.24", type=("build", "run"), when="@8.3:")
|
||||
depends_on("py-protobuf@4.21.2:4.21", type=("build", "run"))
|
||||
depends_on("py-psutil@5.6.0:", type=("build", "run"))
|
||||
depends_on("py-pyzmq@22:", type=("build", "run"), when="@8.2:")
|
||||
depends_on("py-pyzmq@22", type=("build", "run"), when="@:8.1")
|
||||
depends_on("py-importlib-metadata", type=("build", "run"), when="@:8.2 ^python@:3.7")
|
||||
depends_on("py-importlib-metadata@5:", type=("build", "run"), when="@8.3: ^python@:3.11")
|
||||
depends_on("py-urwid@2:2.6.1,2.6.4:2", type=("build", "run"))
|
||||
depends_on("py-importlib-metadata", type=("build", "run"), when="^python@:3.7")
|
||||
depends_on("py-urwid@2", type=("build", "run"))
|
||||
depends_on("py-rx", type=("build", "run"))
|
||||
depends_on("py-promise", type=("build", "run"))
|
||||
depends_on("py-tomli@2:", type=("build", "run"), when="^python@:3.10")
|
||||
|
||||
# Non-Python dependencies for creating graphs.
|
||||
# We want at least the pangocairo variant for
|
||||
# graphviz so that we can create output as png.
|
||||
depends_on("graphviz+pangocairo", type="run")
|
||||
# Non-Python dependencies
|
||||
depends_on("graphviz", type="run")
|
||||
|
||||
@@ -10,26 +10,15 @@ class PyCylcRose(PythonPackage):
|
||||
|
||||
homepage = "https://cylc.github.io/cylc-doc/latest/html/plugins/cylc-rose.html"
|
||||
pypi = "cylc-rose/cylc-rose-1.3.0.tar.gz"
|
||||
git = "https://github.com/cylc/cylc-rose.git"
|
||||
|
||||
maintainers("LydDeb", "climbfuji")
|
||||
maintainers("LydDeb")
|
||||
|
||||
license("GPL-3.0-only")
|
||||
|
||||
# Version 1.4.2 is available at PyPI, but not at the URL that is considered canonical by Spack
|
||||
# https://github.com/spack/spack/issues/48479
|
||||
version("1.4.2", commit="8deda0480afed8cf92cfdf7938fc78d0aaf0c0e4")
|
||||
version("1.3.0", sha256="017072b69d7a50fa6d309a911d2428743b07c095f308529b36b1b787ebe7ab88")
|
||||
|
||||
depends_on("py-setuptools", type="build")
|
||||
depends_on("py-metomi-rose@2.1", type=("build", "run"))
|
||||
depends_on("py-cylc-flow@8.2", type=("build", "run"))
|
||||
depends_on("py-metomi-isodatetime", type=("build", "run"))
|
||||
depends_on("py-jinja2", type=("build", "run"))
|
||||
|
||||
with when("@1.3.0"):
|
||||
depends_on("py-metomi-rose@2.1", type=("build", "run"))
|
||||
depends_on("py-cylc-flow@8.2", type=("build", "run"))
|
||||
|
||||
with when("@1.4.2"):
|
||||
depends_on("py-metomi-rose@2.3", type=("build", "run"))
|
||||
depends_on("py-cylc-flow@8.3.5:8.3", type=("build", "run"))
|
||||
depends_on("py-ansimarkup", type=("build", "run"))
|
||||
|
||||
@@ -10,31 +10,22 @@ class PyCylcUiserver(PythonPackage):
|
||||
|
||||
homepage = "https://github.com/cylc/cylc-uiserver/"
|
||||
pypi = "cylc-uiserver/cylc-uiserver-1.3.0.tar.gz"
|
||||
git = "https://github.com/cylc/cylc-uiserver.git"
|
||||
|
||||
maintainers("LydDeb", "climbfuji")
|
||||
maintainers("LydDeb")
|
||||
|
||||
license("GPL-3.0-or-later")
|
||||
|
||||
# Version 1.5.1 is available at PyPI, but not at the URL that is considered canonical by Spack
|
||||
# https://github.com/spack/spack/issues/48479
|
||||
version("1.5.1", commit="3a41c6fbefbcea33c41410f3698de8b62c9871b8")
|
||||
version("1.3.0", sha256="f3526e470c7ac2b61bf69e9b8d17fc7a513392219d28baed9b1166dcc7033d7a")
|
||||
|
||||
depends_on("python@3.8:", when="@1.5.1", type=("build", "run"))
|
||||
depends_on("py-wheel", type="build")
|
||||
depends_on("py-setuptools@40.9.0:", type="build")
|
||||
|
||||
depends_on("py-cylc-flow@8.2", when="@1.3.0", type=("build", "run"))
|
||||
depends_on("py-cylc-flow@8.3", when="@1.5.1", type=("build", "run"))
|
||||
depends_on("py-cylc-flow@8.2", type=("build", "run"))
|
||||
depends_on("py-ansimarkup@1.0.0:", type=("build", "run"))
|
||||
depends_on("py-graphene", type=("build", "run"))
|
||||
depends_on("py-graphene-tornado@2.6", type=("build", "run"))
|
||||
depends_on("py-graphql-ws@0.4.4", type=("build", "run"))
|
||||
depends_on("py-jupyter-server@1.10.2:1", when="@1.3.0", type=("build", "run"))
|
||||
depends_on("py-jupyter-server@2.7:", when="@1.5.1", type=("build", "run"))
|
||||
depends_on("py-jupyter-server@1.10.2:1", type=("build", "run"))
|
||||
depends_on("py-requests", type=("build", "run"))
|
||||
depends_on("py-psutil", when="@1.5.1", type=("build", "run"))
|
||||
depends_on("py-tornado@6.1.0:", type=("build", "run"))
|
||||
depends_on("py-traitlets@5.2.1:", type=("build", "run"))
|
||||
depends_on("py-pyzmq", type=("build", "run"))
|
||||
|
||||
@@ -13,22 +13,9 @@ class PyDeepdiff(PythonPackage):
|
||||
|
||||
license("MIT")
|
||||
|
||||
version("8.1.1", sha256="dd7bc7d5c8b51b5b90f01b0e2fe23c801fd8b4c6a7ee7e31c5a3c3663fcc7ceb")
|
||||
version("8.0.1", sha256="245599a4586ab59bb599ca3517a9c42f3318ff600ded5e80a3432693c8ec3c4b")
|
||||
version("7.0.1", sha256="260c16f052d4badbf60351b4f77e8390bee03a0b516246f6839bc813fb429ddf")
|
||||
version("6.7.1", sha256="b367e6fa6caac1c9f500adc79ada1b5b1242c50d5f716a1a4362030197847d30")
|
||||
version("6.6.1", sha256="75c75b1511f0e48edef2b70d785a9c32b2631666b465fa8c32270a77a7b950b5")
|
||||
version("6.5.0", sha256="080b1359d6128f3f5f1738c6be3064f0ad9b0cc41994aa90a028065f6ad11f25")
|
||||
version("6.4.1", sha256="744c4e54ff83eaa77a995b3311dccdce6ee67773335a34a5ef269fa048005457")
|
||||
version("6.3.1", sha256="e8c1bb409a2caf1d757799add53b3a490f707dd792ada0eca7cac1328055097a")
|
||||
version("6.3.0", sha256="6a3bf1e7228ac5c71ca2ec43505ca0a743ff54ec77aa08d7db22de6bc7b2b644")
|
||||
version("5.6.0", sha256="e3f1c3a375c7ea5ca69dba6f7920f9368658318ff1d8a496293c79481f48e649")
|
||||
|
||||
depends_on("py-setuptools", type="build")
|
||||
depends_on("py-orderly-set@5.2.3:5", when="@8.1.0:", type=("build", "run"))
|
||||
depends_on("py-orderly-set@5.2.2", when="@8.0.1", type=("build", "run"))
|
||||
depends_on("py-orderly-set@5.2.1", when="@8.0.0", type=("build", "run"))
|
||||
|
||||
depends_on("py-ordered-set@4.1", when="@7.0.1:7", type=("build", "run"))
|
||||
depends_on("py-ordered-set@4.0.2:4.1", when="@6:7.0.0", type=("build", "run"))
|
||||
depends_on("py-ordered-set@4.0.2:4.1", when="@6:", type=("build", "run"))
|
||||
depends_on("py-ordered-set@4.0.2", when="@:5", type=("build", "run"))
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user