Compare commits
140 Commits
packages/a
...
wdconinc-p
Author | SHA1 | Date | |
---|---|---|---|
![]() |
12ec16a43b | ||
![]() |
89663c1fdb | ||
![]() |
8b4be739af | ||
![]() |
81e7601a79 | ||
![]() |
f54055279b | ||
![]() |
85378307f8 | ||
![]() |
ec1016a0fc | ||
![]() |
88b2db6f56 | ||
![]() |
463a19eb5c | ||
![]() |
db3bd7a053 | ||
![]() |
369928200a | ||
![]() |
81ed0f8d87 | ||
![]() |
194b6311e9 | ||
![]() |
8420898f79 | ||
![]() |
f556ba46d9 | ||
![]() |
ddaa9d5d81 | ||
![]() |
b878fe5555 | ||
![]() |
b600bfc779 | ||
![]() |
612c289c41 | ||
![]() |
e42c76cccf | ||
![]() |
25013bacf2 | ||
![]() |
3d554db198 | ||
![]() |
b6def50dcb | ||
![]() |
bf591c96bd | ||
![]() |
edf1d2ec40 | ||
![]() |
07f607ec9f | ||
![]() |
93747c5e24 | ||
![]() |
b746d4596a | ||
![]() |
8814705936 | ||
![]() |
c989541ebc | ||
![]() |
1759ce05dd | ||
![]() |
c0c1a4aea1 | ||
![]() |
53353ae64e | ||
![]() |
62f7a4c9b1 | ||
![]() |
39679d0882 | ||
![]() |
50e6bf9979 | ||
![]() |
b874c31cc8 | ||
![]() |
04baad90f5 | ||
![]() |
1022527923 | ||
![]() |
7ef19ec1d8 | ||
![]() |
6e45b51f27 | ||
![]() |
5f9cd0991b | ||
![]() |
98c44fc351 | ||
![]() |
b99f850c8e | ||
![]() |
cbbd68d16b | ||
![]() |
e4fbf99497 | ||
![]() |
6a225d5405 | ||
![]() |
af9fd82476 | ||
![]() |
29c1152484 | ||
![]() |
d6a8af6a1d | ||
![]() |
3c3dad0a7a | ||
![]() |
109efdff88 | ||
![]() |
fa318e2c92 | ||
![]() |
064e70990d | ||
![]() |
c40139b7d6 | ||
![]() |
c302e1a768 | ||
![]() |
7171015f1c | ||
![]() |
8ab6f33eb6 | ||
![]() |
a66ab9cc6c | ||
![]() |
70534ac9d4 | ||
![]() |
b369d8b250 | ||
![]() |
4d2319a785 | ||
![]() |
d6a9511f39 | ||
![]() |
dd69b646ad | ||
![]() |
b670205e54 | ||
![]() |
d6d8800466 | ||
![]() |
7a32954f7f | ||
![]() |
92564ecd42 | ||
![]() |
c1258a1431 | ||
![]() |
d46ac9b1e4 | ||
![]() |
2e472a13e5 | ||
![]() |
7edb525599 | ||
![]() |
93cd216603 | ||
![]() |
c1d385ada2 | ||
![]() |
464390962f | ||
![]() |
16734cd8c6 | ||
![]() |
1dd9eeb0c6 | ||
![]() |
f4ef0aec28 | ||
![]() |
ea2c70a21a | ||
![]() |
72ddc03da9 | ||
![]() |
32de71b0b6 | ||
![]() |
e94d5b935f | ||
![]() |
85649be232 | ||
![]() |
c23d2cdb2b | ||
![]() |
dc5dd896a2 | ||
![]() |
43f23589ef | ||
![]() |
5085f635dd | ||
![]() |
46da7952d3 | ||
![]() |
72783bcb0a | ||
![]() |
f4d2ff0068 | ||
![]() |
a2b7fee3fe | ||
![]() |
2ebf2df421 | ||
![]() |
e725aa527e | ||
![]() |
7455c8d173 | ||
![]() |
99e2bce99f | ||
![]() |
4204d16fd3 | ||
![]() |
e76677cbd5 | ||
![]() |
57357a540f | ||
![]() |
97e0b39b32 | ||
![]() |
247da9ea7a | ||
![]() |
07f89a73d1 | ||
![]() |
60cfdcb6cc | ||
![]() |
1c9b042d3a | ||
![]() |
c424611010 | ||
![]() |
35963d7d7d | ||
![]() |
7e62ca864a | ||
![]() |
fa9ef0ac89 | ||
![]() |
55d9fe20e5 | ||
![]() |
434a8d54d4 | ||
![]() |
7328c64fc7 | ||
![]() |
4be7b98fd2 | ||
![]() |
25ce6cce47 | ||
![]() |
418a9194e7 | ||
![]() |
d16a578242 | ||
![]() |
aee2f5cf41 | ||
![]() |
ca8ea63796 | ||
![]() |
3b21ff109f | ||
![]() |
d503f03473 | ||
![]() |
770d2fe4d7 | ||
![]() |
fcc35b70bb | ||
![]() |
074387b29c | ||
![]() |
4d26b93070 | ||
![]() |
9d15a006f8 | ||
![]() |
7e69650806 | ||
![]() |
142469f92e | ||
![]() |
cb6cb023d2 | ||
![]() |
920614a799 | ||
![]() |
5be40f1a5a | ||
![]() |
aa0ab3b38b | ||
![]() |
85c125a0f5 | ||
![]() |
2842c6d191 | ||
![]() |
e8309b16fc | ||
![]() |
4dd55e1b67 | ||
![]() |
227fa1a482 | ||
![]() |
ee47d877ff | ||
![]() |
4b4be2e2c2 | ||
![]() |
0e6e61b32f | ||
![]() |
79027884c7 | ||
![]() |
beadf06caa | ||
![]() |
fc24be5c2b |
2
.github/workflows/coverage.yml
vendored
2
.github/workflows/coverage.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
- run: coverage xml
|
||||
|
||||
- name: "Upload coverage report to CodeCov"
|
||||
uses: codecov/codecov-action@05f5a9cfad807516dbbef9929c4a42df3eb78766
|
||||
uses: codecov/codecov-action@1e68e06f1dbfde0e4cefc87efeba9e4643565303
|
||||
with:
|
||||
verbose: true
|
||||
fail_ci_if_error: false
|
||||
|
@@ -2,6 +2,6 @@ black==24.10.0
|
||||
clingo==5.7.1
|
||||
flake8==7.1.1
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
mypy==1.11.2
|
||||
types-six==1.17.0.20241205
|
||||
vermin==1.6.0
|
||||
|
6
.github/workflows/valid-style.yml
vendored
6
.github/workflows/valid-style.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.11'
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.11'
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ inputs.with_coverage }}
|
||||
python_version: '3.11'
|
||||
python_version: '3.13'
|
||||
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
|
||||
bootstrap-dev-rhel8:
|
||||
runs-on: ubuntu-latest
|
||||
|
@@ -25,14 +25,23 @@ These settings can be overridden in ``etc/spack/config.yaml`` or
|
||||
The location where Spack will install packages and their dependencies.
|
||||
Default is ``$spack/opt/spack``.
|
||||
|
||||
---------------------------------------------------
|
||||
``install_hash_length`` and ``install_path_scheme``
|
||||
---------------------------------------------------
|
||||
---------------
|
||||
``projections``
|
||||
---------------
|
||||
|
||||
The default Spack installation path can be very long and can create problems
|
||||
for scripts with hardcoded shebangs. Additionally, when using the Intel
|
||||
compiler, and if there is also a long list of dependencies, the compiler may
|
||||
segfault. If you see the following:
|
||||
.. warning::
|
||||
|
||||
Modifying projections of the install tree is strongly discouraged.
|
||||
|
||||
By default Spack installs all packages into a unique directory relative to the install
|
||||
tree root with the following layout:
|
||||
|
||||
.. code-block::
|
||||
|
||||
{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}
|
||||
|
||||
In very rare cases, it may be necessary to reduce the length of this path. For example,
|
||||
very old versions of the Intel compiler are known to segfault when input paths are too long:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -40,36 +49,25 @@ segfault. If you see the following:
|
||||
** Segmentation violation signal raised. **
|
||||
Access violation or stack overflow. Please contact Intel Support for assistance.
|
||||
|
||||
it may be because variables containing dependency specs may be too long. There
|
||||
are two parameters to help with long path names. Firstly, the
|
||||
``install_hash_length`` parameter can set the length of the hash in the
|
||||
installation path from 1 to 32. The default path uses the full 32 characters.
|
||||
Another case is Python and R packages with many runtime dependencies, which can result
|
||||
in very large ``PYTHONPATH`` and ``R_LIBS`` environment variables. This can cause the
|
||||
``execve`` system call to fail with ``E2BIG``, preventing processes from starting.
|
||||
|
||||
Secondly, it is also possible to modify the entire installation
|
||||
scheme. By default Spack uses
|
||||
``{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}``
|
||||
where the tokens that are available for use in this directive are the
|
||||
same as those understood by the :meth:`~spack.spec.Spec.format`
|
||||
method. Using this parameter it is possible to use a different package
|
||||
layout or reduce the depth of the installation paths. For example
|
||||
For this reason, Spack allows users to modify the installation layout through custom
|
||||
projections. For example
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
config:
|
||||
install_path_scheme: '{name}/{version}/{hash:7}'
|
||||
install_tree:
|
||||
root: $spack/opt/spack
|
||||
projections:
|
||||
all: "{name}/{version}/{hash:16}"
|
||||
|
||||
would install packages into sub-directories using only the package
|
||||
name, version and a hash length of 7 characters.
|
||||
would install packages into sub-directories using only the package name, version and a
|
||||
hash length of 16 characters.
|
||||
|
||||
When using either parameter to set the hash length it only affects the
|
||||
representation of the hash in the installation directory. You
|
||||
should be aware that the smaller the hash length the more likely
|
||||
naming conflicts will occur. These parameters are independent of those
|
||||
used to configure module names.
|
||||
|
||||
.. warning:: Modifying the installation hash length or path scheme after
|
||||
packages have been installed will prevent Spack from being
|
||||
able to find the old installation directories.
|
||||
Notice that reducing the hash length increases the likelihood of hash collisions.
|
||||
|
||||
--------------------
|
||||
``build_stage``
|
||||
|
@@ -4,7 +4,7 @@ sphinx_design==0.6.1
|
||||
sphinx-rtd-theme==3.0.2
|
||||
python-levenshtein==0.26.1
|
||||
docutils==0.21.2
|
||||
pygments==2.18.0
|
||||
pygments==2.19.1
|
||||
urllib3==2.3.0
|
||||
pytest==8.3.4
|
||||
isort==5.13.2
|
||||
|
@@ -591,32 +591,18 @@ def file_matches(f: IO[bytes], regex: llnl.util.lang.PatternBytes) -> bool:
|
||||
f.seek(0)
|
||||
|
||||
|
||||
def deps_to_relocate(spec):
|
||||
"""Return the transitive link and direct run dependencies of the spec.
|
||||
|
||||
This is a special traversal for dependencies we need to consider when relocating a package.
|
||||
|
||||
Package binaries, scripts, and other files may refer to the prefixes of dependencies, so
|
||||
we need to rewrite those locations when dependencies are in a different place at install time
|
||||
than they were at build time.
|
||||
|
||||
This traversal covers transitive link dependencies and direct run dependencies because:
|
||||
|
||||
1. Spack adds RPATHs for transitive link dependencies so that packages can find needed
|
||||
dependency libraries.
|
||||
2. Packages may call any of their *direct* run dependencies (and may bake their paths into
|
||||
binaries or scripts), so we also need to search for run dependency prefixes when relocating.
|
||||
|
||||
This returns a deduplicated list of transitive link dependencies and direct run dependencies.
|
||||
"""
|
||||
deps = [
|
||||
def specs_to_relocate(spec: spack.spec.Spec) -> List[spack.spec.Spec]:
|
||||
"""Return the set of specs that may be referenced in the install prefix of the provided spec.
|
||||
We currently include non-external transitive link and direct run dependencies."""
|
||||
specs = [
|
||||
s
|
||||
for s in itertools.chain(
|
||||
spec.traverse(root=True, deptype="link"), spec.dependencies(deptype="run")
|
||||
spec.traverse(root=True, deptype="link", order="breadth", key=traverse.by_dag_hash),
|
||||
spec.dependencies(deptype="run"),
|
||||
)
|
||||
if not s.external
|
||||
]
|
||||
return llnl.util.lang.dedupe(deps, key=lambda s: s.dag_hash())
|
||||
return list(llnl.util.lang.dedupe(specs, key=lambda s: s.dag_hash()))
|
||||
|
||||
|
||||
def get_buildinfo_dict(spec):
|
||||
@@ -630,7 +616,7 @@ def get_buildinfo_dict(spec):
|
||||
# "relocate_binaries": [],
|
||||
# "relocate_links": [],
|
||||
"hardlinks_deduped": True,
|
||||
"hash_to_prefix": {d.dag_hash(): str(d.prefix) for d in deps_to_relocate(spec)},
|
||||
"hash_to_prefix": {d.dag_hash(): str(d.prefix) for d in specs_to_relocate(spec)},
|
||||
}
|
||||
|
||||
|
||||
@@ -1112,7 +1098,7 @@ def _exists_in_buildcache(spec: spack.spec.Spec, tmpdir: str, out_url: str) -> E
|
||||
|
||||
|
||||
def prefixes_to_relocate(spec):
|
||||
prefixes = [s.prefix for s in deps_to_relocate(spec)]
|
||||
prefixes = [s.prefix for s in specs_to_relocate(spec)]
|
||||
prefixes.append(spack.hooks.sbang.sbang_install_path())
|
||||
prefixes.append(str(spack.store.STORE.layout.root))
|
||||
return prefixes
|
||||
@@ -2139,10 +2125,9 @@ def fetch_url_to_mirror(url):
|
||||
|
||||
|
||||
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||
"""Updates a buildinfo dict for old archives that did
|
||||
not dedupe hardlinks. De-duping hardlinks is necessary
|
||||
when relocating files in parallel and in-place. This
|
||||
means we must preserve inodes when relocating."""
|
||||
"""Updates a buildinfo dict for old archives that did not dedupe hardlinks. De-duping hardlinks
|
||||
is necessary when relocating files in parallel and in-place. This means we must preserve inodes
|
||||
when relocating."""
|
||||
|
||||
# New archives don't need this.
|
||||
if buildinfo.get("hardlinks_deduped", False):
|
||||
@@ -2171,65 +2156,47 @@ def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||
buildinfo[key] = new_list
|
||||
|
||||
|
||||
def relocate_package(spec):
|
||||
"""
|
||||
Relocate the given package
|
||||
"""
|
||||
workdir = str(spec.prefix)
|
||||
buildinfo = read_buildinfo_file(workdir)
|
||||
new_layout_root = str(spack.store.STORE.layout.root)
|
||||
new_prefix = str(spec.prefix)
|
||||
new_rel_prefix = str(os.path.relpath(new_prefix, new_layout_root))
|
||||
new_spack_prefix = str(spack.paths.prefix)
|
||||
|
||||
old_sbang_install_path = None
|
||||
if "sbang_install_path" in buildinfo:
|
||||
old_sbang_install_path = str(buildinfo["sbang_install_path"])
|
||||
def relocate_package(spec: spack.spec.Spec) -> None:
|
||||
"""Relocate binaries and text files in the given spec prefix, based on its buildinfo file."""
|
||||
buildinfo = read_buildinfo_file(spec.prefix)
|
||||
old_layout_root = str(buildinfo["buildpath"])
|
||||
old_spack_prefix = str(buildinfo.get("spackprefix"))
|
||||
old_rel_prefix = buildinfo.get("relative_prefix")
|
||||
old_prefix = os.path.join(old_layout_root, old_rel_prefix)
|
||||
rel = buildinfo.get("relative_rpaths", False)
|
||||
|
||||
# In the past prefix_to_hash was the default and externals were not dropped, so prefixes
|
||||
# were not unique.
|
||||
# Warn about old style tarballs created with the --rel flag (removed in Spack v0.20)
|
||||
if buildinfo.get("relative_rpaths", False):
|
||||
tty.warn(
|
||||
f"Tarball for {spec} uses relative rpaths, which can cause library loading issues."
|
||||
)
|
||||
|
||||
# In Spack 0.19 and older prefix_to_hash was the default and externals were not dropped, so
|
||||
# prefixes were not unique.
|
||||
if "hash_to_prefix" in buildinfo:
|
||||
hash_to_old_prefix = buildinfo["hash_to_prefix"]
|
||||
elif "prefix_to_hash" in buildinfo:
|
||||
hash_to_old_prefix = dict((v, k) for (k, v) in buildinfo["prefix_to_hash"].items())
|
||||
hash_to_old_prefix = {v: k for (k, v) in buildinfo["prefix_to_hash"].items()}
|
||||
else:
|
||||
hash_to_old_prefix = dict()
|
||||
raise NewLayoutException(
|
||||
"Package tarball was created from an install prefix with a different directory layout "
|
||||
"and an older buildcache create implementation. It cannot be relocated."
|
||||
)
|
||||
|
||||
if old_rel_prefix != new_rel_prefix and not hash_to_old_prefix:
|
||||
msg = "Package tarball was created from an install "
|
||||
msg += "prefix with a different directory layout and an older "
|
||||
msg += "buildcache create implementation. It cannot be relocated."
|
||||
raise NewLayoutException(msg)
|
||||
prefix_to_prefix = {}
|
||||
|
||||
# Spurious replacements (e.g. sbang) will cause issues with binaries
|
||||
# For example, the new sbang can be longer than the old one.
|
||||
# Hence 2 dictionaries are maintained here.
|
||||
prefix_to_prefix_text = collections.OrderedDict()
|
||||
prefix_to_prefix_bin = collections.OrderedDict()
|
||||
if "sbang_install_path" in buildinfo:
|
||||
old_sbang_install_path = str(buildinfo["sbang_install_path"])
|
||||
prefix_to_prefix[old_sbang_install_path] = spack.hooks.sbang.sbang_install_path()
|
||||
|
||||
if old_sbang_install_path:
|
||||
install_path = spack.hooks.sbang.sbang_install_path()
|
||||
prefix_to_prefix_text[old_sbang_install_path] = install_path
|
||||
# First match specific prefix paths. Possibly the *local* install prefix of some dependency is
|
||||
# in an upstream, so we cannot assume the original spack store root can be mapped uniformly to
|
||||
# the new spack store root.
|
||||
|
||||
# First match specific prefix paths. Possibly the *local* install prefix
|
||||
# of some dependency is in an upstream, so we cannot assume the original
|
||||
# spack store root can be mapped uniformly to the new spack store root.
|
||||
#
|
||||
# If the spec is spliced, we need to handle the simultaneous mapping
|
||||
# from the old install_tree to the new install_tree and from the build_spec
|
||||
# to the spliced spec.
|
||||
# Because foo.build_spec is foo for any non-spliced spec, we can simplify
|
||||
# by checking for spliced-in nodes by checking for nodes not in the build_spec
|
||||
# without any explicit check for whether the spec is spliced.
|
||||
# An analog in this algorithm is any spec that shares a name or provides the same virtuals
|
||||
# in the context of the relevant root spec. This ensures that the analog for a spec s
|
||||
# is the spec that s replaced when we spliced.
|
||||
relocation_specs = deps_to_relocate(spec)
|
||||
# If the spec is spliced, we need to handle the simultaneous mapping from the old install_tree
|
||||
# to the new install_tree and from the build_spec to the spliced spec. Because foo.build_spec
|
||||
# is foo for any non-spliced spec, we can simplify by checking for spliced-in nodes by checking
|
||||
# for nodes not in the build_spec without any explicit check for whether the spec is spliced.
|
||||
# An analog in this algorithm is any spec that shares a name or provides the same virtuals in
|
||||
# the context of the relevant root spec. This ensures that the analog for a spec s is the spec
|
||||
# that s replaced when we spliced.
|
||||
relocation_specs = specs_to_relocate(spec)
|
||||
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
||||
for s in relocation_specs:
|
||||
analog = s
|
||||
@@ -2248,98 +2215,48 @@ def relocate_package(spec):
|
||||
lookup_dag_hash = analog.dag_hash()
|
||||
if lookup_dag_hash in hash_to_old_prefix:
|
||||
old_dep_prefix = hash_to_old_prefix[lookup_dag_hash]
|
||||
prefix_to_prefix_bin[old_dep_prefix] = str(s.prefix)
|
||||
prefix_to_prefix_text[old_dep_prefix] = str(s.prefix)
|
||||
prefix_to_prefix[old_dep_prefix] = str(s.prefix)
|
||||
|
||||
# Only then add the generic fallback of install prefix -> install prefix.
|
||||
prefix_to_prefix_text[old_prefix] = new_prefix
|
||||
prefix_to_prefix_bin[old_prefix] = new_prefix
|
||||
prefix_to_prefix_text[old_layout_root] = new_layout_root
|
||||
prefix_to_prefix_bin[old_layout_root] = new_layout_root
|
||||
prefix_to_prefix[old_layout_root] = str(spack.store.STORE.layout.root)
|
||||
|
||||
# This is vestigial code for the *old* location of sbang. Previously,
|
||||
# sbang was a bash script, and it lived in the spack prefix. It is
|
||||
# now a POSIX script that lives in the install prefix. Old packages
|
||||
# will have the old sbang location in their shebangs.
|
||||
orig_sbang = "#!/bin/bash {0}/bin/sbang".format(old_spack_prefix)
|
||||
new_sbang = spack.hooks.sbang.sbang_shebang_line()
|
||||
prefix_to_prefix_text[orig_sbang] = new_sbang
|
||||
# Delete identity mappings from prefix_to_prefix
|
||||
prefix_to_prefix = {k: v for k, v in prefix_to_prefix.items() if k != v}
|
||||
|
||||
tty.debug("Relocating package from", "%s to %s." % (old_layout_root, new_layout_root))
|
||||
# If there's nothing to relocate, we're done.
|
||||
if not prefix_to_prefix:
|
||||
return
|
||||
|
||||
# Old archives maybe have hardlinks repeated.
|
||||
dedupe_hardlinks_if_necessary(workdir, buildinfo)
|
||||
for old, new in prefix_to_prefix.items():
|
||||
tty.debug(f"Relocating: {old} => {new}.")
|
||||
|
||||
def is_backup_file(file):
|
||||
return file.endswith("~")
|
||||
# Old archives may have hardlinks repeated.
|
||||
dedupe_hardlinks_if_necessary(spec.prefix, buildinfo)
|
||||
|
||||
# Text files containing the prefix text
|
||||
text_names = list()
|
||||
for filename in buildinfo["relocate_textfiles"]:
|
||||
text_name = os.path.join(workdir, filename)
|
||||
# Don't add backup files generated by filter_file during install step.
|
||||
if not is_backup_file(text_name):
|
||||
text_names.append(text_name)
|
||||
textfiles = [os.path.join(spec.prefix, f) for f in buildinfo["relocate_textfiles"]]
|
||||
binaries = [os.path.join(spec.prefix, f) for f in buildinfo.get("relocate_binaries")]
|
||||
links = [os.path.join(spec.prefix, f) for f in buildinfo.get("relocate_links", [])]
|
||||
|
||||
# If we are not installing back to the same install tree do the relocation
|
||||
if old_prefix != new_prefix:
|
||||
files_to_relocate = [
|
||||
os.path.join(workdir, filename) for filename in buildinfo.get("relocate_binaries")
|
||||
]
|
||||
# If the buildcache was not created with relativized rpaths
|
||||
# do the relocation of path in binaries
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
if "macho" in platform.binary_formats:
|
||||
relocate.relocate_macho_binaries(
|
||||
files_to_relocate,
|
||||
old_layout_root,
|
||||
new_layout_root,
|
||||
prefix_to_prefix_bin,
|
||||
rel,
|
||||
old_prefix,
|
||||
new_prefix,
|
||||
)
|
||||
elif "elf" in platform.binary_formats and not rel:
|
||||
# The new ELF dynamic section relocation logic only handles absolute to
|
||||
# absolute relocation.
|
||||
relocate.new_relocate_elf_binaries(files_to_relocate, prefix_to_prefix_bin)
|
||||
elif "elf" in platform.binary_formats and rel:
|
||||
relocate.relocate_elf_binaries(
|
||||
files_to_relocate,
|
||||
old_layout_root,
|
||||
new_layout_root,
|
||||
prefix_to_prefix_bin,
|
||||
rel,
|
||||
old_prefix,
|
||||
new_prefix,
|
||||
)
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
if "macho" in platform.binary_formats:
|
||||
relocate.relocate_macho_binaries(binaries, prefix_to_prefix)
|
||||
elif "elf" in platform.binary_formats:
|
||||
relocate.relocate_elf_binaries(binaries, prefix_to_prefix)
|
||||
|
||||
# Relocate links to the new install prefix
|
||||
links = [os.path.join(workdir, f) for f in buildinfo.get("relocate_links", [])]
|
||||
relocate.relocate_links(links, prefix_to_prefix_bin)
|
||||
relocate.relocate_links(links, prefix_to_prefix)
|
||||
relocate.relocate_text(textfiles, prefix_to_prefix)
|
||||
changed_files = relocate.relocate_text_bin(binaries, prefix_to_prefix)
|
||||
|
||||
# For all buildcaches
|
||||
# relocate the install prefixes in text files including dependencies
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
# relocate the install prefixes in binary files including dependencies
|
||||
changed_files = relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
|
||||
|
||||
# Add ad-hoc signatures to patched macho files when on macOS.
|
||||
if "macho" in platform.binary_formats and sys.platform == "darwin":
|
||||
codesign = which("codesign")
|
||||
if not codesign:
|
||||
return
|
||||
for binary in changed_files:
|
||||
# preserve the original inode by running codesign on a copy
|
||||
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
||||
codesign("-fs-", tmp_binary)
|
||||
|
||||
# If we are installing back to the same location
|
||||
# relocate the sbang location if the spack directory changed
|
||||
else:
|
||||
if old_spack_prefix != new_spack_prefix:
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
# Add ad-hoc signatures to patched macho files when on macOS.
|
||||
if "macho" in platform.binary_formats and sys.platform == "darwin":
|
||||
codesign = which("codesign")
|
||||
if not codesign:
|
||||
return
|
||||
for binary in changed_files:
|
||||
# preserve the original inode by running codesign on a copy
|
||||
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
||||
codesign("-fs-", tmp_binary)
|
||||
|
||||
|
||||
def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum):
|
||||
|
@@ -44,7 +44,19 @@
|
||||
from enum import Flag, auto
|
||||
from itertools import chain
|
||||
from multiprocessing.connection import Connection
|
||||
from typing import Callable, Dict, List, Optional, Set, Tuple
|
||||
from typing import (
|
||||
Callable,
|
||||
Dict,
|
||||
List,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
TextIO,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -146,48 +158,128 @@ def get_effective_jobs(jobs, parallel=True, supports_jobserver=False):
|
||||
|
||||
|
||||
class MakeExecutable(Executable):
|
||||
"""Special callable executable object for make so the user can specify
|
||||
parallelism options on a per-invocation basis. Specifying
|
||||
'parallel' to the call will override whatever the package's
|
||||
global setting is, so you can either default to true or false and
|
||||
override particular calls. Specifying 'jobs_env' to a particular
|
||||
call will name an environment variable which will be set to the
|
||||
parallelism level (without affecting the normal invocation with
|
||||
-j).
|
||||
"""Special callable executable object for make so the user can specify parallelism options
|
||||
on a per-invocation basis.
|
||||
"""
|
||||
|
||||
def __init__(self, name, jobs, **kwargs):
|
||||
supports_jobserver = kwargs.pop("supports_jobserver", True)
|
||||
super().__init__(name, **kwargs)
|
||||
def __init__(self, name: str, *, jobs: int, supports_jobserver: bool = True) -> None:
|
||||
super().__init__(name)
|
||||
self.supports_jobserver = supports_jobserver
|
||||
self.jobs = jobs
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
"""parallel, and jobs_env from kwargs are swallowed and used here;
|
||||
remaining arguments are passed through to the superclass.
|
||||
"""
|
||||
parallel = kwargs.pop("parallel", True)
|
||||
jobs_env = kwargs.pop("jobs_env", None)
|
||||
jobs_env_supports_jobserver = kwargs.pop("jobs_env_supports_jobserver", False)
|
||||
@overload
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
parallel: bool = ...,
|
||||
jobs_env: Optional[str] = ...,
|
||||
jobs_env_supports_jobserver: bool = ...,
|
||||
fail_on_error: bool = ...,
|
||||
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||
ignore_quotes: Optional[bool] = ...,
|
||||
timeout: Optional[int] = ...,
|
||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
input: Optional[TextIO] = ...,
|
||||
output: Union[Optional[TextIO], str] = ...,
|
||||
error: Union[Optional[TextIO], str] = ...,
|
||||
_dump_env: Optional[Dict[str, str]] = ...,
|
||||
) -> None: ...
|
||||
|
||||
@overload
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
parallel: bool = ...,
|
||||
jobs_env: Optional[str] = ...,
|
||||
jobs_env_supports_jobserver: bool = ...,
|
||||
fail_on_error: bool = ...,
|
||||
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||
ignore_quotes: Optional[bool] = ...,
|
||||
timeout: Optional[int] = ...,
|
||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
input: Optional[TextIO] = ...,
|
||||
output: Union[Type[str], Callable] = ...,
|
||||
error: Union[Optional[TextIO], str, Type[str], Callable] = ...,
|
||||
_dump_env: Optional[Dict[str, str]] = ...,
|
||||
) -> str: ...
|
||||
|
||||
@overload
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
parallel: bool = ...,
|
||||
jobs_env: Optional[str] = ...,
|
||||
jobs_env_supports_jobserver: bool = ...,
|
||||
fail_on_error: bool = ...,
|
||||
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||
ignore_quotes: Optional[bool] = ...,
|
||||
timeout: Optional[int] = ...,
|
||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
input: Optional[TextIO] = ...,
|
||||
output: Union[Optional[TextIO], str, Type[str], Callable] = ...,
|
||||
error: Union[Type[str], Callable] = ...,
|
||||
_dump_env: Optional[Dict[str, str]] = ...,
|
||||
) -> str: ...
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
parallel: bool = True,
|
||||
jobs_env: Optional[str] = None,
|
||||
jobs_env_supports_jobserver: bool = False,
|
||||
**kwargs,
|
||||
) -> Optional[str]:
|
||||
"""Runs this "make" executable in a subprocess.
|
||||
|
||||
Args:
|
||||
parallel: if False, parallelism is disabled
|
||||
jobs_env: environment variable that will be set to the current level of parallelism
|
||||
jobs_env_supports_jobserver: whether the jobs env supports a job server
|
||||
|
||||
For all the other **kwargs, refer to the base class.
|
||||
"""
|
||||
jobs = get_effective_jobs(
|
||||
self.jobs, parallel=parallel, supports_jobserver=self.supports_jobserver
|
||||
)
|
||||
if jobs is not None:
|
||||
args = ("-j{0}".format(jobs),) + args
|
||||
args = (f"-j{jobs}",) + args
|
||||
|
||||
if jobs_env:
|
||||
# Caller wants us to set an environment variable to
|
||||
# control the parallelism.
|
||||
# Caller wants us to set an environment variable to control the parallelism
|
||||
jobs_env_jobs = get_effective_jobs(
|
||||
self.jobs, parallel=parallel, supports_jobserver=jobs_env_supports_jobserver
|
||||
)
|
||||
if jobs_env_jobs is not None:
|
||||
kwargs["extra_env"] = {jobs_env: str(jobs_env_jobs)}
|
||||
extra_env = kwargs.setdefault("extra_env", {})
|
||||
extra_env.update({jobs_env: str(jobs_env_jobs)})
|
||||
|
||||
return super().__call__(*args, **kwargs)
|
||||
|
||||
|
||||
class UndeclaredDependencyError(spack.error.SpackError):
|
||||
"""Raised if a dependency is invoking an executable through a module global, without
|
||||
declaring a dependency on it.
|
||||
"""
|
||||
|
||||
|
||||
class DeprecatedExecutable:
|
||||
def __init__(self, pkg: str, exe: str, exe_pkg: str) -> None:
|
||||
self.pkg = pkg
|
||||
self.exe = exe
|
||||
self.exe_pkg = exe_pkg
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
raise UndeclaredDependencyError(
|
||||
f"{self.pkg} is using {self.exe} without declaring a dependency on {self.exe_pkg}"
|
||||
)
|
||||
|
||||
def add_default_env(self, key: str, value: str):
|
||||
self.__call__()
|
||||
|
||||
|
||||
def clean_environment():
|
||||
# Stuff in here sanitizes the build environment to eliminate
|
||||
# anything the user has set that may interfere. We apply it immediately
|
||||
@@ -621,10 +713,9 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
module.make = MakeExecutable("make", jobs)
|
||||
module.gmake = MakeExecutable("gmake", jobs)
|
||||
module.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
||||
module.make = DeprecatedExecutable(pkg.name, "make", "gmake")
|
||||
module.gmake = DeprecatedExecutable(pkg.name, "gmake", "gmake")
|
||||
module.ninja = DeprecatedExecutable(pkg.name, "ninja", "ninja")
|
||||
# TODO: johnwparent: add package or builder support to define these build tools
|
||||
# for now there is no entrypoint for builders to define these on their
|
||||
# own
|
||||
|
@@ -298,7 +298,14 @@ def initconfig_hardware_entries(self):
|
||||
def std_initconfig_entries(self):
|
||||
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
|
||||
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
|
||||
|
||||
complete_rpath_list = ";".join(
|
||||
[
|
||||
self.pkg.spec.prefix.lib,
|
||||
self.pkg.spec.prefix.lib64,
|
||||
*os.environ.get("SPACK_COMPILER_EXTRA_RPATHS", "").split(":"),
|
||||
*os.environ.get("SPACK_COMPILER_IMPLICIT_RPATHS", "").split(":"),
|
||||
]
|
||||
)
|
||||
return [
|
||||
"#------------------{0}".format("-" * 60),
|
||||
"# !!!! This is a generated file, edit at own risk !!!!",
|
||||
@@ -307,6 +314,8 @@ def std_initconfig_entries(self):
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
||||
cmake_cache_string("CMAKE_INSTALL_RPATH_USE_LINK_PATH", "ON"),
|
||||
cmake_cache_string("CMAKE_BUILD_RPATH", complete_rpath_list),
|
||||
cmake_cache_string("CMAKE_INSTALL_RPATH", complete_rpath_list),
|
||||
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
|
||||
]
|
||||
|
||||
|
@@ -71,13 +71,16 @@ def build_directory(self):
|
||||
@property
|
||||
def build_args(self):
|
||||
"""Arguments for ``cargo build``."""
|
||||
return []
|
||||
return ["-j", str(self.pkg.module.make_jobs)]
|
||||
|
||||
@property
|
||||
def check_args(self):
|
||||
"""Argument for ``cargo test`` during check phase"""
|
||||
return []
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
env.set("CARGO_HOME", self.stage.path)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Runs ``cargo install`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
|
@@ -10,8 +10,9 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, extends
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.install_test import SkipTest, test_part
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
@@ -28,7 +29,9 @@ class PerlPackage(spack.package_base.PackageBase):
|
||||
|
||||
build_system("perl")
|
||||
|
||||
extends("perl", when="build_system=perl")
|
||||
with when("build_system=perl"):
|
||||
extends("perl")
|
||||
depends_on("gmake", type="build")
|
||||
|
||||
@property
|
||||
@memoized
|
||||
|
@@ -27,6 +27,7 @@ class QMakePackage(spack.package_base.PackageBase):
|
||||
build_system("qmake")
|
||||
|
||||
depends_on("qmake", type="build", when="build_system=qmake")
|
||||
depends_on("gmake", type="build")
|
||||
|
||||
|
||||
@spack.builder.builder("qmake")
|
||||
|
@@ -94,7 +94,7 @@ def list_url(cls):
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
||||
|
||||
@property
|
||||
def git(self):
|
||||
if self.bioc:
|
||||
return f"https://git.bioconductor.org/packages/{self.bioc}"
|
||||
@lang.classproperty
|
||||
def git(cls):
|
||||
if cls.bioc:
|
||||
return f"https://git.bioconductor.org/packages/{cls.bioc}"
|
||||
|
@@ -171,7 +171,9 @@ def quote_kvp(string: str) -> str:
|
||||
|
||||
|
||||
def parse_specs(
|
||||
args: Union[str, List[str]], concretize: bool = False, tests: bool = False
|
||||
args: Union[str, List[str]],
|
||||
concretize: bool = False,
|
||||
tests: spack.concretize.TestsType = False,
|
||||
) -> List[spack.spec.Spec]:
|
||||
"""Convenience function for parsing arguments from specs. Handles common
|
||||
exceptions and dies if there are errors.
|
||||
@@ -183,11 +185,13 @@ def parse_specs(
|
||||
if not concretize:
|
||||
return specs
|
||||
|
||||
to_concretize = [(s, None) for s in specs]
|
||||
to_concretize: List[spack.concretize.SpecPairInput] = [(s, None) for s in specs]
|
||||
return _concretize_spec_pairs(to_concretize, tests=tests)
|
||||
|
||||
|
||||
def _concretize_spec_pairs(to_concretize, tests=False):
|
||||
def _concretize_spec_pairs(
|
||||
to_concretize: List[spack.concretize.SpecPairInput], tests: spack.concretize.TestsType = False
|
||||
) -> List[spack.spec.Spec]:
|
||||
"""Helper method that concretizes abstract specs from a list of abstract,concrete pairs.
|
||||
|
||||
Any spec with a concrete spec associated with it will concretize to that spec. Any spec
|
||||
@@ -198,7 +202,7 @@ def _concretize_spec_pairs(to_concretize, tests=False):
|
||||
# Special case for concretizing a single spec
|
||||
if len(to_concretize) == 1:
|
||||
abstract, concrete = to_concretize[0]
|
||||
return [concrete or abstract.concretized()]
|
||||
return [concrete or abstract.concretized(tests=tests)]
|
||||
|
||||
# Special case if every spec is either concrete or has an abstract hash
|
||||
if all(
|
||||
|
@@ -144,7 +144,7 @@ def is_installed(spec):
|
||||
record = spack.store.STORE.db.query_local_by_spec_hash(spec.dag_hash())
|
||||
return record and record.installed
|
||||
|
||||
specs = traverse.traverse_nodes(
|
||||
all_specs = traverse.traverse_nodes(
|
||||
specs,
|
||||
root=False,
|
||||
order="breadth",
|
||||
@@ -155,7 +155,7 @@ def is_installed(spec):
|
||||
)
|
||||
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
return [spec for spec in specs if is_installed(spec)]
|
||||
return [spec for spec in all_specs if is_installed(spec)]
|
||||
|
||||
|
||||
def dependent_environments(
|
||||
|
@@ -749,12 +749,18 @@ def __init__(self, compiler, feature, flag_name, ver_string=None):
|
||||
class CompilerCacheEntry:
|
||||
"""Deserialized cache entry for a compiler"""
|
||||
|
||||
__slots__ = ["c_compiler_output", "real_version"]
|
||||
__slots__ = ("c_compiler_output", "real_version")
|
||||
|
||||
def __init__(self, c_compiler_output: Optional[str], real_version: str):
|
||||
self.c_compiler_output = c_compiler_output
|
||||
self.real_version = real_version
|
||||
|
||||
@property
|
||||
def empty(self) -> bool:
|
||||
"""Sometimes the compiler is temporarily broken, preventing us from getting output. The
|
||||
call site determines if that is a problem."""
|
||||
return self.c_compiler_output is None
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Optional[str]]):
|
||||
if not isinstance(data, dict):
|
||||
@@ -792,9 +798,10 @@ def __init__(self, cache: "FileCache") -> None:
|
||||
self.cache.init_entry(self.name)
|
||||
self._data: Dict[str, Dict[str, Optional[str]]] = {}
|
||||
|
||||
def _get_entry(self, key: str) -> Optional[CompilerCacheEntry]:
|
||||
def _get_entry(self, key: str, *, allow_empty: bool) -> Optional[CompilerCacheEntry]:
|
||||
try:
|
||||
return CompilerCacheEntry.from_dict(self._data[key])
|
||||
entry = CompilerCacheEntry.from_dict(self._data[key])
|
||||
return entry if allow_empty or not entry.empty else None
|
||||
except ValueError:
|
||||
del self._data[key]
|
||||
except KeyError:
|
||||
@@ -812,7 +819,7 @@ def get(self, compiler: Compiler) -> CompilerCacheEntry:
|
||||
self._data = {}
|
||||
|
||||
key = self._key(compiler)
|
||||
value = self._get_entry(key)
|
||||
value = self._get_entry(key, allow_empty=False)
|
||||
if value is not None:
|
||||
return value
|
||||
|
||||
@@ -826,7 +833,7 @@ def get(self, compiler: Compiler) -> CompilerCacheEntry:
|
||||
self._data = {}
|
||||
|
||||
# Use cache entry that may have been created by another process in the meantime.
|
||||
entry = self._get_entry(key)
|
||||
entry = self._get_entry(key, allow_empty=True)
|
||||
|
||||
# Finally compute the cache entry
|
||||
if entry is None:
|
||||
|
@@ -5,7 +5,7 @@
|
||||
import sys
|
||||
import time
|
||||
from contextlib import contextmanager
|
||||
from typing import Iterable, Optional, Sequence, Tuple, Union
|
||||
from typing import Iterable, List, Optional, Sequence, Tuple, Union
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -35,6 +35,7 @@ def enable_compiler_existence_check():
|
||||
CHECK_COMPILER_EXISTENCE = saved
|
||||
|
||||
|
||||
SpecPairInput = Tuple[Spec, Optional[Spec]]
|
||||
SpecPair = Tuple[Spec, Spec]
|
||||
SpecLike = Union[Spec, str]
|
||||
TestsType = Union[bool, Iterable[str]]
|
||||
@@ -59,8 +60,8 @@ def concretize_specs_together(
|
||||
|
||||
|
||||
def concretize_together(
|
||||
spec_list: Sequence[SpecPair], tests: TestsType = False
|
||||
) -> Sequence[SpecPair]:
|
||||
spec_list: Sequence[SpecPairInput], tests: TestsType = False
|
||||
) -> List[SpecPair]:
|
||||
"""Given a number of specs as input, tries to concretize them together.
|
||||
|
||||
Args:
|
||||
@@ -76,8 +77,8 @@ def concretize_together(
|
||||
|
||||
|
||||
def concretize_together_when_possible(
|
||||
spec_list: Sequence[SpecPair], tests: TestsType = False
|
||||
) -> Sequence[SpecPair]:
|
||||
spec_list: Sequence[SpecPairInput], tests: TestsType = False
|
||||
) -> List[SpecPair]:
|
||||
"""Given a number of specs as input, tries to concretize them together to the extent possible.
|
||||
|
||||
See documentation for ``unify: when_possible`` concretization for the precise definition of
|
||||
@@ -113,8 +114,8 @@ def concretize_together_when_possible(
|
||||
|
||||
|
||||
def concretize_separately(
|
||||
spec_list: Sequence[SpecPair], tests: TestsType = False
|
||||
) -> Sequence[SpecPair]:
|
||||
spec_list: Sequence[SpecPairInput], tests: TestsType = False
|
||||
) -> List[SpecPair]:
|
||||
"""Concretizes the input specs separately from each other.
|
||||
|
||||
Args:
|
||||
|
@@ -951,12 +951,6 @@ def set(path: str, value: Any, scope: Optional[str] = None) -> None:
|
||||
return CONFIG.set(path, value, scope)
|
||||
|
||||
|
||||
def add_default_platform_scope(platform: str) -> None:
|
||||
plat_name = os.path.join("defaults", platform)
|
||||
plat_path = os.path.join(CONFIGURATION_DEFAULTS_PATH[1], platform)
|
||||
CONFIG.push_scope(DirectoryConfigScope(plat_name, plat_path))
|
||||
|
||||
|
||||
def scopes() -> Dict[str, ConfigScope]:
|
||||
"""Convenience function to get list of configuration scopes."""
|
||||
return CONFIG.scopes
|
||||
|
@@ -1330,7 +1330,7 @@ def deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") -> N
|
||||
def installed_relatives(
|
||||
self,
|
||||
spec: "spack.spec.Spec",
|
||||
direction: str = "children",
|
||||
direction: tr.DirectionType = "children",
|
||||
transitive: bool = True,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
|
||||
) -> Set["spack.spec.Spec"]:
|
||||
|
@@ -8,7 +8,7 @@
|
||||
import shutil
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Tuple
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util.symlink import readlink
|
||||
@@ -17,7 +17,6 @@
|
||||
import spack.hash_types as ht
|
||||
import spack.projections
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.error import SpackError
|
||||
|
||||
@@ -69,10 +68,9 @@ def specs_from_metadata_dirs(root: str) -> List["spack.spec.Spec"]:
|
||||
|
||||
|
||||
class DirectoryLayout:
|
||||
"""A directory layout is used to associate unique paths with specs.
|
||||
Different installations are going to want different layouts for their
|
||||
install, and they can use this to customize the nesting structure of
|
||||
spack installs. The default layout is:
|
||||
"""A directory layout is used to associate unique paths with specs. Different installations are
|
||||
going to want different layouts for their install, and they can use this to customize the
|
||||
nesting structure of spack installs. The default layout is:
|
||||
|
||||
* <install root>/
|
||||
|
||||
@@ -82,35 +80,30 @@ class DirectoryLayout:
|
||||
|
||||
* <name>-<version>-<hash>
|
||||
|
||||
The hash here is a SHA-1 hash for the full DAG plus the build
|
||||
spec.
|
||||
The installation directory projections can be modified with the projections argument."""
|
||||
|
||||
The installation directory projections can be modified with the
|
||||
projections argument.
|
||||
"""
|
||||
|
||||
def __init__(self, root, **kwargs):
|
||||
def __init__(
|
||||
self,
|
||||
root,
|
||||
*,
|
||||
projections: Optional[Dict[str, str]] = None,
|
||||
hash_length: Optional[int] = None,
|
||||
) -> None:
|
||||
self.root = root
|
||||
self.check_upstream = True
|
||||
projections = kwargs.get("projections") or default_projections
|
||||
self.projections = dict(
|
||||
(key, projection.lower()) for key, projection in projections.items()
|
||||
)
|
||||
projections = projections or default_projections
|
||||
self.projections = {key: projection.lower() for key, projection in projections.items()}
|
||||
|
||||
# apply hash length as appropriate
|
||||
self.hash_length = kwargs.get("hash_length", None)
|
||||
self.hash_length = hash_length
|
||||
if self.hash_length is not None:
|
||||
for when_spec, projection in self.projections.items():
|
||||
if "{hash}" not in projection:
|
||||
if "{hash" in projection:
|
||||
raise InvalidDirectoryLayoutParametersError(
|
||||
"Conflicting options for installation layout hash" " length"
|
||||
)
|
||||
else:
|
||||
raise InvalidDirectoryLayoutParametersError(
|
||||
"Cannot specify hash length when the hash is not"
|
||||
" part of all install_tree projections"
|
||||
)
|
||||
raise InvalidDirectoryLayoutParametersError(
|
||||
"Conflicting options for installation layout hash length"
|
||||
if "{hash" in projection
|
||||
else "Cannot specify hash length when the hash is not part of all "
|
||||
"install_tree projections"
|
||||
)
|
||||
self.projections[when_spec] = projection.replace(
|
||||
"{hash}", "{hash:%d}" % self.hash_length
|
||||
)
|
||||
@@ -279,13 +272,6 @@ def path_for_spec(self, spec):
|
||||
|
||||
if spec.external:
|
||||
return spec.external_path
|
||||
if self.check_upstream:
|
||||
upstream, record = spack.store.STORE.db.query_by_spec_hash(spec.dag_hash())
|
||||
if upstream:
|
||||
raise SpackError(
|
||||
"Internal error: attempted to call path_for_spec on"
|
||||
" upstream-installed package."
|
||||
)
|
||||
|
||||
path = self.relative_path_for_spec(spec)
|
||||
assert not path.startswith(self.root)
|
||||
|
@@ -15,6 +15,10 @@
|
||||
SHOW_BACKTRACE = False
|
||||
|
||||
|
||||
class SpackAPIWarning(UserWarning):
|
||||
"""Warning that formats with file and line number."""
|
||||
|
||||
|
||||
class SpackError(Exception):
|
||||
"""This is the superclass for all Spack errors.
|
||||
Subclasses can be found in the modules they have to do with.
|
||||
|
@@ -35,7 +35,6 @@
|
||||
|
||||
import spack.config
|
||||
import spack.directory_layout
|
||||
import spack.paths
|
||||
import spack.projections
|
||||
import spack.relocate
|
||||
import spack.schema.projections
|
||||
@@ -44,7 +43,6 @@
|
||||
import spack.util.spack_json as s_json
|
||||
import spack.util.spack_yaml as s_yaml
|
||||
from spack.error import SpackError
|
||||
from spack.hooks import sbang
|
||||
|
||||
__all__ = ["FilesystemView", "YamlFilesystemView"]
|
||||
|
||||
@@ -94,12 +92,6 @@ def view_copy(
|
||||
spack.relocate.relocate_text_bin(binaries=[dst], prefixes=prefix_to_projection)
|
||||
else:
|
||||
prefix_to_projection[spack.store.STORE.layout.root] = view._root
|
||||
|
||||
# This is vestigial code for the *old* location of sbang.
|
||||
prefix_to_projection[f"#!/bin/bash {spack.paths.spack_root}/bin/sbang"] = (
|
||||
sbang.sbang_shebang_line()
|
||||
)
|
||||
|
||||
spack.relocate.relocate_text(files=[dst], prefixes=prefix_to_projection)
|
||||
|
||||
# The os module on Windows does not have a chown function.
|
||||
|
@@ -539,7 +539,7 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
||||
# Note that we copy them in as they are in the *install* directory
|
||||
# NOT as they are in the repository, because we want a snapshot of
|
||||
# how *this* particular build was done.
|
||||
for node in spec.traverse(deptype=all):
|
||||
for node in spec.traverse(deptype="all"):
|
||||
if node is not spec:
|
||||
# Locate the dependency package in the install tree and find
|
||||
# its provenance information.
|
||||
|
@@ -503,16 +503,16 @@ def make_argument_parser(**kwargs):
|
||||
return parser
|
||||
|
||||
|
||||
def send_warning_to_tty(message, *args):
|
||||
def showwarning(message, category, filename, lineno, file=None, line=None):
|
||||
"""Redirects messages to tty.warn."""
|
||||
tty.warn(message)
|
||||
if category is spack.error.SpackAPIWarning:
|
||||
tty.warn(f"{filename}:{lineno}: {message}")
|
||||
else:
|
||||
tty.warn(message)
|
||||
|
||||
|
||||
def setup_main_options(args):
|
||||
"""Configure spack globals based on the basic options."""
|
||||
# Assign a custom function to show warnings
|
||||
warnings.showwarning = send_warning_to_tty
|
||||
|
||||
# Set up environment based on args.
|
||||
tty.set_verbose(args.verbose)
|
||||
tty.set_debug(args.debug)
|
||||
@@ -903,9 +903,10 @@ def _main(argv=None):
|
||||
# main() is tricky to get right, so be careful where you put things.
|
||||
#
|
||||
# Things in this first part of `main()` should *not* require any
|
||||
# configuration. This doesn't include much -- setting up th parser,
|
||||
# configuration. This doesn't include much -- setting up the parser,
|
||||
# restoring some key environment variables, very simple CLI options, etc.
|
||||
# ------------------------------------------------------------------------
|
||||
warnings.showwarning = showwarning
|
||||
|
||||
# Create a parser with a simple positional argument first. We'll
|
||||
# lazily load the subcommand(s) we need later. This allows us to
|
||||
|
@@ -767,6 +767,9 @@ def __init__(self, spec):
|
||||
self.win_rpath = fsys.WindowsSimulatedRPath(self)
|
||||
super().__init__()
|
||||
|
||||
def __getitem__(self, key: str) -> "PackageBase":
|
||||
return self.spec[key].package
|
||||
|
||||
@classmethod
|
||||
def dependency_names(cls):
|
||||
return _subkeys(cls.dependencies)
|
||||
@@ -1816,12 +1819,6 @@ def _has_make_target(self, target):
|
||||
Returns:
|
||||
bool: True if 'target' is found, else False
|
||||
"""
|
||||
# Prevent altering LC_ALL for 'make' outside this function
|
||||
make = copy.deepcopy(self.module.make)
|
||||
|
||||
# Use English locale for missing target message comparison
|
||||
make.add_default_env("LC_ALL", "C")
|
||||
|
||||
# Check if we have a Makefile
|
||||
for makefile in ["GNUmakefile", "Makefile", "makefile"]:
|
||||
if os.path.exists(makefile):
|
||||
@@ -1830,6 +1827,12 @@ def _has_make_target(self, target):
|
||||
tty.debug("No Makefile found in the build directory")
|
||||
return False
|
||||
|
||||
# Prevent altering LC_ALL for 'make' outside this function
|
||||
make = copy.deepcopy(self.module.make)
|
||||
|
||||
# Use English locale for missing target message comparison
|
||||
make.add_default_env("LC_ALL", "C")
|
||||
|
||||
# Check if 'target' is a valid target.
|
||||
#
|
||||
# `make -n target` performs a "dry run". It prints the commands that
|
||||
|
@@ -54,144 +54,11 @@ def _patchelf() -> Optional[executable.Executable]:
|
||||
return spack.bootstrap.ensure_patchelf_in_path_or_raise()
|
||||
|
||||
|
||||
def _elf_rpaths_for(path):
|
||||
"""Return the RPATHs for an executable or a library.
|
||||
|
||||
Args:
|
||||
path (str): full path to the executable or library
|
||||
|
||||
Return:
|
||||
RPATHs as a list of strings. Returns an empty array
|
||||
on ELF parsing errors, or when the ELF file simply
|
||||
has no rpaths.
|
||||
"""
|
||||
return elf.get_rpaths(path) or []
|
||||
|
||||
|
||||
def _make_relative(reference_file, path_root, paths):
|
||||
"""Return a list where any path in ``paths`` that starts with
|
||||
``path_root`` is made relative to the directory in which the
|
||||
reference file is stored.
|
||||
|
||||
After a path is made relative it is prefixed with the ``$ORIGIN``
|
||||
string.
|
||||
|
||||
Args:
|
||||
reference_file (str): file from which the reference directory
|
||||
is computed
|
||||
path_root (str): root of the relative paths
|
||||
paths: (list) paths to be examined
|
||||
|
||||
Returns:
|
||||
List of relative paths
|
||||
"""
|
||||
start_directory = os.path.dirname(reference_file)
|
||||
pattern = re.compile(path_root)
|
||||
relative_paths = []
|
||||
|
||||
for path in paths:
|
||||
if pattern.match(path):
|
||||
rel = os.path.relpath(path, start=start_directory)
|
||||
path = os.path.join("$ORIGIN", rel)
|
||||
|
||||
relative_paths.append(path)
|
||||
|
||||
return relative_paths
|
||||
|
||||
|
||||
def _normalize_relative_paths(start_path, relative_paths):
|
||||
"""Normalize the relative paths with respect to the original path name
|
||||
of the file (``start_path``).
|
||||
|
||||
The paths that are passed to this function existed or were relevant
|
||||
on another filesystem, so os.path.abspath cannot be used.
|
||||
|
||||
A relative path may contain the signifier $ORIGIN. Assuming that
|
||||
``start_path`` is absolute, this implies that the relative path
|
||||
(relative to start_path) should be replaced with an absolute path.
|
||||
|
||||
Args:
|
||||
start_path (str): path from which the starting directory
|
||||
is extracted
|
||||
relative_paths (str): list of relative paths as obtained by a
|
||||
call to :ref:`_make_relative`
|
||||
|
||||
Returns:
|
||||
List of normalized paths
|
||||
"""
|
||||
normalized_paths = []
|
||||
pattern = re.compile(re.escape("$ORIGIN"))
|
||||
start_directory = os.path.dirname(start_path)
|
||||
|
||||
for path in relative_paths:
|
||||
if path.startswith("$ORIGIN"):
|
||||
sub = pattern.sub(start_directory, path)
|
||||
path = os.path.normpath(sub)
|
||||
normalized_paths.append(path)
|
||||
|
||||
return normalized_paths
|
||||
|
||||
|
||||
def _decode_macho_data(bytestring):
|
||||
return bytestring.rstrip(b"\x00").decode("ascii")
|
||||
|
||||
|
||||
def macho_make_paths_relative(path_name, old_layout_root, rpaths, deps, idpath):
|
||||
"""
|
||||
Return a dictionary mapping the original rpaths to the relativized rpaths.
|
||||
This dictionary is used to replace paths in mach-o binaries.
|
||||
Replace old_dir with relative path from dirname of path name
|
||||
in rpaths and deps; idpath is replaced with @rpath/libname.
|
||||
"""
|
||||
paths_to_paths = dict()
|
||||
if idpath:
|
||||
paths_to_paths[idpath] = os.path.join("@rpath", "%s" % os.path.basename(idpath))
|
||||
for rpath in rpaths:
|
||||
if re.match(old_layout_root, rpath):
|
||||
rel = os.path.relpath(rpath, start=os.path.dirname(path_name))
|
||||
paths_to_paths[rpath] = os.path.join("@loader_path", "%s" % rel)
|
||||
else:
|
||||
paths_to_paths[rpath] = rpath
|
||||
for dep in deps:
|
||||
if re.match(old_layout_root, dep):
|
||||
rel = os.path.relpath(dep, start=os.path.dirname(path_name))
|
||||
paths_to_paths[dep] = os.path.join("@loader_path", "%s" % rel)
|
||||
else:
|
||||
paths_to_paths[dep] = dep
|
||||
return paths_to_paths
|
||||
|
||||
|
||||
def macho_make_paths_normal(orig_path_name, rpaths, deps, idpath):
|
||||
"""
|
||||
Return a dictionary mapping the relativized rpaths to the original rpaths.
|
||||
This dictionary is used to replace paths in mach-o binaries.
|
||||
Replace '@loader_path' with the dirname of the origname path name
|
||||
in rpaths and deps; idpath is replaced with the original path name
|
||||
"""
|
||||
rel_to_orig = dict()
|
||||
if idpath:
|
||||
rel_to_orig[idpath] = orig_path_name
|
||||
|
||||
for rpath in rpaths:
|
||||
if re.match("@loader_path", rpath):
|
||||
norm = os.path.normpath(
|
||||
re.sub(re.escape("@loader_path"), os.path.dirname(orig_path_name), rpath)
|
||||
)
|
||||
rel_to_orig[rpath] = norm
|
||||
else:
|
||||
rel_to_orig[rpath] = rpath
|
||||
for dep in deps:
|
||||
if re.match("@loader_path", dep):
|
||||
norm = os.path.normpath(
|
||||
re.sub(re.escape("@loader_path"), os.path.dirname(orig_path_name), dep)
|
||||
)
|
||||
rel_to_orig[dep] = norm
|
||||
else:
|
||||
rel_to_orig[dep] = dep
|
||||
return rel_to_orig
|
||||
|
||||
|
||||
def macho_find_paths(orig_rpaths, deps, idpath, old_layout_root, prefix_to_prefix):
|
||||
def macho_find_paths(orig_rpaths, deps, idpath, prefix_to_prefix):
|
||||
"""
|
||||
Inputs
|
||||
original rpaths from mach-o binaries
|
||||
@@ -207,13 +74,12 @@ def macho_find_paths(orig_rpaths, deps, idpath, old_layout_root, prefix_to_prefi
|
||||
# Sort from longest path to shortest, to ensure we try /foo/bar/baz before /foo/bar
|
||||
prefix_iteration_order = sorted(prefix_to_prefix, key=len, reverse=True)
|
||||
for orig_rpath in orig_rpaths:
|
||||
if orig_rpath.startswith(old_layout_root):
|
||||
for old_prefix in prefix_iteration_order:
|
||||
new_prefix = prefix_to_prefix[old_prefix]
|
||||
if orig_rpath.startswith(old_prefix):
|
||||
new_rpath = re.sub(re.escape(old_prefix), new_prefix, orig_rpath)
|
||||
paths_to_paths[orig_rpath] = new_rpath
|
||||
break
|
||||
for old_prefix in prefix_iteration_order:
|
||||
new_prefix = prefix_to_prefix[old_prefix]
|
||||
if orig_rpath.startswith(old_prefix):
|
||||
new_rpath = re.sub(re.escape(old_prefix), new_prefix, orig_rpath)
|
||||
paths_to_paths[orig_rpath] = new_rpath
|
||||
break
|
||||
else:
|
||||
paths_to_paths[orig_rpath] = orig_rpath
|
||||
|
||||
@@ -348,9 +214,7 @@ def _set_elf_rpaths_and_interpreter(
|
||||
return None
|
||||
|
||||
|
||||
def relocate_macho_binaries(
|
||||
path_names, old_layout_root, new_layout_root, prefix_to_prefix, rel, old_prefix, new_prefix
|
||||
):
|
||||
def relocate_macho_binaries(path_names, prefix_to_prefix):
|
||||
"""
|
||||
Use macholib python package to get the rpaths, depedent libraries
|
||||
and library identity for libraries from the MachO object. Modify them
|
||||
@@ -363,77 +227,15 @@ def relocate_macho_binaries(
|
||||
# Corner case where macho object file ended up in the path name list
|
||||
if path_name.endswith(".o"):
|
||||
continue
|
||||
if rel:
|
||||
# get the relativized paths
|
||||
rpaths, deps, idpath = macholib_get_paths(path_name)
|
||||
# get the file path name in the original prefix
|
||||
orig_path_name = re.sub(re.escape(new_prefix), old_prefix, path_name)
|
||||
# get the mapping of the relativized paths to the original
|
||||
# normalized paths
|
||||
rel_to_orig = macho_make_paths_normal(orig_path_name, rpaths, deps, idpath)
|
||||
# replace the relativized paths with normalized paths
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, rel_to_orig)
|
||||
# get the normalized paths in the mach-o binary
|
||||
rpaths, deps, idpath = macholib_get_paths(path_name)
|
||||
# get the mapping of paths in old prefix to path in new prefix
|
||||
paths_to_paths = macho_find_paths(
|
||||
rpaths, deps, idpath, old_layout_root, prefix_to_prefix
|
||||
)
|
||||
# replace the old paths with new paths
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||
# get the new normalized path in the mach-o binary
|
||||
rpaths, deps, idpath = macholib_get_paths(path_name)
|
||||
# get the mapping of paths to relative paths in the new prefix
|
||||
paths_to_paths = macho_make_paths_relative(
|
||||
path_name, new_layout_root, rpaths, deps, idpath
|
||||
)
|
||||
# replace the new paths with relativized paths in the new prefix
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||
else:
|
||||
# get the paths in the old prefix
|
||||
rpaths, deps, idpath = macholib_get_paths(path_name)
|
||||
# get the mapping of paths in the old prerix to the new prefix
|
||||
paths_to_paths = macho_find_paths(
|
||||
rpaths, deps, idpath, old_layout_root, prefix_to_prefix
|
||||
)
|
||||
# replace the old paths with new paths
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||
# get the paths in the old prefix
|
||||
rpaths, deps, idpath = macholib_get_paths(path_name)
|
||||
# get the mapping of paths in the old prerix to the new prefix
|
||||
paths_to_paths = macho_find_paths(rpaths, deps, idpath, prefix_to_prefix)
|
||||
# replace the old paths with new paths
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||
|
||||
|
||||
def _transform_rpaths(orig_rpaths, orig_root, new_prefixes):
|
||||
"""Return an updated list of RPATHs where each entry in the original list
|
||||
starting with the old root is relocated to another place according to the
|
||||
mapping passed as argument.
|
||||
|
||||
Args:
|
||||
orig_rpaths (list): list of the original RPATHs
|
||||
orig_root (str): original root to be substituted
|
||||
new_prefixes (dict): dictionary that maps the original prefixes to
|
||||
where they should be relocated
|
||||
|
||||
Returns:
|
||||
List of paths
|
||||
"""
|
||||
new_rpaths = []
|
||||
for orig_rpath in orig_rpaths:
|
||||
# If the original RPATH doesn't start with the target root
|
||||
# append it verbatim and proceed
|
||||
if not orig_rpath.startswith(orig_root):
|
||||
new_rpaths.append(orig_rpath)
|
||||
continue
|
||||
|
||||
# Otherwise inspect the mapping and transform + append any prefix
|
||||
# that starts with a registered key
|
||||
# avoiding duplicates
|
||||
for old_prefix, new_prefix in new_prefixes.items():
|
||||
if orig_rpath.startswith(old_prefix):
|
||||
new_rpath = re.sub(re.escape(old_prefix), new_prefix, orig_rpath)
|
||||
if new_rpath not in new_rpaths:
|
||||
new_rpaths.append(new_rpath)
|
||||
return new_rpaths
|
||||
|
||||
|
||||
def new_relocate_elf_binaries(binaries, prefix_to_prefix):
|
||||
def relocate_elf_binaries(binaries, prefix_to_prefix):
|
||||
"""Take a list of binaries, and an ordered dictionary of
|
||||
prefix to prefix mapping, and update the rpaths accordingly."""
|
||||
|
||||
@@ -452,98 +254,6 @@ def new_relocate_elf_binaries(binaries, prefix_to_prefix):
|
||||
_set_elf_rpaths_and_interpreter(path, rpaths=rpaths, interpreter=interpreter)
|
||||
|
||||
|
||||
def relocate_elf_binaries(
|
||||
binaries, orig_root, new_root, new_prefixes, rel, orig_prefix, new_prefix
|
||||
):
|
||||
"""Relocate the binaries passed as arguments by changing their RPATHs.
|
||||
|
||||
Use patchelf to get the original RPATHs and then replace them with
|
||||
rpaths in the new directory layout.
|
||||
|
||||
New RPATHs are determined from a dictionary mapping the prefixes in the
|
||||
old directory layout to the prefixes in the new directory layout if the
|
||||
rpath was in the old layout root, i.e. system paths are not replaced.
|
||||
|
||||
Args:
|
||||
binaries (list): list of binaries that might need relocation, located
|
||||
in the new prefix
|
||||
orig_root (str): original root to be substituted
|
||||
new_root (str): new root to be used, only relevant for relative RPATHs
|
||||
new_prefixes (dict): dictionary that maps the original prefixes to
|
||||
where they should be relocated
|
||||
rel (bool): True if the RPATHs are relative, False if they are absolute
|
||||
orig_prefix (str): prefix where the executable was originally located
|
||||
new_prefix (str): prefix where we want to relocate the executable
|
||||
"""
|
||||
for new_binary in binaries:
|
||||
orig_rpaths = _elf_rpaths_for(new_binary)
|
||||
# TODO: Can we deduce `rel` from the original RPATHs?
|
||||
if rel:
|
||||
# Get the file path in the original prefix
|
||||
orig_binary = re.sub(re.escape(new_prefix), orig_prefix, new_binary)
|
||||
|
||||
# Get the normalized RPATHs in the old prefix using the file path
|
||||
# in the orig prefix
|
||||
orig_norm_rpaths = _normalize_relative_paths(orig_binary, orig_rpaths)
|
||||
# Get the normalize RPATHs in the new prefix
|
||||
new_norm_rpaths = _transform_rpaths(orig_norm_rpaths, orig_root, new_prefixes)
|
||||
# Get the relative RPATHs in the new prefix
|
||||
new_rpaths = _make_relative(new_binary, new_root, new_norm_rpaths)
|
||||
# check to see if relative rpaths are changed before rewriting
|
||||
if sorted(new_rpaths) != sorted(orig_rpaths):
|
||||
_set_elf_rpaths_and_interpreter(new_binary, new_rpaths)
|
||||
else:
|
||||
new_rpaths = _transform_rpaths(orig_rpaths, orig_root, new_prefixes)
|
||||
_set_elf_rpaths_and_interpreter(new_binary, new_rpaths)
|
||||
|
||||
|
||||
def make_link_relative(new_links, orig_links):
|
||||
"""Compute the relative target from the original link and
|
||||
make the new link relative.
|
||||
|
||||
Args:
|
||||
new_links (list): new links to be made relative
|
||||
orig_links (list): original links
|
||||
"""
|
||||
for new_link, orig_link in zip(new_links, orig_links):
|
||||
target = readlink(orig_link)
|
||||
relative_target = os.path.relpath(target, os.path.dirname(orig_link))
|
||||
os.unlink(new_link)
|
||||
symlink(relative_target, new_link)
|
||||
|
||||
|
||||
def make_macho_binaries_relative(cur_path_names, orig_path_names, old_layout_root):
|
||||
"""
|
||||
Replace old RPATHs with paths relative to old_dir in binary files
|
||||
"""
|
||||
if not sys.platform == "darwin":
|
||||
return
|
||||
|
||||
for cur_path, orig_path in zip(cur_path_names, orig_path_names):
|
||||
(rpaths, deps, idpath) = macholib_get_paths(cur_path)
|
||||
paths_to_paths = macho_make_paths_relative(
|
||||
orig_path, old_layout_root, rpaths, deps, idpath
|
||||
)
|
||||
modify_macho_object(cur_path, rpaths, deps, idpath, paths_to_paths)
|
||||
|
||||
|
||||
def make_elf_binaries_relative(new_binaries, orig_binaries, orig_layout_root):
|
||||
"""Replace the original RPATHs in the new binaries making them
|
||||
relative to the original layout root.
|
||||
|
||||
Args:
|
||||
new_binaries (list): new binaries whose RPATHs is to be made relative
|
||||
orig_binaries (list): original binaries
|
||||
orig_layout_root (str): path to be used as a base for making
|
||||
RPATHs relative
|
||||
"""
|
||||
for new_binary, orig_binary in zip(new_binaries, orig_binaries):
|
||||
orig_rpaths = _elf_rpaths_for(new_binary)
|
||||
if orig_rpaths:
|
||||
new_rpaths = _make_relative(orig_binary, orig_layout_root, orig_rpaths)
|
||||
_set_elf_rpaths_and_interpreter(new_binary, new_rpaths)
|
||||
|
||||
|
||||
def warn_if_link_cant_be_relocated(link, target):
|
||||
if not os.path.isabs(target):
|
||||
return
|
||||
|
@@ -48,7 +48,7 @@ def rewire_node(spec, explicit):
|
||||
# spec
|
||||
prefix_to_prefix = {spec.build_spec.prefix: spec.prefix}
|
||||
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
||||
for s in bindist.deps_to_relocate(spec):
|
||||
for s in bindist.specs_to_relocate(spec):
|
||||
analog = s
|
||||
if id(s) not in build_spec_ids:
|
||||
analogs = [
|
||||
@@ -77,25 +77,9 @@ def rewire_node(spec, explicit):
|
||||
]
|
||||
if bins_to_relocate:
|
||||
if "macho" in platform.binary_formats:
|
||||
relocate.relocate_macho_binaries(
|
||||
bins_to_relocate,
|
||||
str(spack.store.STORE.layout.root),
|
||||
str(spack.store.STORE.layout.root),
|
||||
prefix_to_prefix,
|
||||
False,
|
||||
spec.build_spec.prefix,
|
||||
spec.prefix,
|
||||
)
|
||||
relocate.relocate_macho_binaries(bins_to_relocate, prefix_to_prefix)
|
||||
if "elf" in platform.binary_formats:
|
||||
relocate.relocate_elf_binaries(
|
||||
bins_to_relocate,
|
||||
str(spack.store.STORE.layout.root),
|
||||
str(spack.store.STORE.layout.root),
|
||||
prefix_to_prefix,
|
||||
False,
|
||||
spec.build_spec.prefix,
|
||||
spec.prefix,
|
||||
)
|
||||
relocate.relocate_elf_binaries(bins_to_relocate, prefix_to_prefix)
|
||||
relocate.relocate_text_bin(binaries=bins_to_relocate, prefixes=prefix_to_prefix)
|
||||
shutil.rmtree(tempdir)
|
||||
install_manifest = os.path.join(
|
||||
|
@@ -106,10 +106,17 @@
|
||||
{
|
||||
"names": ["install_missing_compilers"],
|
||||
"message": "The config:install_missing_compilers option has been deprecated in "
|
||||
"Spack v0.23, and is currently ignored. It will be removed from config after "
|
||||
"Spack v0.23, and is currently ignored. It will be removed from config in "
|
||||
"Spack v1.0.",
|
||||
"error": False,
|
||||
},
|
||||
{
|
||||
"names": ["install_path_scheme"],
|
||||
"message": "The config:install_path_scheme option was deprecated in Spack v0.16 "
|
||||
"in favor of config:install_tree:projections:all. It will be removed in Spack "
|
||||
"v1.0.",
|
||||
"error": False,
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
@@ -58,7 +58,21 @@
|
||||
import re
|
||||
import socket
|
||||
import warnings
|
||||
from typing import Any, Callable, Dict, Iterable, List, Match, Optional, Set, Tuple, Union
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
Match,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
from typing_extensions import Literal
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -83,7 +97,7 @@
|
||||
import spack.solver
|
||||
import spack.spec_parser
|
||||
import spack.store
|
||||
import spack.traverse as traverse
|
||||
import spack.traverse
|
||||
import spack.util.executable
|
||||
import spack.util.hash
|
||||
import spack.util.module_cmd as md
|
||||
@@ -1339,16 +1353,16 @@ def tree(
|
||||
depth: bool = False,
|
||||
hashes: bool = False,
|
||||
hashlen: Optional[int] = None,
|
||||
cover: str = "nodes",
|
||||
cover: spack.traverse.CoverType = "nodes",
|
||||
indent: int = 0,
|
||||
format: str = DEFAULT_FORMAT,
|
||||
deptypes: Union[Tuple[str, ...], str] = "all",
|
||||
deptypes: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
|
||||
show_types: bool = False,
|
||||
depth_first: bool = False,
|
||||
recurse_dependencies: bool = True,
|
||||
status_fn: Optional[Callable[["Spec"], InstallStatus]] = None,
|
||||
prefix: Optional[Callable[["Spec"], str]] = None,
|
||||
key=id,
|
||||
key: Callable[["Spec"], Any] = id,
|
||||
) -> str:
|
||||
"""Prints out specs and their dependencies, tree-formatted with indentation.
|
||||
|
||||
@@ -1380,11 +1394,16 @@ def tree(
|
||||
# reduce deptypes over all in-edges when covering nodes
|
||||
if show_types and cover == "nodes":
|
||||
deptype_lookup: Dict[str, dt.DepFlag] = collections.defaultdict(dt.DepFlag)
|
||||
for edge in traverse.traverse_edges(specs, cover="edges", deptype=deptypes, root=False):
|
||||
for edge in spack.traverse.traverse_edges(
|
||||
specs, cover="edges", deptype=deptypes, root=False
|
||||
):
|
||||
deptype_lookup[edge.spec.dag_hash()] |= edge.depflag
|
||||
|
||||
for d, dep_spec in traverse.traverse_tree(
|
||||
sorted(specs), cover=cover, deptype=deptypes, depth_first=depth_first, key=key
|
||||
# SupportsRichComparisonT issue with List[Spec]
|
||||
sorted_specs: List["Spec"] = sorted(specs) # type: ignore[type-var]
|
||||
|
||||
for d, dep_spec in spack.traverse.traverse_tree(
|
||||
sorted_specs, cover=cover, deptype=deptypes, depth_first=depth_first, key=key
|
||||
):
|
||||
node = dep_spec.spec
|
||||
|
||||
@@ -1927,13 +1946,111 @@ def installed_upstream(self):
|
||||
upstream, _ = spack.store.STORE.db.query_by_spec_hash(self.dag_hash())
|
||||
return upstream
|
||||
|
||||
def traverse(self, **kwargs):
|
||||
"""Shorthand for :meth:`~spack.traverse.traverse_nodes`"""
|
||||
return traverse.traverse_nodes([self], **kwargs)
|
||||
@overload
|
||||
def traverse(
|
||||
self,
|
||||
*,
|
||||
root: bool = ...,
|
||||
order: spack.traverse.OrderType = ...,
|
||||
cover: spack.traverse.CoverType = ...,
|
||||
direction: spack.traverse.DirectionType = ...,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||
depth: Literal[False] = False,
|
||||
key: Callable[["Spec"], Any] = ...,
|
||||
visited: Optional[Set[Any]] = ...,
|
||||
) -> Iterable["Spec"]: ...
|
||||
|
||||
def traverse_edges(self, **kwargs):
|
||||
@overload
|
||||
def traverse(
|
||||
self,
|
||||
*,
|
||||
root: bool = ...,
|
||||
order: spack.traverse.OrderType = ...,
|
||||
cover: spack.traverse.CoverType = ...,
|
||||
direction: spack.traverse.DirectionType = ...,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||
depth: Literal[True],
|
||||
key: Callable[["Spec"], Any] = ...,
|
||||
visited: Optional[Set[Any]] = ...,
|
||||
) -> Iterable[Tuple[int, "Spec"]]: ...
|
||||
|
||||
def traverse(
|
||||
self,
|
||||
*,
|
||||
root: bool = True,
|
||||
order: spack.traverse.OrderType = "pre",
|
||||
cover: spack.traverse.CoverType = "nodes",
|
||||
direction: spack.traverse.DirectionType = "children",
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = "all",
|
||||
depth: bool = False,
|
||||
key: Callable[["Spec"], Any] = id,
|
||||
visited: Optional[Set[Any]] = None,
|
||||
) -> Iterable[Union["Spec", Tuple[int, "Spec"]]]:
|
||||
"""Shorthand for :meth:`~spack.traverse.traverse_nodes`"""
|
||||
return spack.traverse.traverse_nodes(
|
||||
[self],
|
||||
root=root,
|
||||
order=order,
|
||||
cover=cover,
|
||||
direction=direction,
|
||||
deptype=deptype,
|
||||
depth=depth,
|
||||
key=key,
|
||||
visited=visited,
|
||||
)
|
||||
|
||||
@overload
|
||||
def traverse_edges(
|
||||
self,
|
||||
*,
|
||||
root: bool = ...,
|
||||
order: spack.traverse.OrderType = ...,
|
||||
cover: spack.traverse.CoverType = ...,
|
||||
direction: spack.traverse.DirectionType = ...,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||
depth: Literal[False] = False,
|
||||
key: Callable[["Spec"], Any] = ...,
|
||||
visited: Optional[Set[Any]] = ...,
|
||||
) -> Iterable[DependencySpec]: ...
|
||||
|
||||
@overload
|
||||
def traverse_edges(
|
||||
self,
|
||||
*,
|
||||
root: bool = ...,
|
||||
order: spack.traverse.OrderType = ...,
|
||||
cover: spack.traverse.CoverType = ...,
|
||||
direction: spack.traverse.DirectionType = ...,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||
depth: Literal[True],
|
||||
key: Callable[["Spec"], Any] = ...,
|
||||
visited: Optional[Set[Any]] = ...,
|
||||
) -> Iterable[Tuple[int, DependencySpec]]: ...
|
||||
|
||||
def traverse_edges(
|
||||
self,
|
||||
*,
|
||||
root: bool = True,
|
||||
order: spack.traverse.OrderType = "pre",
|
||||
cover: spack.traverse.CoverType = "nodes",
|
||||
direction: spack.traverse.DirectionType = "children",
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = "all",
|
||||
depth: bool = False,
|
||||
key: Callable[["Spec"], Any] = id,
|
||||
visited: Optional[Set[Any]] = None,
|
||||
) -> Iterable[Union[DependencySpec, Tuple[int, DependencySpec]]]:
|
||||
"""Shorthand for :meth:`~spack.traverse.traverse_edges`"""
|
||||
return traverse.traverse_edges([self], **kwargs)
|
||||
return spack.traverse.traverse_edges(
|
||||
[self],
|
||||
root=root,
|
||||
order=order,
|
||||
cover=cover,
|
||||
direction=direction,
|
||||
deptype=deptype,
|
||||
depth=depth,
|
||||
key=key,
|
||||
visited=visited,
|
||||
)
|
||||
|
||||
@property
|
||||
def short_spec(self):
|
||||
@@ -2944,7 +3061,7 @@ def _finalize_concretization(self):
|
||||
for spec in self.traverse():
|
||||
spec._cached_hash(ht.dag_hash)
|
||||
|
||||
def concretized(self, tests: Union[bool, Iterable[str]] = False) -> "spack.spec.Spec":
|
||||
def concretized(self, tests: Union[bool, Iterable[str]] = False) -> "Spec":
|
||||
"""This is a non-destructive version of concretize().
|
||||
|
||||
First clones, then returns a concrete version of this package
|
||||
@@ -4105,10 +4222,10 @@ def tree(
|
||||
depth: bool = False,
|
||||
hashes: bool = False,
|
||||
hashlen: Optional[int] = None,
|
||||
cover: str = "nodes",
|
||||
cover: spack.traverse.CoverType = "nodes",
|
||||
indent: int = 0,
|
||||
format: str = DEFAULT_FORMAT,
|
||||
deptypes: Union[Tuple[str, ...], str] = "all",
|
||||
deptypes: Union[dt.DepTypes, dt.DepFlag] = dt.ALL,
|
||||
show_types: bool = False,
|
||||
depth_first: bool = False,
|
||||
recurse_dependencies: bool = True,
|
||||
|
@@ -36,14 +36,13 @@
|
||||
import spack.oci.image
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.util.gpg
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.binary_distribution import CannotListKeys, GenerateIndexError
|
||||
from spack.directory_layout import DirectoryLayout
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.paths import test_path
|
||||
from spack.spec import Spec
|
||||
|
||||
@@ -136,35 +135,28 @@ def default_config(tmp_path, config_directory, monkeypatch, install_mockery):
|
||||
@pytest.fixture(scope="function")
|
||||
def install_dir_default_layout(tmpdir):
|
||||
"""Hooks a fake install directory with a default layout"""
|
||||
scheme = os.path.join(
|
||||
"${architecture}", "${compiler.name}-${compiler.version}", "${name}-${version}-${hash}"
|
||||
)
|
||||
real_store, real_layout = spack.store.STORE, spack.store.STORE.layout
|
||||
opt_dir = tmpdir.join("opt")
|
||||
spack.store.STORE = spack.store.Store(str(opt_dir))
|
||||
spack.store.STORE.layout = DirectoryLayout(str(opt_dir), path_scheme=scheme)
|
||||
original_store, spack.store.STORE = spack.store.STORE, spack.store.Store(str(opt_dir))
|
||||
try:
|
||||
yield spack.store
|
||||
finally:
|
||||
spack.store.STORE = real_store
|
||||
spack.store.STORE.layout = real_layout
|
||||
spack.store.STORE = original_store
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def install_dir_non_default_layout(tmpdir):
|
||||
"""Hooks a fake install directory with a non-default layout"""
|
||||
scheme = os.path.join(
|
||||
"${name}", "${version}", "${architecture}-${compiler.name}-${compiler.version}-${hash}"
|
||||
)
|
||||
real_store, real_layout = spack.store.STORE, spack.store.STORE.layout
|
||||
opt_dir = tmpdir.join("opt")
|
||||
spack.store.STORE = spack.store.Store(str(opt_dir))
|
||||
spack.store.STORE.layout = DirectoryLayout(str(opt_dir), path_scheme=scheme)
|
||||
original_store, spack.store.STORE = spack.store.STORE, spack.store.Store(
|
||||
str(opt_dir),
|
||||
projections={
|
||||
"all": "{name}/{version}/{architecture}-{compiler.name}-{compiler.version}-{hash}"
|
||||
},
|
||||
)
|
||||
try:
|
||||
yield spack.store
|
||||
finally:
|
||||
spack.store.STORE = real_store
|
||||
spack.store.STORE.layout = real_layout
|
||||
spack.store.STORE = original_store
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
@@ -500,74 +492,40 @@ def mock_list_url(url, recursive=False):
|
||||
assert f"Encountered problem listing packages at {url}" in capfd.readouterr().err
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_fetch", "install_mockery")
|
||||
def test_update_sbang(tmpdir, temporary_mirror):
|
||||
"""Test the creation and installation of buildcaches with default rpaths
|
||||
into the non-default directory layout scheme, triggering an update of the
|
||||
sbang.
|
||||
"""
|
||||
spec_str = "old-sbang"
|
||||
# Concretize a package with some old-fashioned sbang lines.
|
||||
old_spec = Spec(spec_str).concretized()
|
||||
old_spec_hash_str = "/{0}".format(old_spec.dag_hash())
|
||||
def test_update_sbang(tmp_path, temporary_mirror, mock_fetch, install_mockery):
|
||||
"""Test relocation of the sbang shebang line in a package script"""
|
||||
s = Spec("old-sbang").concretized()
|
||||
PackageInstaller([s.package]).install()
|
||||
old_prefix, old_sbang_shebang = s.prefix, sbang.sbang_shebang_line()
|
||||
old_contents = f"""\
|
||||
{old_sbang_shebang}
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Need a fake mirror with *function* scope.
|
||||
mirror_dir = temporary_mirror
|
||||
|
||||
# Assume all commands will concretize old_spec the same way.
|
||||
install_cmd("--no-cache", old_spec.name)
|
||||
{s.prefix.bin}
|
||||
"""
|
||||
with open(os.path.join(s.prefix.bin, "script.sh"), encoding="utf-8") as f:
|
||||
assert f.read() == old_contents
|
||||
|
||||
# Create a buildcache with the installed spec.
|
||||
buildcache_cmd("push", "-u", mirror_dir, old_spec_hash_str)
|
||||
|
||||
# Need to force an update of the buildcache index
|
||||
buildcache_cmd("update-index", mirror_dir)
|
||||
|
||||
# Uninstall the original package.
|
||||
uninstall_cmd("-y", old_spec_hash_str)
|
||||
buildcache_cmd("push", "--update-index", "--unsigned", temporary_mirror, f"/{s.dag_hash()}")
|
||||
|
||||
# Switch the store to the new install tree locations
|
||||
newtree_dir = tmpdir.join("newtree")
|
||||
with spack.store.use_store(str(newtree_dir)):
|
||||
new_spec = Spec("old-sbang").concretized()
|
||||
assert new_spec.dag_hash() == old_spec.dag_hash()
|
||||
with spack.store.use_store(str(tmp_path)):
|
||||
s._prefix = None # clear the cached old prefix
|
||||
new_prefix, new_sbang_shebang = s.prefix, sbang.sbang_shebang_line()
|
||||
assert old_prefix != new_prefix
|
||||
assert old_sbang_shebang != new_sbang_shebang
|
||||
PackageInstaller([s.package], cache_only=True, unsigned=True).install()
|
||||
|
||||
# Install package from buildcache
|
||||
buildcache_cmd("install", "-u", "-f", new_spec.name)
|
||||
# Check that the sbang line refers to the new install tree
|
||||
new_contents = f"""\
|
||||
{sbang.sbang_shebang_line()}
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Continue blowing away caches
|
||||
bindist.clear_spec_cache()
|
||||
spack.stage.purge()
|
||||
|
||||
# test that the sbang was updated by the move
|
||||
sbang_style_1_expected = """{0}
|
||||
#!/usr/bin/env python
|
||||
|
||||
{1}
|
||||
""".format(
|
||||
sbang.sbang_shebang_line(), new_spec.prefix.bin
|
||||
)
|
||||
sbang_style_2_expected = """{0}
|
||||
#!/usr/bin/env python
|
||||
|
||||
{1}
|
||||
""".format(
|
||||
sbang.sbang_shebang_line(), new_spec.prefix.bin
|
||||
)
|
||||
|
||||
installed_script_style_1_path = new_spec.prefix.bin.join("sbang-style-1.sh")
|
||||
assert (
|
||||
sbang_style_1_expected
|
||||
== open(str(installed_script_style_1_path), encoding="utf-8").read()
|
||||
)
|
||||
|
||||
installed_script_style_2_path = new_spec.prefix.bin.join("sbang-style-2.sh")
|
||||
assert (
|
||||
sbang_style_2_expected
|
||||
== open(str(installed_script_style_2_path), encoding="utf-8").read()
|
||||
)
|
||||
|
||||
uninstall_cmd("-y", "/%s" % new_spec.dag_hash())
|
||||
{s.prefix.bin}
|
||||
"""
|
||||
with open(os.path.join(s.prefix.bin, "script.sh"), encoding="utf-8") as f:
|
||||
assert f.read() == new_contents
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
|
@@ -20,7 +20,7 @@
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.platforms.test
|
||||
from spack.build_environment import ChildError, setup_package
|
||||
from spack.build_environment import ChildError, MakeExecutable, setup_package
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.spec import Spec
|
||||
from spack.util.executable import which
|
||||
@@ -29,10 +29,12 @@
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def concretize_and_setup(default_mock_concretization):
|
||||
def concretize_and_setup(default_mock_concretization, monkeypatch):
|
||||
def _func(spec_str):
|
||||
s = default_mock_concretization(spec_str)
|
||||
setup_package(s.package, False)
|
||||
monkeypatch.setattr(s.package.module, "make", MakeExecutable("make", jobs=1))
|
||||
monkeypatch.setattr(s.package.module, "ninja", MakeExecutable("ninja", jobs=1))
|
||||
return s
|
||||
|
||||
return _func
|
||||
|
@@ -635,11 +635,6 @@ def ensure_debug(monkeypatch):
|
||||
tty.set_debug(current_debug_level)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=sys.platform == "win32", scope="session")
|
||||
def platform_config():
|
||||
spack.config.add_default_platform_scope(spack.platforms.real_host().name)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def default_config():
|
||||
"""Isolates the default configuration from the user configs.
|
||||
|
@@ -29,31 +29,31 @@ def make_executable(tmp_path, working_env):
|
||||
|
||||
|
||||
def test_make_normal():
|
||||
make = MakeExecutable("make", 8)
|
||||
make = MakeExecutable("make", jobs=8)
|
||||
assert make(output=str).strip() == "-j8"
|
||||
assert make("install", output=str).strip() == "-j8 install"
|
||||
|
||||
|
||||
def test_make_explicit():
|
||||
make = MakeExecutable("make", 8)
|
||||
make = MakeExecutable("make", jobs=8)
|
||||
assert make(parallel=True, output=str).strip() == "-j8"
|
||||
assert make("install", parallel=True, output=str).strip() == "-j8 install"
|
||||
|
||||
|
||||
def test_make_one_job():
|
||||
make = MakeExecutable("make", 1)
|
||||
make = MakeExecutable("make", jobs=1)
|
||||
assert make(output=str).strip() == "-j1"
|
||||
assert make("install", output=str).strip() == "-j1 install"
|
||||
|
||||
|
||||
def test_make_parallel_false():
|
||||
make = MakeExecutable("make", 8)
|
||||
make = MakeExecutable("make", jobs=8)
|
||||
assert make(parallel=False, output=str).strip() == "-j1"
|
||||
assert make("install", parallel=False, output=str).strip() == "-j1 install"
|
||||
|
||||
|
||||
def test_make_parallel_disabled(monkeypatch):
|
||||
make = MakeExecutable("make", 8)
|
||||
make = MakeExecutable("make", jobs=8)
|
||||
|
||||
monkeypatch.setenv("SPACK_NO_PARALLEL_MAKE", "true")
|
||||
assert make(output=str).strip() == "-j1"
|
||||
@@ -74,7 +74,7 @@ def test_make_parallel_disabled(monkeypatch):
|
||||
|
||||
|
||||
def test_make_parallel_precedence(monkeypatch):
|
||||
make = MakeExecutable("make", 8)
|
||||
make = MakeExecutable("make", jobs=8)
|
||||
|
||||
# These should work
|
||||
monkeypatch.setenv("SPACK_NO_PARALLEL_MAKE", "true")
|
||||
@@ -96,21 +96,21 @@ def test_make_parallel_precedence(monkeypatch):
|
||||
|
||||
|
||||
def test_make_jobs_env():
|
||||
make = MakeExecutable("make", 8)
|
||||
make = MakeExecutable("make", jobs=8)
|
||||
dump_env = {}
|
||||
assert make(output=str, jobs_env="MAKE_PARALLELISM", _dump_env=dump_env).strip() == "-j8"
|
||||
assert dump_env["MAKE_PARALLELISM"] == "8"
|
||||
|
||||
|
||||
def test_make_jobserver(monkeypatch):
|
||||
make = MakeExecutable("make", 8)
|
||||
make = MakeExecutable("make", jobs=8)
|
||||
monkeypatch.setenv("MAKEFLAGS", "--jobserver-auth=X,Y")
|
||||
assert make(output=str).strip() == ""
|
||||
assert make(parallel=False, output=str).strip() == "-j1"
|
||||
|
||||
|
||||
def test_make_jobserver_not_supported(monkeypatch):
|
||||
make = MakeExecutable("make", 8, supports_jobserver=False)
|
||||
make = MakeExecutable("make", jobs=8, supports_jobserver=False)
|
||||
monkeypatch.setenv("MAKEFLAGS", "--jobserver-auth=X,Y")
|
||||
# Currently fallback on default job count, Maybe it should force -j1 ?
|
||||
assert make(output=str).strip() == "-j8"
|
||||
|
@@ -285,3 +285,16 @@ def compilers(compiler, arch_spec):
|
||||
error = capfd.readouterr()[1]
|
||||
assert "Skipping tests for package" in error
|
||||
assert "test requires missing compiler" in error
|
||||
|
||||
|
||||
def test_package_subscript(default_mock_concretization):
|
||||
"""Tests that we can use the subscript notation on packages, and that it returns a package"""
|
||||
root = default_mock_concretization("mpileaks")
|
||||
root_pkg = root.package
|
||||
|
||||
# Subscript of a virtual
|
||||
assert isinstance(root_pkg["mpi"], spack.package_base.PackageBase)
|
||||
|
||||
# Subscript on concrete
|
||||
for d in root.traverse():
|
||||
assert isinstance(root_pkg[d.name], spack.package_base.PackageBase)
|
||||
|
@@ -31,13 +31,7 @@
|
||||
from spack.fetch_strategy import URLFetchStrategy
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.paths import mock_gpg_keys_path
|
||||
from spack.relocate import (
|
||||
macho_find_paths,
|
||||
macho_make_paths_normal,
|
||||
macho_make_paths_relative,
|
||||
relocate_links,
|
||||
relocate_text,
|
||||
)
|
||||
from spack.relocate import macho_find_paths, relocate_links, relocate_text
|
||||
from spack.spec import Spec
|
||||
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
@@ -301,7 +295,6 @@ def test_replace_paths(tmpdir):
|
||||
os.path.join(oldlibdir_local, libfile_loco),
|
||||
],
|
||||
os.path.join(oldlibdir_cc, libfile_c),
|
||||
old_spack_dir,
|
||||
prefix2prefix,
|
||||
)
|
||||
assert out_dict == {
|
||||
@@ -325,7 +318,6 @@ def test_replace_paths(tmpdir):
|
||||
os.path.join(oldlibdir_local, libfile_loco),
|
||||
],
|
||||
None,
|
||||
old_spack_dir,
|
||||
prefix2prefix,
|
||||
)
|
||||
assert out_dict == {
|
||||
@@ -349,7 +341,6 @@ def test_replace_paths(tmpdir):
|
||||
f"@rpath/{libfile_loco}",
|
||||
],
|
||||
None,
|
||||
old_spack_dir,
|
||||
prefix2prefix,
|
||||
)
|
||||
|
||||
@@ -369,7 +360,6 @@ def test_replace_paths(tmpdir):
|
||||
[oldlibdir_a, oldlibdir_b, oldlibdir_d, oldlibdir_local],
|
||||
[f"@rpath/{libfile_a}", f"@rpath/{libfile_b}", f"@rpath/{libfile_loco}"],
|
||||
None,
|
||||
old_spack_dir,
|
||||
prefix2prefix,
|
||||
)
|
||||
assert out_dict == {
|
||||
@@ -383,91 +373,6 @@ def test_replace_paths(tmpdir):
|
||||
}
|
||||
|
||||
|
||||
def test_macho_make_paths():
|
||||
out = macho_make_paths_relative(
|
||||
"/Users/Shared/spack/pkgC/lib/libC.dylib",
|
||||
"/Users/Shared/spack",
|
||||
("/Users/Shared/spack/pkgA/lib", "/Users/Shared/spack/pkgB/lib", "/usr/local/lib"),
|
||||
(
|
||||
"/Users/Shared/spack/pkgA/libA.dylib",
|
||||
"/Users/Shared/spack/pkgB/libB.dylib",
|
||||
"/usr/local/lib/libloco.dylib",
|
||||
),
|
||||
"/Users/Shared/spack/pkgC/lib/libC.dylib",
|
||||
)
|
||||
assert out == {
|
||||
"/Users/Shared/spack/pkgA/lib": "@loader_path/../../pkgA/lib",
|
||||
"/Users/Shared/spack/pkgB/lib": "@loader_path/../../pkgB/lib",
|
||||
"/usr/local/lib": "/usr/local/lib",
|
||||
"/Users/Shared/spack/pkgA/libA.dylib": "@loader_path/../../pkgA/libA.dylib",
|
||||
"/Users/Shared/spack/pkgB/libB.dylib": "@loader_path/../../pkgB/libB.dylib",
|
||||
"/usr/local/lib/libloco.dylib": "/usr/local/lib/libloco.dylib",
|
||||
"/Users/Shared/spack/pkgC/lib/libC.dylib": "@rpath/libC.dylib",
|
||||
}
|
||||
|
||||
out = macho_make_paths_normal(
|
||||
"/Users/Shared/spack/pkgC/lib/libC.dylib",
|
||||
("@loader_path/../../pkgA/lib", "@loader_path/../../pkgB/lib", "/usr/local/lib"),
|
||||
(
|
||||
"@loader_path/../../pkgA/libA.dylib",
|
||||
"@loader_path/../../pkgB/libB.dylib",
|
||||
"/usr/local/lib/libloco.dylib",
|
||||
),
|
||||
"@rpath/libC.dylib",
|
||||
)
|
||||
|
||||
assert out == {
|
||||
"@rpath/libC.dylib": "/Users/Shared/spack/pkgC/lib/libC.dylib",
|
||||
"@loader_path/../../pkgA/lib": "/Users/Shared/spack/pkgA/lib",
|
||||
"@loader_path/../../pkgB/lib": "/Users/Shared/spack/pkgB/lib",
|
||||
"/usr/local/lib": "/usr/local/lib",
|
||||
"@loader_path/../../pkgA/libA.dylib": "/Users/Shared/spack/pkgA/libA.dylib",
|
||||
"@loader_path/../../pkgB/libB.dylib": "/Users/Shared/spack/pkgB/libB.dylib",
|
||||
"/usr/local/lib/libloco.dylib": "/usr/local/lib/libloco.dylib",
|
||||
}
|
||||
|
||||
out = macho_make_paths_relative(
|
||||
"/Users/Shared/spack/pkgC/bin/exeC",
|
||||
"/Users/Shared/spack",
|
||||
("/Users/Shared/spack/pkgA/lib", "/Users/Shared/spack/pkgB/lib", "/usr/local/lib"),
|
||||
(
|
||||
"/Users/Shared/spack/pkgA/libA.dylib",
|
||||
"/Users/Shared/spack/pkgB/libB.dylib",
|
||||
"/usr/local/lib/libloco.dylib",
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
assert out == {
|
||||
"/Users/Shared/spack/pkgA/lib": "@loader_path/../../pkgA/lib",
|
||||
"/Users/Shared/spack/pkgB/lib": "@loader_path/../../pkgB/lib",
|
||||
"/usr/local/lib": "/usr/local/lib",
|
||||
"/Users/Shared/spack/pkgA/libA.dylib": "@loader_path/../../pkgA/libA.dylib",
|
||||
"/Users/Shared/spack/pkgB/libB.dylib": "@loader_path/../../pkgB/libB.dylib",
|
||||
"/usr/local/lib/libloco.dylib": "/usr/local/lib/libloco.dylib",
|
||||
}
|
||||
|
||||
out = macho_make_paths_normal(
|
||||
"/Users/Shared/spack/pkgC/bin/exeC",
|
||||
("@loader_path/../../pkgA/lib", "@loader_path/../../pkgB/lib", "/usr/local/lib"),
|
||||
(
|
||||
"@loader_path/../../pkgA/libA.dylib",
|
||||
"@loader_path/../../pkgB/libB.dylib",
|
||||
"/usr/local/lib/libloco.dylib",
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
assert out == {
|
||||
"@loader_path/../../pkgA/lib": "/Users/Shared/spack/pkgA/lib",
|
||||
"@loader_path/../../pkgB/lib": "/Users/Shared/spack/pkgB/lib",
|
||||
"/usr/local/lib": "/usr/local/lib",
|
||||
"@loader_path/../../pkgA/libA.dylib": "/Users/Shared/spack/pkgA/libA.dylib",
|
||||
"@loader_path/../../pkgB/libB.dylib": "/Users/Shared/spack/pkgB/libB.dylib",
|
||||
"/usr/local/lib/libloco.dylib": "/usr/local/lib/libloco.dylib",
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_download(monkeypatch):
|
||||
"""Mock a failing download strategy."""
|
||||
@@ -561,10 +466,6 @@ def test_macho_relocation_with_changing_projection(relocation_dict):
|
||||
"""
|
||||
original_rpath = "/foo/bar/baz/abcdef"
|
||||
result = macho_find_paths(
|
||||
[original_rpath],
|
||||
deps=[],
|
||||
idpath=None,
|
||||
old_layout_root="/foo",
|
||||
prefix_to_prefix=relocation_dict,
|
||||
[original_rpath], deps=[], idpath=None, prefix_to_prefix=relocation_dict
|
||||
)
|
||||
assert result[original_rpath] == "/a/b/c/abcdef"
|
||||
|
@@ -1,8 +1,6 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
|
||||
@@ -114,49 +112,6 @@ def _copy_somewhere(orig_binary):
|
||||
return _copy_somewhere
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"start_path,path_root,paths,expected",
|
||||
[
|
||||
(
|
||||
"/usr/bin/test",
|
||||
"/usr",
|
||||
["/usr/lib", "/usr/lib64", "/opt/local/lib"],
|
||||
[
|
||||
os.path.join("$ORIGIN", "..", "lib"),
|
||||
os.path.join("$ORIGIN", "..", "lib64"),
|
||||
"/opt/local/lib",
|
||||
],
|
||||
)
|
||||
],
|
||||
)
|
||||
def test_make_relative_paths(start_path, path_root, paths, expected):
|
||||
relatives = spack.relocate._make_relative(start_path, path_root, paths)
|
||||
assert relatives == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"start_path,relative_paths,expected",
|
||||
[
|
||||
# $ORIGIN will be replaced with os.path.dirname('usr/bin/test')
|
||||
# and then normalized
|
||||
(
|
||||
"/usr/bin/test",
|
||||
["$ORIGIN/../lib", "$ORIGIN/../lib64", "/opt/local/lib"],
|
||||
[
|
||||
os.sep + os.path.join("usr", "lib"),
|
||||
os.sep + os.path.join("usr", "lib64"),
|
||||
"/opt/local/lib",
|
||||
],
|
||||
),
|
||||
# Relative path without $ORIGIN
|
||||
("/usr/bin/test", ["../local/lib"], ["../local/lib"]),
|
||||
],
|
||||
)
|
||||
def test_normalize_relative_paths(start_path, relative_paths, expected):
|
||||
normalized = spack.relocate._normalize_relative_paths(start_path, relative_paths)
|
||||
assert normalized == expected
|
||||
|
||||
|
||||
@pytest.mark.requires_executables("patchelf", "gcc")
|
||||
@skip_unless_linux
|
||||
def test_relocate_text_bin(binary_with_rpaths, prefix_like):
|
||||
@@ -182,61 +137,13 @@ def test_relocate_elf_binaries_absolute_paths(binary_with_rpaths, copy_binary, p
|
||||
new_binary = copy_binary(orig_binary)
|
||||
|
||||
spack.relocate.relocate_elf_binaries(
|
||||
binaries=[str(new_binary)],
|
||||
orig_root=str(orig_binary.dirpath()),
|
||||
new_root=None, # Not needed when relocating absolute paths
|
||||
new_prefixes={str(orig_binary.dirpath()): "/foo"},
|
||||
rel=False,
|
||||
# Not needed when relocating absolute paths
|
||||
orig_prefix=None,
|
||||
new_prefix=None,
|
||||
binaries=[str(new_binary)], prefix_to_prefix={str(orig_binary.dirpath()): "/foo"}
|
||||
)
|
||||
|
||||
# Some compilers add rpaths so ensure changes included in final result
|
||||
assert "/foo/lib:/usr/lib64" in rpaths_for(new_binary)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables("patchelf", "gcc")
|
||||
@skip_unless_linux
|
||||
def test_relocate_elf_binaries_relative_paths(binary_with_rpaths, copy_binary):
|
||||
# Create an executable, set some RPATHs, copy it to another location
|
||||
orig_binary = binary_with_rpaths(rpaths=["lib", "lib64", "/opt/local/lib"])
|
||||
new_binary = copy_binary(orig_binary)
|
||||
|
||||
spack.relocate.relocate_elf_binaries(
|
||||
binaries=[str(new_binary)],
|
||||
orig_root=str(orig_binary.dirpath()),
|
||||
new_root=str(new_binary.dirpath()),
|
||||
new_prefixes={str(orig_binary.dirpath()): "/foo"},
|
||||
rel=True,
|
||||
orig_prefix=str(orig_binary.dirpath()),
|
||||
new_prefix=str(new_binary.dirpath()),
|
||||
)
|
||||
|
||||
# Some compilers add rpaths so ensure changes included in final result
|
||||
assert "/foo/lib:/foo/lib64:/opt/local/lib" in rpaths_for(new_binary)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables("patchelf", "gcc")
|
||||
@skip_unless_linux
|
||||
def test_make_elf_binaries_relative(binary_with_rpaths, copy_binary, prefix_tmpdir):
|
||||
orig_binary = binary_with_rpaths(
|
||||
rpaths=[
|
||||
str(prefix_tmpdir.mkdir("lib")),
|
||||
str(prefix_tmpdir.mkdir("lib64")),
|
||||
"/opt/local/lib",
|
||||
]
|
||||
)
|
||||
new_binary = copy_binary(orig_binary)
|
||||
|
||||
spack.relocate.make_elf_binaries_relative(
|
||||
[str(new_binary)], [str(orig_binary)], str(orig_binary.dirpath())
|
||||
)
|
||||
|
||||
# Some compilers add rpaths so ensure changes included in final result
|
||||
assert "$ORIGIN/lib:$ORIGIN/lib64:/opt/local/lib" in rpaths_for(new_binary)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables("patchelf", "gcc")
|
||||
@skip_unless_linux
|
||||
def test_relocate_text_bin_with_message(binary_with_rpaths, copy_binary, prefix_tmpdir):
|
||||
|
@@ -3,7 +3,21 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from collections import defaultdict
|
||||
from typing import Any, Callable, List, NamedTuple, Set, Union
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Iterable,
|
||||
List,
|
||||
NamedTuple,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
Tuple,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
from typing_extensions import Literal
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.spec
|
||||
@@ -424,49 +438,95 @@ def traverse_topo_edges_generator(edges, visitor, key=id, root=True, all_edges=F
|
||||
|
||||
# High-level API: traverse_edges, traverse_nodes, traverse_tree.
|
||||
|
||||
OrderType = Literal["pre", "post", "breadth", "topo"]
|
||||
CoverType = Literal["nodes", "edges", "paths"]
|
||||
DirectionType = Literal["children", "parents"]
|
||||
|
||||
|
||||
@overload
|
||||
def traverse_edges(
|
||||
specs: Sequence["spack.spec.Spec"],
|
||||
*,
|
||||
root: bool = ...,
|
||||
order: OrderType = ...,
|
||||
cover: CoverType = ...,
|
||||
direction: DirectionType = ...,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||
depth: Literal[False] = False,
|
||||
key: Callable[["spack.spec.Spec"], Any] = ...,
|
||||
visited: Optional[Set[Any]] = ...,
|
||||
) -> Iterable["spack.spec.DependencySpec"]: ...
|
||||
|
||||
|
||||
@overload
|
||||
def traverse_edges(
|
||||
specs: Sequence["spack.spec.Spec"],
|
||||
*,
|
||||
root: bool = ...,
|
||||
order: OrderType = ...,
|
||||
cover: CoverType = ...,
|
||||
direction: DirectionType = ...,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||
depth: Literal[True],
|
||||
key: Callable[["spack.spec.Spec"], Any] = ...,
|
||||
visited: Optional[Set[Any]] = ...,
|
||||
) -> Iterable[Tuple[int, "spack.spec.DependencySpec"]]: ...
|
||||
|
||||
|
||||
@overload
|
||||
def traverse_edges(
|
||||
specs: Sequence["spack.spec.Spec"],
|
||||
*,
|
||||
root: bool = ...,
|
||||
order: OrderType = ...,
|
||||
cover: CoverType = ...,
|
||||
direction: DirectionType = ...,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||
depth: bool,
|
||||
key: Callable[["spack.spec.Spec"], Any] = ...,
|
||||
visited: Optional[Set[Any]] = ...,
|
||||
) -> Iterable[Union["spack.spec.DependencySpec", Tuple[int, "spack.spec.DependencySpec"]]]: ...
|
||||
|
||||
|
||||
def traverse_edges(
|
||||
specs,
|
||||
root=True,
|
||||
order="pre",
|
||||
cover="nodes",
|
||||
direction="children",
|
||||
specs: Sequence["spack.spec.Spec"],
|
||||
root: bool = True,
|
||||
order: OrderType = "pre",
|
||||
cover: CoverType = "nodes",
|
||||
direction: DirectionType = "children",
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = "all",
|
||||
depth=False,
|
||||
key=id,
|
||||
visited=None,
|
||||
):
|
||||
depth: bool = False,
|
||||
key: Callable[["spack.spec.Spec"], Any] = id,
|
||||
visited: Optional[Set[Any]] = None,
|
||||
) -> Iterable[Union["spack.spec.DependencySpec", Tuple[int, "spack.spec.DependencySpec"]]]:
|
||||
"""
|
||||
Generator that yields edges from the DAG, starting from a list of root specs.
|
||||
Iterable of edges from the DAG, starting from a list of root specs.
|
||||
|
||||
Arguments:
|
||||
|
||||
specs (list): List of root specs (considered to be depth 0)
|
||||
root (bool): Yield the root nodes themselves
|
||||
order (str): What order of traversal to use in the DAG. For depth-first
|
||||
search this can be ``pre`` or ``post``. For BFS this should be ``breadth``.
|
||||
For topological order use ``topo``
|
||||
cover (str): Determines how extensively to cover the dag. Possible values:
|
||||
specs: List of root specs (considered to be depth 0)
|
||||
root: Yield the root nodes themselves
|
||||
order: What order of traversal to use in the DAG. For depth-first search this can be
|
||||
``pre`` or ``post``. For BFS this should be ``breadth``. For topological order use
|
||||
``topo``
|
||||
cover: Determines how extensively to cover the dag. Possible values:
|
||||
``nodes`` -- Visit each unique node in the dag only once.
|
||||
``edges`` -- If a node has been visited once but is reached along a
|
||||
new path, it's accepted, but not recurisvely followed. This traverses
|
||||
each 'edge' in the DAG once.
|
||||
``paths`` -- Explore every unique path reachable from the root.
|
||||
This descends into visited subtrees and will accept nodes multiple
|
||||
times if they're reachable by multiple paths.
|
||||
direction (str): ``children`` or ``parents``. If ``children``, does a traversal
|
||||
of this spec's children. If ``parents``, traverses upwards in the DAG
|
||||
towards the root.
|
||||
``edges`` -- If a node has been visited once but is reached along a new path, it's
|
||||
accepted, but not recurisvely followed. This traverses each 'edge' in the DAG once.
|
||||
``paths`` -- Explore every unique path reachable from the root. This descends into
|
||||
visited subtrees and will accept nodes multiple times if they're reachable by multiple
|
||||
paths.
|
||||
direction: ``children`` or ``parents``. If ``children``, does a traversal of this spec's
|
||||
children. If ``parents``, traverses upwards in the DAG towards the root.
|
||||
deptype: allowed dependency types
|
||||
depth (bool): When ``False``, yield just edges. When ``True`` yield
|
||||
the tuple (depth, edge), where depth corresponds to the depth
|
||||
at which edge.spec was discovered.
|
||||
depth: When ``False``, yield just edges. When ``True`` yield the tuple (depth, edge), where
|
||||
depth corresponds to the depth at which edge.spec was discovered.
|
||||
key: function that takes a spec and outputs a key for uniqueness test.
|
||||
visited (set or None): a set of nodes not to follow
|
||||
visited: a set of nodes not to follow
|
||||
|
||||
Returns:
|
||||
A generator that yields ``DependencySpec`` if depth is ``False``
|
||||
or a tuple of ``(depth, DependencySpec)`` if depth is ``True``.
|
||||
An iterable of ``DependencySpec`` if depth is ``False`` or a tuple of
|
||||
``(depth, DependencySpec)`` if depth is ``True``.
|
||||
"""
|
||||
# validate input
|
||||
if order == "topo":
|
||||
@@ -484,7 +544,7 @@ def traverse_edges(
|
||||
root_edges = with_artificial_edges(specs)
|
||||
|
||||
# Depth-first
|
||||
if order in ("pre", "post"):
|
||||
if order == "pre" or order == "post":
|
||||
return traverse_depth_first_edges_generator(
|
||||
root_edges, visitor, order == "post", root, depth
|
||||
)
|
||||
@@ -496,79 +556,135 @@ def traverse_edges(
|
||||
)
|
||||
|
||||
|
||||
@overload
|
||||
def traverse_nodes(
|
||||
specs,
|
||||
root=True,
|
||||
order="pre",
|
||||
cover="nodes",
|
||||
direction="children",
|
||||
specs: Sequence["spack.spec.Spec"],
|
||||
*,
|
||||
root: bool = ...,
|
||||
order: OrderType = ...,
|
||||
cover: CoverType = ...,
|
||||
direction: DirectionType = ...,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||
depth: Literal[False] = False,
|
||||
key: Callable[["spack.spec.Spec"], Any] = ...,
|
||||
visited: Optional[Set[Any]] = ...,
|
||||
) -> Iterable["spack.spec.Spec"]: ...
|
||||
|
||||
|
||||
@overload
|
||||
def traverse_nodes(
|
||||
specs: Sequence["spack.spec.Spec"],
|
||||
*,
|
||||
root: bool = ...,
|
||||
order: OrderType = ...,
|
||||
cover: CoverType = ...,
|
||||
direction: DirectionType = ...,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||
depth: Literal[True],
|
||||
key: Callable[["spack.spec.Spec"], Any] = ...,
|
||||
visited: Optional[Set[Any]] = ...,
|
||||
) -> Iterable[Tuple[int, "spack.spec.Spec"]]: ...
|
||||
|
||||
|
||||
@overload
|
||||
def traverse_nodes(
|
||||
specs: Sequence["spack.spec.Spec"],
|
||||
*,
|
||||
root: bool = ...,
|
||||
order: OrderType = ...,
|
||||
cover: CoverType = ...,
|
||||
direction: DirectionType = ...,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||
depth: bool,
|
||||
key: Callable[["spack.spec.Spec"], Any] = ...,
|
||||
visited: Optional[Set[Any]] = ...,
|
||||
) -> Iterable[Union["spack.spec.Spec", Tuple[int, "spack.spec.Spec"]]]: ...
|
||||
|
||||
|
||||
def traverse_nodes(
|
||||
specs: Sequence["spack.spec.Spec"],
|
||||
*,
|
||||
root: bool = True,
|
||||
order: OrderType = "pre",
|
||||
cover: CoverType = "nodes",
|
||||
direction: DirectionType = "children",
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = "all",
|
||||
depth=False,
|
||||
key=id,
|
||||
visited=None,
|
||||
):
|
||||
depth: bool = False,
|
||||
key: Callable[["spack.spec.Spec"], Any] = id,
|
||||
visited: Optional[Set[Any]] = None,
|
||||
) -> Iterable[Union["spack.spec.Spec", Tuple[int, "spack.spec.Spec"]]]:
|
||||
"""
|
||||
Generator that yields specs from the DAG, starting from a list of root specs.
|
||||
Iterable of specs from the DAG, starting from a list of root specs.
|
||||
|
||||
Arguments:
|
||||
specs (list): List of root specs (considered to be depth 0)
|
||||
root (bool): Yield the root nodes themselves
|
||||
order (str): What order of traversal to use in the DAG. For depth-first
|
||||
search this can be ``pre`` or ``post``. For BFS this should be ``breadth``.
|
||||
cover (str): Determines how extensively to cover the dag. Possible values:
|
||||
specs: List of root specs (considered to be depth 0)
|
||||
root: Yield the root nodes themselves
|
||||
order: What order of traversal to use in the DAG. For depth-first search this can be
|
||||
``pre`` or ``post``. For BFS this should be ``breadth``.
|
||||
cover: Determines how extensively to cover the dag. Possible values:
|
||||
``nodes`` -- Visit each unique node in the dag only once.
|
||||
``edges`` -- If a node has been visited once but is reached along a
|
||||
new path, it's accepted, but not recurisvely followed. This traverses
|
||||
each 'edge' in the DAG once.
|
||||
``paths`` -- Explore every unique path reachable from the root.
|
||||
This descends into visited subtrees and will accept nodes multiple
|
||||
times if they're reachable by multiple paths.
|
||||
direction (str): ``children`` or ``parents``. If ``children``, does a traversal
|
||||
of this spec's children. If ``parents``, traverses upwards in the DAG
|
||||
towards the root.
|
||||
``edges`` -- If a node has been visited once but is reached along a new path, it's
|
||||
accepted, but not recurisvely followed. This traverses each 'edge' in the DAG once.
|
||||
``paths`` -- Explore every unique path reachable from the root. This descends into
|
||||
visited subtrees and will accept nodes multiple times if they're reachable by multiple
|
||||
paths.
|
||||
direction: ``children`` or ``parents``. If ``children``, does a traversal of this spec's
|
||||
children. If ``parents``, traverses upwards in the DAG towards the root.
|
||||
deptype: allowed dependency types
|
||||
depth (bool): When ``False``, yield just edges. When ``True`` yield
|
||||
the tuple ``(depth, edge)``, where depth corresponds to the depth
|
||||
at which ``edge.spec`` was discovered.
|
||||
depth: When ``False``, yield just edges. When ``True`` yield the tuple ``(depth, edge)``,
|
||||
where depth corresponds to the depth at which ``edge.spec`` was discovered.
|
||||
key: function that takes a spec and outputs a key for uniqueness test.
|
||||
visited (set or None): a set of nodes not to follow
|
||||
visited: a set of nodes not to follow
|
||||
|
||||
Yields:
|
||||
By default :class:`~spack.spec.Spec`, or a tuple ``(depth, Spec)`` if depth is
|
||||
set to ``True``.
|
||||
"""
|
||||
for item in traverse_edges(specs, root, order, cover, direction, deptype, depth, key, visited):
|
||||
yield (item[0], item[1].spec) if depth else item.spec
|
||||
for item in traverse_edges(
|
||||
specs,
|
||||
root=root,
|
||||
order=order,
|
||||
cover=cover,
|
||||
direction=direction,
|
||||
deptype=deptype,
|
||||
depth=depth,
|
||||
key=key,
|
||||
visited=visited,
|
||||
):
|
||||
yield (item[0], item[1].spec) if depth else item.spec # type: ignore
|
||||
|
||||
|
||||
def traverse_tree(
|
||||
specs, cover="nodes", deptype: Union[dt.DepFlag, dt.DepTypes] = "all", key=id, depth_first=True
|
||||
):
|
||||
specs: Sequence["spack.spec.Spec"],
|
||||
cover: CoverType = "nodes",
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = "all",
|
||||
key: Callable[["spack.spec.Spec"], Any] = id,
|
||||
depth_first: bool = True,
|
||||
) -> Iterable[Tuple[int, "spack.spec.DependencySpec"]]:
|
||||
"""
|
||||
Generator that yields ``(depth, DependencySpec)`` tuples in the depth-first
|
||||
pre-order, so that a tree can be printed from it.
|
||||
|
||||
Arguments:
|
||||
|
||||
specs (list): List of root specs (considered to be depth 0)
|
||||
cover (str): Determines how extensively to cover the dag. Possible values:
|
||||
specs: List of root specs (considered to be depth 0)
|
||||
cover: Determines how extensively to cover the dag. Possible values:
|
||||
``nodes`` -- Visit each unique node in the dag only once.
|
||||
``edges`` -- If a node has been visited once but is reached along a
|
||||
new path, it's accepted, but not recurisvely followed. This traverses
|
||||
each 'edge' in the DAG once.
|
||||
``paths`` -- Explore every unique path reachable from the root.
|
||||
This descends into visited subtrees and will accept nodes multiple
|
||||
times if they're reachable by multiple paths.
|
||||
new path, it's accepted, but not recurisvely followed. This traverses each 'edge' in
|
||||
the DAG once.
|
||||
``paths`` -- Explore every unique path reachable from the root. This descends into
|
||||
visited subtrees and will accept nodes multiple times if they're reachable by multiple
|
||||
paths.
|
||||
deptype: allowed dependency types
|
||||
key: function that takes a spec and outputs a key for uniqueness test.
|
||||
depth_first (bool): Explore the tree in depth-first or breadth-first order.
|
||||
When setting ``depth_first=True`` and ``cover=nodes``, each spec only
|
||||
occurs once at the shallowest level, which is useful when rendering
|
||||
the tree in a terminal.
|
||||
depth_first: Explore the tree in depth-first or breadth-first order. When setting
|
||||
``depth_first=True`` and ``cover=nodes``, each spec only occurs once at the shallowest
|
||||
level, which is useful when rendering the tree in a terminal.
|
||||
|
||||
Returns:
|
||||
A generator that yields ``(depth, DependencySpec)`` tuples in such an order
|
||||
that a tree can be printed.
|
||||
A generator that yields ``(depth, DependencySpec)`` tuples in such an order that a tree can
|
||||
be printed.
|
||||
"""
|
||||
# BFS only makes sense when going over edges and nodes, for paths the tree is
|
||||
# identical to DFS, which is much more efficient then.
|
||||
|
@@ -7,11 +7,12 @@
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path, PurePath
|
||||
from typing import Callable, Dict, Optional, Sequence, TextIO, Type, Union, overload
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.error
|
||||
import spack.util.environment
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
__all__ = ["Executable", "which", "which_string", "ProcessError"]
|
||||
|
||||
@@ -19,33 +20,29 @@
|
||||
class Executable:
|
||||
"""Class representing a program that can be run on the command line."""
|
||||
|
||||
def __init__(self, name):
|
||||
def __init__(self, name: str) -> None:
|
||||
file_path = str(Path(name))
|
||||
if sys.platform != "win32" and name.startswith("."):
|
||||
# pathlib strips the ./ from relative paths so it must be added back
|
||||
file_path = os.path.join(".", file_path)
|
||||
|
||||
self.exe = [file_path]
|
||||
|
||||
self.default_env = {}
|
||||
|
||||
self.default_envmod = spack.util.environment.EnvironmentModifications()
|
||||
self.returncode = None
|
||||
self.default_env: Dict[str, str] = {}
|
||||
self.default_envmod = EnvironmentModifications()
|
||||
self.returncode = 0
|
||||
self.ignore_quotes = False
|
||||
|
||||
if not self.exe:
|
||||
raise ProcessError("Cannot construct executable for '%s'" % name)
|
||||
|
||||
def add_default_arg(self, *args):
|
||||
def add_default_arg(self, *args: str) -> None:
|
||||
"""Add default argument(s) to the command."""
|
||||
self.exe.extend(args)
|
||||
|
||||
def with_default_args(self, *args):
|
||||
def with_default_args(self, *args: str) -> "Executable":
|
||||
"""Same as add_default_arg, but returns a copy of the executable."""
|
||||
new = self.copy()
|
||||
new.add_default_arg(*args)
|
||||
return new
|
||||
|
||||
def copy(self):
|
||||
def copy(self) -> "Executable":
|
||||
"""Return a copy of this Executable."""
|
||||
new = Executable(self.exe[0])
|
||||
new.exe[:] = self.exe
|
||||
@@ -53,7 +50,7 @@ def copy(self):
|
||||
new.default_envmod.extend(self.default_envmod)
|
||||
return new
|
||||
|
||||
def add_default_env(self, key, value):
|
||||
def add_default_env(self, key: str, value: str) -> None:
|
||||
"""Set an environment variable when the command is run.
|
||||
|
||||
Parameters:
|
||||
@@ -62,68 +59,109 @@ def add_default_env(self, key, value):
|
||||
"""
|
||||
self.default_env[key] = value
|
||||
|
||||
def add_default_envmod(self, envmod):
|
||||
def add_default_envmod(self, envmod: EnvironmentModifications) -> None:
|
||||
"""Set an EnvironmentModifications to use when the command is run."""
|
||||
self.default_envmod.extend(envmod)
|
||||
|
||||
@property
|
||||
def command(self):
|
||||
"""The command-line string.
|
||||
|
||||
Returns:
|
||||
str: The executable and default arguments
|
||||
"""
|
||||
def command(self) -> str:
|
||||
"""Returns the entire command-line string"""
|
||||
return " ".join(self.exe)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""The executable name.
|
||||
|
||||
Returns:
|
||||
str: The basename of the executable
|
||||
"""
|
||||
def name(self) -> str:
|
||||
"""Returns the executable name"""
|
||||
return PurePath(self.path).name
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
"""The path to the executable.
|
||||
|
||||
Returns:
|
||||
str: The path to the executable
|
||||
"""
|
||||
def path(self) -> str:
|
||||
"""Returns the executable path"""
|
||||
return str(PurePath(self.exe[0]))
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
"""Run this executable in a subprocess.
|
||||
@overload
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
fail_on_error: bool = ...,
|
||||
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||
ignore_quotes: Optional[bool] = ...,
|
||||
timeout: Optional[int] = ...,
|
||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
input: Optional[TextIO] = ...,
|
||||
output: Union[Optional[TextIO], str] = ...,
|
||||
error: Union[Optional[TextIO], str] = ...,
|
||||
_dump_env: Optional[Dict[str, str]] = ...,
|
||||
) -> None: ...
|
||||
|
||||
@overload
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
fail_on_error: bool = ...,
|
||||
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||
ignore_quotes: Optional[bool] = ...,
|
||||
timeout: Optional[int] = ...,
|
||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
input: Optional[TextIO] = ...,
|
||||
output: Union[Type[str], Callable],
|
||||
error: Union[Optional[TextIO], str, Type[str], Callable] = ...,
|
||||
_dump_env: Optional[Dict[str, str]] = ...,
|
||||
) -> str: ...
|
||||
|
||||
@overload
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
fail_on_error: bool = ...,
|
||||
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||
ignore_quotes: Optional[bool] = ...,
|
||||
timeout: Optional[int] = ...,
|
||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
input: Optional[TextIO] = ...,
|
||||
output: Union[Optional[TextIO], str, Type[str], Callable] = ...,
|
||||
error: Union[Type[str], Callable],
|
||||
_dump_env: Optional[Dict[str, str]] = ...,
|
||||
) -> str: ...
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
fail_on_error: bool = True,
|
||||
ignore_errors: Union[int, Sequence[int]] = (),
|
||||
ignore_quotes: Optional[bool] = None,
|
||||
timeout: Optional[int] = None,
|
||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = None,
|
||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = None,
|
||||
input: Optional[TextIO] = None,
|
||||
output: Union[Optional[TextIO], str, Type[str], Callable] = None,
|
||||
error: Union[Optional[TextIO], str, Type[str], Callable] = None,
|
||||
_dump_env: Optional[Dict[str, str]] = None,
|
||||
) -> Optional[str]:
|
||||
"""Runs this executable in a subprocess.
|
||||
|
||||
Parameters:
|
||||
*args (str): Command-line arguments to the executable to run
|
||||
|
||||
Keyword Arguments:
|
||||
_dump_env (dict): Dict to be set to the environment actually
|
||||
used (envisaged for testing purposes only)
|
||||
env (dict or EnvironmentModifications): The environment with which
|
||||
to run the executable
|
||||
extra_env (dict or EnvironmentModifications): Extra items to add to
|
||||
the environment (neither requires nor precludes env)
|
||||
fail_on_error (bool): Raise an exception if the subprocess returns
|
||||
an error. Default is True. The return code is available as
|
||||
``exe.returncode``
|
||||
ignore_errors (int or list): A list of error codes to ignore.
|
||||
If these codes are returned, this process will not raise
|
||||
an exception even if ``fail_on_error`` is set to ``True``
|
||||
ignore_quotes (bool): If False, warn users that quotes are not needed
|
||||
as Spack does not use a shell. Defaults to False.
|
||||
timeout (int or float): The number of seconds to wait before killing
|
||||
the child process
|
||||
input: Where to read stdin from
|
||||
output: Where to send stdout
|
||||
error: Where to send stderr
|
||||
*args: command-line arguments to the executable to run
|
||||
fail_on_error: if True, raises an exception if the subprocess returns an error
|
||||
The return code is available as ``self.returncode``
|
||||
ignore_errors: a sequence of error codes to ignore. If these codes are returned, this
|
||||
process will not raise an exception, even if ``fail_on_error`` is set to ``True``
|
||||
ignore_quotes: if False, warn users that quotes are not needed, as Spack does not
|
||||
use a shell. If None, use ``self.ignore_quotes``.
|
||||
timeout: the number of seconds to wait before killing the child process
|
||||
env: the environment with which to run the executable
|
||||
extra_env: extra items to add to the environment (neither requires nor precludes env)
|
||||
input: where to read stdin from
|
||||
output: where to send stdout
|
||||
error: where to send stderr
|
||||
_dump_env: dict to be set to the environment actually used (envisaged for
|
||||
testing purposes only)
|
||||
|
||||
Accepted values for input, output, and error:
|
||||
|
||||
* python streams, e.g. open Python file objects, or ``os.devnull``
|
||||
* filenames, which will be automatically opened for writing
|
||||
* ``str``, as in the Python string type. If you set these to ``str``,
|
||||
output and error will be written to pipes and returned as a string.
|
||||
If both ``output`` and ``error`` are set to ``str``, then one string
|
||||
@@ -133,8 +171,11 @@ def __call__(self, *args, **kwargs):
|
||||
Behaves the same as ``str``, except that value is also written to
|
||||
``stdout`` or ``stderr``.
|
||||
|
||||
By default, the subprocess inherits the parent's file descriptors.
|
||||
For output and error it's also accepted:
|
||||
|
||||
* filenames, which will be automatically opened for writing
|
||||
|
||||
By default, the subprocess inherits the parent's file descriptors.
|
||||
"""
|
||||
|
||||
def process_cmd_output(out, err):
|
||||
@@ -159,44 +200,34 @@ def process_cmd_output(out, err):
|
||||
sys.stderr.write(errstr)
|
||||
return result
|
||||
|
||||
# Environment
|
||||
env_arg = kwargs.get("env", None)
|
||||
|
||||
# Setup default environment
|
||||
env = os.environ.copy() if env_arg is None else {}
|
||||
self.default_envmod.apply_modifications(env)
|
||||
env.update(self.default_env)
|
||||
current_environment = os.environ.copy() if env is None else {}
|
||||
self.default_envmod.apply_modifications(current_environment)
|
||||
current_environment.update(self.default_env)
|
||||
|
||||
# Apply env argument
|
||||
if isinstance(env_arg, spack.util.environment.EnvironmentModifications):
|
||||
env_arg.apply_modifications(env)
|
||||
elif env_arg:
|
||||
env.update(env_arg)
|
||||
if isinstance(env, EnvironmentModifications):
|
||||
env.apply_modifications(current_environment)
|
||||
elif env:
|
||||
current_environment.update(env)
|
||||
|
||||
# Apply extra env
|
||||
extra_env = kwargs.get("extra_env", {})
|
||||
if isinstance(extra_env, spack.util.environment.EnvironmentModifications):
|
||||
extra_env.apply_modifications(env)
|
||||
else:
|
||||
env.update(extra_env)
|
||||
if isinstance(extra_env, EnvironmentModifications):
|
||||
extra_env.apply_modifications(current_environment)
|
||||
elif extra_env is not None:
|
||||
current_environment.update(extra_env)
|
||||
|
||||
if "_dump_env" in kwargs:
|
||||
kwargs["_dump_env"].clear()
|
||||
kwargs["_dump_env"].update(env)
|
||||
if _dump_env is not None:
|
||||
_dump_env.clear()
|
||||
_dump_env.update(current_environment)
|
||||
|
||||
fail_on_error = kwargs.pop("fail_on_error", True)
|
||||
ignore_errors = kwargs.pop("ignore_errors", ())
|
||||
ignore_quotes = kwargs.pop("ignore_quotes", self.ignore_quotes)
|
||||
timeout = kwargs.pop("timeout", None)
|
||||
if ignore_quotes is None:
|
||||
ignore_quotes = self.ignore_quotes
|
||||
|
||||
# If they just want to ignore one error code, make it a tuple.
|
||||
if isinstance(ignore_errors, int):
|
||||
ignore_errors = (ignore_errors,)
|
||||
|
||||
input = kwargs.pop("input", None)
|
||||
output = kwargs.pop("output", None)
|
||||
error = kwargs.pop("error", None)
|
||||
|
||||
if input is str:
|
||||
raise ValueError("Cannot use `str` as input stream.")
|
||||
|
||||
@@ -230,9 +261,15 @@ def streamify(arg, mode):
|
||||
cmd_line_string = " ".join(escaped_cmd)
|
||||
tty.debug(cmd_line_string)
|
||||
|
||||
result = None
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
cmd, stdin=istream, stderr=estream, stdout=ostream, env=env, close_fds=False
|
||||
cmd,
|
||||
stdin=istream,
|
||||
stderr=estream,
|
||||
stdout=ostream,
|
||||
env=current_environment,
|
||||
close_fds=False,
|
||||
)
|
||||
out, err = proc.communicate(timeout=timeout)
|
||||
|
||||
@@ -248,9 +285,6 @@ def streamify(arg, mode):
|
||||
long_msg += "\n" + result
|
||||
|
||||
raise ProcessError("Command exited with status %d:" % proc.returncode, long_msg)
|
||||
|
||||
return result
|
||||
|
||||
except OSError as e:
|
||||
message = "Command: " + cmd_line_string
|
||||
if " " in self.exe[0]:
|
||||
@@ -286,6 +320,8 @@ def streamify(arg, mode):
|
||||
if close_istream:
|
||||
istream.close()
|
||||
|
||||
return result
|
||||
|
||||
def __eq__(self, other):
|
||||
return hasattr(other, "exe") and self.exe == other.exe
|
||||
|
||||
|
@@ -14,10 +14,10 @@ default:
|
||||
image: { "name": "ghcr.io/spack/e4s-ubuntu-18.04:v2021-10-18", "entrypoint": [""] }
|
||||
|
||||
# CI Platform-Arch
|
||||
.cray_rhel_zen4:
|
||||
.cray_rhel_x86_64_v3:
|
||||
variables:
|
||||
SPACK_TARGET_PLATFORM: "cray-rhel"
|
||||
SPACK_TARGET_ARCH: "zen4"
|
||||
SPACK_TARGET_ARCH: "x86_64_v3"
|
||||
|
||||
.cray_sles_zen4:
|
||||
variables:
|
||||
@@ -884,7 +884,7 @@ aws-pcluster-build-neoverse_v1:
|
||||
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
|
||||
|
||||
.generate-cray-rhel:
|
||||
tags: [ "cray-rhel-zen4", "public" ]
|
||||
tags: [ "cray-rhel-x86_64_v3", "public" ]
|
||||
extends: [ ".generate-cray" ]
|
||||
|
||||
.generate-cray-sles:
|
||||
@@ -896,7 +896,7 @@ aws-pcluster-build-neoverse_v1:
|
||||
# E4S - Cray RHEL
|
||||
#######################################
|
||||
.e4s-cray-rhel:
|
||||
extends: [ ".cray_rhel_zen4" ]
|
||||
extends: [ ".cray_rhel_x86_64_v3" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: e4s-cray-rhel
|
||||
|
||||
@@ -904,7 +904,6 @@ e4s-cray-rhel-generate:
|
||||
extends: [ ".generate-cray-rhel", ".e4s-cray-rhel" ]
|
||||
|
||||
e4s-cray-rhel-build:
|
||||
allow_failure: true # libsci_cray.so broken, misses DT_NEEDED for libdl.so
|
||||
extends: [ ".build", ".e4s-cray-rhel" ]
|
||||
trigger:
|
||||
include:
|
||||
@@ -923,10 +922,10 @@ e4s-cray-rhel-build:
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: e4s-cray-sles
|
||||
|
||||
e4s-cray-sles-generate:
|
||||
.e4s-cray-sles-generate:
|
||||
extends: [ ".generate-cray-sles", ".e4s-cray-sles" ]
|
||||
|
||||
e4s-cray-sles-build:
|
||||
.e4s-cray-sles-build:
|
||||
allow_failure: true # libsci_cray.so broken, misses DT_NEEDED for libdl.so
|
||||
extends: [ ".build", ".e4s-cray-sles" ]
|
||||
trigger:
|
||||
|
@@ -1,31 +1,27 @@
|
||||
compilers:
|
||||
- compiler:
|
||||
spec: cce@15.0.1
|
||||
spec: cce@=18.0.0
|
||||
paths:
|
||||
cc: cc
|
||||
cxx: CC
|
||||
f77: ftn
|
||||
fc: ftn
|
||||
cc: /opt/cray/pe/cce/18.0.0/bin/craycc
|
||||
cxx: /opt/cray/pe/cce/18.0.0/bin/crayCC
|
||||
f77: /opt/cray/pe/cce/18.0.0/bin/crayftn
|
||||
fc: /opt/cray/pe/cce/18.0.0/bin/crayftn
|
||||
flags: {}
|
||||
operating_system: rhel8
|
||||
target: any
|
||||
modules:
|
||||
- PrgEnv-cray/8.3.3
|
||||
- cce/15.0.1
|
||||
environment:
|
||||
set:
|
||||
MACHTYPE: x86_64
|
||||
target: x86_64
|
||||
modules: []
|
||||
environment: {}
|
||||
extra_rpaths: []
|
||||
- compiler:
|
||||
spec: gcc@11.2.0
|
||||
spec: gcc@=8.5.0
|
||||
paths:
|
||||
cc: gcc
|
||||
cxx: g++
|
||||
f77: gfortran
|
||||
fc: gfortran
|
||||
cc: /usr/bin/gcc
|
||||
cxx: /usr/bin/g++
|
||||
f77: /usr/bin/gfortran
|
||||
fc: /usr/bin/gfortran
|
||||
flags: {}
|
||||
operating_system: rhel8
|
||||
target: any
|
||||
modules:
|
||||
- PrgEnv-gnu
|
||||
- gcc/11.2.0
|
||||
environment: {}
|
||||
target: x86_64
|
||||
modules: []
|
||||
environment: {}
|
||||
extra_rpaths: []
|
@@ -1,16 +1,15 @@
|
||||
packages:
|
||||
# EXTERNALS
|
||||
cray-mpich:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: cray-mpich@8.1.25 %cce@15.0.1
|
||||
prefix: /opt/cray/pe/mpich/8.1.25/ofi/cray/10.0
|
||||
- spec: cray-mpich@8.1.30 %cce
|
||||
prefix: /opt/cray/pe/mpich/8.1.30/ofi/cray/18.0
|
||||
modules:
|
||||
- cray-mpich/8.1.25
|
||||
- cray-mpich/8.1.30
|
||||
cray-libsci:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: cray-libsci@23.02.1.1 %cce@15.0.1
|
||||
prefix: /opt/cray/pe/libsci/23.02.1.1/CRAY/9.0/x86_64/
|
||||
- spec: cray-libsci@24.07.0 %cce
|
||||
prefix: /opt/cray/pe/libsci/24.07.0/CRAY/18.0/x86_64/
|
||||
modules:
|
||||
- cray-libsci/23.02.1.1
|
||||
- cray-libsci/24.07.0
|
@@ -0,0 +1,4 @@
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
tags: ["cray-rhel-x86_64_v3"]
|
@@ -1,4 +0,0 @@
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
tags: ["cray-rhel-zen4"]
|
@@ -13,6 +13,7 @@ spack:
|
||||
- openjpeg # CMakePackage
|
||||
- r-rcpp # RPackage
|
||||
- ruby-rake # RubyPackage
|
||||
- perl-data-dumper # PerlPackage
|
||||
- arch:
|
||||
- '%gcc'
|
||||
|
||||
|
@@ -14,8 +14,7 @@ spack:
|
||||
|
||||
packages:
|
||||
all:
|
||||
prefer:
|
||||
- "%cce"
|
||||
require: "%cce@18.0.0 target=x86_64_v3"
|
||||
compiler: [cce]
|
||||
providers:
|
||||
blas: [cray-libsci]
|
||||
@@ -23,17 +22,15 @@ spack:
|
||||
mpi: [cray-mpich]
|
||||
tbb: [intel-tbb]
|
||||
scalapack: [netlib-scalapack]
|
||||
target: [zen4]
|
||||
variants: +mpi
|
||||
|
||||
ncurses:
|
||||
require: +termlib ldflags=-Wl,--undefined-version
|
||||
tbb:
|
||||
require: "intel-tbb"
|
||||
binutils:
|
||||
variants: +ld +gold +headers +libiberty ~nls
|
||||
boost:
|
||||
variants: +python +filesystem +iostreams +system
|
||||
cuda:
|
||||
version: [11.7.0]
|
||||
elfutils:
|
||||
variants: ~nls
|
||||
require: "%gcc"
|
||||
@@ -43,18 +40,14 @@ spack:
|
||||
variants: +fortran +hl +shared
|
||||
libfabric:
|
||||
variants: fabrics=sockets,tcp,udp,rxm
|
||||
libunwind:
|
||||
variants: +pic +xz
|
||||
mgard:
|
||||
require:
|
||||
- "@2023-01-10:"
|
||||
mpich:
|
||||
variants: ~wrapperrpath
|
||||
ncurses:
|
||||
variants: +termlib
|
||||
paraview:
|
||||
# Don't build GUI support or GLX rendering for HPC/container deployments
|
||||
require: "@5.11 ~qt ^[virtuals=gl] osmesa"
|
||||
require: "~qt ^[virtuals=gl] osmesa"
|
||||
trilinos:
|
||||
require:
|
||||
- one_of: [+amesos +amesos2 +anasazi +aztec +boost +epetra +epetraext +ifpack
|
||||
@@ -65,12 +58,6 @@ spack:
|
||||
- one_of: [~ml ~muelu ~zoltan2 ~teko, +ml +muelu +zoltan2 +teko]
|
||||
- one_of: [+superlu-dist, ~superlu-dist]
|
||||
- one_of: [+shylu, ~shylu]
|
||||
xz:
|
||||
variants: +pic
|
||||
mesa:
|
||||
version: [21.3.8]
|
||||
unzip:
|
||||
require: "%gcc"
|
||||
|
||||
specs:
|
||||
# CPU
|
||||
@@ -78,62 +65,43 @@ spack:
|
||||
- aml
|
||||
- arborx
|
||||
- argobots
|
||||
- bolt
|
||||
- butterflypack
|
||||
- boost +python +filesystem +iostreams +system
|
||||
- cabana
|
||||
- caliper
|
||||
- chai
|
||||
- charliecloud
|
||||
- conduit
|
||||
# - cp2k +mpi # libxsmm: ftn-78 ftn: ERROR in command linel; The -f option has an invalid argument, "tree-vectorize".
|
||||
- datatransferkit
|
||||
- flecsi
|
||||
- flit
|
||||
- flux-core
|
||||
- fortrilinos
|
||||
- ginkgo
|
||||
- globalarrays
|
||||
- gmp
|
||||
- gotcha
|
||||
- h5bench
|
||||
- hdf5-vol-async
|
||||
- hdf5-vol-cache
|
||||
- hdf5-vol-cache cflags=-Wno-error=incompatible-function-pointer-types
|
||||
- hdf5-vol-log
|
||||
- heffte +fftw
|
||||
- hpx max_cpu_count=512 networking=mpi
|
||||
- hypre
|
||||
- kokkos +openmp
|
||||
- kokkos-kernels +openmp
|
||||
- lammps
|
||||
- legion
|
||||
- libnrm
|
||||
#- libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf +mgard # mgard:
|
||||
- libquo
|
||||
- libunwind
|
||||
- mercury
|
||||
- metall
|
||||
- mfem
|
||||
# - mgard +serial +openmp +timing +unstructured ~cuda # mgard
|
||||
- mpark-variant
|
||||
- mpifileutils ~xattr
|
||||
- mpifileutils ~xattr cflags=-Wno-error=implicit-function-declaration
|
||||
- nccmp
|
||||
- nco
|
||||
- netlib-scalapack
|
||||
- omega-h
|
||||
- openmpi
|
||||
- netlib-scalapack cflags=-Wno-error=implicit-function-declaration
|
||||
- openpmd-api ^adios2~mgard
|
||||
- papi
|
||||
- papyrus
|
||||
- pdt
|
||||
- petsc
|
||||
- plumed
|
||||
- precice
|
||||
- pumi
|
||||
- py-h5py +mpi
|
||||
- py-h5py ~mpi
|
||||
- py-libensemble +mpi +nlopt
|
||||
- py-petsc4py
|
||||
- qthreads scheduler=distrib
|
||||
- raja
|
||||
- slate ~cuda
|
||||
@@ -146,8 +114,7 @@ spack:
|
||||
- swig@4.0.2-fortran
|
||||
- sz3
|
||||
- tasmanian
|
||||
- tau +mpi +python
|
||||
- trilinos@13.0.1 +belos +ifpack2 +stokhos
|
||||
- trilinos +belos +ifpack2 +stokhos
|
||||
- turbine
|
||||
- umap
|
||||
- umpire
|
||||
@@ -157,27 +124,47 @@ spack:
|
||||
# - alquimia # pflotran: petsc-3.19.4-c6pmpdtpzarytxo434zf76jqdkhdyn37/lib/petsc/conf/rules:169: material_aux.o] Error 1: fortran errors
|
||||
# - amrex # disabled temporarily pending resolution of unreproducible CI failure
|
||||
# - axom # axom: CMake Error at axom/sidre/cmake_install.cmake:154 (file): file INSTALL cannot find "/tmp/gitlab-runner-2/spack-stage/spack-stage-axom-0.8.1-jvol6riu34vuyqvrd5ft2gyhrxdqvf63/spack-build-jvol6ri/lib/fortran/axom_spio.mod": No such file or directory.
|
||||
# - bolt # ld.lld: error: CMakeFiles/bolt-omp.dir/kmp_gsupport.cpp.o: symbol GOMP_atomic_end@@GOMP_1.0 has undefined version GOMP_1.0
|
||||
# - bricks # bricks: clang-15: error: clang frontend command failed with exit code 134 (use -v to see invocation)
|
||||
# - butterflypack ^netlib-scalapack cflags=-Wno-error=implicit-function-declaration # ftn-2116 ftn: INTERNAL "driver" was terminated due to receipt of signal 01: Hangup.
|
||||
# - caliper # papi: papi_internal.c:124:3: error: use of undeclared identifier '_papi_hwi_my_thread'; did you mean '_papi_hwi_read'?
|
||||
# - charliecloud # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - cp2k +mpi # libxsmm: ftn-78 ftn: ERROR in command linel; The -f option has an invalid argument, "tree-vectorize".
|
||||
# - dealii # llvm@14.0.6: ?; intel-tbb@2020.3: clang-15: error: unknown argument: '-flifetime-dse=1'; assimp@5.2.5: clang-15: error: clang frontend command failed with exit code 134 (use -v to see invocation)
|
||||
# - dyninst # requires %gcc
|
||||
# - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 +paraview +pnetcdf +sz +unifyfs +veloc ~visit +vtkm +zfp ^hdf5@1.14 # llvm@14.0.6: ?;
|
||||
# - exaworks # rust: ld.lld: error: relocation R_X86_64_32 cannot be used against local symbol; recompile with -fPIC'; defined in /opt/cray/pe/cce/15.0.1/cce/x86_64/lib/no_mmap.o, referenced by /opt/cray/pe/cce/15.0.1/cce/x86_64/lib/no_mmap.o:(__no_mmap_for_malloc)
|
||||
# - flux-core # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - fortrilinos # trilinos-14.0.0: packages/teuchos/core/src/Teuchos_BigUIntDecl.hpp:67:8: error: no type named 'uint32_t' in namespace 'std'
|
||||
# - gasnet # configure error: User requested --enable-ofi but I don't know how to build ofi programs for your system
|
||||
# - gptune # py-scipy: meson.build:82:0: ERROR: Unknown compiler(s): [['/home/gitlab-runner-3/builds/dWfnZWPh/0/spack/spack/lib/spack/env/cce/ftn']]
|
||||
# - hpctoolkit # dyninst requires %gcc
|
||||
# - hpx max_cpu_count=512 networking=mpi # libxcrypt-4.4.35
|
||||
# - lammps # lammps-20240829.1: Reversed (or previously applied) patch detected! Assume -R? [n]
|
||||
# - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf +mgard # mgard:
|
||||
# - mgard +serial +openmp +timing +unstructured ~cuda # mgard
|
||||
# - nrm # py-scipy: meson.build:82:0: ERROR: Unknown compiler(s): [['/home/gitlab-runner-3/builds/dWfnZWPh/0/spack/spack/lib/spack/env/cce/ftn']]
|
||||
# - nvhpc # requires %gcc
|
||||
# - omega-h # trilinos-13.4.1: packages/kokkos/core/src/impl/Kokkos_MemoryPool.cpp:112:48: error: unknown type name 'uint32_t'
|
||||
# - openmpi # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - papi # papi_internal.c:124:3: error: use of undeclared identifier '_papi_hwi_my_thread'; did you mean '_papi_hwi_read'?
|
||||
# - parsec ~cuda # parsec: parsec/fortran/CMakeFiles/parsec_fortran.dir/parsecf.F90.o: ftn-2103 ftn: WARNING in command line. The -W extra option is not supported or invalid and will be ignored.
|
||||
# - phist # fortran_bindings/CMakeFiles/phist_fort.dir/phist_testing.F90.o: ftn-78 ftn: ERROR in command line. The -f option has an invalid argument, "no-math-errno".
|
||||
# - plasma # %cce conflict
|
||||
# - plumed # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - py-h5py +mpi # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - py-h5py ~mpi # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - py-jupyterhub # rust: ld.lld: error: relocation R_X86_64_32 cannot be used against local symbol; recompile with -fPIC'; defined in /opt/cray/pe/cce/15.0.1/cce/x86_64/lib/no_mmap.o, referenced by /opt/cray/pe/cce/15.0.1/cce/x86_64/lib/no_mmap.o:(__no_mmap_for_malloc)
|
||||
# - py-libensemble +mpi +nlopt # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - py-petsc4py # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - quantum-espresso # quantum-espresso: CMake Error at cmake/FindSCALAPACK.cmake:503 (message): A required library with SCALAPACK API not found. Please specify library
|
||||
# - scr # scr: make[2]: *** [examples/CMakeFiles/test_ckpt_F.dir/build.make:112: examples/test_ckpt_F] Error 1: /opt/cray/pe/cce/15.0.1/binutils/x86_64/x86_64-pc-linux-gnu/bin/ld: /opt/cray/pe/mpich/8.1.25/ofi/cray/10.0/lib/libmpi_cray.so: undefined reference to `PMI_Barrier'
|
||||
# - strumpack ~slate # strumpack: [test/CMakeFiles/test_HSS_seq.dir/build.make:117: test/test_HSS_seq] Error 1: ld.lld: error: undefined reference due to --no-allow-shlib-undefined: mpi_abort_
|
||||
# - tau +mpi +python # libelf: configure: error: installation or configuration problem: C compiler cannot create executables.; papi: papi_internal.c:124:3: error: use of undeclared identifier '_papi_hwi_my_thread'; did you mean '_papi_hwi_read'?
|
||||
# - upcxx # upcxx: configure error: User requested --enable-ofi but I don't know how to build ofi programs for your system
|
||||
# - variorum # variorum: /opt/cray/pe/cce/15.0.1/binutils/x86_64/x86_64-pc-linux-gnu/bin/ld: /opt/cray/pe/lib64/libpals.so.0: undefined reference to `json_array_append_new@@libjansson.so.4'
|
||||
# - xyce +mpi +shared +pymi +pymi_static_tpls ^trilinos~shylu # openblas: ftn-2307 ftn: ERROR in command line: The "-m" option must be followed by 0, 1, 2, 3 or 4.; make[2]: *** [<builtin>: spotrf2.o] Error 1; make[1]: *** [Makefile:27: lapacklib] Error 2; make: *** [Makefile:250: netlib] Error 2
|
||||
# - warpx +python # py-scipy: meson.build:82:0: ERROR: Unknown compiler(s): [['/home/gitlab-runner-3/builds/dWfnZWPh/0/spack/spack/lib/spack/env/cce/ftn']]
|
||||
# - xyce +mpi +shared +pymi +pymi_static_tpls ^trilinos~shylu # openblas: ftn-2307 ftn: ERROR in command line: The "-m" option must be followed by 0, 1, 2, 3 or 4.; make[2]: *** [<builtin>: spotrf2.o] Error 1; make[1]: *** [Makefile:27: lapacklib] Error 2; make: *** [Makefile:250: netlib] Error 2
|
||||
|
||||
cdash:
|
||||
build-group: E4S Cray
|
||||
|
@@ -31,8 +31,10 @@ spack:
|
||||
specs:
|
||||
# CPU
|
||||
- acts +analysis +dd4hep +edm4hep +examples +fatras +geant4 +hepmc3 +podio +pythia8 +python +tgeo cxxstd=20
|
||||
- celeritas +geant4 +hepmc3 +openmp +root +shared +vecgeom cxxstd=20
|
||||
- dd4hep +ddalign +ddcad +ddcond +dddetectors +dddigi +ddeve +ddg4 +ddrec +edm4hep +hepmc3 +lcio +utilityapps +xercesc
|
||||
- delphes +pythia8
|
||||
- dpmjet
|
||||
- edm4hep
|
||||
- fastjet
|
||||
- fjcontrib
|
||||
@@ -45,11 +47,25 @@ spack:
|
||||
- lhapdf +python
|
||||
- madgraph5amc
|
||||
- opendatadetector
|
||||
- pandoramonitoring
|
||||
- pandorapfa
|
||||
- pandorasdk
|
||||
- podio +rntuple +sio
|
||||
- py-awkward
|
||||
- py-boost-histogram
|
||||
- py-hepunits
|
||||
- py-hist
|
||||
- py-histbook
|
||||
- py-histoprint
|
||||
- py-iminuit
|
||||
- py-mplhep
|
||||
- py-particle
|
||||
- py-uhi
|
||||
- py-uproot +lz4 +xrootd +zstd
|
||||
- py-vector
|
||||
- pythia8 +evtgen +fastjet +hdf5 +hepmc +hepmc3 +lhapdf ~madgraph5amc +python +rivet ~root # pythia8 and root circularly depend
|
||||
- rivet hepmc=3
|
||||
- root +davix +dcache +examples +fftw +fits +fortran +gdml +graphviz +gsl +http +math +minuit +mlp +mysql +opengl ~postgres +pythia8 +python +r +roofit +root7 +rpath ~shadow +spectrum +sqlite +ssl +tbb +threads +tmva +unuran +vc +vdt +veccore +webgui +x +xml +xrootd
|
||||
- root +davix +dcache +examples +fftw +fits +fortran +gdml +graphviz +gsl +http +math +minuit +mlp +mysql +opengl +postgres +pythia8 +python +r +roofit +root7 +rpath ~shadow +spectrum +sqlite +ssl +tbb +threads +tmva +unuran +vc +vdt +veccore +webgui +x +xml +xrootd
|
||||
- sherpa +analysis ~blackhat +gzip +hepmc3 +hepmc3root +lhapdf +lhole +openloops +pythia ~python ~recola ~rivet +root +ufo
|
||||
- thepeg ~rivet
|
||||
- vecgeom +gdml +geant4 +root
|
||||
|
@@ -16,3 +16,8 @@ class Gmake(Package):
|
||||
|
||||
def do_stage(self):
|
||||
mkdirp(self.stage.source_path)
|
||||
|
||||
def setup_dependent_package(self, module, dspec):
|
||||
module.make = MakeExecutable(
|
||||
"make", jobs=determine_number_of_jobs(parallel=dspec.package.parallel)
|
||||
)
|
||||
|
@@ -1,13 +1,14 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import spack.paths
|
||||
import spack.store
|
||||
import os
|
||||
|
||||
from spack.hooks.sbang import sbang_shebang_line
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class OldSbang(Package):
|
||||
"""Toy package for testing the old sbang replacement problem"""
|
||||
"""Package for testing sbang relocation"""
|
||||
|
||||
homepage = "https://www.example.com"
|
||||
url = "https://www.example.com/old-sbang.tar.gz"
|
||||
@@ -16,23 +17,11 @@ class OldSbang(Package):
|
||||
|
||||
def install(self, spec, prefix):
|
||||
mkdirp(prefix.bin)
|
||||
contents = f"""\
|
||||
{sbang_shebang_line()}
|
||||
#!/usr/bin/env python3
|
||||
|
||||
sbang_style_1 = """#!/bin/bash {0}/bin/sbang
|
||||
#!/usr/bin/env python
|
||||
|
||||
{1}
|
||||
""".format(
|
||||
spack.paths.prefix, prefix.bin
|
||||
)
|
||||
sbang_style_2 = """#!/bin/sh {0}/bin/sbang
|
||||
#!/usr/bin/env python
|
||||
|
||||
{1}
|
||||
""".format(
|
||||
spack.store.STORE.unpadded_root, prefix.bin
|
||||
)
|
||||
with open("%s/sbang-style-1.sh" % self.prefix.bin, "w", encoding="utf-8") as f:
|
||||
f.write(sbang_style_1)
|
||||
|
||||
with open("%s/sbang-style-2.sh" % self.prefix.bin, "w", encoding="utf-8") as f:
|
||||
f.write(sbang_style_2)
|
||||
{prefix.bin}
|
||||
"""
|
||||
with open(os.path.join(self.prefix.bin, "script.sh"), "w", encoding="utf-8") as f:
|
||||
f.write(contents)
|
||||
|
@@ -93,7 +93,7 @@ class AbseilCpp(CMakePackage):
|
||||
depends_on("cmake@3.5:", when="@20190312:", type="build")
|
||||
depends_on("cmake@3.1:", type="build")
|
||||
|
||||
depends_on("googletest", type="build", when="@20220623:")
|
||||
depends_on("googletest~absl", type="test", when="@20220623:")
|
||||
|
||||
def cmake_args(self):
|
||||
run_tests = self.run_tests and self.spec.satisfies("@20220623:")
|
||||
|
@@ -16,6 +16,7 @@ class ActsAlgebraPlugins(CMakePackage):
|
||||
|
||||
license("MPL-2.0", checked_by="stephenswat")
|
||||
|
||||
version("0.26.2", sha256="0170f22e1a75493b86464f27991117bc2c5a9d52554c75786e321d4c591990e7")
|
||||
version("0.26.1", sha256="8eb1e9e28ec2839d149b6a6bddd0f983b0cdf71c286c0aeb67ede31727c5b7d3")
|
||||
version("0.26.0", sha256="301702e3d0a3d12e46ae6d949f3027ddebd0b1167cbb3004d9a4a5697d3adc7f")
|
||||
version("0.25.0", sha256="bb0cba6e37558689d780a6de8f749abb3b96f8cd9e0c8851474eb4532e1e98b8")
|
||||
|
@@ -40,6 +40,7 @@ class Acts(CMakePackage, CudaPackage):
|
||||
# Supported Acts versions
|
||||
version("main", branch="main")
|
||||
version("master", branch="main", deprecated=True) # For compatibility
|
||||
version("38.2.0", commit="9cb8f4494656553fd9b85955938b79b2fac4c9b0", submodules=True)
|
||||
version("38.1.0", commit="8a20c88808f10bf4fcdfd7c6e077f23614c3ab90", submodules=True)
|
||||
version("38.0.0", commit="0a6b5155e29e3b755bf351b8a76067fff9b4214b", submodules=True)
|
||||
version("37.4.0", commit="4ae9a44f54c854599d1d753222ec36e0b5b4e9c7", submodules=True)
|
||||
@@ -423,6 +424,10 @@ class Acts(CMakePackage, CudaPackage):
|
||||
for _scalar in _scalar_values:
|
||||
depends_on(f"detray scalar={_scalar}", when=f"scalar={_scalar}")
|
||||
|
||||
# ACTS enables certain options anyway based on other options
|
||||
conflicts("~svg", when="+traccc")
|
||||
conflicts("~json", when="+traccc")
|
||||
|
||||
# ACTS has been using C++17 for a while, which precludes use of old GCC
|
||||
conflicts("%gcc@:7", when="@0.23:")
|
||||
# When using C++20, disable gcc 9 and lower.
|
||||
|
@@ -0,0 +1,4 @@
|
||||
#!/bin/sh
|
||||
cd ${0%/*} || exit 1 # Run from this directory
|
||||
|
||||
applications/Allwmake $targetType $*
|
@@ -0,0 +1,9 @@
|
||||
#!/bin/sh
|
||||
cd ${0%/*} || exit 1 # Run from this directory
|
||||
|
||||
# Parse arguments for library compilation
|
||||
. $WM_PROJECT_DIR/wmake/scripts/AllwmakeParseArguments
|
||||
|
||||
wmake $targetType solvers/additiveFoam/functionObjects/ExaCA
|
||||
wmake $targetType solvers/additiveFoam/movingHeatSource
|
||||
wmake $targetType solvers/additiveFoam
|
@@ -2,10 +2,14 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import inspect
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.pkg.builtin.openfoam as openfoam
|
||||
from spack.package import *
|
||||
from spack.pkg.builtin.openfoam import add_extra_files
|
||||
from spack.version import Version
|
||||
|
||||
|
||||
class Additivefoam(Package):
|
||||
@@ -29,14 +33,36 @@ class Additivefoam(Package):
|
||||
depends_on("openfoam-org@10")
|
||||
|
||||
common = ["spack-derived-Allwmake"]
|
||||
assets = ["applications/Allwmake", "Allwmake"]
|
||||
assets = [join_path("applications", "Allwmake"), "Allwmake"]
|
||||
|
||||
build_script = "./spack-derived-Allwmake"
|
||||
|
||||
phases = ["configure", "build", "install"]
|
||||
|
||||
def add_extra_files(self, common, local_prefix, local):
|
||||
"""Copy additional common and local files into the stage.source_path
|
||||
from the openfoam/common and the package/assets directories,
|
||||
respectively. Modified from `spack.pkg.builtin.openfoam.add_extra_files()`.
|
||||
"""
|
||||
outdir = self.stage.source_path
|
||||
indir = join_path(os.path.dirname(inspect.getfile(openfoam)), "common")
|
||||
for f in common:
|
||||
tty.info("Added file {0}".format(f))
|
||||
openfoam.install(join_path(indir, f), join_path(outdir, f))
|
||||
|
||||
indir = join_path(self.package_dir, "assets", local_prefix)
|
||||
for f in local:
|
||||
tty.info("Added file {0}".format(f))
|
||||
openfoam.install(join_path(indir, f), join_path(outdir, f))
|
||||
|
||||
def patch(self):
|
||||
add_extra_files(self, self.common, self.assets)
|
||||
spec = self.spec
|
||||
asset_dir = ""
|
||||
if Version("main") in spec.versions:
|
||||
asset_dir = "assets_main"
|
||||
elif Version("1.0.0") in spec.versions:
|
||||
asset_dir = "assets_1.0.0"
|
||||
self.add_extra_files(self.common, asset_dir, self.assets)
|
||||
|
||||
def configure(self, spec, prefix):
|
||||
pass
|
||||
|
@@ -12,8 +12,8 @@ class Alpgen(CMakePackage, MakefilePackage):
|
||||
in hadronic collisions.
|
||||
"""
|
||||
|
||||
homepage = "http://mlm.home.cern.ch/mlm/alpgen/"
|
||||
url = "http://mlm.home.cern.ch/mlm/alpgen/V2.1/v214.tgz"
|
||||
homepage = "https://alpgen.web.cern.ch/"
|
||||
url = "https://alpgen.web.cern.ch/V2.1/v214.tgz"
|
||||
|
||||
tags = ["hep"]
|
||||
|
||||
|
@@ -25,6 +25,7 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage):
|
||||
license("BSD-3-Clause")
|
||||
|
||||
version("develop", branch="development")
|
||||
version("25.01", sha256="29eb35cf67d66b0fd0654282454c210abfadf27fcff8478b256e3196f237c74f")
|
||||
version("24.12", sha256="ca4b41ac73fabb9cf3600b530c9823eb3625f337d9b7b9699c1089e81c67fc67")
|
||||
version("24.11", sha256="31cc37b39f15e02252875815f6066046fc56a479bf459362b9889b0d6a202df6")
|
||||
version("24.10", sha256="a2d15e417bd7c41963749338e884d939c80c5f2fcae3279fe3f1b463e3e4208a")
|
||||
@@ -151,6 +152,8 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage):
|
||||
|
||||
# Build dependencies
|
||||
depends_on("mpi", when="+mpi")
|
||||
with when("+linear_solvers"):
|
||||
depends_on("rocsparse", when="@25.01: +rocm")
|
||||
with when("+fft"):
|
||||
depends_on("rocfft", when="+rocm")
|
||||
depends_on("fftw@3", when="~cuda ~rocm ~sycl")
|
||||
|
@@ -16,6 +16,7 @@ class Armadillo(CMakePackage):
|
||||
|
||||
license("Apache-2.0")
|
||||
|
||||
version("14.2.2", sha256="3054c8e63db3abdf1a5c8f9fdb7e6b4ad833f9bcfb58324c0ff86de0784c70e0")
|
||||
version("14.0.3", sha256="ebd6215eeb01ee412fed078c8a9f7f87d4e1f6187ebcdc1bc09f46095a4f4003")
|
||||
version("14.0.2", sha256="248e2535fc092add6cb7dea94fc86ae1c463bda39e46fd82d2a7165c1c197dff")
|
||||
version("12.8.4", sha256="558fe526b990a1663678eff3af6ec93f79ee128c81a4c8aef27ad328fae61138")
|
||||
@@ -33,14 +34,14 @@ class Armadillo(CMakePackage):
|
||||
depends_on("c", type="build")
|
||||
depends_on("cxx", type="build")
|
||||
|
||||
variant("hdf5", default=False, description="Include HDF5 support")
|
||||
variant("hdf5", default=False, description="Include HDF5 support", when="@:10")
|
||||
|
||||
depends_on("cmake@2.8.12:", type="build")
|
||||
depends_on("cmake@3.5:", type="build", when="@14:")
|
||||
depends_on("arpack-ng") # old arpack causes undefined symbols
|
||||
depends_on("blas")
|
||||
depends_on("lapack")
|
||||
depends_on("superlu@5.2:")
|
||||
depends_on("superlu@5.2:5") # only superlu@5 is supported
|
||||
depends_on("hdf5", when="+hdf5")
|
||||
|
||||
# Adds an `#undef linux` to prevent preprocessor expansion of include
|
||||
|
@@ -37,7 +37,7 @@ class BigdftChess(AutotoolsPackage, CudaPackage):
|
||||
depends_on("autoconf", type="build")
|
||||
depends_on("automake", type="build")
|
||||
depends_on("libtool", type="build")
|
||||
depends_on("pkg-config", type="build")
|
||||
depends_on("pkgconfig", type="build")
|
||||
|
||||
depends_on("python@3.0:", type=("build", "run"))
|
||||
|
||||
|
@@ -36,7 +36,7 @@ class BigdftCore(AutotoolsPackage, CudaPackage):
|
||||
depends_on("autoconf", type="build")
|
||||
depends_on("automake", type="build")
|
||||
depends_on("libtool", type="build")
|
||||
depends_on("pkg-config", type="build")
|
||||
depends_on("pkgconfig", type="build")
|
||||
|
||||
depends_on("python@3.0:", type=("build", "run"))
|
||||
|
||||
|
@@ -35,7 +35,7 @@ class BigdftPsolver(AutotoolsPackage, CudaPackage):
|
||||
depends_on("autoconf", type="build")
|
||||
depends_on("automake", type="build")
|
||||
depends_on("libtool", type="build")
|
||||
depends_on("pkg-config", type="build")
|
||||
depends_on("pkgconfig", type="build")
|
||||
|
||||
depends_on("python@3.0:", type=("build", "run"))
|
||||
|
||||
|
@@ -28,7 +28,7 @@ class BigdftSpred(AutotoolsPackage):
|
||||
depends_on("autoconf", type="build")
|
||||
depends_on("automake", type="build")
|
||||
depends_on("libtool", type="build")
|
||||
depends_on("pkg-config", type="build")
|
||||
depends_on("pkgconfig", type="build")
|
||||
|
||||
variant("mpi", default=True, description="Enable MPI support")
|
||||
variant("openmp", default=True, description="Enable OpenMP support")
|
||||
|
@@ -29,6 +29,7 @@ class Boost(Package):
|
||||
license("BSL-1.0")
|
||||
|
||||
version("develop", branch="develop", submodules=True)
|
||||
version("1.87.0", sha256="af57be25cb4c4f4b413ed692fe378affb4352ea50fbe294a11ef548f4d527d89")
|
||||
version("1.86.0", sha256="1bed88e40401b2cb7a1f76d4bab499e352fa4d0c5f31c0dbae64e24d34d7513b")
|
||||
version("1.85.0", sha256="7009fe1faa1697476bdc7027703a2badb84e849b7b0baad5086b087b971f8617")
|
||||
version("1.84.0", sha256="cc4b893acf645c9d4b698e9a0f08ca8846aa5d6c68275c14c3e7949c24109454")
|
||||
@@ -287,6 +288,9 @@ def libs(self):
|
||||
# boost-python in 1.72.0 broken with cxxstd=98
|
||||
conflicts("cxxstd=98", when="+mpi+python @1.72.0")
|
||||
|
||||
# boost-mpi depends on boost-python since 1.87.0
|
||||
conflicts("~python", when="+mpi @1.87.0:")
|
||||
|
||||
# Container's Extended Allocators were not added until 1.56.0
|
||||
conflicts("+container", when="@:1.55")
|
||||
|
||||
@@ -440,6 +444,14 @@ def libs(self):
|
||||
when="@1.82.0 platform=windows",
|
||||
)
|
||||
|
||||
# https://github.com/boostorg/context/pull/280
|
||||
patch(
|
||||
"https://github.com/boostorg/context/commit/d11cbccc87da5d6d41c04f3949e18d49c43e62fc.patch?full_index=1",
|
||||
sha256="e2d37f9e35e8e238977de9af32604a8e1c2648d153df1d568935a20216b5c67f",
|
||||
when="@1.87.0",
|
||||
working_dir="libs/context",
|
||||
)
|
||||
|
||||
def patch(self):
|
||||
# Disable SSSE3 and AVX2 when using the NVIDIA compiler
|
||||
if self.spec.satisfies("%nvhpc"):
|
||||
|
@@ -25,7 +25,7 @@ class Bwa(Package):
|
||||
)
|
||||
|
||||
depends_on("c", type="build") # generated
|
||||
|
||||
depends_on("gmake", type="build")
|
||||
depends_on("zlib-api")
|
||||
depends_on("sse2neon", when="target=aarch64:")
|
||||
|
||||
|
@@ -15,6 +15,7 @@ class Cepgen(CMakePackage):
|
||||
|
||||
license("GPL-3.0-or-later")
|
||||
|
||||
version("1.2.5", sha256="5016c5a9b505035f849f47bdf35ecfb8c98d45dd1e086fae64f264a30adb120d")
|
||||
version("1.1.0", sha256="2a4eaed161f007269516cbfb6e90421e657ab1922d4509de0165f08dde91bf3d")
|
||||
version(
|
||||
"1.0.2patch1", sha256="333bba0cb1965a98dec127e00c150eab1a515cd348a90f7b1d66d5cd8d206d21"
|
||||
|
@@ -20,7 +20,7 @@ class CgsiGsoap(CMakePackage):
|
||||
|
||||
depends_on("c", type="build")
|
||||
|
||||
depends_on("pkg-config", type="build")
|
||||
depends_on("pkgconfig", type="build")
|
||||
depends_on("gsoap")
|
||||
depends_on("voms")
|
||||
depends_on("globus-common")
|
||||
|
@@ -97,7 +97,8 @@ class Chai(CachedCMakePackage, CudaPackage, ROCmPackage):
|
||||
)
|
||||
version("1.0", tag="v1.0", commit="501a098ad879dc8deb4a74fcfe8c08c283a10627", submodules=True)
|
||||
|
||||
depends_on("cxx", type="build") # generated
|
||||
depends_on("c", type="build")
|
||||
depends_on("cxx", type="build")
|
||||
|
||||
# Patching Umpire for dual BLT targets import changed MPI target name in Umpire link interface
|
||||
# We propagate the patch here.
|
||||
|
@@ -108,7 +108,9 @@ def pgo_train(self):
|
||||
# Run spack solve --fresh hdf5 with instrumented clingo.
|
||||
python_runtime_env = EnvironmentModifications()
|
||||
python_runtime_env.extend(
|
||||
spack.user_environment.environment_modifications_for_specs(self.spec)
|
||||
spack.user_environment.environment_modifications_for_specs(
|
||||
self.spec, set_package_py_globals=False
|
||||
)
|
||||
)
|
||||
python_runtime_env.unset("SPACK_ENV")
|
||||
python_runtime_env.unset("SPACK_PYTHON")
|
||||
|
@@ -30,6 +30,8 @@ class Cmake(Package):
|
||||
license("BSD-3-Clause")
|
||||
|
||||
version("master", branch="master")
|
||||
version("3.31.4", sha256="a6130bfe75f5ba5c73e672e34359f7c0a1931521957e8393a5c2922c8b0f7f25")
|
||||
version("3.31.3", sha256="fac45bc6d410b49b3113ab866074888d6c9e9dc81a141874446eb239ac38cb87")
|
||||
version("3.31.2", sha256="42abb3f48f37dbd739cdfeb19d3712db0c5935ed5c2aef6c340f9ae9114238a2")
|
||||
version("3.31.1", sha256="c4fc2a9bd0cd5f899ccb2fb81ec422e175090bc0de5d90e906dd453b53065719")
|
||||
version("3.31.0", sha256="300b71db6d69dcc1ab7c5aae61cbc1aa2778a3e00cbd918bc720203e311468c3")
|
||||
|
@@ -790,7 +790,7 @@ def edit(self, pkg, spec, prefix):
|
||||
"# include Plumed.inc as recommended by"
|
||||
"PLUMED to include libraries and flags"
|
||||
)
|
||||
mkf.write("include {0}\n".format(spec["plumed"].package.plumed_inc))
|
||||
mkf.write("include {0}\n".format(self.pkg["plumed"].plumed_inc))
|
||||
|
||||
mkf.write("\n# COMPILER, LINKER, TOOLS\n\n")
|
||||
mkf.write(
|
||||
|
@@ -30,13 +30,10 @@ class Dbus(AutotoolsPackage, MesonPackage):
|
||||
)
|
||||
|
||||
# Note: odd minor versions are unstable, keep last stable version preferred
|
||||
version("1.16.0", sha256="9f8ca5eb51cbe09951aec8624b86c292990ae2428b41b856e2bed17ec65c8849")
|
||||
version("1.15.12", sha256="0589c9c707dd593e31f0709caefa5828e69c668c887a7c0d2e5ba445a86bae4d")
|
||||
version("1.15.10", sha256="f700f2f1d0473f11e52f3f3e179f577f31b85419f9ae1972af8c3db0bcfde178")
|
||||
version(
|
||||
"1.14.10",
|
||||
sha256="ba1f21d2bd9d339da2d4aa8780c09df32fea87998b73da24f49ab9df1e36a50f",
|
||||
preferred=True,
|
||||
)
|
||||
version("1.14.10", sha256="ba1f21d2bd9d339da2d4aa8780c09df32fea87998b73da24f49ab9df1e36a50f")
|
||||
version("1.13.6", sha256="b533693232d36d608a09f70c15440c1816319bac3055433300d88019166c1ae4")
|
||||
version("1.12.8", sha256="e2dc99e7338303393b6663a98320aba6a63421bcdaaf571c8022f815e5896eb3")
|
||||
version("1.11.2", sha256="5abc4c57686fa82669ad0039830788f9b03fdc4fff487f0ccf6c9d56ba2645c9")
|
||||
|
@@ -24,6 +24,9 @@ class Dcap(AutotoolsPackage):
|
||||
depends_on("libtool", type="build")
|
||||
depends_on("m4", type="build")
|
||||
|
||||
depends_on("openssl")
|
||||
depends_on("zlib-api")
|
||||
|
||||
variant("plugins", default=True, description="Build plugins")
|
||||
|
||||
def patch(self):
|
||||
|
@@ -19,6 +19,7 @@ class Detray(CMakePackage):
|
||||
|
||||
license("MPL-2.0", checked_by="stephenswat")
|
||||
|
||||
version("0.87.0", sha256="2d4a76432dd6ddbfc00b88b5d482072e471fefc264b60748bb1f9a123963576e")
|
||||
version("0.86.0", sha256="98350c94e8a2395b8712b7102fd449536857e8158b38a96cc913c79b70301170")
|
||||
version("0.85.0", sha256="a0121a27fd08243d4a6aab060e8ab379ad5129e96775b45f6a683835767fa8e7")
|
||||
version("0.84.0", sha256="b1d133a97dc90b1513f8c1ef235ceaa542d80243028a41f59a79300c7d71eb25")
|
||||
@@ -77,6 +78,7 @@ class Detray(CMakePackage):
|
||||
depends_on("acts-algebra-plugins +vc", when="+vc")
|
||||
depends_on("acts-algebra-plugins +eigen", when="+eigen")
|
||||
depends_on("acts-algebra-plugins +smatrix", when="+smatrix")
|
||||
depends_on("acts-algebra-plugins@0.26.0:", when="@0.87:")
|
||||
|
||||
# Detray imposes requirements on the C++ standard values used by Algebra
|
||||
# Plugins.
|
||||
|
@@ -9,7 +9,7 @@ class Dftd4(MesonPackage):
|
||||
"""Generally Applicable Atomic-Charge Dependent London Dispersion Correction"""
|
||||
|
||||
homepage = "https://www.chemie.uni-bonn.de/pctc/mulliken-center/software/dftd4"
|
||||
url = "https://github.com/dftd4/dftd4/releases/download/v3.5.0/dftd4-3.5.0-source.tar.xz"
|
||||
url = "https://github.com/dftd4/dftd4/releases/download/v0.0.0/dftd4-0.0.0.tar.xz"
|
||||
git = "https://github.com/dftd4/dftd4.git"
|
||||
|
||||
maintainers("awvwgk")
|
||||
@@ -17,6 +17,8 @@ class Dftd4(MesonPackage):
|
||||
license("LGPL-3.0-only")
|
||||
|
||||
version("main", branch="main")
|
||||
version("3.7.0", sha256="4e8749df6852bf863d5d1831780a2d30e9ac4afcfebbbfe5f6a6a73d06d6c6ee")
|
||||
version("3.6.0", sha256="56b3b4650853a34347d3d56c93d7596ecbe2208c4a14dbd027959fd4a009679d")
|
||||
version("3.5.0", sha256="d2bab992b5ef999fd13fec8eb1da9e9e8d94b8727a2e624d176086197a00a46f")
|
||||
version("3.4.0", sha256="24fcb225cdd5c292ac26f7d3204ee3c4024174adb5272eeda9ae7bc57113ec8d")
|
||||
version("3.3.0", sha256="408720b8545532d5240dd743c05d57b140af983192dad6d965b0d79393d0a9ef")
|
||||
@@ -54,3 +56,8 @@ def meson_args(self):
|
||||
"-Dopenmp={0}".format(str("+openmp" in self.spec).lower()),
|
||||
"-Dpython={0}".format(str("+python" in self.spec).lower()),
|
||||
]
|
||||
|
||||
def url_for_version(self, version):
|
||||
if version <= Version("3.6.0"):
|
||||
return f"https://github.com/dftd4/dftd4/releases/download/v{version}/dftd4-{version}-source.tar.xz"
|
||||
return super().url_for_version(version)
|
||||
|
@@ -2,7 +2,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
@@ -17,21 +16,43 @@ class Dpmjet(MakefilePackage):
|
||||
|
||||
maintainers("wdconinc")
|
||||
|
||||
license("BSD-3-Clause")
|
||||
license("BSD-3-Clause AND Pythia6", checked_by="wdconinc")
|
||||
|
||||
version("19.3.7", sha256="4ab22fa9925031a11cf3b82fff73226011da2cf6b2662f10523bc9850f85b8a5")
|
||||
version("19.3.6", sha256="9453f1428eb41d59f0c951a48763b8f386eece39c4a73bdb07e759b2c5fdd4f5")
|
||||
version("19.3.5", sha256="5a546ca20f86abaecda1828eb5b577aee8a532dffb2c5e7244667d5f25777909")
|
||||
version("19.3.4", sha256="646f520aa67ef6355c45cde155a5dd55f7c9d661314358a7668f6ff472f5d5f9")
|
||||
version("19.3.3", sha256="4f449a36b48ff551beb4303d66bac18bebc52dbcac907f84ab7716c914ad6d8a")
|
||||
version("19.2.0", sha256="0f5c1af4419e1a8fa4b46cc24ae1da98abe5c119064275e1848538fe033f02cc")
|
||||
version("19.1.3", sha256="f2f7f9eee0fcd1e2770382fa6e3491418607e33de2272e04b6d75ebc97640474")
|
||||
|
||||
depends_on("python@3:")
|
||||
depends_on("fortran", type="build")
|
||||
|
||||
depends_on("python@3:", when="@:19.3.5")
|
||||
|
||||
build_targets = ["exe"]
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
makefile = FileFilter("Makefile")
|
||||
makefile.filter(r"install: \$\(pylib\)", "install:")
|
||||
# The spack prefix paths needed to point to the data files are too long
|
||||
# and need to be wrapped at the maximum column for f77 source files.
|
||||
columns = 72
|
||||
datadir = str(join_path(prefix, "share/dpmjet/dpmdata", ""))
|
||||
datini = FileFilter("src/phojet/PHO_DATINI.f")
|
||||
continuation = "\n &"
|
||||
old = "^ DATDir = 'dpmdata/'"
|
||||
new = f" DATDir = '{datadir}'"
|
||||
new_wrapped = [new[i : i + columns] for i in range(0, len(new), columns)]
|
||||
datini.filter(old, continuation.join(new_wrapped))
|
||||
datini.filter("LENDir = 8", f"LENDir = {len(datadir)}")
|
||||
|
||||
# The python components were extracted in later versions
|
||||
if spec.satisfies("@:19.3.5"):
|
||||
makefile = FileFilter("Makefile")
|
||||
makefile.filter(r"install: \$\(pylib\)", "install:")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
install_tree("bin", prefix.bin)
|
||||
install_tree("lib", prefix.lib)
|
||||
install_tree("include", prefix.include)
|
||||
install_tree("dpmdata", prefix.share.dpmjet.dpmdata)
|
||||
install_tree("examples", prefix.share.dpmjet.examples)
|
||||
|
@@ -76,6 +76,7 @@ class Edm4hep(CMakePackage):
|
||||
depends_on("nlohmann-json@3.10.5:", when="@:0.99.1")
|
||||
depends_on("podio@1:", when="@0.99:")
|
||||
depends_on("podio@0.15:", when="@:0.10.5")
|
||||
depends_on("podio@:1.1", when="@:0.99.0")
|
||||
for _std in _cxxstd_values:
|
||||
for _v in _std:
|
||||
depends_on(f"podio cxxstd={_v.value}", when=f"cxxstd={_v.value}")
|
||||
@@ -90,6 +91,8 @@ class Edm4hep(CMakePackage):
|
||||
# Corresponding changes in EDM4hep landed with https://github.com/key4hep/EDM4hep/pull/314
|
||||
extends("python", when="@0.10.6:")
|
||||
|
||||
conflicts("%clang@:16", when="@0.99.1:", msg="Incomplete consteval support in clang")
|
||||
|
||||
def cmake_args(self):
|
||||
args = [
|
||||
self.define_from_variant("CMAKE_CXX_STANDARD", "cxxstd"),
|
||||
|
@@ -20,15 +20,17 @@ class Ensmallen(CMakePackage):
|
||||
|
||||
license("BSD-3-Clause")
|
||||
|
||||
version("2.22.1", sha256="daf53fe96783043ca33151a3851d054a826fab8d9a173e6bcbbedd4a7eabf5b1")
|
||||
version("2.21.1", sha256="820eee4d8aa32662ff6a7d883a1bcaf4e9bf9ca0a3171d94c5398fe745008750")
|
||||
version("2.19.1", sha256="f36ad7f08b0688d2a8152e1c73dd437c56ed7a5af5facf65db6ffd977b275b2e")
|
||||
|
||||
depends_on("cxx", type="build") # generated
|
||||
depends_on("cxx", type="build")
|
||||
|
||||
variant("openmp", default=True, description="Use OpenMP for parallelization")
|
||||
|
||||
depends_on("cmake@3.3.2:")
|
||||
depends_on("armadillo@9.800.0:")
|
||||
depends_on("armadillo@10.8.2:", when="@2.22:")
|
||||
|
||||
def cmake_args(self):
|
||||
args = [self.define_from_variant("USE_OPENMP", "openmp")]
|
||||
|
@@ -62,6 +62,7 @@ class EnvironmentModules(Package):
|
||||
|
||||
variant("X", default=True, description="Build with X functionality")
|
||||
|
||||
depends_on("gmake", type="build")
|
||||
depends_on("util-linux", type=("build", "run"), when="@5.5:")
|
||||
depends_on("less", type=("build", "run"), when="@4.1:")
|
||||
with when("@main"):
|
||||
|
@@ -31,6 +31,7 @@ class Esmf(MakefilePackage, PythonExtension):
|
||||
# Develop is a special name for spack and is always considered the newest version
|
||||
version("develop", branch="develop")
|
||||
# generate chksum with 'spack checksum esmf@x.y.z'
|
||||
version("8.8.0", sha256="f89327428aeef6ad34660b5b78f30d1c55ec67efb8f7df1991fdaa6b1eb3a27c")
|
||||
version("8.7.0", sha256="d7ab266e2af8c8b230721d4df59e61aa03c612a95cc39c07a2d5695746f21f56")
|
||||
version("8.6.1", sha256="dc270dcba1c0b317f5c9c6a32ab334cb79468dda283d1e395d98ed2a22866364")
|
||||
version("8.6.0", sha256="ed057eaddb158a3cce2afc0712b49353b7038b45b29aee86180f381457c0ebe7")
|
||||
|
@@ -16,24 +16,20 @@ class Evtgen(CMakePackage):
|
||||
|
||||
maintainers("vvolkl")
|
||||
|
||||
version("02.02.03", sha256="b642700b703190e3304edb98ff464622db5d03c1cfc5d275ba4a628227d7d6d0")
|
||||
version("02.02.02", sha256="e543d1213cd5003124139d0dc7eee9247b0b9d44154ff8a88bac52ba91c5dfc9")
|
||||
version("02.02.01", sha256="1fcae56c6b27b89c4a2f4b224d27980607442185f5570e961f6334a3543c6e77")
|
||||
version("02.02.00", sha256="0c626e51cb17e799ad0ffd0beea5cb94d7ac8a5f8777b746aa1944dd26071ecf")
|
||||
version("02.00.00", sha256="02372308e1261b8369d10538a3aa65fe60728ab343fcb64b224dac7313deb719")
|
||||
# switched to cmake in 02.00.00
|
||||
version(
|
||||
"01.07.00",
|
||||
sha256="2648f1e2be5f11568d589d2079f22f589c283a2960390bbdb8d9d7f71bc9c014",
|
||||
deprecated=True,
|
||||
)
|
||||
|
||||
depends_on("cxx", type="build") # generated
|
||||
|
||||
variant("pythia8", default=True, description="Build with pythia8")
|
||||
variant("tauola", default=False, description="Build with tauola")
|
||||
variant("photos", default=False, description="Build with photos")
|
||||
variant("sherpa", default=False, description="build with sherpa")
|
||||
variant("hepmc3", default=False, description="Link with hepmc3 (instead of hepmc)")
|
||||
|
||||
patch("g2c.patch", when="@01.07.00")
|
||||
patch("evtgen-2.0.0.patch", when="@02.00.00 ^pythia8@8.304:")
|
||||
|
||||
depends_on("hepmc", when="~hepmc3")
|
||||
@@ -44,6 +40,8 @@ class Evtgen(CMakePackage):
|
||||
depends_on("photos~hepmc3", when="+photos~hepmc3")
|
||||
depends_on("tauola+hepmc3", when="+tauola+hepmc3")
|
||||
depends_on("photos+hepmc3", when="+photos+hepmc3")
|
||||
depends_on("sherpa@2:", when="@02.02.01: +sherpa")
|
||||
depends_on("sherpa@:2", when="@:02 +sherpa")
|
||||
|
||||
conflicts(
|
||||
"^pythia8+evtgen",
|
||||
@@ -53,7 +51,6 @@ class Evtgen(CMakePackage):
|
||||
"that cannot be resolved at the moment! "
|
||||
"Use evtgen+pythia8^pythia8~evtgen.",
|
||||
)
|
||||
conflicts("+hepmc3", when="@:01", msg="hepmc3 support was added in 02.00.00")
|
||||
|
||||
@property
|
||||
def root_cmakelists_dir(self):
|
||||
@@ -71,6 +68,7 @@ def cmake_args(self):
|
||||
args.append(self.define_from_variant("EVTGEN_PYTHIA", "pythia8"))
|
||||
args.append(self.define_from_variant("EVTGEN_TAUOLA", "tauola"))
|
||||
args.append(self.define_from_variant("EVTGEN_PHOTOS", "photos"))
|
||||
args.append(self.define_from_variant("EVTGEN_SHERPA", "sherpa"))
|
||||
args.append(self.define_from_variant("EVTGEN_HEPMC3", "hepmc3"))
|
||||
|
||||
return args
|
||||
@@ -85,50 +83,5 @@ def patch(self):
|
||||
|
||||
filter_file("-shared", "-dynamiclib -undefined dynamic_lookup", "make.inc")
|
||||
|
||||
# Taken from AutotoolsPackage
|
||||
def configure(self, spec, prefix):
|
||||
"""Runs configure with the arguments specified in
|
||||
:py:meth:`~.AutotoolsPackage.configure_args`
|
||||
and an appropriately set prefix.
|
||||
"""
|
||||
options = getattr(self, "configure_flag_args", [])
|
||||
options += ["--prefix={0}".format(prefix)]
|
||||
options += self.configure_args()
|
||||
|
||||
with working_dir(self.build_directory, create=True):
|
||||
configure(*options)
|
||||
|
||||
@when("@:01")
|
||||
def configure_args(self):
|
||||
args = []
|
||||
|
||||
args.append("--hepmcdir=%s" % self.spec["hepmc"].prefix)
|
||||
if self.spec.satisfies("+pythia8"):
|
||||
args.append("--pythiadir=%s" % self.spec["pythia8"].prefix)
|
||||
if self.spec.satisfies("+photos"):
|
||||
args.append("--photosdir=%s" % self.spec["photos"].prefix)
|
||||
if self.spec.satisfies("+tauola"):
|
||||
args.append("--tauoladir=%s" % self.spec["tauola"].prefix)
|
||||
|
||||
return args
|
||||
|
||||
@when("@:01")
|
||||
def cmake(self, spec, prefix):
|
||||
pass
|
||||
|
||||
@when("@:01")
|
||||
def build(self, spec, prefix):
|
||||
self.configure(spec, prefix)
|
||||
# avoid parallel compilation errors
|
||||
# due to libext_shared depending on lib_shared
|
||||
with working_dir(self.build_directory):
|
||||
make("lib_shared")
|
||||
make("all")
|
||||
|
||||
@when("@:01")
|
||||
def install(self, spec, prefix):
|
||||
with working_dir(self.build_directory):
|
||||
make("install")
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
env.set("EVTGEN", self.prefix.share)
|
||||
|
@@ -64,6 +64,7 @@ class Faodel(CMakePackage):
|
||||
"+program_options+exception+locale+system+chrono+log+serialization"
|
||||
"+atomic+container+regex+thread+date_time"
|
||||
)
|
||||
depends_on("boost@:1.86", when="@:1.2108.1")
|
||||
depends_on("cmake@3.8.0:", type="build")
|
||||
depends_on("hdf5+mpi", when="+hdf5+mpi")
|
||||
depends_on("hdf5~mpi", when="+hdf5~mpi")
|
||||
|
@@ -28,6 +28,7 @@ class Fltk(Package):
|
||||
|
||||
depends_on("c", type="build") # generated
|
||||
depends_on("cxx", type="build") # generated
|
||||
depends_on("gmake", type="build")
|
||||
|
||||
depends_on("libx11")
|
||||
|
||||
|
@@ -20,6 +20,7 @@ class FluxCore(AutotoolsPackage):
|
||||
license("LGPL-3.0-only")
|
||||
|
||||
version("master", branch="master")
|
||||
version("0.67.0", sha256="9406e776cbeff971881143fd1b94c42ec912e5b226401d2d3d91d766dd81de8c")
|
||||
version("0.66.0", sha256="0a25cfb1ebc033c249614eb2350c6fb57b00cdf3c584d0759c787f595c360daa")
|
||||
version("0.65.0", sha256="a60bc7ed13b8e6d09e99176123a474aad2d9792fff6eb6fd4da2a00e1d2865ab")
|
||||
version("0.64.0", sha256="0334d6191915f1b89b70cdbf14f24200f8899da31090df5f502020533b304bb3")
|
||||
@@ -96,6 +97,7 @@ class FluxCore(AutotoolsPackage):
|
||||
depends_on("py-pyyaml@3.10:", type=("build", "run"))
|
||||
depends_on("py-jsonschema@2.3:", type=("build", "run"), when="@:0.58.0")
|
||||
depends_on("py-ply", type=("build", "run"), when="@0.46.1:")
|
||||
depends_on("py-setuptools", type="build", when="@0.67.0:")
|
||||
depends_on("jansson@2.10:")
|
||||
depends_on("pkgconfig")
|
||||
depends_on("lz4")
|
||||
|
@@ -72,11 +72,12 @@ class Gasnet(Package, CudaPackage, ROCmPackage):
|
||||
deprecated=True,
|
||||
sha256="117f5fdb16e53d0fa8a47a1e28cccab1d8020ed4f6e50163d985dc90226aaa2c",
|
||||
)
|
||||
# Do NOT add older versions here.
|
||||
# GASNet-EX releases over 2 years old are not supported.
|
||||
|
||||
depends_on("c", type="build") # generated
|
||||
depends_on("cxx", type="build") # generated
|
||||
# Do NOT add older versions here.
|
||||
# GASNet-EX releases over 2 years old are not supported.
|
||||
depends_on("gmake", type="build")
|
||||
|
||||
# The optional network backends:
|
||||
variant(
|
||||
|
@@ -30,6 +30,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension):
|
||||
license("MIT")
|
||||
maintainers("adamjstewart")
|
||||
|
||||
version("3.10.1", sha256="9211eac72b53f5f85d23cf6d83ee20245c6d818733405024e71f2af41e5c5f91")
|
||||
version("3.10.0", sha256="af821a3bcf68cf085724c21c9b53605fd451d83af3c8854d8bf194638eb734a8")
|
||||
version("3.9.3", sha256="34a037852ffe6d2163f1b8948a1aa7019ff767148aea55876c1339b22ad751f1")
|
||||
version("3.9.2", sha256="bfbcc9f087f012c36151c20c79f8eac9529e1e5298fbded79cd5a1365f0b113a")
|
||||
|
@@ -56,8 +56,6 @@ class Geant4Data(BundlePackage):
|
||||
"g4incl@1.2",
|
||||
"g4ensdfstate@3.0",
|
||||
"g4channeling@1.0",
|
||||
"g4nudexlib@1.0",
|
||||
"g4urrpt@1.1",
|
||||
],
|
||||
"11.2.2:11.2": [
|
||||
"g4ndl@4.7.1",
|
||||
@@ -195,6 +193,23 @@ class Geant4Data(BundlePackage):
|
||||
for _d in _dsets:
|
||||
depends_on(_d, type=("build", "run"), when=_vers)
|
||||
|
||||
_datasets_tendl = {
|
||||
"11.0:11.3": "g4tendl@1.4",
|
||||
"10.4:10.7": "g4tendl@1.3.2",
|
||||
"10.3:10.3": "g4tendl@1.3",
|
||||
}
|
||||
|
||||
variant("tendl", default=True, when="@10.3:", description="Enable G4TENDL")
|
||||
with when("+tendl"):
|
||||
for _vers, _d in _datasets_tendl.items():
|
||||
depends_on(_d, type=("build", "run"), when="@" + _vers)
|
||||
variant("nudexlib", default=True, when="@11.3.0:11.3", description="Enable G4NUDEXLIB")
|
||||
with when("+nudexlib"):
|
||||
depends_on("g4nudexlib@1.0", type=("build", "run"))
|
||||
variant("urrpt", default=True, when="@11.3.0:11.3", description="Enable G4URRPT")
|
||||
with when("+urrpt"):
|
||||
depends_on("g4urrpt@1.1", type=("build", "run"))
|
||||
|
||||
@property
|
||||
def datadir(self):
|
||||
spec = self.spec
|
||||
|
@@ -0,0 +1,26 @@
|
||||
diff --git a/source/g3tog4/include/G3EleTable.hh b/source/g3tog4/include/G3EleTable.hh
|
||||
index 0ab9c4fd566..18c6f73fde6 100644
|
||||
--- a/source/g3tog4/include/G3EleTable.hh
|
||||
+++ b/source/g3tog4/include/G3EleTable.hh
|
||||
@@ -56,7 +56,7 @@ public: // with description
|
||||
private:
|
||||
|
||||
void LoadUp();
|
||||
- G4int parse(G4double& Z, char* name, char* sym, G4double& A);
|
||||
+ G4int parse(G4double& Z, char (&name)[20], char (&sym)[3], G4double& A);
|
||||
|
||||
private:
|
||||
|
||||
diff --git a/source/g3tog4/src/G3EleTable.cc b/source/g3tog4/src/G3EleTable.cc
|
||||
index cecc494b201..a2f3af3d6a2 100644
|
||||
--- a/source/g3tog4/src/G3EleTable.cc
|
||||
+++ b/source/g3tog4/src/G3EleTable.cc
|
||||
@@ -64,7 +64,7 @@ G3EleTable::GetEle(G4double Z){
|
||||
}
|
||||
|
||||
G4int
|
||||
-G3EleTable::parse(G4double& Z, char* name, char* sym, G4double& A){
|
||||
+G3EleTable::parse(G4double& Z, char (&name)[20], char (&sym)[3], G4double& A){
|
||||
G4int rc = 0;
|
||||
if (Z>0 && Z <=_MaxEle){
|
||||
G4int z = (G4int) Z-1;
|
@@ -20,6 +20,7 @@ class Geant4(CMakePackage):
|
||||
executables = ["^geant4-config$"]
|
||||
|
||||
maintainers("drbenmorgan", "sethrj")
|
||||
|
||||
version("11.3.0", sha256="d9d71daff8890a7b5e0e33ea9a65fe6308ad6713000b43ba6705af77078e7ead")
|
||||
version("11.2.2", sha256="3a8d98c63fc52578f6ebf166d7dffaec36256a186d57f2520c39790367700c8d")
|
||||
version("11.2.1", sha256="76c9093b01128ee2b45a6f4020a1bcb64d2a8141386dea4674b5ae28bcd23293")
|
||||
@@ -203,29 +204,30 @@ def std_when(values):
|
||||
depends_on("qt@5.9:", when="@11.2:")
|
||||
conflicts("@:11.1 ^[virtuals=qmake] qt-base", msg="Qt6 not supported before 11.2")
|
||||
|
||||
# CMAKE PROBLEMS #
|
||||
# As released, 10.0.4 has inconsistently capitalised filenames
|
||||
# in the cmake files; this patch also enables cxxstd 14
|
||||
patch("geant4-10.0.4.patch", when="@10.0.4")
|
||||
# Fix member field typo in g4tools wroot
|
||||
# See https://bugzilla-geant4.kek.jp/show_bug.cgi?id=2640
|
||||
patch("columns-11.patch", when="@11:11.2.2")
|
||||
patch("columns-10.patch", when="@10.4:10")
|
||||
# As released, 10.03.03 has issues with respect to using external
|
||||
# CLHEP.
|
||||
patch("CLHEP-10.03.03.patch", level=1, when="@10.3")
|
||||
# Build failure on clang 15, ubuntu 22: see Geant4 problem report #2444
|
||||
# fixed by ascii-V10-07-03
|
||||
patch("geant4-10.6.patch", level=1, when="@10.0:10.6")
|
||||
# These patches can be applied independent of the cxxstd value?
|
||||
patch("cxx17.patch", when="@10.3 cxxstd=17")
|
||||
patch("cxx17_geant4_10_0.patch", level=1, when="@10.4.0 cxxstd=17")
|
||||
patch("geant4-10.4.3-cxx17-removed-features.patch", level=1, when="@10.4.3 cxxstd=17")
|
||||
patch("geant4-10.6.patch", when="@10.0:10.6")
|
||||
# Enable "17" cxxstd option in CMake (2 different filenames)
|
||||
patch("geant4-10.3-cxx17-cmake.patch", when="@10.3 cxxstd=17")
|
||||
patch("geant4-10.4-cxx17-cmake.patch", when="@10.4:10.4.2 cxxstd=17")
|
||||
# Fix exported cmake: https://bugzilla-geant4.kek.jp/show_bug.cgi?id=2556
|
||||
patch("package-cache.patch", when="@10.7.0:11.1.2^cmake@3.17:")
|
||||
|
||||
# See https://bugzilla-geant4.kek.jp/show_bug.cgi?id=2556
|
||||
patch("package-cache.patch", level=1, when="@10.7.0:11.1.2^cmake@3.17:")
|
||||
|
||||
# Issue with Twisted tubes, see https://bugzilla-geant4.kek.jp/show_bug.cgi?id=2619
|
||||
patch("twisted-tubes.patch", level=1, when="@11.2.0:11.2.2")
|
||||
# BUILD ERRORS #
|
||||
# Fix C++17: add -D_LIBCPP_ENABLE_CXX17_REMOVED_FEATURES C++ flag
|
||||
patch("geant4-10.4.3-cxx17-removed-features.patch", when="@10.4.3 cxxstd=17")
|
||||
# Fix C++20: build error due to removed-in-C++20 `ostream::operator>>(char*)`
|
||||
# (different, simpler approach than upstream Geant4 changes)
|
||||
patch("geant4-10.7-cxx20-g3tog4.patch", when="@:10.7 cxxstd=20")
|
||||
# Fix member field typo in g4tools wroot: https://bugzilla-geant4.kek.jp/show_bug.cgi?id=2640
|
||||
patch("columns-10.patch", when="@10.4:10")
|
||||
patch("columns-11.patch", when="@11:11.2.2")
|
||||
# Fix navigation errors with twisted tubes: https://bugzilla-geant4.kek.jp/show_bug.cgi?id=2619
|
||||
patch("twisted-tubes.patch", when="@11.2.0:11.2.2")
|
||||
|
||||
# NVHPC: "thread-local declaration follows non-thread-local declaration"
|
||||
conflicts("%nvhpc", when="+threads")
|
||||
|
@@ -58,6 +58,20 @@ def build_args(self):
|
||||
args.extend(["-trimpath", "./cmd/gh"])
|
||||
return args
|
||||
|
||||
@property
|
||||
def check_args(self):
|
||||
args = super().check_args
|
||||
skip_tests = (
|
||||
"TestHasNoActiveToken|TestTokenStoredIn.*|"
|
||||
"TestSwitchUser.*|TestSwitchClears.*|"
|
||||
"TestTokenWorksRightAfterMigration|"
|
||||
"Test_loginRun.*|Test_logoutRun.*|Test_refreshRun.*|"
|
||||
"Test_setupGitRun.*|Test_CheckAuth|TestSwitchRun.*|"
|
||||
"Test_statusRun.*|TestTokenRun.*"
|
||||
)
|
||||
args.extend([f"-skip={skip_tests}", "./..."])
|
||||
return args
|
||||
|
||||
@run_after("install")
|
||||
def install_completions(self):
|
||||
gh = Executable(self.prefix.bin.gh)
|
||||
|
@@ -153,7 +153,7 @@ def common_args(self):
|
||||
"CC={0}".format(env["CC"]),
|
||||
"PREFIX={0}".format(self.spec.prefix.bin),
|
||||
"MFEM_DIR={0}".format(self.spec["mfem"].prefix),
|
||||
"CONFIG_MK={0}".format(self.spec["mfem"].package.config_mk),
|
||||
"CONFIG_MK={0}".format(self.pkg["mfem"].config_mk),
|
||||
]
|
||||
|
||||
# https://github.com/spack/spack/issues/42839
|
||||
|
@@ -96,5 +96,6 @@ def install(self, spec, prefix):
|
||||
|
||||
def setup_dependent_package(self, module, dspec):
|
||||
module.make = MakeExecutable(
|
||||
self.spec.prefix.bin.make, determine_number_of_jobs(parallel=dspec.package.parallel)
|
||||
self.spec.prefix.bin.make,
|
||||
jobs=determine_number_of_jobs(parallel=dspec.package.parallel),
|
||||
)
|
||||
|
@@ -17,6 +17,12 @@ class Gnupg(AutotoolsPackage):
|
||||
|
||||
license("GPL-3.0-or-later")
|
||||
|
||||
version("2.5.3", sha256="23128b136aed4e5121e793d1b6c60ee50c8007a9d926c1313e524d05386b54ac")
|
||||
version("2.5.2", sha256="7f404ccc6a58493fedc15faef59f3ae914831cff866a23f0bf9d66cfdd0fea29")
|
||||
version("2.5.1", sha256="8a34bb318499867962c939e156666ada93ed81f01926590ac68f3ff79178375e")
|
||||
version("2.5.0", sha256="2222c827d4e7087f15e7f72739d004abc1d05c6c5f0a5a12b24c6a6cc5d173fb")
|
||||
version("2.4.7", sha256="7b24706e4da7e0e3b06ca068231027401f238102c41c909631349dcc3b85eb46")
|
||||
version("2.4.6", sha256="95acfafda7004924a6f5c901677f15ac1bda2754511d973bb4523e8dd840e17a")
|
||||
version("2.4.5", sha256="f68f7d75d06cb1635c336d34d844af97436c3f64ea14bcb7c869782f96f44277")
|
||||
version("2.4.4", sha256="67ebe016ca90fa7688ce67a387ebd82c6261e95897db7b23df24ff335be85bc6")
|
||||
version("2.4.3", sha256="a271ae6d732f6f4d80c258ad9ee88dd9c94c8fdc33c3e45328c4d7c126bd219d")
|
||||
@@ -41,15 +47,23 @@ class Gnupg(AutotoolsPackage):
|
||||
depends_on("libgpg-error@1.24:", when="@2:")
|
||||
depends_on("libgpg-error@1.41:", when="@2.3:")
|
||||
depends_on("libgpg-error@1.46:", when="@2.4:")
|
||||
# https://github.com/gpg/gnupg/commit/d78131490edd7f7db142702b8144bc30e65dbd8d
|
||||
depends_on("libgpg-error@1.50:", when="@2.5:")
|
||||
# https://github.com/gpg/gnupg/commit/c3bab200d97460028d842d76484b4c08fb947fef
|
||||
depends_on("libgpg-error@1.51:", when="@2.5.2:")
|
||||
|
||||
depends_on("libgcrypt@1.7.0:", when="@2:")
|
||||
depends_on("libgcrypt@1.9.1:", when="@2.3:")
|
||||
# https://github.com/gpg/gnupg/commit/f305e703d51079a17bcfc15d54f4c5f591dcff56
|
||||
depends_on("libgcrypt@1.11:", when="@2.5:")
|
||||
|
||||
depends_on("libksba@1.3.4:", when="@2:")
|
||||
depends_on("libksba@1.6.3:", when="@2.4:")
|
||||
|
||||
depends_on("libassuan@2.5:", when="@2.2.15:")
|
||||
depends_on("libassuan@:2", when="@:2.4.3")
|
||||
depends_on("libassuan@2.5:", when="@2.2.15:")
|
||||
# https://github.com/gpg/gnupg/commit/0d20b79ab79819f6177737a61e886d4820e475e2
|
||||
depends_on("libassuan@3:", when="@2.5.0:")
|
||||
|
||||
depends_on("pinentry", type="run", when="@2:")
|
||||
depends_on("iconv", when="@2:")
|
||||
|
@@ -17,11 +17,13 @@ class Gnutls(AutotoolsPackage):
|
||||
|
||||
homepage = "https://www.gnutls.org"
|
||||
url = "https://www.gnupg.org/ftp/gcrypt/gnutls/v3.5/gnutls-3.5.19.tar.xz"
|
||||
list_depth = 2
|
||||
|
||||
maintainers("alecbcs")
|
||||
|
||||
license("LGPL-2.1-or-later")
|
||||
|
||||
version("3.8.8", sha256="ac4f020e583880b51380ed226e59033244bc536cad2623f2e26f5afa2939d8fb")
|
||||
version("3.8.4", sha256="2bea4e154794f3f00180fa2a5c51fe8b005ac7a31cd58bd44cdfa7f36ebc3a9b")
|
||||
version("3.8.3", sha256="f74fc5954b27d4ec6dfbb11dea987888b5b124289a3703afcada0ee520f4173e")
|
||||
version("3.7.8", sha256="c58ad39af0670efe6a8aee5e3a8b2331a1200418b64b7c51977fb396d4617114")
|
||||
|
@@ -22,4 +22,4 @@ class Gocryptfs(GoPackage):
|
||||
depends_on("c", type="build") # generated
|
||||
|
||||
depends_on("openssl")
|
||||
depends_on("pkg-config", type="build")
|
||||
depends_on("pkgconfig", type="build")
|
||||
|
@@ -15,6 +15,8 @@ class Googletest(CMakePackage):
|
||||
maintainers("sethrj")
|
||||
|
||||
version("main", branch="main")
|
||||
version("1.15.2", sha256="7b42b4d6ed48810c5362c265a17faebe90dc2373c885e5216439d37927f02926")
|
||||
version("1.15.0", sha256="7315acb6bf10e99f332c8a43f00d5fbb1ee6ca48c52f6b936991b216c586aaad")
|
||||
version("1.14.0", sha256="8ad598c73ad796e0d8280b082cebd82a630d73e73cd3c70057938a6501bba5d7")
|
||||
version("1.13.0", sha256="ad7fdba11ea011c1d925b3289cf4af2c66a352e18d4c7264392fead75e919363")
|
||||
version("1.12.1", sha256="81964fe578e9bd7c94dfdb09c8e4d6e6759e19967e397dbea48d1c10e45d0df2")
|
||||
@@ -29,14 +31,18 @@ class Googletest(CMakePackage):
|
||||
depends_on("c", type="build")
|
||||
depends_on("cxx", type="build")
|
||||
|
||||
variant("absl", default=False, when="@1.12.1:", description="Build with abseil and RE2")
|
||||
depends_on("abseil-cpp", when="+absl")
|
||||
depends_on("re2", when="+absl")
|
||||
|
||||
variant("gmock", default=True, when="@1.8:", description="Build with gmock")
|
||||
variant("pthreads", default=True, description="Build multithreaded version with pthreads")
|
||||
variant("shared", default=True, description="Build shared libraries (DLLs)")
|
||||
|
||||
variant(
|
||||
"cxxstd",
|
||||
default="11",
|
||||
values=("98", "11", "14", "17"),
|
||||
default="14",
|
||||
values=("98", "11", "14", "17", "20"),
|
||||
multi=False,
|
||||
description="Use the specified C++ standard when building",
|
||||
)
|
||||
@@ -48,12 +54,13 @@ def cmake_args(self):
|
||||
args = [
|
||||
self.define_from_variant("BUILD_SHARED_LIBS", "shared"),
|
||||
self.define_from_variant("CMAKE_CXX_STANDARD", "cxxstd"),
|
||||
self.define_from_variant("BUILD_GMOCK", "gmock"),
|
||||
self.define_from_variant("GTEST_HAS_ABSL", "absl"),
|
||||
self.define("gtest_disable_pthreads", spec.satisfies("~pthreads")),
|
||||
]
|
||||
args.append(self.define("gtest_disable_pthreads", not spec.satisfies("+pthreads")))
|
||||
if spec.satisfies("@1.8:"):
|
||||
# New style (contains both Google Mock and Google Test)
|
||||
|
||||
if spec.satisfies("@:1.8.0"):
|
||||
args.append(self.define("BUILD_GTEST", True))
|
||||
args.append(self.define_from_variant("BUILD_GMOCK", "gmock"))
|
||||
|
||||
return args
|
||||
|
||||
|
@@ -585,7 +585,7 @@ def patch(self):
|
||||
)
|
||||
|
||||
if self.spec.satisfies("+plumed"):
|
||||
self.spec["plumed"].package.apply_patch(self)
|
||||
self["plumed"].apply_patch(self)
|
||||
|
||||
if self.spec.satisfies("%nvhpc"):
|
||||
# Disable obsolete workaround
|
||||
|
@@ -45,7 +45,7 @@ class Gxsview(QMakePackage):
|
||||
depends_on("vtk@9:+qt+opengl2", when="@2024.03.15:")
|
||||
conflicts("%gcc@:7.2.0", msg="Requires C++17 compiler support") # need C++17 standard
|
||||
conflicts("qt@6:", msg="Qt 6 support is not yet achieved")
|
||||
conflicts("qt-base@6:", msg="Qt 6 support is not yet achieved") # required for clingo
|
||||
conflicts("^qt-base@6:", msg="Qt 6 support is not yet achieved") # required for clingo
|
||||
|
||||
patch("vtk9.patch", when="^vtk@9:")
|
||||
# gcc11 compilation rule for std::numeric_limits,
|
||||
@@ -75,13 +75,13 @@ def qmake_args(self):
|
||||
]
|
||||
)
|
||||
# Below to avoid undefined reference to `std::filesystem::__cxx11::path::_M_split_cmpts()'
|
||||
if self.spec.satisfies("%gcc@8.0:8.9") or self.spec.satisfies("%fj"):
|
||||
if self.spec.satisfies("%gcc@:8.9") or self.spec.satisfies("%fj"):
|
||||
if self.spec.satisfies("^vtk@9:"):
|
||||
fic = "vtk9.pri"
|
||||
else:
|
||||
fic = "vtk8.pri"
|
||||
with open(fic, "a") as fh:
|
||||
fh.write("-lstdc++fs\n")
|
||||
fh.write("\nLIBS += -lstdc++fs\n")
|
||||
return args
|
||||
|
||||
def install(self, spec, prefix):
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user