Compare commits
135 Commits
fix-ams
...
hs/rocm-op
Author | SHA1 | Date | |
---|---|---|---|
![]() |
eff56cd845 | ||
![]() |
9747978c7f | ||
![]() |
f043455ccc | ||
![]() |
fb9d6427e6 | ||
![]() |
76e83e10c1 | ||
![]() |
af89bdf632 | ||
![]() |
46f5b192ef | ||
![]() |
18cd922aab | ||
![]() |
5518ad9611 | ||
![]() |
57a1807443 | ||
![]() |
3909308d5c | ||
![]() |
54210270c8 | ||
![]() |
1a71bb046e | ||
![]() |
dbd6857d32 | ||
![]() |
025bc24996 | ||
![]() |
01e16b58a3 | ||
![]() |
f71e202f24 | ||
![]() |
f7edd10c17 | ||
![]() |
153c0805dd | ||
![]() |
5d8517ef69 | ||
![]() |
f23cae6a86 | ||
![]() |
e6e67f8e0a | ||
![]() |
e6bef4ca9b | ||
![]() |
e3e0bef0de | ||
![]() |
42486d93ec | ||
![]() |
6d608a9664 | ||
![]() |
04313afc63 | ||
![]() |
f839d2ba56 | ||
![]() |
2b1a8b1913 | ||
![]() |
8907003648 | ||
![]() |
8afdba4bf7 | ||
![]() |
57cabbfb10 | ||
![]() |
c71efb9040 | ||
![]() |
c5dd2d43d2 | ||
![]() |
34338ef757 | ||
![]() |
c0bdc37226 | ||
![]() |
8bad9fb804 | ||
![]() |
2df7cc0087 | ||
![]() |
40d40ccc52 | ||
![]() |
afe7d6c39e | ||
![]() |
113733d9fb | ||
![]() |
a8e2da5bb8 | ||
![]() |
97750189b6 | ||
![]() |
bcd40835a0 | ||
![]() |
2c3f2c5733 | ||
![]() |
302d74394b | ||
![]() |
cf94dc7823 | ||
![]() |
4411ee3382 | ||
![]() |
f790ce0f72 | ||
![]() |
64d53037db | ||
![]() |
4aef50739b | ||
![]() |
a6e966f6f2 | ||
![]() |
1f428c4188 | ||
![]() |
731e48b1bd | ||
![]() |
74ff9ad821 | ||
![]() |
16a4eff689 | ||
![]() |
d0b0d8db50 | ||
![]() |
54f591cce5 | ||
![]() |
8677bb4d43 | ||
![]() |
b66b80a96a | ||
![]() |
10e21f399c | ||
![]() |
56892f6140 | ||
![]() |
7eddc4b1f8 | ||
![]() |
3c7392bbcc | ||
![]() |
bb0517f4d9 | ||
![]() |
c8994ee50f | ||
![]() |
4b2f5638f2 | ||
![]() |
31312a379f | ||
![]() |
b0d5f272b0 | ||
![]() |
1c93fef160 | ||
![]() |
8bb5f4faf4 | ||
![]() |
f76ab5f72f | ||
![]() |
49c831edc3 | ||
![]() |
c943c8c1d2 | ||
![]() |
e0e6f29584 | ||
![]() |
72bc3bb803 | ||
![]() |
dba8fe2b96 | ||
![]() |
4487598d60 | ||
![]() |
495537cf56 | ||
![]() |
22c3b4099f | ||
![]() |
13978d11a0 | ||
![]() |
a22114b20b | ||
![]() |
c10624390f | ||
![]() |
fb3d9de80b | ||
![]() |
fbb688af07 | ||
![]() |
d34b709425 | ||
![]() |
cb0b188cf6 | ||
![]() |
9a2b0aca66 | ||
![]() |
89a8ab3233 | ||
![]() |
5d87166c07 | ||
![]() |
15c989b3fe | ||
![]() |
b7f556e4b4 | ||
![]() |
36f32ceda3 | ||
![]() |
01d77ed915 | ||
![]() |
0049f8332d | ||
![]() |
39c10c3116 | ||
![]() |
71d1901831 | ||
![]() |
41e0863b86 | ||
![]() |
a75d83f65c | ||
![]() |
f2f13964fb | ||
![]() |
9b032018d6 | ||
![]() |
7d470c05be | ||
![]() |
664fe9e9e6 | ||
![]() |
2745a519e2 | ||
![]() |
4348ee1c75 | ||
![]() |
8e39fb1e54 | ||
![]() |
09458312a3 | ||
![]() |
5fd0693df4 | ||
![]() |
f58684429d | ||
![]() |
409611a479 | ||
![]() |
dd98cfb839 | ||
![]() |
5c91667dab | ||
![]() |
9efd6f3f11 | ||
![]() |
a8f5289801 | ||
![]() |
ac635aa777 | ||
![]() |
45dcddf9c3 | ||
![]() |
f1660722e7 | ||
![]() |
04b44d841c | ||
![]() |
7f30502297 | ||
![]() |
61b1586c51 | ||
![]() |
8579efcadf | ||
![]() |
1c3e2b5425 | ||
![]() |
011ef0aaaf | ||
![]() |
9642f3f49a | ||
![]() |
a6c9b55fad | ||
![]() |
608ed967e1 | ||
![]() |
742eaa32b7 | ||
![]() |
763b35a2e0 | ||
![]() |
12280f864c | ||
![]() |
253ba05732 | ||
![]() |
195b869e1c | ||
![]() |
393961ffd6 | ||
![]() |
392a58e9be | ||
![]() |
0e8e97a811 | ||
![]() |
43a0cbe7a2 |
4
.github/workflows/ci.yaml
vendored
4
.github/workflows/ci.yaml
vendored
@@ -81,6 +81,10 @@ jobs:
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
|
||||
import-check:
|
||||
needs: [ changes ]
|
||||
uses: ./.github/workflows/import-check.yaml
|
||||
|
||||
all-prechecks:
|
||||
needs: [ prechecks ]
|
||||
if: ${{ always() }}
|
||||
|
1
.github/workflows/coverage.yml
vendored
1
.github/workflows/coverage.yml
vendored
@@ -33,3 +33,4 @@ jobs:
|
||||
with:
|
||||
verbose: true
|
||||
fail_ci_if_error: false
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
49
.github/workflows/import-check.yaml
vendored
Normal file
49
.github/workflows/import-check.yaml
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
name: import-check
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
# Check we don't make the situation with circular imports worse
|
||||
import-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: julia-actions/setup-julia@v2
|
||||
with:
|
||||
version: '1.10'
|
||||
- uses: julia-actions/cache@v2
|
||||
|
||||
# PR: use the base of the PR as the old commit
|
||||
- name: Checkout PR base commit
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.sha }}
|
||||
path: old
|
||||
# not a PR: use the previous commit as the old commit
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 2
|
||||
path: old
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
run: git -C old reset --hard HEAD^
|
||||
|
||||
- name: Checkout new commit
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
path: new
|
||||
- name: Install circular import checker
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
repository: haampie/circular-import-fighter
|
||||
ref: 4cdb0bf15f04ab6b49041d5ef1bfd9644cce7f33
|
||||
path: circular-import-fighter
|
||||
- name: Install dependencies
|
||||
working-directory: circular-import-fighter
|
||||
run: make -j dependencies
|
||||
- name: Circular import check
|
||||
working-directory: circular-import-fighter
|
||||
run: make -j compare "SPACK_ROOT=../old ../new"
|
60
.github/workflows/valid-style.yml
vendored
60
.github/workflows/valid-style.yml
vendored
@@ -86,66 +86,6 @@ jobs:
|
||||
spack -d bootstrap now --dev
|
||||
spack -d style -t black
|
||||
spack unit-test -V
|
||||
# Check we don't make the situation with circular imports worse
|
||||
import-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: julia-actions/setup-julia@v2
|
||||
with:
|
||||
version: '1.10'
|
||||
- uses: julia-actions/cache@v2
|
||||
|
||||
# PR: use the base of the PR as the old commit
|
||||
- name: Checkout PR base commit
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.sha }}
|
||||
path: old
|
||||
# not a PR: use the previous commit as the old commit
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 2
|
||||
path: old
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
run: git -C old reset --hard HEAD^
|
||||
|
||||
- name: Checkout new commit
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
path: new
|
||||
- name: Install circular import checker
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
repository: haampie/circular-import-fighter
|
||||
ref: b5d6ce9be35f602cca7d5a6aa0259fca10639cca
|
||||
path: circular-import-fighter
|
||||
- name: Install dependencies
|
||||
working-directory: circular-import-fighter
|
||||
run: make -j dependencies
|
||||
- name: Problematic imports before
|
||||
working-directory: circular-import-fighter
|
||||
run: make SPACK_ROOT=../old SUFFIX=.old
|
||||
- name: Problematic imports after
|
||||
working-directory: circular-import-fighter
|
||||
run: make SPACK_ROOT=../new SUFFIX=.new
|
||||
- name: Compare import cycles
|
||||
working-directory: circular-import-fighter
|
||||
run: |
|
||||
edges_before="$(head -n1 solution.old)"
|
||||
edges_after="$(head -n1 solution.new)"
|
||||
if [ "$edges_after" -gt "$edges_before" ]; then
|
||||
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
|
||||
printf 'previously this was %s\033[0m\n' "$edges_before"
|
||||
printf 'Compare \033[1;97m"Problematic imports before"\033[0m and '
|
||||
printf '\033[1;97m"Problematic imports after"\033[0m.\n'
|
||||
exit 1
|
||||
else
|
||||
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
|
||||
fi
|
||||
|
||||
# Further style checks from pylint
|
||||
pylint:
|
||||
|
@@ -63,3 +63,7 @@ concretizer:
|
||||
# Setting this to false yields unreproducible results, so we advise to use that value only
|
||||
# for debugging purposes (e.g. check which constraints can help Spack concretize faster).
|
||||
error_on_timeout: true
|
||||
|
||||
# Static analysis may reduce the concretization time by generating smaller ASP problems, in
|
||||
# cases where there are requirements that prevent part of the search space to be explored.
|
||||
static_analysis: false
|
||||
|
@@ -1,5 +1,5 @@
|
||||
config:
|
||||
locks: false
|
||||
build_stage::
|
||||
- '$spack/.staging'
|
||||
- '$user_cache_path/stage'
|
||||
stage_name: '{name}-{version}-{hash:7}'
|
||||
|
@@ -344,26 +344,6 @@ def close(self):
|
||||
self.file.close()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def replace_environment(env):
|
||||
"""Replace the current environment (`os.environ`) with `env`.
|
||||
|
||||
If `env` is empty (or None), this unsets all current environment
|
||||
variables.
|
||||
"""
|
||||
env = env or {}
|
||||
old_env = os.environ.copy()
|
||||
try:
|
||||
os.environ.clear()
|
||||
for name, val in env.items():
|
||||
os.environ[name] = val
|
||||
yield
|
||||
finally:
|
||||
os.environ.clear()
|
||||
for name, val in old_env.items():
|
||||
os.environ[name] = val
|
||||
|
||||
|
||||
def log_output(*args, **kwargs):
|
||||
"""Context manager that logs its output to a file.
|
||||
|
||||
@@ -447,7 +427,6 @@ def __init__(
|
||||
self.echo = echo
|
||||
self.debug = debug
|
||||
self.buffer = buffer
|
||||
self.env = env # the environment to use for _writer_daemon
|
||||
self.filter_fn = filter_fn
|
||||
|
||||
self._active = False # used to prevent re-entry
|
||||
@@ -519,21 +498,20 @@ def __enter__(self):
|
||||
# just don't forward input if this fails
|
||||
pass
|
||||
|
||||
with replace_environment(self.env):
|
||||
self.process = multiprocessing.Process(
|
||||
target=_writer_daemon,
|
||||
args=(
|
||||
input_fd,
|
||||
read_fd,
|
||||
self.write_fd,
|
||||
self.echo,
|
||||
self.log_file,
|
||||
child_pipe,
|
||||
self.filter_fn,
|
||||
),
|
||||
)
|
||||
self.process.daemon = True # must set before start()
|
||||
self.process.start()
|
||||
self.process = multiprocessing.Process(
|
||||
target=_writer_daemon,
|
||||
args=(
|
||||
input_fd,
|
||||
read_fd,
|
||||
self.write_fd,
|
||||
self.echo,
|
||||
self.log_file,
|
||||
child_pipe,
|
||||
self.filter_fn,
|
||||
),
|
||||
)
|
||||
self.process.daemon = True # must set before start()
|
||||
self.process.start()
|
||||
|
||||
finally:
|
||||
if input_fd:
|
||||
@@ -729,10 +707,7 @@ class winlog:
|
||||
Does not support the use of 'v' toggling as nixlog does.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, file_like=None, echo=False, debug=0, buffer=False, env=None, filter_fn=None
|
||||
):
|
||||
self.env = env
|
||||
def __init__(self, file_like=None, echo=False, debug=0, buffer=False, filter_fn=None):
|
||||
self.debug = debug
|
||||
self.echo = echo
|
||||
self.logfile = file_like
|
||||
@@ -789,11 +764,10 @@ def background_reader(reader, echo_writer, _kill):
|
||||
reader.close()
|
||||
|
||||
self._active = True
|
||||
with replace_environment(self.env):
|
||||
self._thread = Thread(
|
||||
target=background_reader, args=(self.reader, self.echo_writer, self._kill)
|
||||
)
|
||||
self._thread.start()
|
||||
self._thread = Thread(
|
||||
target=background_reader, args=(self.reader, self.echo_writer, self._kill)
|
||||
)
|
||||
self._thread.start()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
|
@@ -10,7 +10,7 @@
|
||||
import spack.util.git
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.24.0.dev0"
|
||||
__version__ = "1.0.0.dev0"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
@@ -11,6 +11,7 @@
|
||||
from typing import Any, List, Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util import tty
|
||||
from llnl.util.lang import stable_partition
|
||||
|
||||
import spack.builder
|
||||
@@ -458,19 +459,23 @@ def cmake(
|
||||
) -> None:
|
||||
"""Runs ``cmake`` in the build directory"""
|
||||
|
||||
# skip cmake phase if it is an incremental develop build
|
||||
# These are the files that will re-run CMake that are generated from a successful
|
||||
# configure step
|
||||
primary_generator = _extract_primary_generator(self.generator)
|
||||
if primary_generator == "Unix Makefiles":
|
||||
configure_artifact = "Makefile"
|
||||
elif primary_generator == "Ninja":
|
||||
configure_artifact = "ninja.build"
|
||||
if spec.is_develop:
|
||||
# skip cmake phase if it is an incremental develop build
|
||||
|
||||
if spec.is_develop and os.path.isfile(
|
||||
os.path.join(self.build_directory, configure_artifact)
|
||||
):
|
||||
return
|
||||
# Determine the files that will re-run CMake that are generated from a successful
|
||||
# configure step based on state
|
||||
primary_generator = _extract_primary_generator(self.generator)
|
||||
configure_artifact = "Makefile"
|
||||
if primary_generator == "Ninja":
|
||||
configure_artifact = "ninja.build"
|
||||
|
||||
if os.path.isfile(os.path.join(self.build_directory, configure_artifact)):
|
||||
tty.msg(
|
||||
"Incremental build criteria satisfied."
|
||||
"Skipping CMake configure step. To force configuration run"
|
||||
f" `spack clean {pkg.name}`"
|
||||
)
|
||||
return
|
||||
|
||||
options = self.std_cmake_args
|
||||
options += self.cmake_args()
|
||||
|
@@ -15,7 +15,7 @@ class CudaPackage(PackageBase):
|
||||
"""Auxiliary class which contains CUDA variant, dependencies and conflicts
|
||||
and is meant to unify and facilitate its usage.
|
||||
|
||||
Maintainers: ax3l, Rombur, davidbeckingsale
|
||||
Maintainers: ax3l, Rombur, davidbeckingsale, pauleonix
|
||||
"""
|
||||
|
||||
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
|
||||
@@ -47,6 +47,12 @@ class CudaPackage(PackageBase):
|
||||
"89",
|
||||
"90",
|
||||
"90a",
|
||||
"100",
|
||||
"100a",
|
||||
"101",
|
||||
"101a",
|
||||
"120",
|
||||
"120a",
|
||||
)
|
||||
|
||||
# FIXME: keep cuda and cuda_arch separate to make usage easier until
|
||||
@@ -99,39 +105,56 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
# CUDA version vs Architecture
|
||||
# https://en.wikipedia.org/wiki/CUDA#GPUs_supported
|
||||
# https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features
|
||||
# Tesla support:
|
||||
depends_on("cuda@:6.0", when="cuda_arch=10")
|
||||
depends_on("cuda@:6.5", when="cuda_arch=11")
|
||||
depends_on("cuda@2.1:6.5", when="cuda_arch=12")
|
||||
depends_on("cuda@2.1:6.5", when="cuda_arch=13")
|
||||
|
||||
# Fermi support:
|
||||
depends_on("cuda@3.0:8.0", when="cuda_arch=20")
|
||||
depends_on("cuda@3.2:8.0", when="cuda_arch=21")
|
||||
|
||||
# Kepler support:
|
||||
depends_on("cuda@5.0:10.2", when="cuda_arch=30")
|
||||
depends_on("cuda@5.0:10.2", when="cuda_arch=32")
|
||||
depends_on("cuda@5.0:11.8", when="cuda_arch=35")
|
||||
depends_on("cuda@6.5:11.8", when="cuda_arch=37")
|
||||
|
||||
# Maxwell support:
|
||||
depends_on("cuda@6.0:", when="cuda_arch=50")
|
||||
depends_on("cuda@6.5:", when="cuda_arch=52")
|
||||
depends_on("cuda@6.5:", when="cuda_arch=53")
|
||||
|
||||
# Pascal support:
|
||||
depends_on("cuda@8.0:", when="cuda_arch=60")
|
||||
depends_on("cuda@8.0:", when="cuda_arch=61")
|
||||
depends_on("cuda@8.0:", when="cuda_arch=62")
|
||||
|
||||
# Volta support:
|
||||
depends_on("cuda@9.0:", when="cuda_arch=70")
|
||||
# Turing support:
|
||||
depends_on("cuda@9.0:", when="cuda_arch=72")
|
||||
depends_on("cuda@10.0:", when="cuda_arch=75")
|
||||
|
||||
# Ampere support:
|
||||
depends_on("cuda@11.0:", when="cuda_arch=80")
|
||||
depends_on("cuda@11.1:", when="cuda_arch=86")
|
||||
depends_on("cuda@11.4:", when="cuda_arch=87")
|
||||
# Ada support:
|
||||
depends_on("cuda@11.8:", when="cuda_arch=89")
|
||||
|
||||
# Hopper support:
|
||||
depends_on("cuda@12.0:", when="cuda_arch=90")
|
||||
depends_on("cuda@12.0:", when="cuda_arch=90a")
|
||||
|
||||
# Blackwell support:
|
||||
depends_on("cuda@12.8:", when="cuda_arch=100")
|
||||
depends_on("cuda@12.8:", when="cuda_arch=100a")
|
||||
depends_on("cuda@12.8:", when="cuda_arch=101")
|
||||
depends_on("cuda@12.8:", when="cuda_arch=101a")
|
||||
depends_on("cuda@12.8:", when="cuda_arch=120")
|
||||
depends_on("cuda@12.8:", when="cuda_arch=120a")
|
||||
# From the NVIDIA install guide we know of conflicts for particular
|
||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
|
||||
@@ -163,6 +186,7 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6")
|
||||
conflicts("%gcc@15:", when="+cuda ^cuda@:12.8")
|
||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||
@@ -171,6 +195,7 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
|
||||
conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
|
||||
conflicts("%clang@20:", when="+cuda ^cuda@:12.8")
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||
|
@@ -14,7 +14,7 @@
|
||||
import zipfile
|
||||
from collections import namedtuple
|
||||
from typing import Callable, Dict, List, Set
|
||||
from urllib.request import HTTPHandler, Request, build_opener
|
||||
from urllib.request import Request
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -62,6 +62,8 @@
|
||||
|
||||
PushResult = namedtuple("PushResult", "success url")
|
||||
|
||||
urlopen = web_util.urlopen # alias for mocking in tests
|
||||
|
||||
|
||||
def get_change_revisions():
|
||||
"""If this is a git repo get the revisions to use when checking
|
||||
@@ -627,29 +629,19 @@ def download_and_extract_artifacts(url, work_dir):
|
||||
if token:
|
||||
headers["PRIVATE-TOKEN"] = token
|
||||
|
||||
opener = build_opener(HTTPHandler)
|
||||
|
||||
request = Request(url, headers=headers)
|
||||
request.get_method = lambda: "GET"
|
||||
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200:
|
||||
msg = f"Error response code ({response_code}) in reproduce_ci_job"
|
||||
raise SpackError(msg)
|
||||
|
||||
request = Request(url, headers=headers, method="GET")
|
||||
artifacts_zip_path = os.path.join(work_dir, "artifacts.zip")
|
||||
os.makedirs(work_dir, exist_ok=True)
|
||||
|
||||
if not os.path.exists(work_dir):
|
||||
os.makedirs(work_dir)
|
||||
try:
|
||||
response = urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
with open(artifacts_zip_path, "wb") as out_file:
|
||||
shutil.copyfileobj(response, out_file)
|
||||
except OSError as e:
|
||||
raise SpackError(f"Error fetching artifacts: {e}")
|
||||
|
||||
with open(artifacts_zip_path, "wb") as out_file:
|
||||
shutil.copyfileobj(response, out_file)
|
||||
|
||||
zip_file = zipfile.ZipFile(artifacts_zip_path)
|
||||
zip_file.extractall(work_dir)
|
||||
zip_file.close()
|
||||
with zipfile.ZipFile(artifacts_zip_path) as zip_file:
|
||||
zip_file.extractall(work_dir)
|
||||
|
||||
os.remove(artifacts_zip_path)
|
||||
|
||||
|
@@ -9,9 +9,9 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.environment as ev
|
||||
import spack.package_base
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.solver.input_analysis import create_graph_analyzer
|
||||
|
||||
description = "show dependencies of a package"
|
||||
section = "basic"
|
||||
@@ -68,15 +68,17 @@ def dependencies(parser, args):
|
||||
|
||||
else:
|
||||
spec = specs[0]
|
||||
dependencies = spack.package_base.possible_dependencies(
|
||||
dependencies, virtuals, _ = create_graph_analyzer().possible_dependencies(
|
||||
spec,
|
||||
transitive=args.transitive,
|
||||
expand_virtuals=args.expand_virtuals,
|
||||
depflag=args.deptype,
|
||||
allowed_deps=args.deptype,
|
||||
)
|
||||
if not args.expand_virtuals:
|
||||
dependencies.update(virtuals)
|
||||
|
||||
if spec.name in dependencies:
|
||||
del dependencies[spec.name]
|
||||
dependencies.remove(spec.name)
|
||||
|
||||
if dependencies:
|
||||
colify(sorted(dependencies))
|
||||
|
@@ -125,7 +125,7 @@ def develop(parser, args):
|
||||
version = spec.versions.concrete_range_as_version
|
||||
if not version:
|
||||
# look up the maximum version so infintiy versions are preferred for develop
|
||||
version = max(spec.package_class.versions.keys())
|
||||
version = max(spack.repo.PATH.get_pkg_class(spec.fullname).versions.keys())
|
||||
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
|
||||
spec.versions = spack.version.VersionList([version])
|
||||
|
||||
|
@@ -545,7 +545,7 @@ def _not_license_excluded(self, x):
|
||||
package does not explicitly forbid redistributing source."""
|
||||
if self.private:
|
||||
return True
|
||||
elif x.package_class.redistribute_source(x):
|
||||
elif spack.repo.PATH.get_pkg_class(x.fullname).redistribute_source(x):
|
||||
return True
|
||||
else:
|
||||
tty.debug(
|
||||
|
@@ -6,7 +6,7 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from itertools import zip_longest
|
||||
from itertools import islice, zip_longest
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
@@ -423,7 +423,8 @@ def _run_import_check(
|
||||
continue
|
||||
|
||||
for m in is_abs_import.finditer(contents):
|
||||
if contents.count(m.group(1)) == 1:
|
||||
# Find at most two occurences: the first is the import itself, the second is its usage.
|
||||
if len(list(islice(re.finditer(rf"{re.escape(m.group(1))}(?!\w)", contents), 2))) == 1:
|
||||
to_remove.append(m.group(0))
|
||||
exit_code = 1
|
||||
print(f"{pretty_path}: redundant import: {m.group(1)}", file=out)
|
||||
@@ -438,7 +439,7 @@ def _run_import_check(
|
||||
module = _module_part(root, m.group(0))
|
||||
if not module or module in to_add:
|
||||
continue
|
||||
if re.search(rf"import {re.escape(module)}\b(?!\.)", contents):
|
||||
if re.search(rf"import {re.escape(module)}(?!\w|\.)", contents):
|
||||
continue
|
||||
to_add.add(module)
|
||||
exit_code = 1
|
||||
|
@@ -252,7 +252,9 @@ def has_test_and_tags(pkg_class):
|
||||
hashes = env.all_hashes() if env else None
|
||||
|
||||
specs = spack.store.STORE.db.query(hashes=hashes)
|
||||
specs = list(filter(lambda s: has_test_and_tags(s.package_class), specs))
|
||||
specs = list(
|
||||
filter(lambda s: has_test_and_tags(spack.repo.PATH.get_pkg_class(s.fullname)), specs)
|
||||
)
|
||||
|
||||
spack.cmd.display_specs(specs, long=True)
|
||||
|
||||
|
@@ -57,7 +57,7 @@ def validate(configuration_file):
|
||||
# Set the default value of the concretization strategy to unify and
|
||||
# warn if the user explicitly set another value
|
||||
env_dict.setdefault("concretizer", {"unify": True})
|
||||
if not env_dict["concretizer"]["unify"] is True:
|
||||
if env_dict["concretizer"]["unify"] is not True:
|
||||
warnings.warn(
|
||||
'"concretizer:unify" is not set to "true", which means the '
|
||||
"generated image may contain different variants of the same "
|
||||
|
@@ -42,10 +42,10 @@
|
||||
import llnl.util.tty.color
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.tengine
|
||||
import spack.traverse
|
||||
from spack.solver.input_analysis import create_graph_analyzer
|
||||
|
||||
|
||||
def find(seq, predicate):
|
||||
@@ -537,10 +537,11 @@ def edge_entry(self, edge):
|
||||
|
||||
def _static_edges(specs, depflag):
|
||||
for spec in specs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
possible = pkg_cls.possible_dependencies(expand_virtuals=True, depflag=depflag)
|
||||
*_, edges = create_graph_analyzer().possible_dependencies(
|
||||
spec.name, expand_virtuals=True, allowed_deps=depflag
|
||||
)
|
||||
|
||||
for parent_name, dependencies in possible.items():
|
||||
for parent_name, dependencies in edges.items():
|
||||
for dependency_name in dependencies:
|
||||
yield spack.spec.DependencySpec(
|
||||
spack.spec.Spec(parent_name),
|
||||
|
@@ -566,7 +566,7 @@ def copy_test_files(pkg: Pb, test_spec: spack.spec.Spec):
|
||||
|
||||
# copy test data into test stage data dir
|
||||
try:
|
||||
pkg_cls = test_spec.package_class
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(test_spec.fullname)
|
||||
except spack.repo.UnknownPackageError:
|
||||
tty.debug(f"{test_spec.name}: skipping test data copy since no package class found")
|
||||
return
|
||||
@@ -623,7 +623,7 @@ def test_functions(
|
||||
vpkgs = virtuals(pkg)
|
||||
for vname in vpkgs:
|
||||
try:
|
||||
classes.append((Spec(vname)).package_class)
|
||||
classes.append(spack.repo.PATH.get_pkg_class(vname))
|
||||
except spack.repo.UnknownPackageError:
|
||||
tty.debug(f"{vname}: virtual does not appear to have a package file")
|
||||
|
||||
@@ -668,7 +668,7 @@ def process_test_parts(pkg: Pb, test_specs: List[spack.spec.Spec], verbose: bool
|
||||
|
||||
# grab test functions associated with the spec, which may be virtual
|
||||
try:
|
||||
tests = test_functions(spec.package_class)
|
||||
tests = test_functions(spack.repo.PATH.get_pkg_class(spec.fullname))
|
||||
except spack.repo.UnknownPackageError:
|
||||
# Some virtuals don't have a package so we don't want to report
|
||||
# them as not having tests when that isn't appropriate.
|
||||
|
@@ -814,7 +814,7 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
|
||||
# Include build dependencies if pkg is going to be built from sources, or
|
||||
# if build deps are explicitly requested.
|
||||
if include_build_deps or not (
|
||||
cache_only or pkg.spec.installed and not pkg.spec.dag_hash() in self.overwrite
|
||||
cache_only or pkg.spec.installed and pkg.spec.dag_hash() not in self.overwrite
|
||||
):
|
||||
depflag |= dt.BUILD
|
||||
if self.run_tests(pkg):
|
||||
@@ -2436,11 +2436,7 @@ def _real_install(self) -> None:
|
||||
# DEBUGGING TIP - to debug this section, insert an IPython
|
||||
# embed here, and run the sections below without log capture
|
||||
log_contextmanager = log_output(
|
||||
log_file,
|
||||
self.echo,
|
||||
True,
|
||||
env=self.unmodified_env,
|
||||
filter_fn=self.filter_fn,
|
||||
log_file, self.echo, True, filter_fn=self.filter_fn
|
||||
)
|
||||
|
||||
with log_contextmanager as logger:
|
||||
|
@@ -64,7 +64,7 @@ def from_local_path(path: str):
|
||||
@staticmethod
|
||||
def from_url(url: str):
|
||||
"""Create an anonymous mirror by URL. This method validates the URL."""
|
||||
if not urllib.parse.urlparse(url).scheme in supported_url_schemes:
|
||||
if urllib.parse.urlparse(url).scheme not in supported_url_schemes:
|
||||
raise ValueError(
|
||||
f'"{url}" is not a valid mirror URL. '
|
||||
f"Scheme must be one of {supported_url_schemes}."
|
||||
|
@@ -383,6 +383,7 @@ def create_opener():
|
||||
"""Create an opener that can handle OCI authentication."""
|
||||
opener = urllib.request.OpenerDirector()
|
||||
for handler in [
|
||||
urllib.request.ProxyHandler(),
|
||||
urllib.request.UnknownHandler(),
|
||||
urllib.request.HTTPSHandler(context=spack.util.web.ssl_create_default_context()),
|
||||
spack.util.web.SpackHTTPDefaultErrorHandler(),
|
||||
|
@@ -2,7 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
# flake8: noqa: F401
|
||||
# flake8: noqa: F401, E402
|
||||
"""spack.package defines the public API for Spack packages, by re-exporting useful symbols from
|
||||
other modules. Packages should import this module, instead of importing from spack.* directly
|
||||
to ensure forward compatibility with future versions of Spack."""
|
||||
|
@@ -22,7 +22,6 @@
|
||||
import textwrap
|
||||
import time
|
||||
import traceback
|
||||
import typing
|
||||
from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, TypeVar, Union
|
||||
|
||||
from typing_extensions import Literal
|
||||
@@ -697,9 +696,6 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
#: Verbosity level, preserved across installs.
|
||||
_verbose = None
|
||||
|
||||
#: index of patches by sha256 sum, built lazily
|
||||
_patches_by_hash = None
|
||||
|
||||
#: Package homepage where users can find more information about the package
|
||||
homepage: Optional[str] = None
|
||||
|
||||
@@ -713,19 +709,6 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
#: Do not include @ here in order not to unnecessarily ping the users.
|
||||
maintainers: List[str] = []
|
||||
|
||||
#: List of attributes to be excluded from a package's hash.
|
||||
metadata_attrs = [
|
||||
"homepage",
|
||||
"url",
|
||||
"urls",
|
||||
"list_url",
|
||||
"extendable",
|
||||
"parallel",
|
||||
"make_jobs",
|
||||
"maintainers",
|
||||
"tags",
|
||||
]
|
||||
|
||||
#: Set to ``True`` to indicate the stand-alone test requires a compiler.
|
||||
#: It is used to ensure a compiler and build dependencies like 'cmake'
|
||||
#: are available to build a custom test code.
|
||||
@@ -825,104 +808,6 @@ def get_variant(self, name: str) -> spack.variant.Variant:
|
||||
except StopIteration:
|
||||
raise ValueError(f"No variant '{name}' on spec: {self.spec}")
|
||||
|
||||
@classmethod
|
||||
def possible_dependencies(
|
||||
cls,
|
||||
transitive: bool = True,
|
||||
expand_virtuals: bool = True,
|
||||
depflag: dt.DepFlag = dt.ALL,
|
||||
visited: Optional[dict] = None,
|
||||
missing: Optional[dict] = None,
|
||||
virtuals: Optional[set] = None,
|
||||
) -> Dict[str, Set[str]]:
|
||||
"""Return dict of possible dependencies of this package.
|
||||
|
||||
Args:
|
||||
transitive (bool or None): return all transitive dependencies if
|
||||
True, only direct dependencies if False (default True)..
|
||||
expand_virtuals (bool or None): expand virtual dependencies into
|
||||
all possible implementations (default True)
|
||||
depflag: dependency types to consider
|
||||
visited (dict or None): dict of names of dependencies visited so
|
||||
far, mapped to their immediate dependencies' names.
|
||||
missing (dict or None): dict to populate with packages and their
|
||||
*missing* dependencies.
|
||||
virtuals (set): if provided, populate with virtuals seen so far.
|
||||
|
||||
Returns:
|
||||
(dict): dictionary mapping dependency names to *their*
|
||||
immediate dependencies
|
||||
|
||||
Each item in the returned dictionary maps a (potentially
|
||||
transitive) dependency of this package to its possible
|
||||
*immediate* dependencies. If ``expand_virtuals`` is ``False``,
|
||||
virtual package names wil be inserted as keys mapped to empty
|
||||
sets of dependencies. Virtuals, if not expanded, are treated as
|
||||
though they have no immediate dependencies.
|
||||
|
||||
Missing dependencies by default are ignored, but if a
|
||||
missing dict is provided, it will be populated with package names
|
||||
mapped to any dependencies they have that are in no
|
||||
repositories. This is only populated if transitive is True.
|
||||
|
||||
Note: the returned dict *includes* the package itself.
|
||||
|
||||
"""
|
||||
visited = {} if visited is None else visited
|
||||
missing = {} if missing is None else missing
|
||||
|
||||
visited.setdefault(cls.name, set())
|
||||
|
||||
for name, conditions in cls.dependencies_by_name(when=True).items():
|
||||
# check whether this dependency could be of the type asked for
|
||||
depflag_union = 0
|
||||
for deplist in conditions.values():
|
||||
for dep in deplist:
|
||||
depflag_union |= dep.depflag
|
||||
if not (depflag & depflag_union):
|
||||
continue
|
||||
|
||||
# expand virtuals if enabled, otherwise just stop at virtuals
|
||||
if spack.repo.PATH.is_virtual(name):
|
||||
if virtuals is not None:
|
||||
virtuals.add(name)
|
||||
if expand_virtuals:
|
||||
providers = spack.repo.PATH.providers_for(name)
|
||||
dep_names = [spec.name for spec in providers]
|
||||
else:
|
||||
visited.setdefault(cls.name, set()).add(name)
|
||||
visited.setdefault(name, set())
|
||||
continue
|
||||
else:
|
||||
dep_names = [name]
|
||||
|
||||
# add the dependency names to the visited dict
|
||||
visited.setdefault(cls.name, set()).update(set(dep_names))
|
||||
|
||||
# recursively traverse dependencies
|
||||
for dep_name in dep_names:
|
||||
if dep_name in visited:
|
||||
continue
|
||||
|
||||
visited.setdefault(dep_name, set())
|
||||
|
||||
# skip the rest if not transitive
|
||||
if not transitive:
|
||||
continue
|
||||
|
||||
try:
|
||||
dep_cls = spack.repo.PATH.get_pkg_class(dep_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# log unknown packages
|
||||
missing.setdefault(cls.name, set()).add(dep_name)
|
||||
continue
|
||||
|
||||
dep_cls.possible_dependencies(
|
||||
transitive, expand_virtuals, depflag, visited, missing, virtuals
|
||||
)
|
||||
|
||||
return visited
|
||||
|
||||
@classproperty
|
||||
def package_dir(cls):
|
||||
"""Directory where the package.py file lives."""
|
||||
@@ -2287,55 +2172,6 @@ def rpath_args(self):
|
||||
build_system_flags = PackageBase.build_system_flags
|
||||
|
||||
|
||||
def use_cray_compiler_names():
|
||||
"""Compiler names for builds that rely on cray compiler names."""
|
||||
os.environ["CC"] = "cc"
|
||||
os.environ["CXX"] = "CC"
|
||||
os.environ["FC"] = "ftn"
|
||||
os.environ["F77"] = "ftn"
|
||||
|
||||
|
||||
def possible_dependencies(
|
||||
*pkg_or_spec: Union[str, spack.spec.Spec, typing.Type[PackageBase]],
|
||||
transitive: bool = True,
|
||||
expand_virtuals: bool = True,
|
||||
depflag: dt.DepFlag = dt.ALL,
|
||||
missing: Optional[dict] = None,
|
||||
virtuals: Optional[set] = None,
|
||||
) -> Dict[str, Set[str]]:
|
||||
"""Get the possible dependencies of a number of packages.
|
||||
|
||||
See ``PackageBase.possible_dependencies`` for details.
|
||||
"""
|
||||
packages = []
|
||||
for pos in pkg_or_spec:
|
||||
if isinstance(pos, PackageMeta) and issubclass(pos, PackageBase):
|
||||
packages.append(pos)
|
||||
continue
|
||||
|
||||
if not isinstance(pos, spack.spec.Spec):
|
||||
pos = spack.spec.Spec(pos)
|
||||
|
||||
if spack.repo.PATH.is_virtual(pos.name):
|
||||
packages.extend(p.package_class for p in spack.repo.PATH.providers_for(pos.name))
|
||||
continue
|
||||
else:
|
||||
packages.append(pos.package_class)
|
||||
|
||||
visited: Dict[str, Set[str]] = {}
|
||||
for pkg in packages:
|
||||
pkg.possible_dependencies(
|
||||
visited=visited,
|
||||
transitive=transitive,
|
||||
expand_virtuals=expand_virtuals,
|
||||
depflag=depflag,
|
||||
missing=missing,
|
||||
virtuals=virtuals,
|
||||
)
|
||||
|
||||
return visited
|
||||
|
||||
|
||||
def deprecated_version(pkg: PackageBase, version: Union[str, StandardVersion]) -> bool:
|
||||
"""Return True iff the version is deprecated.
|
||||
|
||||
|
@@ -236,22 +236,15 @@ def relocate_elf_binaries(binaries: Iterable[str], prefix_to_prefix: Dict[str, s
|
||||
_set_elf_rpaths_and_interpreter(path, rpaths=rpaths, interpreter=interpreter)
|
||||
|
||||
|
||||
def _warn_if_link_cant_be_relocated(link: str, target: str):
|
||||
if not os.path.isabs(target):
|
||||
return
|
||||
tty.warn(f'Symbolic link at "{link}" to "{target}" cannot be relocated')
|
||||
|
||||
|
||||
def relocate_links(links: Iterable[str], prefix_to_prefix: Dict[str, str]) -> None:
|
||||
"""Relocate links to a new install prefix."""
|
||||
regex = re.compile("|".join(re.escape(p) for p in prefix_to_prefix.keys()))
|
||||
for link in links:
|
||||
old_target = readlink(link)
|
||||
if not os.path.isabs(old_target):
|
||||
continue
|
||||
match = regex.match(old_target)
|
||||
|
||||
# No match.
|
||||
if match is None:
|
||||
_warn_if_link_cant_be_relocated(link, old_target)
|
||||
continue
|
||||
|
||||
new_target = prefix_to_prefix[match.group()] + old_target[match.end() :]
|
||||
|
@@ -1,6 +1,7 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import codecs
|
||||
import collections
|
||||
import hashlib
|
||||
import os
|
||||
@@ -13,7 +14,7 @@
|
||||
import xml.sax.saxutils
|
||||
from typing import Dict, Optional
|
||||
from urllib.parse import urlencode
|
||||
from urllib.request import HTTPSHandler, Request, build_opener
|
||||
from urllib.request import Request
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
@@ -24,10 +25,10 @@
|
||||
import spack.spec
|
||||
import spack.tengine
|
||||
import spack.util.git
|
||||
import spack.util.web as web_util
|
||||
from spack.error import SpackError
|
||||
from spack.util.crypto import checksum
|
||||
from spack.util.log_parse import parse_log_events
|
||||
from spack.util.web import ssl_create_default_context
|
||||
|
||||
from .base import Reporter
|
||||
from .extract import extract_test_parts
|
||||
@@ -433,7 +434,6 @@ def upload(self, filename):
|
||||
# Compute md5 checksum for the contents of this file.
|
||||
md5sum = checksum(hashlib.md5, filename, block_size=8192)
|
||||
|
||||
opener = build_opener(HTTPSHandler(context=ssl_create_default_context()))
|
||||
with open(filename, "rb") as f:
|
||||
params_dict = {
|
||||
"build": self.buildname,
|
||||
@@ -443,26 +443,21 @@ def upload(self, filename):
|
||||
}
|
||||
encoded_params = urlencode(params_dict)
|
||||
url = "{0}&{1}".format(self.cdash_upload_url, encoded_params)
|
||||
request = Request(url, data=f)
|
||||
request = Request(url, data=f, method="PUT")
|
||||
request.add_header("Content-Type", "text/xml")
|
||||
request.add_header("Content-Length", os.path.getsize(filename))
|
||||
if self.authtoken:
|
||||
request.add_header("Authorization", "Bearer {0}".format(self.authtoken))
|
||||
try:
|
||||
# By default, urllib2 only support GET and POST.
|
||||
# CDash expects this file to be uploaded via PUT.
|
||||
request.get_method = lambda: "PUT"
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
response = web_util.urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
if self.current_package_name not in self.buildIds:
|
||||
resp_value = response.read()
|
||||
if isinstance(resp_value, bytes):
|
||||
resp_value = resp_value.decode("utf-8")
|
||||
resp_value = codecs.getreader("utf-8")(response).read()
|
||||
match = self.buildid_regexp.search(resp_value)
|
||||
if match:
|
||||
buildid = match.group(1)
|
||||
self.buildIds[self.current_package_name] = buildid
|
||||
except Exception as e:
|
||||
print("Upload to CDash failed: {0}".format(e))
|
||||
print(f"Upload to CDash failed: {e}")
|
||||
|
||||
def finalize_report(self):
|
||||
if self.buildIds:
|
||||
|
@@ -87,6 +87,7 @@
|
||||
"strategy": {"type": "string", "enum": ["none", "minimal", "full"]}
|
||||
},
|
||||
},
|
||||
"static_analysis": {"type": "boolean"},
|
||||
"timeout": {"type": "integer", "minimum": 0},
|
||||
"error_on_timeout": {"type": "boolean"},
|
||||
"os_compatible": {"type": "object", "additionalProperties": {"type": "array"}},
|
||||
|
@@ -62,7 +62,7 @@
|
||||
parse_files,
|
||||
parse_term,
|
||||
)
|
||||
from .counter import FullDuplicatesCounter, MinimalDuplicatesCounter, NoDuplicatesCounter
|
||||
from .input_analysis import create_counter, create_graph_analyzer
|
||||
from .requirements import RequirementKind, RequirementParser, RequirementRule
|
||||
from .version_order import concretization_version_order
|
||||
|
||||
@@ -271,15 +271,6 @@ def remove_node(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunc
|
||||
return list(filter(lambda x: x.args[0] not in ("node", "virtual_node"), facts))
|
||||
|
||||
|
||||
def _create_counter(specs: List[spack.spec.Spec], tests: bool):
|
||||
strategy = spack.config.CONFIG.get("concretizer:duplicates:strategy", "none")
|
||||
if strategy == "full":
|
||||
return FullDuplicatesCounter(specs, tests=tests)
|
||||
if strategy == "minimal":
|
||||
return MinimalDuplicatesCounter(specs, tests=tests)
|
||||
return NoDuplicatesCounter(specs, tests=tests)
|
||||
|
||||
|
||||
def all_libcs() -> Set[spack.spec.Spec]:
|
||||
"""Return a set of all libc specs targeted by any configured compiler. If none, fall back to
|
||||
libc determined from the current Python process if dynamically linked."""
|
||||
@@ -1121,6 +1112,8 @@ class SpackSolverSetup:
|
||||
"""Class to set up and run a Spack concretization solve."""
|
||||
|
||||
def __init__(self, tests: bool = False):
|
||||
self.possible_graph = create_graph_analyzer()
|
||||
|
||||
# these are all initialized in setup()
|
||||
self.gen: "ProblemInstanceBuilder" = ProblemInstanceBuilder()
|
||||
self.requirement_parser = RequirementParser(spack.config.CONFIG)
|
||||
@@ -2397,38 +2390,20 @@ def keyfun(os):
|
||||
|
||||
def target_defaults(self, specs):
|
||||
"""Add facts about targets and target compatibility."""
|
||||
self.gen.h2("Default target")
|
||||
|
||||
platform = spack.platforms.host()
|
||||
uarch = archspec.cpu.TARGETS.get(platform.default)
|
||||
|
||||
self.gen.h2("Target compatibility")
|
||||
|
||||
# Construct the list of targets which are compatible with the host
|
||||
candidate_targets = [uarch] + uarch.ancestors
|
||||
|
||||
# Get configuration options
|
||||
granularity = spack.config.get("concretizer:targets:granularity")
|
||||
host_compatible = spack.config.get("concretizer:targets:host_compatible")
|
||||
|
||||
# Add targets which are not compatible with the current host
|
||||
if not host_compatible:
|
||||
additional_targets_in_family = sorted(
|
||||
[
|
||||
t
|
||||
for t in archspec.cpu.TARGETS.values()
|
||||
if (t.family.name == uarch.family.name and t not in candidate_targets)
|
||||
],
|
||||
key=lambda x: len(x.ancestors),
|
||||
reverse=True,
|
||||
)
|
||||
candidate_targets += additional_targets_in_family
|
||||
|
||||
# Check if we want only generic architecture
|
||||
if granularity == "generic":
|
||||
candidate_targets = [t for t in candidate_targets if t.vendor == "generic"]
|
||||
|
||||
# Add targets explicitly requested from specs
|
||||
candidate_targets = []
|
||||
for x in self.possible_graph.candidate_targets():
|
||||
if all(
|
||||
self.possible_graph.unreachable(pkg_name=pkg_name, when_spec=f"target={x}")
|
||||
for pkg_name in self.pkgs
|
||||
):
|
||||
tty.debug(f"[{__name__}] excluding target={x}, cause no package can use it")
|
||||
continue
|
||||
candidate_targets.append(x)
|
||||
|
||||
host_compatible = spack.config.CONFIG.get("concretizer:targets:host_compatible")
|
||||
for spec in specs:
|
||||
if not spec.architecture or not spec.architecture.target:
|
||||
continue
|
||||
@@ -2444,6 +2419,8 @@ def target_defaults(self, specs):
|
||||
if ancestor not in candidate_targets:
|
||||
candidate_targets.append(ancestor)
|
||||
|
||||
platform = spack.platforms.host()
|
||||
uarch = archspec.cpu.TARGETS.get(platform.default)
|
||||
best_targets = {uarch.family.name}
|
||||
for compiler_id, known_compiler in enumerate(self.possible_compilers):
|
||||
if not known_compiler.available:
|
||||
@@ -2501,7 +2478,6 @@ def target_defaults(self, specs):
|
||||
self.gen.newline()
|
||||
|
||||
self.default_targets = list(sorted(set(self.default_targets)))
|
||||
|
||||
self.target_preferences()
|
||||
|
||||
def virtual_providers(self):
|
||||
@@ -2605,7 +2581,14 @@ def define_variant_values(self):
|
||||
# Tell the concretizer about possible values from specs seen in spec_clauses().
|
||||
# We might want to order these facts by pkg and name if we are debugging.
|
||||
for pkg_name, variant_def_id, value in self.variant_values_from_specs:
|
||||
vid = self.variant_ids_by_def_id[variant_def_id]
|
||||
try:
|
||||
vid = self.variant_ids_by_def_id[variant_def_id]
|
||||
except KeyError:
|
||||
tty.debug(
|
||||
f"[{__name__}] cannot retrieve id of the {value} variant from {pkg_name}"
|
||||
)
|
||||
continue
|
||||
|
||||
self.gen.fact(fn.pkg_fact(pkg_name, fn.variant_possible_value(vid, value)))
|
||||
|
||||
def register_concrete_spec(self, spec, possible):
|
||||
@@ -2676,7 +2659,7 @@ def setup(
|
||||
"""
|
||||
check_packages_exist(specs)
|
||||
|
||||
node_counter = _create_counter(specs, tests=self.tests)
|
||||
node_counter = create_counter(specs, tests=self.tests, possible_graph=self.possible_graph)
|
||||
self.possible_virtuals = node_counter.possible_virtuals()
|
||||
self.pkgs = node_counter.possible_dependencies()
|
||||
self.libcs = sorted(all_libcs()) # type: ignore[type-var]
|
||||
@@ -3489,7 +3472,7 @@ def external_spec_selected(self, node, idx):
|
||||
self._specs[node].extra_attributes = spec_info.get("extra_attributes", {})
|
||||
|
||||
# If this is an extension, update the dependencies to include the extendee
|
||||
package = self._specs[node].package_class(self._specs[node])
|
||||
package = spack.repo.PATH.get_pkg_class(self._specs[node].fullname)(self._specs[node])
|
||||
extendee_spec = package.extendee_spec
|
||||
|
||||
if extendee_spec:
|
||||
|
@@ -1,179 +0,0 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
from typing import List, Set
|
||||
|
||||
from llnl.util import lang
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
|
||||
PossibleDependencies = Set[str]
|
||||
|
||||
|
||||
class Counter:
|
||||
"""Computes the possible packages and the maximum number of duplicates
|
||||
allowed for each of them.
|
||||
|
||||
Args:
|
||||
specs: abstract specs to concretize
|
||||
tests: if True, add test dependencies to the list of possible packages
|
||||
"""
|
||||
|
||||
def __init__(self, specs: List["spack.spec.Spec"], tests: bool) -> None:
|
||||
runtime_pkgs = spack.repo.PATH.packages_with_tags("runtime")
|
||||
runtime_virtuals = set()
|
||||
for x in runtime_pkgs:
|
||||
pkg_class = spack.repo.PATH.get_pkg_class(x)
|
||||
runtime_virtuals.update(pkg_class.provided_virtual_names())
|
||||
|
||||
self.specs = specs + [spack.spec.Spec(x) for x in runtime_pkgs]
|
||||
|
||||
self.link_run_types: dt.DepFlag = dt.LINK | dt.RUN | dt.TEST
|
||||
self.all_types: dt.DepFlag = dt.ALL
|
||||
if not tests:
|
||||
self.link_run_types = dt.LINK | dt.RUN
|
||||
self.all_types = dt.LINK | dt.RUN | dt.BUILD
|
||||
|
||||
self._possible_dependencies: PossibleDependencies = set()
|
||||
self._possible_virtuals: Set[str] = (
|
||||
set(x.name for x in specs if x.virtual) | runtime_virtuals
|
||||
)
|
||||
|
||||
def possible_dependencies(self) -> PossibleDependencies:
|
||||
"""Returns the list of possible dependencies"""
|
||||
self.ensure_cache_values()
|
||||
return self._possible_dependencies
|
||||
|
||||
def possible_virtuals(self) -> Set[str]:
|
||||
"""Returns the list of possible virtuals"""
|
||||
self.ensure_cache_values()
|
||||
return self._possible_virtuals
|
||||
|
||||
def ensure_cache_values(self) -> None:
|
||||
"""Ensure the cache values have been computed"""
|
||||
if self._possible_dependencies:
|
||||
return
|
||||
self._compute_cache_values()
|
||||
|
||||
def possible_packages_facts(self, gen: "spack.solver.asp.PyclingoDriver", fn) -> None:
|
||||
"""Emit facts associated with the possible packages"""
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
def _compute_cache_values(self):
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
|
||||
class NoDuplicatesCounter(Counter):
|
||||
def _compute_cache_values(self):
|
||||
result = spack.package_base.possible_dependencies(
|
||||
*self.specs, virtuals=self._possible_virtuals, depflag=self.all_types
|
||||
)
|
||||
self._possible_dependencies = set(result)
|
||||
|
||||
def possible_packages_facts(self, gen, fn):
|
||||
gen.h2("Maximum number of nodes (packages)")
|
||||
for package_name in sorted(self.possible_dependencies()):
|
||||
gen.fact(fn.max_dupes(package_name, 1))
|
||||
gen.newline()
|
||||
gen.h2("Maximum number of nodes (virtual packages)")
|
||||
for package_name in sorted(self.possible_virtuals()):
|
||||
gen.fact(fn.max_dupes(package_name, 1))
|
||||
gen.newline()
|
||||
gen.h2("Possible package in link-run subDAG")
|
||||
for name in sorted(self.possible_dependencies()):
|
||||
gen.fact(fn.possible_in_link_run(name))
|
||||
gen.newline()
|
||||
|
||||
|
||||
class MinimalDuplicatesCounter(NoDuplicatesCounter):
|
||||
def __init__(self, specs, tests):
|
||||
super().__init__(specs, tests)
|
||||
self._link_run: PossibleDependencies = set()
|
||||
self._direct_build: PossibleDependencies = set()
|
||||
self._total_build: PossibleDependencies = set()
|
||||
self._link_run_virtuals: Set[str] = set()
|
||||
|
||||
def _compute_cache_values(self):
|
||||
self._link_run = set(
|
||||
spack.package_base.possible_dependencies(
|
||||
*self.specs, virtuals=self._possible_virtuals, depflag=self.link_run_types
|
||||
)
|
||||
)
|
||||
self._link_run_virtuals.update(self._possible_virtuals)
|
||||
for x in self._link_run:
|
||||
build_dependencies = spack.repo.PATH.get_pkg_class(x).dependencies_of_type(dt.BUILD)
|
||||
virtuals, reals = lang.stable_partition(
|
||||
build_dependencies, spack.repo.PATH.is_virtual_safe
|
||||
)
|
||||
|
||||
self._possible_virtuals.update(virtuals)
|
||||
for virtual_dep in virtuals:
|
||||
providers = spack.repo.PATH.providers_for(virtual_dep)
|
||||
self._direct_build.update(str(x) for x in providers)
|
||||
|
||||
self._direct_build.update(reals)
|
||||
|
||||
self._total_build = set(
|
||||
spack.package_base.possible_dependencies(
|
||||
*self._direct_build, virtuals=self._possible_virtuals, depflag=self.all_types
|
||||
)
|
||||
)
|
||||
self._possible_dependencies = set(self._link_run) | set(self._total_build)
|
||||
|
||||
def possible_packages_facts(self, gen, fn):
|
||||
build_tools = spack.repo.PATH.packages_with_tags("build-tools")
|
||||
gen.h2("Packages with at most a single node")
|
||||
for package_name in sorted(self.possible_dependencies() - build_tools):
|
||||
gen.fact(fn.max_dupes(package_name, 1))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Packages with at multiple possible nodes (build-tools)")
|
||||
for package_name in sorted(self.possible_dependencies() & build_tools):
|
||||
gen.fact(fn.max_dupes(package_name, 2))
|
||||
gen.fact(fn.multiple_unification_sets(package_name))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Maximum number of nodes (virtual packages)")
|
||||
for package_name in sorted(self.possible_virtuals()):
|
||||
gen.fact(fn.max_dupes(package_name, 1))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Possible package in link-run subDAG")
|
||||
for name in sorted(self._link_run):
|
||||
gen.fact(fn.possible_in_link_run(name))
|
||||
gen.newline()
|
||||
|
||||
|
||||
class FullDuplicatesCounter(MinimalDuplicatesCounter):
|
||||
def possible_packages_facts(self, gen, fn):
|
||||
build_tools = spack.repo.PATH.packages_with_tags("build-tools")
|
||||
counter = collections.Counter(
|
||||
list(self._link_run) + list(self._total_build) + list(self._direct_build)
|
||||
)
|
||||
gen.h2("Maximum number of nodes")
|
||||
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
|
||||
count = min(count, 2)
|
||||
gen.fact(fn.max_dupes(pkg, count))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Build unification sets ")
|
||||
for name in sorted(self.possible_dependencies() & build_tools):
|
||||
gen.fact(fn.multiple_unification_sets(name))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Possible package in link-run subDAG")
|
||||
for name in sorted(self._link_run):
|
||||
gen.fact(fn.possible_in_link_run(name))
|
||||
gen.newline()
|
||||
|
||||
counter = collections.Counter(
|
||||
list(self._link_run_virtuals) + list(self._possible_virtuals)
|
||||
)
|
||||
gen.h2("Maximum number of virtual nodes")
|
||||
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
|
||||
gen.fact(fn.max_dupes(pkg, count))
|
||||
gen.newline()
|
524
lib/spack/spack/solver/input_analysis.py
Normal file
524
lib/spack/spack/solver/input_analysis.py
Normal file
@@ -0,0 +1,524 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Classes to analyze the input of a solve, and provide information to set up the ASP problem"""
|
||||
import collections
|
||||
from typing import Dict, List, NamedTuple, Set, Tuple, Union
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
from llnl.util import lang, tty
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.error import SpackError
|
||||
|
||||
RUNTIME_TAG = "runtime"
|
||||
|
||||
|
||||
class PossibleGraph(NamedTuple):
|
||||
real_pkgs: Set[str]
|
||||
virtuals: Set[str]
|
||||
edges: Dict[str, Set[str]]
|
||||
|
||||
|
||||
class PossibleDependencyGraph:
|
||||
"""Returns information needed to set up an ASP problem"""
|
||||
|
||||
def unreachable(self, *, pkg_name: str, when_spec: spack.spec.Spec) -> bool:
|
||||
"""Returns true if the context can determine that the condition cannot ever
|
||||
be met on pkg_name.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def candidate_targets(self) -> List[archspec.cpu.Microarchitecture]:
|
||||
"""Returns a list of targets that are candidate for concretization"""
|
||||
raise NotImplementedError
|
||||
|
||||
def possible_dependencies(
|
||||
self,
|
||||
*specs: Union[spack.spec.Spec, str],
|
||||
allowed_deps: dt.DepFlag,
|
||||
transitive: bool = True,
|
||||
strict_depflag: bool = False,
|
||||
expand_virtuals: bool = True,
|
||||
) -> PossibleGraph:
|
||||
"""Returns the set of possible dependencies, and the set of possible virtuals.
|
||||
|
||||
Both sets always include runtime packages, which may be injected by compilers.
|
||||
|
||||
Args:
|
||||
transitive: return transitive dependencies if True, only direct dependencies if False
|
||||
allowed_deps: dependency types to consider
|
||||
strict_depflag: if True, only the specific dep type is considered, if False any
|
||||
deptype that intersects with allowed deptype is considered
|
||||
expand_virtuals: expand virtual dependencies into all possible implementations
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class NoStaticAnalysis(PossibleDependencyGraph):
|
||||
"""Implementation that tries to minimize the setup time (i.e. defaults to give fast
|
||||
answers), rather than trying to reduce the ASP problem size with more complex analysis.
|
||||
"""
|
||||
|
||||
def __init__(self, *, configuration: spack.config.Configuration, repo: spack.repo.RepoPath):
|
||||
self.configuration = configuration
|
||||
self.repo = repo
|
||||
self.runtime_pkgs = set(self.repo.packages_with_tags(RUNTIME_TAG))
|
||||
self.runtime_virtuals = set()
|
||||
self._platform_condition = spack.spec.Spec(
|
||||
f"platform={spack.platforms.host()} target={archspec.cpu.host().family}:"
|
||||
)
|
||||
for x in self.runtime_pkgs:
|
||||
pkg_class = self.repo.get_pkg_class(x)
|
||||
self.runtime_virtuals.update(pkg_class.provided_virtual_names())
|
||||
|
||||
try:
|
||||
self.libc_pkgs = [x.name for x in self.providers_for("libc")]
|
||||
except spack.repo.UnknownPackageError:
|
||||
self.libc_pkgs = []
|
||||
|
||||
def is_virtual(self, name: str) -> bool:
|
||||
return self.repo.is_virtual(name)
|
||||
|
||||
@lang.memoized
|
||||
def is_allowed_on_this_platform(self, *, pkg_name: str) -> bool:
|
||||
"""Returns true if a package is allowed on the current host"""
|
||||
pkg_cls = self.repo.get_pkg_class(pkg_name)
|
||||
for when_spec, conditions in pkg_cls.requirements.items():
|
||||
if not when_spec.intersects(self._platform_condition):
|
||||
continue
|
||||
for requirements, _, _ in conditions:
|
||||
if not any(x.intersects(self._platform_condition) for x in requirements):
|
||||
tty.debug(f"[{__name__}] {pkg_name} is not for this platform")
|
||||
return False
|
||||
return True
|
||||
|
||||
def providers_for(self, virtual_str: str) -> List[spack.spec.Spec]:
|
||||
"""Returns a list of possible providers for the virtual string in input."""
|
||||
return self.repo.providers_for(virtual_str)
|
||||
|
||||
def can_be_installed(self, *, pkg_name) -> bool:
|
||||
"""Returns True if a package can be installed, False otherwise."""
|
||||
return True
|
||||
|
||||
def unreachable(self, *, pkg_name: str, when_spec: spack.spec.Spec) -> bool:
|
||||
"""Returns true if the context can determine that the condition cannot ever
|
||||
be met on pkg_name.
|
||||
"""
|
||||
return False
|
||||
|
||||
def candidate_targets(self) -> List[archspec.cpu.Microarchitecture]:
|
||||
"""Returns a list of targets that are candidate for concretization"""
|
||||
platform = spack.platforms.host()
|
||||
default_target = archspec.cpu.TARGETS[platform.default]
|
||||
|
||||
# Construct the list of targets which are compatible with the host
|
||||
candidate_targets = [default_target] + default_target.ancestors
|
||||
granularity = self.configuration.get("concretizer:targets:granularity")
|
||||
host_compatible = self.configuration.get("concretizer:targets:host_compatible")
|
||||
|
||||
# Add targets which are not compatible with the current host
|
||||
if not host_compatible:
|
||||
additional_targets_in_family = sorted(
|
||||
[
|
||||
t
|
||||
for t in archspec.cpu.TARGETS.values()
|
||||
if (t.family.name == default_target.family.name and t not in candidate_targets)
|
||||
],
|
||||
key=lambda x: len(x.ancestors),
|
||||
reverse=True,
|
||||
)
|
||||
candidate_targets += additional_targets_in_family
|
||||
|
||||
# Check if we want only generic architecture
|
||||
if granularity == "generic":
|
||||
candidate_targets = [t for t in candidate_targets if t.vendor == "generic"]
|
||||
|
||||
return candidate_targets
|
||||
|
||||
def possible_dependencies(
|
||||
self,
|
||||
*specs: Union[spack.spec.Spec, str],
|
||||
allowed_deps: dt.DepFlag,
|
||||
transitive: bool = True,
|
||||
strict_depflag: bool = False,
|
||||
expand_virtuals: bool = True,
|
||||
) -> PossibleGraph:
|
||||
stack = [x for x in self._package_list(specs)]
|
||||
virtuals: Set[str] = set()
|
||||
edges: Dict[str, Set[str]] = {}
|
||||
|
||||
while stack:
|
||||
pkg_name = stack.pop()
|
||||
|
||||
if pkg_name in edges:
|
||||
continue
|
||||
|
||||
edges[pkg_name] = set()
|
||||
|
||||
# Since libc is not buildable, there is no need to extend the
|
||||
# search space with libc dependencies.
|
||||
if pkg_name in self.libc_pkgs:
|
||||
continue
|
||||
|
||||
pkg_cls = self.repo.get_pkg_class(pkg_name=pkg_name)
|
||||
for name, conditions in pkg_cls.dependencies_by_name(when=True).items():
|
||||
if all(self.unreachable(pkg_name=pkg_name, when_spec=x) for x in conditions):
|
||||
tty.debug(
|
||||
f"[{__name__}] Not adding {name} as a dep of {pkg_name}, because "
|
||||
f"conditions cannot be met"
|
||||
)
|
||||
continue
|
||||
|
||||
if not self._has_deptypes(
|
||||
conditions, allowed_deps=allowed_deps, strict=strict_depflag
|
||||
):
|
||||
continue
|
||||
|
||||
if name in virtuals:
|
||||
continue
|
||||
|
||||
dep_names = set()
|
||||
if self.is_virtual(name):
|
||||
virtuals.add(name)
|
||||
if expand_virtuals:
|
||||
providers = self.providers_for(name)
|
||||
dep_names = {spec.name for spec in providers}
|
||||
else:
|
||||
dep_names = {name}
|
||||
|
||||
edges[pkg_name].update(dep_names)
|
||||
|
||||
if not transitive:
|
||||
continue
|
||||
|
||||
for dep_name in dep_names:
|
||||
if dep_name in edges:
|
||||
continue
|
||||
|
||||
if not self._is_possible(pkg_name=dep_name):
|
||||
continue
|
||||
|
||||
stack.append(dep_name)
|
||||
|
||||
real_packages = set(edges)
|
||||
if not transitive:
|
||||
# We exit early, so add children from the edges information
|
||||
for root, children in edges.items():
|
||||
real_packages.update(x for x in children if self._is_possible(pkg_name=x))
|
||||
|
||||
virtuals.update(self.runtime_virtuals)
|
||||
real_packages = real_packages | self.runtime_pkgs
|
||||
return PossibleGraph(real_pkgs=real_packages, virtuals=virtuals, edges=edges)
|
||||
|
||||
def _package_list(self, specs: Tuple[Union[spack.spec.Spec, str], ...]) -> List[str]:
|
||||
stack = []
|
||||
for current_spec in specs:
|
||||
if isinstance(current_spec, str):
|
||||
current_spec = spack.spec.Spec(current_spec)
|
||||
|
||||
if self.repo.is_virtual(current_spec.name):
|
||||
stack.extend([p.name for p in self.providers_for(current_spec.name)])
|
||||
continue
|
||||
|
||||
stack.append(current_spec.name)
|
||||
return sorted(set(stack))
|
||||
|
||||
def _has_deptypes(self, dependencies, *, allowed_deps: dt.DepFlag, strict: bool) -> bool:
|
||||
if strict is True:
|
||||
return any(
|
||||
dep.depflag == allowed_deps for deplist in dependencies.values() for dep in deplist
|
||||
)
|
||||
return any(
|
||||
dep.depflag & allowed_deps for deplist in dependencies.values() for dep in deplist
|
||||
)
|
||||
|
||||
def _is_possible(self, *, pkg_name):
|
||||
try:
|
||||
return self.is_allowed_on_this_platform(pkg_name=pkg_name) and self.can_be_installed(
|
||||
pkg_name=pkg_name
|
||||
)
|
||||
except spack.repo.UnknownPackageError:
|
||||
return False
|
||||
|
||||
|
||||
class StaticAnalysis(NoStaticAnalysis):
|
||||
"""Performs some static analysis of the configuration, store, etc. to provide more precise
|
||||
answers on whether some packages can be installed, or used as a provider.
|
||||
|
||||
It increases the setup time, but might decrease the grounding and solve time considerably,
|
||||
especially when requirements restrict the possible choices for providers.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
configuration: spack.config.Configuration,
|
||||
repo: spack.repo.RepoPath,
|
||||
store: spack.store.Store,
|
||||
binary_index: spack.binary_distribution.BinaryCacheIndex,
|
||||
):
|
||||
super().__init__(configuration=configuration, repo=repo)
|
||||
self.store = store
|
||||
self.binary_index = binary_index
|
||||
|
||||
@lang.memoized
|
||||
def providers_for(self, virtual_str: str) -> List[spack.spec.Spec]:
|
||||
candidates = super().providers_for(virtual_str)
|
||||
result = []
|
||||
for spec in candidates:
|
||||
if not self._is_provider_candidate(pkg_name=spec.name, virtual=virtual_str):
|
||||
continue
|
||||
result.append(spec)
|
||||
return result
|
||||
|
||||
@lang.memoized
|
||||
def buildcache_specs(self) -> List[spack.spec.Spec]:
|
||||
self.binary_index.update()
|
||||
return self.binary_index.get_all_built_specs()
|
||||
|
||||
@lang.memoized
|
||||
def can_be_installed(self, *, pkg_name) -> bool:
|
||||
if self.configuration.get(f"packages:{pkg_name}:buildable", True):
|
||||
return True
|
||||
|
||||
if self.configuration.get(f"packages:{pkg_name}:externals", []):
|
||||
return True
|
||||
|
||||
reuse = self.configuration.get("concretizer:reuse")
|
||||
if reuse is not False and self.store.db.query(pkg_name):
|
||||
return True
|
||||
|
||||
if reuse is not False and any(x.name == pkg_name for x in self.buildcache_specs()):
|
||||
return True
|
||||
|
||||
tty.debug(f"[{__name__}] {pkg_name} cannot be installed")
|
||||
return False
|
||||
|
||||
@lang.memoized
|
||||
def _is_provider_candidate(self, *, pkg_name: str, virtual: str) -> bool:
|
||||
if not self.is_allowed_on_this_platform(pkg_name=pkg_name):
|
||||
return False
|
||||
|
||||
if not self.can_be_installed(pkg_name=pkg_name):
|
||||
return False
|
||||
|
||||
virtual_spec = spack.spec.Spec(virtual)
|
||||
if self.unreachable(pkg_name=virtual_spec.name, when_spec=pkg_name):
|
||||
tty.debug(f"[{__name__}] {pkg_name} cannot be a provider for {virtual}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@lang.memoized
|
||||
def unreachable(self, *, pkg_name: str, when_spec: spack.spec.Spec) -> bool:
|
||||
"""Returns true if the context can determine that the condition cannot ever
|
||||
be met on pkg_name.
|
||||
"""
|
||||
candidates = self.configuration.get(f"packages:{pkg_name}:require", [])
|
||||
if not candidates and pkg_name != "all":
|
||||
return self.unreachable(pkg_name="all", when_spec=when_spec)
|
||||
|
||||
if not candidates:
|
||||
return False
|
||||
|
||||
if isinstance(candidates, str):
|
||||
candidates = [candidates]
|
||||
|
||||
union_requirement = spack.spec.Spec()
|
||||
for c in candidates:
|
||||
if not isinstance(c, str):
|
||||
continue
|
||||
try:
|
||||
union_requirement.constrain(c)
|
||||
except SpackError:
|
||||
# Less optimized, but shouldn't fail
|
||||
pass
|
||||
|
||||
if not union_requirement.intersects(when_spec):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def create_graph_analyzer() -> PossibleDependencyGraph:
|
||||
static_analysis = spack.config.CONFIG.get("concretizer:static_analysis", False)
|
||||
if static_analysis:
|
||||
return StaticAnalysis(
|
||||
configuration=spack.config.CONFIG,
|
||||
repo=spack.repo.PATH,
|
||||
store=spack.store.STORE,
|
||||
binary_index=spack.binary_distribution.BINARY_INDEX,
|
||||
)
|
||||
return NoStaticAnalysis(configuration=spack.config.CONFIG, repo=spack.repo.PATH)
|
||||
|
||||
|
||||
class Counter:
|
||||
"""Computes the possible packages and the maximum number of duplicates
|
||||
allowed for each of them.
|
||||
|
||||
Args:
|
||||
specs: abstract specs to concretize
|
||||
tests: if True, add test dependencies to the list of possible packages
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, specs: List["spack.spec.Spec"], tests: bool, possible_graph: PossibleDependencyGraph
|
||||
) -> None:
|
||||
self.possible_graph = possible_graph
|
||||
self.specs = specs
|
||||
self.link_run_types: dt.DepFlag = dt.LINK | dt.RUN | dt.TEST
|
||||
self.all_types: dt.DepFlag = dt.ALL
|
||||
if not tests:
|
||||
self.link_run_types = dt.LINK | dt.RUN
|
||||
self.all_types = dt.LINK | dt.RUN | dt.BUILD
|
||||
|
||||
self._possible_dependencies: Set[str] = set()
|
||||
self._possible_virtuals: Set[str] = set(x.name for x in specs if x.virtual)
|
||||
|
||||
def possible_dependencies(self) -> Set[str]:
|
||||
"""Returns the list of possible dependencies"""
|
||||
self.ensure_cache_values()
|
||||
return self._possible_dependencies
|
||||
|
||||
def possible_virtuals(self) -> Set[str]:
|
||||
"""Returns the list of possible virtuals"""
|
||||
self.ensure_cache_values()
|
||||
return self._possible_virtuals
|
||||
|
||||
def ensure_cache_values(self) -> None:
|
||||
"""Ensure the cache values have been computed"""
|
||||
if self._possible_dependencies:
|
||||
return
|
||||
self._compute_cache_values()
|
||||
|
||||
def possible_packages_facts(self, gen: "spack.solver.asp.ProblemInstanceBuilder", fn) -> None:
|
||||
"""Emit facts associated with the possible packages"""
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
def _compute_cache_values(self) -> None:
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
|
||||
class NoDuplicatesCounter(Counter):
|
||||
def _compute_cache_values(self) -> None:
|
||||
self._possible_dependencies, virtuals, _ = self.possible_graph.possible_dependencies(
|
||||
*self.specs, allowed_deps=self.all_types
|
||||
)
|
||||
self._possible_virtuals.update(virtuals)
|
||||
|
||||
def possible_packages_facts(self, gen: "spack.solver.asp.ProblemInstanceBuilder", fn) -> None:
|
||||
gen.h2("Maximum number of nodes (packages)")
|
||||
for package_name in sorted(self.possible_dependencies()):
|
||||
gen.fact(fn.max_dupes(package_name, 1))
|
||||
gen.newline()
|
||||
gen.h2("Maximum number of nodes (virtual packages)")
|
||||
for package_name in sorted(self.possible_virtuals()):
|
||||
gen.fact(fn.max_dupes(package_name, 1))
|
||||
gen.newline()
|
||||
gen.h2("Possible package in link-run subDAG")
|
||||
for name in sorted(self.possible_dependencies()):
|
||||
gen.fact(fn.possible_in_link_run(name))
|
||||
gen.newline()
|
||||
|
||||
|
||||
class MinimalDuplicatesCounter(NoDuplicatesCounter):
|
||||
def __init__(
|
||||
self, specs: List["spack.spec.Spec"], tests: bool, possible_graph: PossibleDependencyGraph
|
||||
) -> None:
|
||||
super().__init__(specs, tests, possible_graph)
|
||||
self._link_run: Set[str] = set()
|
||||
self._direct_build: Set[str] = set()
|
||||
self._total_build: Set[str] = set()
|
||||
self._link_run_virtuals: Set[str] = set()
|
||||
|
||||
def _compute_cache_values(self) -> None:
|
||||
self._link_run, virtuals, _ = self.possible_graph.possible_dependencies(
|
||||
*self.specs, allowed_deps=self.link_run_types
|
||||
)
|
||||
self._possible_virtuals.update(virtuals)
|
||||
self._link_run_virtuals.update(virtuals)
|
||||
for x in self._link_run:
|
||||
reals, virtuals, _ = self.possible_graph.possible_dependencies(
|
||||
x, allowed_deps=dt.BUILD, transitive=False, strict_depflag=True
|
||||
)
|
||||
self._possible_virtuals.update(virtuals)
|
||||
self._direct_build.update(reals)
|
||||
|
||||
self._total_build, virtuals, _ = self.possible_graph.possible_dependencies(
|
||||
*self._direct_build, allowed_deps=self.all_types
|
||||
)
|
||||
self._possible_virtuals.update(virtuals)
|
||||
self._possible_dependencies = set(self._link_run) | set(self._total_build)
|
||||
|
||||
def possible_packages_facts(self, gen, fn):
|
||||
build_tools = spack.repo.PATH.packages_with_tags("build-tools")
|
||||
gen.h2("Packages with at most a single node")
|
||||
for package_name in sorted(self.possible_dependencies() - build_tools):
|
||||
gen.fact(fn.max_dupes(package_name, 1))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Packages with at multiple possible nodes (build-tools)")
|
||||
for package_name in sorted(self.possible_dependencies() & build_tools):
|
||||
gen.fact(fn.max_dupes(package_name, 2))
|
||||
gen.fact(fn.multiple_unification_sets(package_name))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Maximum number of nodes (virtual packages)")
|
||||
for package_name in sorted(self.possible_virtuals()):
|
||||
gen.fact(fn.max_dupes(package_name, 1))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Possible package in link-run subDAG")
|
||||
for name in sorted(self._link_run):
|
||||
gen.fact(fn.possible_in_link_run(name))
|
||||
gen.newline()
|
||||
|
||||
|
||||
class FullDuplicatesCounter(MinimalDuplicatesCounter):
|
||||
def possible_packages_facts(self, gen, fn):
|
||||
build_tools = spack.repo.PATH.packages_with_tags("build-tools")
|
||||
counter = collections.Counter(
|
||||
list(self._link_run) + list(self._total_build) + list(self._direct_build)
|
||||
)
|
||||
gen.h2("Maximum number of nodes")
|
||||
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
|
||||
count = min(count, 2)
|
||||
gen.fact(fn.max_dupes(pkg, count))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Build unification sets ")
|
||||
for name in sorted(self.possible_dependencies() & build_tools):
|
||||
gen.fact(fn.multiple_unification_sets(name))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Possible package in link-run subDAG")
|
||||
for name in sorted(self._link_run):
|
||||
gen.fact(fn.possible_in_link_run(name))
|
||||
gen.newline()
|
||||
|
||||
counter = collections.Counter(
|
||||
list(self._link_run_virtuals) + list(self._possible_virtuals)
|
||||
)
|
||||
gen.h2("Maximum number of virtual nodes")
|
||||
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
|
||||
gen.fact(fn.max_dupes(pkg, count))
|
||||
gen.newline()
|
||||
|
||||
|
||||
def create_counter(
|
||||
specs: List[spack.spec.Spec], tests: bool, possible_graph: PossibleDependencyGraph
|
||||
) -> Counter:
|
||||
strategy = spack.config.CONFIG.get("concretizer:duplicates:strategy", "none")
|
||||
if strategy == "full":
|
||||
return FullDuplicatesCounter(specs, tests=tests, possible_graph=possible_graph)
|
||||
if strategy == "minimal":
|
||||
return MinimalDuplicatesCounter(specs, tests=tests, possible_graph=possible_graph)
|
||||
return NoDuplicatesCounter(specs, tests=tests, possible_graph=possible_graph)
|
@@ -1527,9 +1527,8 @@ def __init__(self, spec_like=None, *, external_path=None, external_modules=None)
|
||||
self._external_path = external_path
|
||||
self.external_modules = Spec._format_module_list(external_modules)
|
||||
|
||||
# This attribute is used to store custom information for
|
||||
# external specs. None signal that it was not set yet.
|
||||
self.extra_attributes = None
|
||||
# This attribute is used to store custom information for external specs.
|
||||
self.extra_attributes: dict = {}
|
||||
|
||||
# This attribute holds the original build copy of the spec if it is
|
||||
# deployed differently than it was built. None signals that the spec
|
||||
@@ -1906,6 +1905,12 @@ def package_class(self):
|
||||
"""Internal package call gets only the class object for a package.
|
||||
Use this to just get package metadata.
|
||||
"""
|
||||
warnings.warn(
|
||||
"`Spec.package_class` is deprecated and will be removed in version 1.0.0. Use "
|
||||
"`spack.repo.PATH.get_pkg_class(spec.fullname) instead.",
|
||||
category=spack.error.SpackAPIWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return spack.repo.PATH.get_pkg_class(self.fullname)
|
||||
|
||||
@property
|
||||
@@ -2351,15 +2356,10 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
)
|
||||
|
||||
if self.external:
|
||||
if self.extra_attributes:
|
||||
extra_attributes = syaml.sorted_dict(self.extra_attributes)
|
||||
else:
|
||||
extra_attributes = None
|
||||
|
||||
d["external"] = {
|
||||
"path": self.external_path,
|
||||
"module": self.external_modules,
|
||||
"extra_attributes": extra_attributes,
|
||||
"module": self.external_modules or None,
|
||||
"extra_attributes": syaml.sorted_dict(self.extra_attributes),
|
||||
}
|
||||
|
||||
if not self._concrete:
|
||||
@@ -2870,7 +2870,7 @@ def inject_patches_variant(root):
|
||||
|
||||
# Add any patches from the package to the spec.
|
||||
patches = set()
|
||||
for cond, patch_list in s.package_class.patches.items():
|
||||
for cond, patch_list in spack.repo.PATH.get_pkg_class(s.fullname).patches.items():
|
||||
if s.satisfies(cond):
|
||||
for patch in patch_list:
|
||||
patches.add(patch)
|
||||
@@ -2883,7 +2883,7 @@ def inject_patches_variant(root):
|
||||
if dspec.spec.concrete:
|
||||
continue
|
||||
|
||||
pkg_deps = dspec.parent.package_class.dependencies
|
||||
pkg_deps = spack.repo.PATH.get_pkg_class(dspec.parent.fullname).dependencies
|
||||
|
||||
patches = []
|
||||
for cond, deps_by_name in pkg_deps.items():
|
||||
@@ -3117,7 +3117,7 @@ def ensure_valid_variants(spec):
|
||||
if spec.concrete:
|
||||
return
|
||||
|
||||
pkg_cls = spec.package_class
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.fullname)
|
||||
pkg_variants = pkg_cls.variant_names()
|
||||
# reserved names are variants that may be set on any package
|
||||
# but are not necessarily recorded by the package's class
|
||||
@@ -3861,6 +3861,13 @@ def _cmp_iter(self):
|
||||
for item in self._cmp_node():
|
||||
yield item
|
||||
|
||||
# If there is ever a breaking change to hash computation, whether accidental or purposeful,
|
||||
# two specs can be identical modulo DAG hash, depending on what time they were concretized
|
||||
# From the perspective of many operation in Spack (database, build cache, etc) a different
|
||||
# DAG hash means a different spec. Here we ensure that two otherwise identical specs, one
|
||||
# serialized before the hash change and one after, are considered different.
|
||||
yield self.dag_hash() if self.concrete else None
|
||||
|
||||
# This needs to be in _cmp_iter so that no specs with different process hashes
|
||||
# are considered the same by `__hash__` or `__eq__`.
|
||||
#
|
||||
@@ -4704,7 +4711,7 @@ def concrete(self):
|
||||
bool: True or False
|
||||
"""
|
||||
return self.spec._concrete or all(
|
||||
v in self for v in self.spec.package_class.variant_names()
|
||||
v in self for v in spack.repo.PATH.get_pkg_class(self.spec.fullname).variant_names()
|
||||
)
|
||||
|
||||
def copy(self) -> "VariantMap":
|
||||
@@ -4764,14 +4771,14 @@ def substitute_abstract_variants(spec: Spec):
|
||||
elif name in vt.reserved_names:
|
||||
continue
|
||||
|
||||
variant_defs = spec.package_class.variant_definitions(name)
|
||||
variant_defs = spack.repo.PATH.get_pkg_class(spec.fullname).variant_definitions(name)
|
||||
valid_defs = []
|
||||
for when, vdef in variant_defs:
|
||||
if when.intersects(spec):
|
||||
valid_defs.append(vdef)
|
||||
|
||||
if not valid_defs:
|
||||
if name not in spec.package_class.variant_names():
|
||||
if name not in spack.repo.PATH.get_pkg_class(spec.fullname).variant_names():
|
||||
unknown.append(name)
|
||||
else:
|
||||
whens = [str(when) for when, _ in variant_defs]
|
||||
@@ -4908,7 +4915,7 @@ def from_node_dict(cls, node):
|
||||
spec.external_modules = node["external"]["module"]
|
||||
if spec.external_modules is False:
|
||||
spec.external_modules = None
|
||||
spec.extra_attributes = node["external"].get("extra_attributes", {})
|
||||
spec.extra_attributes = node["external"].get("extra_attributes") or {}
|
||||
|
||||
# specs read in are concrete unless marked abstract
|
||||
if node.get("concrete", True):
|
||||
|
@@ -1,8 +1,10 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import io
|
||||
import os
|
||||
import subprocess
|
||||
from urllib.error import HTTPError
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -15,6 +17,7 @@
|
||||
import spack.paths as spack_paths
|
||||
import spack.repo as repo
|
||||
import spack.util.git
|
||||
from spack.test.conftest import MockHTTPResponse
|
||||
|
||||
pytestmark = [pytest.mark.usefixtures("mock_packages")]
|
||||
|
||||
@@ -162,38 +165,8 @@ def test_import_signing_key(mock_gnupghome):
|
||||
ci.import_signing_key(signing_key)
|
||||
|
||||
|
||||
class FakeWebResponder:
|
||||
def __init__(self, response_code=200, content_to_read=[]):
|
||||
self._resp_code = response_code
|
||||
self._content = content_to_read
|
||||
self._read = [False for c in content_to_read]
|
||||
|
||||
def open(self, request, data=None, timeout=object()):
|
||||
return self
|
||||
|
||||
def getcode(self):
|
||||
return self._resp_code
|
||||
|
||||
def read(self, length=None):
|
||||
if len(self._content) <= 0:
|
||||
return None
|
||||
|
||||
if not self._read[-1]:
|
||||
return_content = self._content[-1]
|
||||
if length:
|
||||
self._read[-1] = True
|
||||
else:
|
||||
self._read.pop()
|
||||
self._content.pop()
|
||||
return return_content
|
||||
|
||||
self._read.pop()
|
||||
self._content.pop()
|
||||
return None
|
||||
|
||||
|
||||
def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env):
|
||||
os.environ.update({"GITLAB_PRIVATE_TOKEN": "faketoken"})
|
||||
def test_download_and_extract_artifacts(tmpdir, monkeypatch):
|
||||
monkeypatch.setenv("GITLAB_PRIVATE_TOKEN", "faketoken")
|
||||
|
||||
url = "https://www.nosuchurlexists.itsfake/artifacts.zip"
|
||||
working_dir = os.path.join(tmpdir.strpath, "repro")
|
||||
@@ -201,10 +174,13 @@ def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env):
|
||||
spack_paths.test_path, "data", "ci", "gitlab", "artifacts.zip"
|
||||
)
|
||||
|
||||
with open(test_artifacts_path, "rb") as fd:
|
||||
fake_responder = FakeWebResponder(content_to_read=[fd.read()])
|
||||
def _urlopen_OK(*args, **kwargs):
|
||||
with open(test_artifacts_path, "rb") as f:
|
||||
return MockHTTPResponse(
|
||||
"200", "OK", {"Content-Type": "application/zip"}, io.BytesIO(f.read())
|
||||
)
|
||||
|
||||
monkeypatch.setattr(ci, "build_opener", lambda handler: fake_responder)
|
||||
monkeypatch.setattr(ci, "urlopen", _urlopen_OK)
|
||||
|
||||
ci.download_and_extract_artifacts(url, working_dir)
|
||||
|
||||
@@ -214,7 +190,11 @@ def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env):
|
||||
found_install = fs.find(working_dir, "install.sh")
|
||||
assert len(found_install) == 1
|
||||
|
||||
fake_responder._resp_code = 400
|
||||
def _urlopen_500(*args, **kwargs):
|
||||
raise HTTPError(url, 500, "Internal Server Error", {}, None)
|
||||
|
||||
monkeypatch.setattr(ci, "urlopen", _urlopen_500)
|
||||
|
||||
with pytest.raises(spack.error.SpackError):
|
||||
ci.download_and_extract_artifacts(url, working_dir)
|
||||
|
||||
|
@@ -24,32 +24,24 @@
|
||||
mpi_deps = ["fake"]
|
||||
|
||||
|
||||
def test_direct_dependencies(mock_packages):
|
||||
out = dependencies("mpileaks")
|
||||
actual = set(re.split(r"\s+", out.strip()))
|
||||
expected = set(["callpath"] + mpis)
|
||||
assert expected == actual
|
||||
|
||||
|
||||
def test_transitive_dependencies(mock_packages):
|
||||
out = dependencies("--transitive", "mpileaks")
|
||||
actual = set(re.split(r"\s+", out.strip()))
|
||||
expected = set(["callpath", "dyninst", "libdwarf", "libelf"] + mpis + mpi_deps)
|
||||
assert expected == actual
|
||||
|
||||
|
||||
def test_transitive_dependencies_with_deptypes(mock_packages):
|
||||
out = dependencies("--transitive", "--deptype=link,run", "dtbuild1")
|
||||
deps = set(re.split(r"\s+", out.strip()))
|
||||
assert set(["dtlink2", "dtrun2"]) == deps
|
||||
|
||||
out = dependencies("--transitive", "--deptype=build", "dtbuild1")
|
||||
deps = set(re.split(r"\s+", out.strip()))
|
||||
assert set(["dtbuild2", "dtlink2"]) == deps
|
||||
|
||||
out = dependencies("--transitive", "--deptype=link", "dtbuild1")
|
||||
deps = set(re.split(r"\s+", out.strip()))
|
||||
assert set(["dtlink2"]) == deps
|
||||
@pytest.mark.parametrize(
|
||||
"cli_args,expected",
|
||||
[
|
||||
(["mpileaks"], set(["callpath"] + mpis)),
|
||||
(
|
||||
["--transitive", "mpileaks"],
|
||||
set(["callpath", "dyninst", "libdwarf", "libelf"] + mpis + mpi_deps),
|
||||
),
|
||||
(["--transitive", "--deptype=link,run", "dtbuild1"], {"dtlink2", "dtrun2"}),
|
||||
(["--transitive", "--deptype=build", "dtbuild1"], {"dtbuild2", "dtlink2"}),
|
||||
(["--transitive", "--deptype=link", "dtbuild1"], {"dtlink2"}),
|
||||
],
|
||||
)
|
||||
def test_direct_dependencies(cli_args, expected, mock_runtimes):
|
||||
out = dependencies(*cli_args)
|
||||
result = set(re.split(r"\s+", out.strip()))
|
||||
expected.update(mock_runtimes)
|
||||
assert expected == result
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
|
@@ -304,6 +304,8 @@ def test_run_import_check(tmp_path: pathlib.Path):
|
||||
contents = '''
|
||||
import spack.cmd
|
||||
import spack.config # do not drop this import because of this comment
|
||||
import spack.repo
|
||||
import spack.repo_utils
|
||||
|
||||
# this comment about spack.error should not be removed
|
||||
class Example(spack.build_systems.autotools.AutotoolsPackage):
|
||||
@@ -314,6 +316,7 @@ def foo(config: "spack.error.SpackError"):
|
||||
# the type hint is quoted, so it should not be removed
|
||||
spack.util.executable.Executable("example")
|
||||
print(spack.__version__)
|
||||
print(spack.repo_utils.__file__)
|
||||
'''
|
||||
file.write_text(contents)
|
||||
root = str(tmp_path)
|
||||
@@ -329,6 +332,7 @@ def foo(config: "spack.error.SpackError"):
|
||||
output = output_buf.getvalue()
|
||||
|
||||
assert "issues.py: redundant import: spack.cmd" in output
|
||||
assert "issues.py: redundant import: spack.repo" in output
|
||||
assert "issues.py: redundant import: spack.config" not in output # comment prevents removal
|
||||
assert "issues.py: missing import: spack" in output # used by spack.__version__
|
||||
assert "issues.py: missing import: spack.build_systems.autotools" in output
|
||||
|
@@ -1,7 +1,6 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import pathlib
|
||||
|
||||
import pytest
|
||||
@@ -182,7 +181,7 @@ def test_requirement_adds_version_satisfies(
|
||||
|
||||
# Sanity check: early version of T does not include U
|
||||
s0 = spack.concretize.concretize_one("t@2.0")
|
||||
assert not ("u" in s0)
|
||||
assert "u" not in s0
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
@@ -200,11 +199,11 @@ def test_requirement_adds_version_satisfies(
|
||||
|
||||
@pytest.mark.parametrize("require_checksum", (True, False))
|
||||
def test_requirement_adds_git_hash_version(
|
||||
require_checksum, concretize_scope, test_repo, mock_git_version_info, monkeypatch, working_env
|
||||
require_checksum, concretize_scope, test_repo, mock_git_version_info, monkeypatch
|
||||
):
|
||||
# A full commit sha is a checksummed version, so this test should pass in both cases
|
||||
if require_checksum:
|
||||
os.environ["SPACK_CONCRETIZER_REQUIRE_CHECKSUM"] = "yes"
|
||||
monkeypatch.setenv("SPACK_CONCRETIZER_REQUIRE_CHECKSUM", "yes")
|
||||
|
||||
repo_path, filename, commits = mock_git_version_info
|
||||
monkeypatch.setattr(
|
||||
|
@@ -2171,3 +2171,8 @@ def getcode(self):
|
||||
|
||||
def info(self):
|
||||
return self.headers
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_runtimes(config, mock_packages):
|
||||
return mock_packages.packages_with_tags("runtime")
|
||||
|
@@ -206,7 +206,7 @@ def test_repo(_create_test_repo, monkeypatch, mock_stage):
|
||||
)
|
||||
def test_redistribute_directive(test_repo, spec_str, distribute_src, distribute_bin):
|
||||
spec = spack.spec.Spec(spec_str)
|
||||
assert spec.package_class.redistribute_source(spec) == distribute_src
|
||||
assert spack.repo.PATH.get_pkg_class(spec.fullname).redistribute_source(spec) == distribute_src
|
||||
concretized_spec = spack.concretize.concretize_one(spec)
|
||||
assert concretized_spec.package.redistribute_binary == distribute_bin
|
||||
|
||||
|
@@ -1,7 +1,6 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Test class methods on Package objects.
|
||||
|
||||
This doesn't include methods on package *instances* (like do_patch(),
|
||||
@@ -16,6 +15,7 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.deptypes as dt
|
||||
@@ -23,15 +23,11 @@
|
||||
import spack.install_test
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.build_systems.generic import Package
|
||||
from spack.error import InstallError
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def mpi_names(mock_repo_path):
|
||||
return [spec.name for spec in mock_repo_path.providers_for("mpi")]
|
||||
from spack.solver.input_analysis import NoStaticAnalysis, StaticAnalysis
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
@@ -53,78 +49,94 @@ def mpileaks_possible_deps(mock_packages, mpi_names):
|
||||
return possible
|
||||
|
||||
|
||||
def test_possible_dependencies(mock_packages, mpileaks_possible_deps):
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class("mpileaks")
|
||||
expanded_possible_deps = pkg_cls.possible_dependencies(expand_virtuals=True)
|
||||
assert mpileaks_possible_deps == expanded_possible_deps
|
||||
assert {
|
||||
"callpath": {"dyninst", "mpi"},
|
||||
"dyninst": {"libdwarf", "libelf"},
|
||||
"libdwarf": {"libelf"},
|
||||
"libelf": set(),
|
||||
"mpi": set(),
|
||||
"mpileaks": {"callpath", "mpi"},
|
||||
} == pkg_cls.possible_dependencies(expand_virtuals=False)
|
||||
|
||||
|
||||
def test_possible_direct_dependencies(mock_packages, mpileaks_possible_deps):
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class("mpileaks")
|
||||
deps = pkg_cls.possible_dependencies(transitive=False, expand_virtuals=False)
|
||||
assert {"callpath": set(), "mpi": set(), "mpileaks": {"callpath", "mpi"}} == deps
|
||||
|
||||
|
||||
def test_possible_dependencies_virtual(mock_packages, mpi_names):
|
||||
expected = dict(
|
||||
(name, set(dep for dep in spack.repo.PATH.get_pkg_class(name).dependencies_by_name()))
|
||||
for name in mpi_names
|
||||
)
|
||||
|
||||
# only one mock MPI has a dependency
|
||||
expected["fake"] = set()
|
||||
|
||||
assert expected == spack.package_base.possible_dependencies("mpi", transitive=False)
|
||||
|
||||
|
||||
def test_possible_dependencies_missing(mock_packages):
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class("missing-dependency")
|
||||
missing = {}
|
||||
pkg_cls.possible_dependencies(transitive=True, missing=missing)
|
||||
assert {"this-is-a-missing-dependency"} == missing["missing-dependency"]
|
||||
|
||||
|
||||
def test_possible_dependencies_with_deptypes(mock_packages):
|
||||
dtbuild1 = spack.repo.PATH.get_pkg_class("dtbuild1")
|
||||
|
||||
assert {
|
||||
"dtbuild1": {"dtrun2", "dtlink2"},
|
||||
"dtlink2": set(),
|
||||
"dtrun2": set(),
|
||||
} == dtbuild1.possible_dependencies(depflag=dt.LINK | dt.RUN)
|
||||
|
||||
assert {
|
||||
"dtbuild1": {"dtbuild2", "dtlink2"},
|
||||
"dtbuild2": set(),
|
||||
"dtlink2": set(),
|
||||
} == dtbuild1.possible_dependencies(depflag=dt.BUILD)
|
||||
|
||||
assert {"dtbuild1": {"dtlink2"}, "dtlink2": set()} == dtbuild1.possible_dependencies(
|
||||
depflag=dt.LINK
|
||||
@pytest.fixture(params=[NoStaticAnalysis, StaticAnalysis])
|
||||
def mock_inspector(config, mock_packages, request):
|
||||
inspector_cls = request.param
|
||||
if inspector_cls is NoStaticAnalysis:
|
||||
return inspector_cls(configuration=config, repo=mock_packages)
|
||||
return inspector_cls(
|
||||
configuration=config,
|
||||
repo=mock_packages,
|
||||
store=spack.store.STORE,
|
||||
binary_index=spack.binary_distribution.BINARY_INDEX,
|
||||
)
|
||||
|
||||
|
||||
def test_possible_dependencies_with_multiple_classes(mock_packages, mpileaks_possible_deps):
|
||||
@pytest.fixture
|
||||
def mpi_names(mock_inspector):
|
||||
return [spec.name for spec in mock_inspector.providers_for("mpi")]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"pkg_name,fn_kwargs,expected",
|
||||
[
|
||||
(
|
||||
"mpileaks",
|
||||
{"expand_virtuals": True, "allowed_deps": dt.ALL},
|
||||
{
|
||||
"fake",
|
||||
"mpileaks",
|
||||
"multi-provider-mpi",
|
||||
"callpath",
|
||||
"dyninst",
|
||||
"mpich2",
|
||||
"libdwarf",
|
||||
"zmpi",
|
||||
"low-priority-provider",
|
||||
"intel-parallel-studio",
|
||||
"mpich",
|
||||
"libelf",
|
||||
},
|
||||
),
|
||||
(
|
||||
"mpileaks",
|
||||
{"expand_virtuals": False, "allowed_deps": dt.ALL},
|
||||
{"callpath", "dyninst", "libdwarf", "libelf", "mpileaks"},
|
||||
),
|
||||
(
|
||||
"mpileaks",
|
||||
{"expand_virtuals": False, "allowed_deps": dt.ALL, "transitive": False},
|
||||
{"callpath", "mpileaks"},
|
||||
),
|
||||
("dtbuild1", {"allowed_deps": dt.LINK | dt.RUN}, {"dtbuild1", "dtrun2", "dtlink2"}),
|
||||
("dtbuild1", {"allowed_deps": dt.BUILD}, {"dtbuild1", "dtbuild2", "dtlink2"}),
|
||||
("dtbuild1", {"allowed_deps": dt.LINK}, {"dtbuild1", "dtlink2"}),
|
||||
],
|
||||
)
|
||||
def test_possible_dependencies(pkg_name, fn_kwargs, expected, mock_runtimes, mock_inspector):
|
||||
"""Tests possible nodes of mpileaks, under different scenarios."""
|
||||
expected.update(mock_runtimes)
|
||||
result, *_ = mock_inspector.possible_dependencies(pkg_name, **fn_kwargs)
|
||||
assert expected == result
|
||||
|
||||
|
||||
def test_possible_dependencies_virtual(mock_inspector, mock_packages, mock_runtimes, mpi_names):
|
||||
expected = set(mpi_names)
|
||||
for name in mpi_names:
|
||||
expected.update(dep for dep in mock_packages.get_pkg_class(name).dependencies_by_name())
|
||||
expected.update(mock_runtimes)
|
||||
|
||||
real_pkgs, *_ = mock_inspector.possible_dependencies(
|
||||
"mpi", transitive=False, allowed_deps=dt.ALL
|
||||
)
|
||||
assert expected == real_pkgs
|
||||
|
||||
|
||||
def test_possible_dependencies_missing(mock_inspector):
|
||||
result, *_ = mock_inspector.possible_dependencies("missing-dependency", allowed_deps=dt.ALL)
|
||||
assert "this-is-a-missing-dependency" not in result
|
||||
|
||||
|
||||
def test_possible_dependencies_with_multiple_classes(
|
||||
mock_inspector, mock_packages, mpileaks_possible_deps
|
||||
):
|
||||
pkgs = ["dt-diamond", "mpileaks"]
|
||||
expected = mpileaks_possible_deps.copy()
|
||||
expected.update(
|
||||
{
|
||||
"dt-diamond": set(["dt-diamond-left", "dt-diamond-right"]),
|
||||
"dt-diamond-left": set(["dt-diamond-bottom"]),
|
||||
"dt-diamond-right": set(["dt-diamond-bottom"]),
|
||||
"dt-diamond-bottom": set(),
|
||||
}
|
||||
)
|
||||
expected = set(mpileaks_possible_deps)
|
||||
expected.update({"dt-diamond", "dt-diamond-left", "dt-diamond-right", "dt-diamond-bottom"})
|
||||
expected.update(mock_packages.packages_with_tags("runtime"))
|
||||
|
||||
assert expected == spack.package_base.possible_dependencies(*pkgs)
|
||||
real_pkgs, *_ = mock_inspector.possible_dependencies(*pkgs, allowed_deps=dt.ALL)
|
||||
assert set(expected) == real_pkgs
|
||||
|
||||
|
||||
def setup_install_test(source_paths, test_root):
|
||||
|
@@ -1989,3 +1989,26 @@ def test_equality_discriminate_on_propagation(lhs, rhs):
|
||||
|
||||
def test_comparison_multivalued_variants():
|
||||
assert Spec("x=a") < Spec("x=a,b") < Spec("x==a,b") < Spec("x==a,b,c")
|
||||
|
||||
|
||||
def test_comparison_after_breaking_hash_change():
|
||||
# We simulate a breaking change in DAG hash computation in Spack. We have two specs that are
|
||||
# entirely equal modulo DAG hash. When deserializing these specs, we don't want them to compare
|
||||
# as equal, because DAG hash is used throughout in Spack to distinguish between specs
|
||||
# (e.g. database, build caches, install dir).
|
||||
s = Spec("example@=1.0")
|
||||
s._mark_concrete(True)
|
||||
|
||||
# compute the dag hash and a change to it
|
||||
dag_hash = s.dag_hash()
|
||||
new_dag_hash = f"{'b' if dag_hash[0] == 'a' else 'a'}{dag_hash[1:]}"
|
||||
|
||||
before_breakage = s.to_dict()
|
||||
after_breakage = s.to_dict()
|
||||
after_breakage["spec"]["nodes"][0]["hash"] = new_dag_hash
|
||||
assert before_breakage != after_breakage
|
||||
|
||||
x = Spec.from_dict(before_breakage)
|
||||
y = Spec.from_dict(after_breakage)
|
||||
assert x != y
|
||||
assert len({x, y}) == 2
|
||||
|
@@ -201,3 +201,15 @@ def test_drop_redundant_rpath(tmpdir, binary_with_rpaths):
|
||||
new_rpaths = elf.get_rpaths(binary)
|
||||
assert set(existing_dirs).issubset(new_rpaths)
|
||||
assert set(non_existing_dirs).isdisjoint(new_rpaths)
|
||||
|
||||
|
||||
def test_elf_invalid_e_shnum(tmp_path):
|
||||
# from llvm/test/Object/Inputs/invalid-e_shnum.elf
|
||||
path = tmp_path / "invalid-e_shnum.elf"
|
||||
with open(path, "wb") as file:
|
||||
file.write(
|
||||
b"\x7fELF\x02\x010000000000\x03\x00>\x0000000000000000000000"
|
||||
b"\x00\x00\x00\x00\x00\x00\x00\x000000000000@\x000000"
|
||||
)
|
||||
with open(path, "rb") as file, pytest.raises(elf.ElfParsingError):
|
||||
elf.parse_elf(file)
|
||||
|
@@ -195,7 +195,10 @@ def parse_program_headers(f: BinaryIO, elf: ElfFile) -> None:
|
||||
elf: ELF file parser data
|
||||
"""
|
||||
# Forward to the program header
|
||||
f.seek(elf.elf_hdr.e_phoff)
|
||||
try:
|
||||
f.seek(elf.elf_hdr.e_phoff)
|
||||
except OSError:
|
||||
raise ElfParsingError("Could not seek to program header")
|
||||
|
||||
# Here we have to make a mapping from virtual address to offset in the file.
|
||||
ph_fmt = elf.byte_order + ("LLQQQQQQ" if elf.is_64_bit else "LLLLLLLL")
|
||||
@@ -245,7 +248,10 @@ def parse_pt_interp(f: BinaryIO, elf: ElfFile) -> None:
|
||||
f: file handle
|
||||
elf: ELF file parser data
|
||||
"""
|
||||
f.seek(elf.pt_interp_p_offset)
|
||||
try:
|
||||
f.seek(elf.pt_interp_p_offset)
|
||||
except OSError:
|
||||
raise ElfParsingError("Could not seek to PT_INTERP entry")
|
||||
data = read_exactly(f, elf.pt_interp_p_filesz, "Malformed PT_INTERP entry")
|
||||
elf.pt_interp_str = parse_c_string(data)
|
||||
|
||||
@@ -264,7 +270,10 @@ def find_strtab_size_at_offset(f: BinaryIO, elf: ElfFile, offset: int) -> int:
|
||||
"""
|
||||
section_hdr_fmt = elf.byte_order + ("LLQQQQLLQQ" if elf.is_64_bit else "LLLLLLLLLL")
|
||||
section_hdr_size = calcsize(section_hdr_fmt)
|
||||
f.seek(elf.elf_hdr.e_shoff)
|
||||
try:
|
||||
f.seek(elf.elf_hdr.e_shoff)
|
||||
except OSError:
|
||||
raise ElfParsingError("Could not seek to section header table")
|
||||
for _ in range(elf.elf_hdr.e_shnum):
|
||||
data = read_exactly(f, section_hdr_size, "Malformed section header")
|
||||
sh = SectionHeader(*unpack(section_hdr_fmt, data))
|
||||
@@ -286,7 +295,10 @@ def retrieve_strtab(f: BinaryIO, elf: ElfFile, offset: int) -> bytes:
|
||||
Returns: file offset
|
||||
"""
|
||||
size = find_strtab_size_at_offset(f, elf, offset)
|
||||
f.seek(offset)
|
||||
try:
|
||||
f.seek(offset)
|
||||
except OSError:
|
||||
raise ElfParsingError("Could not seek to string table")
|
||||
return read_exactly(f, size, "Could not read string table")
|
||||
|
||||
|
||||
@@ -319,7 +331,10 @@ def parse_pt_dynamic(f: BinaryIO, elf: ElfFile) -> None:
|
||||
count_runpath = 0
|
||||
count_strtab = 0
|
||||
|
||||
f.seek(elf.pt_dynamic_p_offset)
|
||||
try:
|
||||
f.seek(elf.pt_dynamic_p_offset)
|
||||
except OSError:
|
||||
raise ElfParsingError("Could not seek to PT_DYNAMIC entry")
|
||||
|
||||
# In case of broken ELF files, don't read beyond the advertized size.
|
||||
for _ in range(elf.pt_dynamic_p_filesz // dynamic_array_size):
|
||||
@@ -478,7 +493,10 @@ def get_interpreter(path: str) -> Optional[str]:
|
||||
def _delete_dynamic_array_entry(
|
||||
f: BinaryIO, elf: ElfFile, should_delete: Callable[[int, int], bool]
|
||||
) -> None:
|
||||
f.seek(elf.pt_dynamic_p_offset)
|
||||
try:
|
||||
f.seek(elf.pt_dynamic_p_offset)
|
||||
except OSError:
|
||||
raise ElfParsingError("Could not seek to PT_DYNAMIC entry")
|
||||
dynamic_array_fmt = elf.byte_order + ("qQ" if elf.is_64_bit else "lL")
|
||||
dynamic_array_size = calcsize(dynamic_array_fmt)
|
||||
new_offset = elf.pt_dynamic_p_offset # points to the new dynamic array
|
||||
|
@@ -8,7 +8,6 @@
|
||||
import spack.directives_meta
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.hash
|
||||
@@ -61,10 +60,18 @@ class RemoveDirectives(ast.NodeTransformer):
|
||||
"""
|
||||
|
||||
def __init__(self, spec):
|
||||
# list of URL attributes and metadata attributes
|
||||
# these will be removed from packages.
|
||||
self.metadata_attrs = [s.url_attr for s in spack.fetch_strategy.all_strategies]
|
||||
self.metadata_attrs += spack.package_base.PackageBase.metadata_attrs
|
||||
#: List of attributes to be excluded from a package's hash.
|
||||
self.metadata_attrs = [s.url_attr for s in spack.fetch_strategy.all_strategies] + [
|
||||
"homepage",
|
||||
"url",
|
||||
"urls",
|
||||
"list_url",
|
||||
"extendable",
|
||||
"parallel",
|
||||
"make_jobs",
|
||||
"maintainers",
|
||||
"tags",
|
||||
]
|
||||
|
||||
self.spec = spec
|
||||
self.in_classdef = False # used to avoid nested classdefs
|
||||
|
@@ -436,8 +436,8 @@ def _dump_annotated(handler, data, stream=None):
|
||||
width = max(clen(a) for a in _ANNOTATIONS)
|
||||
formats = ["%%-%ds %%s\n" % (width + cextra(a)) for a in _ANNOTATIONS]
|
||||
|
||||
for f, a, l in zip(formats, _ANNOTATIONS, lines):
|
||||
stream.write(f % (a, l))
|
||||
for fmt, annotation, line in zip(formats, _ANNOTATIONS, lines):
|
||||
stream.write(fmt % (annotation, line))
|
||||
|
||||
if getvalue:
|
||||
return getvalue()
|
||||
|
@@ -1,6 +1,7 @@
|
||||
[project]
|
||||
name="spack"
|
||||
description="The spack package manager"
|
||||
requires-python=">=3.6"
|
||||
dependencies=[
|
||||
"clingo",
|
||||
"setuptools",
|
||||
@@ -67,9 +68,42 @@ features = [
|
||||
"ci",
|
||||
]
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 99
|
||||
extend-include = ["bin/spack"]
|
||||
extend-exclude = ["lib/spack/external", "*.pyi"]
|
||||
|
||||
[tool.ruff.format]
|
||||
skip-magic-trailing-comma = true
|
||||
|
||||
[tool.ruff.lint]
|
||||
extend-select = ["I"]
|
||||
ignore = ["E731", "E203"]
|
||||
|
||||
[tool.ruff.lint.isort]
|
||||
split-on-trailing-comma = false
|
||||
section-order = [
|
||||
"future",
|
||||
"standard-library",
|
||||
"third-party",
|
||||
"archspec",
|
||||
"llnl",
|
||||
"spack",
|
||||
"first-party",
|
||||
"local-folder",
|
||||
]
|
||||
|
||||
[tool.ruff.lint.isort.sections]
|
||||
spack = ["spack"]
|
||||
archspec = ["archspec"]
|
||||
llnl = ["llnl"]
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"var/spack/repos/*/package.py" = ["F403", "F405", "F811", "F821"]
|
||||
"*-ci-package.py" = ["F403", "F405", "F821"]
|
||||
|
||||
[tool.black]
|
||||
line-length = 99
|
||||
target-version = ['py36', 'py37', 'py38', 'py39', 'py310']
|
||||
include = '(lib/spack|var/spack/repos)/.*\.pyi?$|bin/spack$'
|
||||
extend-exclude = 'lib/spack/external'
|
||||
skip_magic_trailing_comma = true
|
||||
|
@@ -539,57 +539,6 @@ data-vis-sdk-build:
|
||||
- artifacts: True
|
||||
job: data-vis-sdk-generate
|
||||
|
||||
########################################
|
||||
# AWS ISC Applications (x86_64)
|
||||
########################################
|
||||
|
||||
# Call this AFTER .*-generate
|
||||
.aws-isc-overrides:
|
||||
# This controls image for generate step; build step is controlled by spack.yaml
|
||||
# Note that generator emits OS info for build so these should be the same.
|
||||
image: { "name": "ghcr.io/spack/e4s-amazonlinux-2:v2023-03-09", "entrypoint": [""] }
|
||||
|
||||
.aws-isc:
|
||||
extends: [ ".linux_x86_64_v3" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: aws-isc
|
||||
|
||||
aws-isc-generate:
|
||||
extends: [ ".aws-isc", ".generate-x86_64", ".aws-isc-overrides", ".tags-x86_64_v4" ]
|
||||
|
||||
aws-isc-build:
|
||||
extends: [ ".aws-isc", ".build" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: aws-isc-generate
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: aws-isc-generate
|
||||
|
||||
# Parallel Pipeline for aarch64 (reuses override image, but generates and builds on aarch64)
|
||||
|
||||
.aws-isc-aarch64:
|
||||
extends: [ ".linux_aarch64" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: aws-isc-aarch64
|
||||
|
||||
aws-isc-aarch64-generate:
|
||||
extends: [ ".aws-isc-aarch64", ".generate-aarch64", ".aws-isc-overrides" ]
|
||||
|
||||
aws-isc-aarch64-build:
|
||||
extends: [ ".aws-isc-aarch64", ".build" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: aws-isc-aarch64-generate
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: aws-isc-aarch64-generate
|
||||
|
||||
|
||||
########################################
|
||||
# Spack Tutorial
|
||||
########################################
|
||||
|
@@ -1,141 +0,0 @@
|
||||
spack:
|
||||
view: false
|
||||
packages:
|
||||
all:
|
||||
providers:
|
||||
blas:
|
||||
- openblas
|
||||
mkl:
|
||||
- intel-oneapi-mkl
|
||||
mpi:
|
||||
- openmpi
|
||||
- mpich
|
||||
variants: +mpi
|
||||
tbb:
|
||||
require: intel-tbb
|
||||
binutils:
|
||||
variants: +ld +gold +headers +libiberty ~nls
|
||||
version:
|
||||
- 2.36.1
|
||||
doxygen:
|
||||
version:
|
||||
- 1.8.20
|
||||
elfutils:
|
||||
variants: ~nls
|
||||
hdf5:
|
||||
variants: +fortran +hl +shared
|
||||
libfabric:
|
||||
variants: fabrics=efa,tcp,udp,sockets,verbs,shm,mrail,rxd,rxm
|
||||
libunwind:
|
||||
variants: +pic +xz
|
||||
mesa:
|
||||
variants: ~llvm
|
||||
mpich:
|
||||
variants: ~wrapperrpath netmod=ofi device=ch4
|
||||
ncurses:
|
||||
variants: +termlib
|
||||
openblas:
|
||||
variants: threads=openmp
|
||||
openmpi:
|
||||
variants: fabrics=ofi +legacylaunchers
|
||||
openturns:
|
||||
version:
|
||||
- '1.18'
|
||||
relion:
|
||||
variants: ~mklfft
|
||||
# texlive:
|
||||
# version: [20210325]
|
||||
trilinos:
|
||||
variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext
|
||||
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
|
||||
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
|
||||
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
|
||||
xz:
|
||||
variants: +pic
|
||||
|
||||
definitions:
|
||||
|
||||
- compiler_specs:
|
||||
- gcc@11.2
|
||||
# Licensing OK?
|
||||
# - intel-oneapi-compilers@2022.1
|
||||
# - nvhpc
|
||||
|
||||
- app_specs:
|
||||
- bwa
|
||||
# Depends on simde which requires newer compiler?
|
||||
#- bowtie2
|
||||
# Requires x86_64 specific ASM
|
||||
#- cistem
|
||||
- cromwell
|
||||
- fastqc
|
||||
- flux-sched
|
||||
- flux-core
|
||||
- flux-pmix
|
||||
- gatk
|
||||
- gromacs
|
||||
- lammps
|
||||
- wrf build_type=dm+sm
|
||||
- mfem
|
||||
- mpas-model ^parallelio+pnetcdf
|
||||
- nextflow
|
||||
- octave
|
||||
- openfoam
|
||||
- osu-micro-benchmarks
|
||||
- parallel
|
||||
# - paraview
|
||||
- picard
|
||||
- quantum-espresso
|
||||
- raja
|
||||
# Depends on bowtie2 -> simde which requires newer compiler?
|
||||
#- rsem
|
||||
# Errors on texlive
|
||||
#- rstudio
|
||||
- salmon
|
||||
- samtools
|
||||
- seqtk
|
||||
- snakemake
|
||||
- star
|
||||
# Requires gcc@9:
|
||||
#- ufs-weather-model
|
||||
# requires LLVM which fails without constraint
|
||||
#- visit
|
||||
|
||||
- lib_specs:
|
||||
- openmpi fabrics=ofi
|
||||
- openmpi fabrics=ofi +legacylaunchers
|
||||
- openmpi fabrics=auto
|
||||
- mpich
|
||||
- libfabric
|
||||
|
||||
- compiler:
|
||||
- '%gcc@7.3.1'
|
||||
|
||||
- target:
|
||||
- target=aarch64
|
||||
|
||||
|
||||
specs:
|
||||
|
||||
- matrix:
|
||||
- - $app_specs
|
||||
- - $compiler
|
||||
- - $target
|
||||
|
||||
- matrix:
|
||||
- - $lib_specs
|
||||
- - $compiler
|
||||
- - $target
|
||||
|
||||
- matrix:
|
||||
- - $compiler_specs
|
||||
- - $compiler
|
||||
- - $target
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
image: {name: ghcr.io/spack/e4s-amazonlinux-2:v2023-03-09, entrypoint: ['']}
|
||||
|
||||
cdash:
|
||||
build-group: AWS Packages
|
@@ -1,153 +0,0 @@
|
||||
spack:
|
||||
view: false
|
||||
packages:
|
||||
all:
|
||||
providers:
|
||||
blas:
|
||||
- openblas
|
||||
mkl:
|
||||
- intel-oneapi-mkl
|
||||
mpi:
|
||||
- openmpi
|
||||
- mpich
|
||||
variants: +mpi
|
||||
tbb:
|
||||
require: intel-tbb
|
||||
binutils:
|
||||
variants: +ld +gold +headers +libiberty ~nls
|
||||
version:
|
||||
- 2.36.1
|
||||
doxygen:
|
||||
version:
|
||||
- 1.8.20
|
||||
elfutils:
|
||||
variants: ~nls
|
||||
hdf5:
|
||||
variants: +fortran +hl +shared
|
||||
libfabric:
|
||||
variants: fabrics=efa,tcp,udp,sockets,verbs,shm,mrail,rxd,rxm
|
||||
libunwind:
|
||||
variants: +pic +xz
|
||||
mesa:
|
||||
variants: ~llvm
|
||||
mpich:
|
||||
variants: ~wrapperrpath netmod=ofi device=ch4
|
||||
ncurses:
|
||||
variants: +termlib
|
||||
openblas:
|
||||
variants: threads=openmp
|
||||
openmpi:
|
||||
variants: fabrics=ofi +legacylaunchers
|
||||
openturns:
|
||||
version:
|
||||
- '1.18'
|
||||
relion:
|
||||
variants: ~mklfft
|
||||
# texlive:
|
||||
# version: [20210325]
|
||||
trilinos:
|
||||
variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext
|
||||
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
|
||||
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
|
||||
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
|
||||
xz:
|
||||
variants: +pic
|
||||
|
||||
definitions:
|
||||
|
||||
- compiler_specs:
|
||||
- gcc@11.2
|
||||
# Licensing OK?
|
||||
# - intel-oneapi-compilers@2022.1
|
||||
# - nvhpc
|
||||
|
||||
- cuda_specs:
|
||||
# Disabled for consistency with aarch64
|
||||
#- relion +cuda cuda_arch=70
|
||||
- raja +cuda cuda_arch=70
|
||||
- mfem +cuda cuda_arch=70
|
||||
|
||||
- app_specs:
|
||||
- bwa
|
||||
# Disabled for consistency with aarch64
|
||||
#- bowtie2
|
||||
# Disabled for consistency with aarch64
|
||||
#- cistem
|
||||
- cromwell
|
||||
- fastqc
|
||||
- flux-sched
|
||||
- flux-core
|
||||
- flux-pmix
|
||||
- gatk
|
||||
- gromacs
|
||||
- lammps
|
||||
- wrf build_type=dm+sm
|
||||
- mfem
|
||||
- mpas-model ^parallelio+pnetcdf
|
||||
- nextflow
|
||||
- octave
|
||||
- openfoam
|
||||
- osu-micro-benchmarks
|
||||
- parallel
|
||||
# - paraview
|
||||
- picard
|
||||
- quantum-espresso
|
||||
# Build broken for gcc@7.3.1 x86_64_v4 (error: '_mm512_loadu_epi32' was not declared in this scope)
|
||||
#- raja
|
||||
# Disabled for consistency with aarch64
|
||||
#- rsem
|
||||
# Errors on texlive
|
||||
#- rstudio
|
||||
- salmon
|
||||
- samtools
|
||||
- seqtk
|
||||
- snakemake
|
||||
- star
|
||||
# Requires gcc@9:
|
||||
#- ufs-weather-model
|
||||
# Disabled for consistency with aarch64
|
||||
#- visit
|
||||
|
||||
- lib_specs:
|
||||
- openmpi fabrics=ofi
|
||||
- openmpi fabrics=ofi +legacylaunchers
|
||||
- openmpi fabrics=auto
|
||||
- mpich
|
||||
- libfabric
|
||||
|
||||
- compiler:
|
||||
- '%gcc@7.3.1'
|
||||
|
||||
- target:
|
||||
- target=x86_64_v3
|
||||
|
||||
|
||||
specs:
|
||||
|
||||
- matrix:
|
||||
- - $cuda_specs
|
||||
- - $compiler
|
||||
- - $target
|
||||
|
||||
- matrix:
|
||||
- - $app_specs
|
||||
- - $compiler
|
||||
- - $target
|
||||
|
||||
- matrix:
|
||||
- - $lib_specs
|
||||
- - $compiler
|
||||
- - $target
|
||||
|
||||
- matrix:
|
||||
- - $compiler_specs
|
||||
- - $compiler
|
||||
- - $target
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
image: {name: ghcr.io/spack/e4s-amazonlinux-2:v2023-03-09, entrypoint: ['']}
|
||||
|
||||
cdash:
|
||||
build-group: AWS Packages
|
@@ -2,10 +2,16 @@ spack:
|
||||
view: false
|
||||
packages:
|
||||
all:
|
||||
require: target=x86_64_v3
|
||||
require:
|
||||
- target=x86_64_v3
|
||||
- ~cuda
|
||||
- ~rocm
|
||||
|
||||
concretizer:
|
||||
unify: true
|
||||
reuse: false
|
||||
static_analysis: true
|
||||
|
||||
definitions:
|
||||
- default_specs:
|
||||
# editors
|
||||
|
@@ -4,16 +4,32 @@ spack:
|
||||
concretizer:
|
||||
reuse: false
|
||||
unify: false
|
||||
static_analysis: true
|
||||
|
||||
packages:
|
||||
all:
|
||||
require: '%gcc target=x86_64_v3'
|
||||
providers:
|
||||
blas: [openblas]
|
||||
mpi: [mpich]
|
||||
require:
|
||||
- '%gcc target=x86_64_v3'
|
||||
variants: +mpi
|
||||
mpi:
|
||||
require:
|
||||
- mpich
|
||||
blas:
|
||||
require:
|
||||
- openblas
|
||||
lapack:
|
||||
require:
|
||||
- openblas
|
||||
binutils:
|
||||
variants: +ld +gold +headers +libiberty ~nls
|
||||
cmake:
|
||||
require:
|
||||
- "~qtgui"
|
||||
- '%gcc target=x86_64_v3'
|
||||
gmake:
|
||||
require:
|
||||
- "~guile"
|
||||
- '%gcc target=x86_64_v3'
|
||||
hdf5:
|
||||
variants: +fortran +hl +shared
|
||||
libfabric:
|
||||
@@ -27,19 +43,25 @@ spack:
|
||||
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
|
||||
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
|
||||
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
|
||||
mpi:
|
||||
require: mpich
|
||||
mpich:
|
||||
require: '~wrapperrpath ~hwloc target=x86_64_v3'
|
||||
require:
|
||||
- '~wrapperrpath ~hwloc'
|
||||
- '%gcc target=x86_64_v3'
|
||||
tbb:
|
||||
require: intel-tbb
|
||||
require:
|
||||
- intel-tbb
|
||||
vtk-m:
|
||||
require: "+examples target=x86_64_v3"
|
||||
require:
|
||||
- "+examples"
|
||||
- '%gcc target=x86_64_v3'
|
||||
visit:
|
||||
require: "~gui target=x86_64_v3"
|
||||
require:
|
||||
- "~gui target=x86_64_v3"
|
||||
paraview:
|
||||
# Don't build GUI support or GLX rendering for HPC/container deployments
|
||||
require: "+examples ~qt ^[virtuals=gl] osmesa target=x86_64_v3"
|
||||
require:
|
||||
- "+examples ~qt ^[virtuals=gl] osmesa target=x86_64_v3"
|
||||
- '%gcc target=x86_64_v3'
|
||||
|
||||
specs:
|
||||
# CPU
|
||||
|
@@ -13,29 +13,43 @@ spack:
|
||||
mpi: [mpich]
|
||||
tbb: [intel-tbb]
|
||||
variants: +mpi
|
||||
acts:
|
||||
require: +analysis +dd4hep +edm4hep +examples +fatras +geant4 +hepmc3 +podio +pythia8 +python ~svg +tgeo cxxstd=20
|
||||
celeritas:
|
||||
require: +geant4 +hepmc3 +root +shared cxxstd=20
|
||||
hip:
|
||||
require: '@5.7.1 +rocm'
|
||||
rivet:
|
||||
require: hepmc=3
|
||||
root:
|
||||
require: +davix +dcache +examples +fftw +fits +fortran +gdml +graphviz +gsl +http +math +minuit +mlp +mysql +opengl +postgres +pythia8 +python +r +roofit +root7 +rpath ~shadow +spectrum +sqlite +ssl +tbb +threads +tmva +tmva-cpu +unuran +vc +vdt +veccore +webgui +x +xml +xrootd # cxxstd=20
|
||||
# note: root cxxstd=20 not concretizable within sherpa
|
||||
vecgeom:
|
||||
require: +gdml +geant4 +root +shared cxxstd=20
|
||||
|
||||
# Mark geant4 data as external to prevent wasting bandwidth on GB-scale files
|
||||
geant4-data:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: geant4-data@11.3.0
|
||||
prefix: /usr
|
||||
- spec: geant4-data@11.2.2
|
||||
prefix: /usr
|
||||
- spec: geant4-data@11.2.0
|
||||
prefix: /usr
|
||||
- spec: geant4-data@11.1.0
|
||||
prefix: /usr
|
||||
- spec: geant4-data@11.0.0
|
||||
prefix: /usr
|
||||
- spec: geant4-data@11.3.0
|
||||
prefix: /usr
|
||||
- spec: geant4-data@11.2.2
|
||||
prefix: /usr
|
||||
- spec: geant4-data@11.2.0
|
||||
prefix: /usr
|
||||
- spec: geant4-data@11.1.0
|
||||
prefix: /usr
|
||||
- spec: geant4-data@11.0.0
|
||||
prefix: /usr
|
||||
|
||||
specs:
|
||||
# CPU
|
||||
- acts +analysis +dd4hep +edm4hep +examples +fatras +geant4 +hepmc3 +podio +pythia8 +python +tgeo cxxstd=20
|
||||
- acts ~cuda
|
||||
#- agile # fails on c++>11 compiler
|
||||
- alpgen
|
||||
- ampt
|
||||
- apfel +lhapdf +python
|
||||
- celeritas +geant4 +hepmc3 +openmp +root +shared +vecgeom cxxstd=20
|
||||
- celeritas ~cuda +openmp ~rocm +vecgeom
|
||||
- cepgen
|
||||
- cernlib +shared
|
||||
- collier
|
||||
@@ -81,20 +95,29 @@ spack:
|
||||
- py-uproot +lz4 +xrootd +zstd
|
||||
- py-vector
|
||||
- pythia8 +evtgen +fastjet +hdf5 +hepmc +hepmc3 +lhapdf ~madgraph5amc +python +rivet ~root # pythia8 and root circularly depend
|
||||
- rivet hepmc=3
|
||||
- root +davix +dcache +examples +fftw +fits +fortran +gdml +graphviz +gsl +http +math +minuit +mlp +mysql +opengl +postgres +pythia8 +python +r +roofit +root7 +rpath ~shadow +spectrum +sqlite +ssl +tbb +threads +tmva +unuran +vc +vdt +veccore +webgui +x +xml +xrootd
|
||||
- sherpa +analysis ~blackhat +gzip +hepmc3 +hepmc3root +lhapdf +lhole +openloops +pythia ~python ~recola ~rivet +root +ufo
|
||||
- rivet
|
||||
- root ~cuda
|
||||
- sherpa +analysis ~blackhat +gzip +hepmc3 +hepmc3root +lhapdf +lhole +openloops +pythia ~python ~recola ~rivet +root +ufo cxxstd=20
|
||||
- tauola +hepmc3 +lhapdf cxxstd=20
|
||||
- thepeg hepmc=3 ~rivet
|
||||
- vecgeom +gdml +geant4 +root
|
||||
- vecgeom ~cuda
|
||||
- whizard +fastjet +gosam hepmc=3 +lcio +lhapdf +openloops +openmp +pythia8
|
||||
- xrootd +davix +http +krb5 +python +readline +scitokens-cpp
|
||||
- yoda +root
|
||||
|
||||
# CUDA
|
||||
#- acts +cuda +traccc cuda_arch=80
|
||||
#- celeritas +cuda ~openmp +vecgeom cuda_arch=80
|
||||
- root +cuda +cudnn +tmva-gpu
|
||||
- vecgeom +cuda cuda_arch=80
|
||||
|
||||
# ROCm
|
||||
- celeritas +rocm amdgpu_target=gfx90a ~openmp ~vecgeom # only available with ORANGE
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
image: "ghcr.io/spack/spack/ubuntu22.04-runner-amd64-gcc-11.4:2024.03.01"
|
||||
image: ghcr.io/spack/spack/ubuntu22.04-runner-amd64-gcc-11.4:2024.03.01
|
||||
|
||||
cdash:
|
||||
build-group: HEP
|
||||
|
@@ -12,6 +12,13 @@ spack:
|
||||
require: ~cuda
|
||||
mpi:
|
||||
require: openmpi
|
||||
py-torch:
|
||||
require:
|
||||
- target=aarch64
|
||||
- ~rocm
|
||||
- +cuda
|
||||
- cuda_arch=80
|
||||
- ~flash_attention
|
||||
|
||||
specs:
|
||||
# Horovod
|
||||
|
@@ -12,6 +12,13 @@ spack:
|
||||
require: ~cuda
|
||||
mpi:
|
||||
require: openmpi
|
||||
py-torch:
|
||||
require:
|
||||
- target=x86_64_v3
|
||||
- ~rocm
|
||||
- +cuda
|
||||
- cuda_arch=80
|
||||
- ~flash_attention
|
||||
|
||||
specs:
|
||||
# Horovod
|
||||
|
@@ -11,6 +11,13 @@ spack:
|
||||
require: "osmesa"
|
||||
mpi:
|
||||
require: openmpi
|
||||
py-torch:
|
||||
require:
|
||||
- target=x86_64_v3
|
||||
- ~cuda
|
||||
- +rocm
|
||||
- amdgpu_target=gfx90a
|
||||
- ~flash_attention
|
||||
|
||||
specs:
|
||||
# Horovod
|
||||
|
@@ -18,7 +18,7 @@ spack:
|
||||
- hdf5+hl+mpi ^mpich
|
||||
- trilinos
|
||||
- trilinos +hdf5 ^hdf5+hl+mpi ^mpich
|
||||
- gcc@12.3.0
|
||||
- gcc@12
|
||||
- mpileaks
|
||||
- lmod@8.7.18
|
||||
- environment-modules
|
||||
|
@@ -36,7 +36,7 @@ bin/spack -h
|
||||
bin/spack help -a
|
||||
|
||||
# Profile and print top 20 lines for a simple call to spack spec
|
||||
spack -p --lines 20 spec mpileaks%gcc ^dyninst@10.0.0 ^elfutils@0.170
|
||||
spack -p --lines 20 spec mpileaks%gcc
|
||||
$coverage_run $(which spack) bootstrap status --dev --optional
|
||||
|
||||
# Check that we can import Spack packages directly as a first import
|
||||
|
@@ -32,7 +32,7 @@ def edit(self, spec, prefix):
|
||||
# Dictionary mapping: compiler-name : ACE config-label
|
||||
supported = {"intel": "_icc", "gcc": ""}
|
||||
|
||||
if not (self.compiler.name in supported):
|
||||
if self.compiler.name not in supported:
|
||||
raise Exception(
|
||||
"compiler " + self.compiler.name + " not supported in ace spack-package"
|
||||
)
|
||||
|
@@ -40,6 +40,7 @@ class Acts(CMakePackage, CudaPackage):
|
||||
# Supported Acts versions
|
||||
version("main", branch="main")
|
||||
version("master", branch="main", deprecated=True) # For compatibility
|
||||
version("39.0.0", commit="b055202e2fbdd509bc186eb4782714bc46f38f3f", submodules=True)
|
||||
version("38.2.0", commit="9cb8f4494656553fd9b85955938b79b2fac4c9b0", submodules=True)
|
||||
version("38.1.0", commit="8a20c88808f10bf4fcdfd7c6e077f23614c3ab90", submodules=True)
|
||||
version("38.0.0", commit="0a6b5155e29e3b755bf351b8a76067fff9b4214b", submodules=True)
|
||||
|
@@ -4,7 +4,6 @@
|
||||
|
||||
import os
|
||||
|
||||
from spack.build_environment import optimization_flags
|
||||
from spack.package import *
|
||||
from spack.pkg.builtin.fftw import FftwBase
|
||||
|
||||
@@ -95,14 +94,14 @@ class Amdfftw(FftwBase):
|
||||
)
|
||||
variant(
|
||||
"amd-dynamic-dispatcher",
|
||||
default=True,
|
||||
default=False,
|
||||
when="@4.1: %aocc@4.1.0:",
|
||||
description="Single portable optimized library"
|
||||
" to execute on different x86 CPU architectures",
|
||||
)
|
||||
variant(
|
||||
"amd-dynamic-dispatcher",
|
||||
default=True,
|
||||
default=False,
|
||||
when="@3.2: %gcc",
|
||||
description="Single portable optimized library"
|
||||
" to execute on different x86 CPU architectures",
|
||||
@@ -203,11 +202,10 @@ def configure(self, spec, prefix):
|
||||
if not self.compiler.f77 or not self.compiler.fc:
|
||||
options.append("--disable-fortran")
|
||||
|
||||
# Cross compilation is supported in amd-fftw by making use of target
|
||||
# variable to set AMD_ARCH configure option.
|
||||
# Spack user can not directly use AMD_ARCH for this purpose but should
|
||||
# use target variable to set appropriate -march option in AMD_ARCH.
|
||||
options.append(f"AMD_ARCH={optimization_flags(self.compiler, spec.target)}")
|
||||
if "avx512" in spec.target:
|
||||
options.append("CFLAGS=-mprefer-vector-width=512")
|
||||
else:
|
||||
options.append("CFLAGS=-mprefer-vector-width=256")
|
||||
|
||||
# Specific SIMD support.
|
||||
# float and double precisions are supported
|
||||
|
@@ -19,6 +19,7 @@ class Amdsmi(CMakePackage):
|
||||
libraries = ["libamd_smi"]
|
||||
|
||||
license("MIT")
|
||||
version("6.3.2", sha256="1ed452eedfe51ac6e615d7bfe0bd7a0614f21113874ae3cbea7df72343cc2d13")
|
||||
version("6.3.1", sha256="a3a5a711052e813b9be9304d5e818351d3797f668ec2a455e61253a73429c355")
|
||||
version("6.3.0", sha256="7234c46648938239385cd5db57516ed53985b8c09d2f0828ae8f446386d8bd1e")
|
||||
version("6.2.4", sha256="5ebe8d0f176bf4a73b0e7000d9c47cb7f65ecca47011d3f9b08b93047dcf7ac5")
|
||||
|
@@ -25,6 +25,7 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage):
|
||||
license("BSD-3-Clause")
|
||||
|
||||
version("develop", branch="development")
|
||||
version("25.02", sha256="2680a5a9afba04e211cd48d27799c5a25abbb36c6c3d2b6c13cd4757c7176b23")
|
||||
version("25.01", sha256="29eb35cf67d66b0fd0654282454c210abfadf27fcff8478b256e3196f237c74f")
|
||||
version("24.12", sha256="ca4b41ac73fabb9cf3600b530c9823eb3625f337d9b7b9699c1089e81c67fc67")
|
||||
version("24.11", sha256="31cc37b39f15e02252875815f6066046fc56a479bf459362b9889b0d6a202df6")
|
||||
@@ -360,7 +361,7 @@ def cmake_args(self):
|
||||
args.append("-DAMReX_GPU_BACKEND=SYCL")
|
||||
# SYCL GPU backend only supported with Intel's oneAPI or DPC++ compilers
|
||||
sycl_compatible_compilers = ["icpx"]
|
||||
if not (os.path.basename(self.compiler.cxx) in sycl_compatible_compilers):
|
||||
if os.path.basename(self.compiler.cxx) not in sycl_compatible_compilers:
|
||||
raise InstallError(
|
||||
"AMReX's SYCL GPU Backend requires the oneAPI CXX (icpx) compiler."
|
||||
)
|
||||
|
@@ -8,6 +8,20 @@
|
||||
from spack.package import *
|
||||
|
||||
_versions = {
|
||||
"6.3.2": {
|
||||
"apt": (
|
||||
"bef302bf344c9297f9fb64a4a93f360721a467185bc4fefbeecb307dd956c504",
|
||||
"https://repo.radeon.com/rocm/apt/6.3.2/pool/main/h/hsa-amd-aqlprofile/hsa-amd-aqlprofile_1.0.0.60302-66~20.04_amd64.deb",
|
||||
),
|
||||
"yum": (
|
||||
"1e01de060073cb72a97fcddf0f3b637b48cf89a08b34f2447d010031abc0e099",
|
||||
"https://repo.radeon.com/rocm/rhel8/6.3.2/main/hsa-amd-aqlprofile-1.0.0.60302-66.el8.x86_64.rpm",
|
||||
),
|
||||
"zyp": (
|
||||
"408fb29e09ba59a9e83e8f7d703ba53e1ef3b3acbae1103b2a82d4f87f321752",
|
||||
"https://repo.radeon.com/rocm/zyp/6.3.2/main/hsa-amd-aqlprofile-1.0.0.60302-sles155.66.x86_64.rpm",
|
||||
),
|
||||
},
|
||||
"6.3.1": {
|
||||
"apt": (
|
||||
"76b129345a1a7caa04859fd738e0ba5bfa6f7bc1ad11171f1a7b2d46e0c0b158",
|
||||
@@ -275,6 +289,7 @@ class Aqlprofile(Package):
|
||||
"6.2.4",
|
||||
"6.3.0",
|
||||
"6.3.1",
|
||||
"6.3.2",
|
||||
]:
|
||||
depends_on(f"hsa-rocr-dev@{ver}", when=f"@{ver}")
|
||||
|
||||
|
@@ -110,22 +110,15 @@ class Arborx(CMakePackage, CudaPackage, ROCmPackage):
|
||||
conflicts("~serial", when="+trilinos")
|
||||
|
||||
def cmake_args(self):
|
||||
spec = self.spec
|
||||
|
||||
if "+trilinos" in spec:
|
||||
kokkos_spec = spec["trilinos"]
|
||||
else:
|
||||
kokkos_spec = spec["kokkos"]
|
||||
|
||||
kokkos_pkg = self["trilinos"] if self.spec.satisfies("+trilinos") else self["kokkos"]
|
||||
options = [
|
||||
f"-DKokkos_ROOT={kokkos_spec.prefix}",
|
||||
self.define("Kokkos_ROOT", kokkos_pkg.prefix),
|
||||
self.define_from_variant("ARBORX_ENABLE_MPI", "mpi"),
|
||||
]
|
||||
|
||||
if spec.satisfies("+cuda"):
|
||||
options.append(f"-DCMAKE_CXX_COMPILER={kokkos_spec.kokkos_cxx}")
|
||||
if spec.satisfies("+rocm"):
|
||||
options.append("-DCMAKE_CXX_COMPILER=%s" % spec["hip"].hipcc)
|
||||
if self.spec.satisfies("+cuda"):
|
||||
options.append(self.define("CMAKE_CXX_COMPILER", kokkos_pkg.kokkos_cxx))
|
||||
if self.spec.satisfies("+rocm"):
|
||||
options.append(self.define("CMAKE_CXX_COMPILER", self.spec["hip"].hipcc))
|
||||
|
||||
return options
|
||||
|
||||
|
@@ -148,6 +148,14 @@ class Ascent(CMakePackage, CudaPackage):
|
||||
# https://github.com/Alpine-DAV/ascent/pull/1123
|
||||
patch("ascent-find-raja-pr1123.patch", when="@0.9.0")
|
||||
|
||||
# patch for fix typo in coord_type
|
||||
# https://github.com/Alpine-DAV/ascent/pull/1408
|
||||
patch(
|
||||
"https://github.com/Alpine-DAV/ascent/pull/1408.patch?full_index=1",
|
||||
when="@0.9.3 %oneapi@2025:",
|
||||
sha256="7de7f51e57f3d743c39ad80d8783a4eb482be1def51eb2d3f9259246c661f164",
|
||||
)
|
||||
|
||||
##########################################################################
|
||||
# package dependencies
|
||||
###########################################################################
|
||||
@@ -468,6 +476,9 @@ def hostconfig(self):
|
||||
if cflags:
|
||||
cfg.write(cmake_cache_entry("CMAKE_C_FLAGS", cflags))
|
||||
cxxflags = cppflags + " ".join(spec.compiler_flags["cxxflags"])
|
||||
if spec.satisfies("%oneapi@2025:"):
|
||||
cxxflags += "-Wno-error=missing-template-arg-list-after-template-kw "
|
||||
cxxflags += "-Wno-missing-template-arg-list-after-template-kw"
|
||||
if cxxflags:
|
||||
cfg.write(cmake_cache_entry("CMAKE_CXX_FLAGS", cxxflags))
|
||||
fflags = " ".join(spec.compiler_flags["fflags"])
|
||||
|
@@ -23,6 +23,10 @@ class Bash(AutotoolsPackage, GNUMirrorPackage):
|
||||
|
||||
depends_on("c", type="build") # generated
|
||||
|
||||
depends_on("autoconf", type="build")
|
||||
depends_on("automake", type="build")
|
||||
depends_on("libtool", type="build")
|
||||
|
||||
depends_on("ncurses")
|
||||
depends_on("readline@8.2:", when="@5.2:")
|
||||
depends_on("readline@5.0:")
|
||||
|
@@ -118,7 +118,7 @@ class Berkeleygw(MakefilePackage):
|
||||
def edit(self, spec, prefix):
|
||||
# archive is a tar file, despite the .gz expension
|
||||
tar = which("tar")
|
||||
tar("-x", "-f", self.stage.archive_file, "--strip-components=1")
|
||||
tar("-x", "-o", "-f", self.stage.archive_file, "--strip-components=1")
|
||||
|
||||
# get generic arch.mk template
|
||||
if spec.satisfies("+mpi"):
|
||||
|
@@ -4,7 +4,6 @@
|
||||
|
||||
import spack.store
|
||||
from spack.package import *
|
||||
from spack.pkg.builtin.boost import Boost
|
||||
|
||||
|
||||
class CbtfKrell(CMakePackage):
|
||||
@@ -51,9 +50,17 @@ class CbtfKrell(CMakePackage):
|
||||
description="build only the FE tool using the runtime_dir to point to target build.",
|
||||
)
|
||||
|
||||
# Fix build errors with gcc >= 10
|
||||
patch(
|
||||
"https://github.com/OpenSpeedShop/cbtf-krell/commit/7d47761c6cd9110883bff9ca1e694af1475676f5.patch?full_index=1",
|
||||
sha256="64ed80d18163ca04a67be4a13ac2d2553243fc24c6274d26981472e6e2050b8a",
|
||||
)
|
||||
|
||||
# Dependencies for cbtf-krell
|
||||
depends_on("cmake@3.0.2:", type="build")
|
||||
|
||||
depends_on("gotcha")
|
||||
|
||||
# For rpcgen
|
||||
depends_on("rpcsvc-proto", type="build")
|
||||
|
||||
@@ -64,16 +71,11 @@ class CbtfKrell(CMakePackage):
|
||||
depends_on("binutils@2.32")
|
||||
|
||||
# For boost
|
||||
depends_on("boost@1.70.0:")
|
||||
|
||||
# TODO: replace this with an explicit list of components of Boost,
|
||||
# for instance depends_on('boost +filesystem')
|
||||
# See https://github.com/spack/spack/pull/22303 for reference
|
||||
depends_on(Boost.with_default_variants)
|
||||
depends_on("boost@1.70.0:+filesystem+graph+program_options+python+test+thread")
|
||||
|
||||
# For Dyninst
|
||||
depends_on("dyninst@10.1.0", when="@develop")
|
||||
depends_on("dyninst@10.1.0", when="@1.9.3:9999")
|
||||
depends_on("dyninst@10.1.0:", when="@develop")
|
||||
depends_on("dyninst@10.1.0:", when="@1.9.3:9999")
|
||||
|
||||
# For MRNet
|
||||
depends_on("mrnet@5.0.1-3:+lwthreads", when="@develop", type=("build", "link", "run"))
|
||||
|
@@ -2,6 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.build_systems.cmake import CMakeBuilder
|
||||
from spack.package import *
|
||||
|
||||
|
||||
@@ -81,13 +82,22 @@ class Celeritas(CMakePackage, CudaPackage, ROCmPackage):
|
||||
depends_on("py-breathe", type="build", when="+doc")
|
||||
depends_on("py-sphinx", type="build", when="+doc")
|
||||
|
||||
with when("+cuda"):
|
||||
depends_on("thrust")
|
||||
with when("+rocm"):
|
||||
depends_on("hiprand")
|
||||
depends_on("rocprim")
|
||||
depends_on("rocrand")
|
||||
depends_on("rocthrust")
|
||||
|
||||
for _std in _cxxstd_values:
|
||||
depends_on("geant4 cxxstd=" + _std, when="+geant4 cxxstd=" + _std)
|
||||
depends_on("root cxxstd=" + _std, when="+root cxxstd=" + _std)
|
||||
depends_on("vecgeom cxxstd=" + _std, when="+vecgeom cxxstd=" + _std)
|
||||
|
||||
depends_on("vecgeom +cuda cuda_arch=none", when="+vecgeom +cuda cuda_arch=none")
|
||||
for _arch in CudaPackage.cuda_arch_values:
|
||||
depends_on("vecgeom+cuda cuda_arch=" + _arch, when="+vecgeom +cuda cuda_arch=" + _arch)
|
||||
depends_on(f"vecgeom +cuda cuda_arch={_arch}", when=f"+vecgeom +cuda cuda_arch={_arch}")
|
||||
|
||||
conflicts("+rocm", when="+cuda", msg="AMD and NVIDIA accelerators are incompatible")
|
||||
conflicts("+rocm", when="+vecgeom", msg="HIP support is only available with ORANGE")
|
||||
@@ -111,7 +121,7 @@ def cmake_args(self):
|
||||
from_variant("CELERITAS_BUILD_DOCS", "doc"),
|
||||
define("CELERITAS_BUILD_DEMOS", False),
|
||||
define("CELERITAS_BUILD_TESTS", False),
|
||||
from_variant("Celeritas_USE_HIP", "rocm"),
|
||||
from_variant("CELERITAS_USE_HIP", "rocm"),
|
||||
define("CELERITAS_USE_MPI", False),
|
||||
define("CELERITAS_USE_Python", True),
|
||||
]
|
||||
@@ -119,6 +129,22 @@ def cmake_args(self):
|
||||
for pkg in ["CUDA", "Geant4", "HepMC3", "OpenMP", "ROOT", "SWIG", "VecGeom"]:
|
||||
args.append(from_variant("CELERITAS_USE_" + pkg, pkg.lower()))
|
||||
|
||||
if self.spec.satisfies("+cuda"):
|
||||
args.append(CMakeBuilder.define_cuda_architectures(self))
|
||||
if self.spec.satisfies("+rocm"):
|
||||
args.append(CMakeBuilder.define_hip_architectures(self))
|
||||
args.append(
|
||||
define(
|
||||
"CMAKE_HIP_FLAGS",
|
||||
" ".join(
|
||||
[
|
||||
f"-I{self.spec[p].prefix.include}"
|
||||
for p in ["hiprand", "rocprim", "rocrand", "rocthrust"]
|
||||
]
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
if self.version < Version("0.5"):
|
||||
# JSON is required for 0.5 and later
|
||||
args.append(define("CELERITAS_USE_JSON", True))
|
||||
|
@@ -276,7 +276,7 @@ def charmarch(self):
|
||||
# build-target=LIBS backend={0}'.format(b))
|
||||
|
||||
def install(self, spec, prefix):
|
||||
if not ("backend=mpi" in self.spec) or not ("backend=netlrts" in self.spec):
|
||||
if "backend=mpi" not in self.spec or "backend=netlrts" not in self.spec:
|
||||
if self.spec.satisfies("+pthreads"):
|
||||
raise InstallError(
|
||||
"The pthreads option is only available on the Netlrts and MPI network layers."
|
||||
|
@@ -92,9 +92,9 @@ def pgo_train(self):
|
||||
|
||||
# Set PGO training flags.
|
||||
generate_mods = EnvironmentModifications()
|
||||
generate_mods.append_flags("CFLAGS", "-fprofile-generate={}".format(reports))
|
||||
generate_mods.append_flags("CXXFLAGS", "-fprofile-generate={}".format(reports))
|
||||
generate_mods.append_flags("LDFLAGS", "-fprofile-generate={} --verbose".format(reports))
|
||||
generate_mods.append_flags("CFLAGS", f"-fprofile-generate={reports}")
|
||||
generate_mods.append_flags("CXXFLAGS", f"-fprofile-generate={reports}")
|
||||
generate_mods.append_flags("LDFLAGS", f"-fprofile-generate={reports}")
|
||||
|
||||
with working_dir(self.build_directory, create=True):
|
||||
cmake(*cmake_options, sources, extra_env=generate_mods)
|
||||
@@ -118,14 +118,14 @@ def pgo_train(self):
|
||||
# Clean the build dir.
|
||||
rmtree(self.build_directory, ignore_errors=True)
|
||||
|
||||
if self.spec.satisfies("%clang") or self.spec.satisfies("apple-clang"):
|
||||
if self.spec.satisfies("%clang") or self.spec.satisfies("%apple-clang"):
|
||||
# merge reports
|
||||
use_report = join_path(reports, "merged.prof")
|
||||
raw_files = glob.glob(join_path(reports, "*.profraw"))
|
||||
llvm_profdata("merge", "--output={}".format(use_report), *raw_files)
|
||||
use_flag = "-fprofile-instr-use={}".format(use_report)
|
||||
llvm_profdata("merge", f"--output={use_report}", *raw_files)
|
||||
use_flag = f"-fprofile-instr-use={use_report}"
|
||||
else:
|
||||
use_flag = "-fprofile-use={}".format(reports)
|
||||
use_flag = f"-fprofile-use={reports}"
|
||||
|
||||
# Set PGO use flags for next cmake phase.
|
||||
use_mods = EnvironmentModifications()
|
||||
|
@@ -125,9 +125,9 @@ class Cmake(Package):
|
||||
patch("mr-9623.patch", when="@3.22.0:3.30")
|
||||
|
||||
depends_on("ninja", when="platform=windows")
|
||||
depends_on("gmake", when="platform=linux")
|
||||
depends_on("gmake", when="platform=darwin")
|
||||
depends_on("gmake", when="platform=freebsd")
|
||||
depends_on("gmake", type=("build", "run"), when="platform=linux")
|
||||
depends_on("gmake", type=("build", "run"), when="platform=darwin")
|
||||
depends_on("gmake", type=("build", "run"), when="platform=freebsd")
|
||||
|
||||
depends_on("qt", when="+qtgui")
|
||||
# Qt depends on libmng, which is a CMake package;
|
||||
|
@@ -12,58 +12,11 @@ class CodeServer(Package):
|
||||
homepage = "https://coder.com/docs/code-server/latest"
|
||||
url = "https://github.com/coder/code-server/releases/download/v4.4.0/code-server-4.4.0-linux-amd64.tar.gz"
|
||||
|
||||
version("4.12.0", sha256="d50ee947c4144a6ff2656e664ecbb3f70b75168b8a6e8c3eef47787f3c240c26")
|
||||
version("4.11.0", sha256="4eb233054941ec298caec6fc84dfba0a72c1bc5fadc0fe4896b10f3f4a291d51")
|
||||
version("4.10.1", sha256="f34ce611a9c058982a5e9d200fdf009788e3a564e970b053f4145574bce21b09")
|
||||
version("4.4.0", sha256="e3dd265acb18c2230c72d19bbce619ac5c1bd800ebb26e5e169c4d613069500d")
|
||||
version("4.3.0", sha256="42c71e98de85270b164b023ef8eb0692cf7700c03081ba5a44eaca014a92eb57")
|
||||
version("4.2.0", sha256="98be5bc43ac604c49ae11da259e318b581757a59a25edeee5cf55317ca589ec6")
|
||||
version("4.1.0", sha256="f720b20d1f615b78f3a1be9b1614f3d99ed722b8da3047a4143dbe5835e52ce3")
|
||||
version("4.0.2", sha256="68c11afa3288707a6880920013d8bac7404cd590eb4f63cac92979d0b0bf4fd1")
|
||||
version("4.0.1", sha256="5fe6d26e9d19e685946f0f392d9c822e5303a800cac3ac54a6a2c26104d239fd")
|
||||
version("3.12.0", sha256="d3ca41a55e36d73d80300702af2687e25d440cff6b613bb58a2c88d9b8a0a38f")
|
||||
version("3.11.1", sha256="d34b0b79582196d59d44ac971aabb7f15cb05d837318b94f62470dc8475665e9")
|
||||
version("3.11.0", sha256="dddb97f044ed615a4b8a526328fca6ad703b9c671a28a6090d84668a18755589")
|
||||
version("3.10.2", sha256="47154a6b9e61a0313ba499dd5d948613a17841c2f580612f9721c31964622bf5")
|
||||
version("3.10.1", sha256="18175624df78976488dbcc2a26f2582a71cef5ca0a419e691b1b70da0b27c7ef")
|
||||
version("3.10.0", sha256="5dfce848747f3dd5074cba435cca6730ac99d6d3aa3f50e0a9bf222ad12d3e97")
|
||||
version("3.9.3", sha256="eba42eaf868c2144795b1ac54929e3b252ae35403bf8553b3412a5ac4f365a41")
|
||||
version("3.9.2", sha256="5dbda5ac598223006f72bcb700b133a752aabe4468ed8105806d1d69b5364408")
|
||||
version("3.9.1", sha256="f2648a4387c5a5be8666fb82a7b8a58274c45b91942251ab337e202e078ae8a5")
|
||||
version("3.9.0", sha256="229b0fb95d78a7f7ff0dd55bc151a7521fcd699af50151faf67f6c7ce51110f4")
|
||||
version("3.8.1", sha256="130cf94e3921d0e2adfa33e875bf1aa81fd28548aac94fd31fbc589baa68d45f")
|
||||
version("3.8.0", sha256="70b069f26b30c38cca5fa07b5f25db4d15976de80af3a644b9105d1b5e23e7d5")
|
||||
version("3.7.4", sha256="01ca0e48df44df70cdf702644b013102024a5b30edf6c1fbb2e10b0310056382")
|
||||
version("3.7.3", sha256="7a90f3171c9bc6f65266066e35cc34d48a032910c136ea21116d28f3d7214547")
|
||||
version("3.7.2", sha256="c3054f214392b1b2eb4c77c57cb950ac5d733d349a426975e8bf32028e65a226")
|
||||
version("3.7.1", sha256="bebd9e0c46e0fd4b4f295fd91fc2db135a694614db972095e9842bf7969f4cee")
|
||||
version("3.7.0", sha256="5f8df8ed3924e8e594674d73fb50b00a06efa529f96a0495a5ee8c39c68f3ce1")
|
||||
version("3.6.2", sha256="fd4ac7d61f3e1b2a5034f1706e409c77fad299adef0ede204828d8ecfe317e45")
|
||||
version("3.6.1", sha256="bbe4ef9585e093b3521deb34a0820d2136172271862d6396df21c2e9a26c6374")
|
||||
version("3.6.0", sha256="d1ae4f7263741e0551358d3ed77dad587b33b352d827623d4df25e98f9e21019")
|
||||
version("3.5.0", sha256="90c19c84611becac4af1fb0bd5324ab30f9200769fa7914cd10ccb6b88c657bb")
|
||||
version("3.4.1", sha256="afdb89f4dc7201c03cb35d4f8dc1ccb6060bd0da324a6789089de264d3406817")
|
||||
version("3.4.0", sha256="918c28696b73b96dc9361977f93e788d5c8884b5d4a088d206f05d5b8bccb738")
|
||||
version("3.3.1", sha256="57b9855b20f511e22776ee8a53d1ff30f864498814c4c0b0af3510f71d7a2969")
|
||||
version("3.3.0", sha256="6ca5148a447b41753d5151c5a49a8af24122c7b0808609782aec454e66be4f2c")
|
||||
version("3.2.0", sha256="a8157e8766d6a0e255c72db25e8677a57adb8d889d653e78750b4d26a6ff7400")
|
||||
version("3.1.1", sha256="5dd922d28b2e351c146081849d987fb1e439ee7d53b941434b2eecb2a194da71")
|
||||
version("3.1.0", sha256="5ef85c8f280ce781a176a8b77386b333efe892755a5c325a1782e4eac6016e59")
|
||||
version("3.0.2", sha256="04367cfeb23991f3dc3f1ef8e3dfe5e9d683bb50c9e1fa69e3c21757facfd7ee")
|
||||
version("4.96.4", sha256="b3f9025d00f2cdf61caf83945ef7225d4a3eb576c4c007e45868f45713e39c8e")
|
||||
|
||||
depends_on("c", type="build") # generated
|
||||
depends_on("cxx", type="build") # generated
|
||||
|
||||
def url_for_version(self, version):
|
||||
if version <= Version("3.2.0"):
|
||||
return "https://github.com/coder/code-server/releases/download/{0}/code-server-{0}-linux-x86_64.tar.gz".format(
|
||||
version
|
||||
)
|
||||
else:
|
||||
return "https://github.com/coder/code-server/releases/download/v{0}/code-server-{0}-linux-amd64.tar.gz".format(
|
||||
version
|
||||
)
|
||||
|
||||
def install(self, spec, prefix):
|
||||
install_tree(".", prefix)
|
||||
|
||||
|
@@ -29,6 +29,7 @@ def url_for_version(self, version):
|
||||
license("NCSA")
|
||||
|
||||
version("master", branch="amd-stg-open")
|
||||
version("6.3.2", sha256="1f52e45660ea508d3fe717a9903fe27020cee96de95a3541434838e0193a4827")
|
||||
version("6.3.1", sha256="e9c2481cccacdea72c1f8d3970956c447cec47e18dfb9712cbbba76a2820552c")
|
||||
version("6.3.0", sha256="79580508b039ca6c50dfdfd7c4f6fbcf489fe1931037ca51324818851eea0c1c")
|
||||
version("6.2.4", sha256="7af782bf5835fcd0928047dbf558f5000e7f0207ca39cf04570969343e789528")
|
||||
@@ -88,6 +89,7 @@ def url_for_version(self, version):
|
||||
"6.2.4",
|
||||
"6.3.0",
|
||||
"6.3.1",
|
||||
"6.3.2",
|
||||
"master",
|
||||
]:
|
||||
# llvm libs are linked statically, so this *could* be a build dep
|
||||
@@ -115,6 +117,7 @@ def url_for_version(self, version):
|
||||
"6.2.4",
|
||||
"6.3.0",
|
||||
"6.3.1",
|
||||
"6.3.2",
|
||||
]:
|
||||
depends_on(f"rocm-core@{ver}", when=f"@{ver}")
|
||||
|
||||
|
@@ -66,7 +66,7 @@ def cmake_args(self):
|
||||
[
|
||||
"-DKokkosCore_PREFIX={0}".format(kokkos.prefix),
|
||||
"-DKokkosKernels_PREFIX={0}".format(kokkos_kernels.prefix),
|
||||
"-DCMAKE_CXX_COMPILER:STRING={0}".format(spec["kokkos"].kokkos_cxx),
|
||||
"-DCMAKE_CXX_COMPILER:STRING={0}".format(self["kokkos"].kokkos_cxx),
|
||||
# Compadre_USE_PYTHON is OFF by default
|
||||
"-DCompadre_USE_PYTHON=OFF",
|
||||
]
|
||||
|
@@ -18,6 +18,7 @@ class ComposableKernel(CMakePackage):
|
||||
license("MIT")
|
||||
|
||||
version("master", branch="develop")
|
||||
version("6.3.2", sha256="875237fe493ff040f8f63b827cddf2ff30a8d3aa18864f87d0e35323c7d62a2d")
|
||||
version("6.3.1", sha256="3e8c8c832ca3f9ceb99ab90f654b93b7db876f08d90eda87a70bc629c854052a")
|
||||
version("6.3.0", sha256="274f87fc27ec2584c76b5bc7ebdbe172923166b6b93e66a24f98475b44be272d")
|
||||
version("6.2.4", sha256="5598aea4bce57dc95b60f2029831edfdade80b30a56e635412cc02b2a6729aa6")
|
||||
@@ -60,6 +61,7 @@ class ComposableKernel(CMakePackage):
|
||||
|
||||
for ver in [
|
||||
"master",
|
||||
"6.3.2",
|
||||
"6.3.1",
|
||||
"6.3.0",
|
||||
"6.2.4",
|
||||
|
@@ -38,10 +38,11 @@ class Cp2k(MakefilePackage, CMakePackage, CudaPackage, ROCmPackage):
|
||||
git = "https://github.com/cp2k/cp2k.git"
|
||||
list_url = "https://github.com/cp2k/cp2k/releases"
|
||||
|
||||
maintainers("dev-zero", "mtaillefumier")
|
||||
maintainers("dev-zero", "mtaillefumier", "RMeli", "abussy")
|
||||
|
||||
license("GPL-2.0-or-later")
|
||||
|
||||
version("2025.1", sha256="65c8ad5488897b0f995919b9fa77f2aba4b61677ba1e3c19bb093d5c08a8ce1d")
|
||||
version("2024.3", sha256="a6eeee773b6b1fb417def576e4049a89a08a0ed5feffcd7f0b33c7d7b48f19ba")
|
||||
version("2024.2", sha256="cc3e56c971dee9e89b705a1103765aba57bf41ad39a11c89d3de04c8b8cdf473")
|
||||
version("2024.1", sha256="a7abf149a278dfd5283dc592a2c4ae803b37d040df25d62a5e35af5c4557668f")
|
||||
@@ -115,6 +116,7 @@ class Cp2k(MakefilePackage, CMakePackage, CudaPackage, ROCmPackage):
|
||||
)
|
||||
variant("pytorch", default=False, description="Enable libtorch support")
|
||||
variant("quip", default=False, description="Enable quip support")
|
||||
variant("dftd4", when="@2024.2:", default=False, description="Enable DFT-D4 support")
|
||||
variant("mpi_f08", default=False, description="Use MPI F08 module")
|
||||
variant("smeagol", default=False, description="Enable libsmeagol support", when="@2025.2:")
|
||||
|
||||
@@ -157,6 +159,7 @@ class Cp2k(MakefilePackage, CMakePackage, CudaPackage, ROCmPackage):
|
||||
)
|
||||
|
||||
depends_on("python@3", type="build")
|
||||
depends_on("pkgconfig", type="build", when="build_system=cmake")
|
||||
|
||||
depends_on("blas")
|
||||
depends_on("lapack")
|
||||
@@ -194,13 +197,14 @@ class Cp2k(MakefilePackage, CMakePackage, CudaPackage, ROCmPackage):
|
||||
)
|
||||
|
||||
with when("+libxc"):
|
||||
depends_on("pkgconfig", type="build", when="@7.0:")
|
||||
depends_on("pkgconfig", type="build", when="@7.0: ^libxc@:6")
|
||||
depends_on("libxc@4.0.3:4", when="@7.0:8.1")
|
||||
depends_on("libxc@5.1.3:5.1", when="@8.2:8")
|
||||
depends_on("libxc@5.1.7:5.1", when="@9:2022.2")
|
||||
depends_on("libxc@6.1:", when="@2023.1:")
|
||||
depends_on("libxc@6.2:", when="@2023.2:")
|
||||
depends_on("libxc@:6", when="@:2024.3")
|
||||
depends_on("libxc@7 build_system=cmake", when="@2025.2:")
|
||||
|
||||
with when("+spla"):
|
||||
depends_on("spla+cuda+fortran", when="+cuda")
|
||||
@@ -293,6 +297,8 @@ class Cp2k(MakefilePackage, CMakePackage, CudaPackage, ROCmPackage):
|
||||
|
||||
depends_on("spglib", when="+spglib")
|
||||
|
||||
depends_on("dftd4@3.6.0: build_system=cmake", when="+dftd4")
|
||||
|
||||
with when("build_system=cmake"):
|
||||
depends_on("cmake@3.22:", type="build")
|
||||
|
||||
@@ -624,6 +630,12 @@ def edit(self, pkg, spec, prefix):
|
||||
ldflags += [spglib.search_flags]
|
||||
libs.append(spglib.ld_flags)
|
||||
|
||||
if spec.satisfies("+dftd4"):
|
||||
cppflags += ["-D__DFTD4"]
|
||||
dftd4 = spec["dftd4"].libs
|
||||
ldflags += [dftd4.search_flags]
|
||||
libs.append(dftd4.ld_flags)
|
||||
|
||||
if spec.satisfies("+smeagol"):
|
||||
cppflags += ["-D__SMEAGOL"]
|
||||
smeagol = spec["libsmeagol"].libs
|
||||
@@ -1003,6 +1015,7 @@ def cmake_args(self):
|
||||
self.define_from_variant("CP2K_USE_VORI", "libvori"),
|
||||
self.define_from_variant("CP2K_USE_SPLA", "spla"),
|
||||
self.define_from_variant("CP2K_USE_QUIP", "quip"),
|
||||
self.define_from_variant("CP2K_USE_DFTD4", "dftd4"),
|
||||
self.define_from_variant("CP2K_USE_MPI_F08", "mpi_f08"),
|
||||
self.define_from_variant("CP2K_USE_LIBSMEAGOL", "smeagol"),
|
||||
]
|
||||
|
@@ -15,6 +15,7 @@ class Cppgsl(CMakePackage):
|
||||
license("MIT")
|
||||
|
||||
version("main", branch="main")
|
||||
version("4.1.0", sha256="0a227fc9c8e0bf25115f401b9a46c2a68cd28f299d24ab195284eb3f1d7794bd")
|
||||
version("4.0.0", sha256="f0e32cb10654fea91ad56bde89170d78cfbf4363ee0b01d8f097de2ba49f6ce9")
|
||||
version("3.1.0", sha256="d3234d7f94cea4389e3ca70619b82e8fb4c2f33bb3a070799f1e18eef500a083")
|
||||
version("2.1.0", sha256="ef73814657b073e1be86c8f7353718771bf4149b482b6cb54f99e79b23ff899d")
|
||||
@@ -32,6 +33,7 @@ class Cppgsl(CMakePackage):
|
||||
)
|
||||
|
||||
depends_on("cmake@3.1.3:", type="build")
|
||||
depends_on("cmake@3.14:", type="build", when="@4.1:")
|
||||
|
||||
def cmake_args(self):
|
||||
return [
|
||||
|
@@ -18,7 +18,8 @@ class Cpuinfo(CMakePackage):
|
||||
license("BSD-2-Clause")
|
||||
|
||||
version("main", branch="main")
|
||||
version("2024-09-06", commit="094fc30b9256f54dad5ad23bcbfb5de74781422f") # py-torch@2.5.1:
|
||||
version("2024-09-26", commit="1e83a2fdd3102f65c6f1fb602c1b320486218a99") # py-torch@2.6:
|
||||
version("2024-09-06", commit="094fc30b9256f54dad5ad23bcbfb5de74781422f") # py-torch@2.5.1
|
||||
version("2024-08-30", commit="fa1c679da8d19e1d87f20175ae1ec10995cd3dd3") # py-torch@2.5.0
|
||||
version("2023-11-04", commit="d6860c477c99f1fce9e28eb206891af3c0e1a1d7") # py-torch@2.3:2.4
|
||||
version("2023-01-13", commit="6481e8bef08f606ddd627e4d3be89f64d62e1b8a") # py-torch@2.1:2.2
|
||||
|
@@ -25,7 +25,7 @@ class Creduce(CMakePackage):
|
||||
depends_on("flex")
|
||||
depends_on("libxml2")
|
||||
depends_on("llvm")
|
||||
depends_on("llvm@8.0.0", when="@:2.10")
|
||||
depends_on("llvm@8.0", when="@:2.10")
|
||||
depends_on("perl")
|
||||
depends_on("perl-exporter-lite")
|
||||
depends_on("perl-file-which")
|
||||
|
@@ -21,6 +21,16 @@
|
||||
# format returned by platform.system() and 'arch' by platform.machine()
|
||||
|
||||
_versions = {
|
||||
"12.8.0": {
|
||||
"Linux-aarch64": (
|
||||
"5bc211f00c4f544da6e3fc3a549b3eb0a7e038439f5f3de71caa688f2f6b132c",
|
||||
"https://developer.download.nvidia.com/compute/cuda/12.8.0/local_installers/cuda_12.8.0_570.86.10_linux_sbsa.run",
|
||||
),
|
||||
"Linux-x86_64": (
|
||||
"610867dcd6d94c4e36c4924f1d01b9db28ec08164e8af6c764f21b84200695f8",
|
||||
"https://developer.download.nvidia.com/compute/cuda/12.8.0/local_installers/cuda_12.8.0_570.86.10_linux.run",
|
||||
),
|
||||
},
|
||||
"12.6.3": {
|
||||
"Linux-aarch64": (
|
||||
"213ea63a6357020978a8b0a79a8c9d12a2a5941afa1cdc69d5a3f933fa8bed04",
|
||||
|
@@ -19,7 +19,10 @@ class Cusz(CMakePackage, CudaPackage):
|
||||
conflicts("cuda_arch=none", when="+cuda")
|
||||
|
||||
version("develop", branch="develop")
|
||||
version("0.6.0", commit="cafed521dc338fe2159ebb5b09a36fc318524bf7")
|
||||
version("0.14.0", commit="e57fd7cd9df923164af9dd307b0b3d37dd9df137")
|
||||
version("0.9.0rc3", commit="c3c3a74d006c6de3c145255241fb181682bd1492")
|
||||
# 0.9.0rc1 was listed as 0.6.0 for a while in spack
|
||||
version("0.9.0rc1", commit="cafed521dc338fe2159ebb5b09a36fc318524bf7")
|
||||
version("0.3.1", commit="02be3cbd07db467decaf45ec9eb593ba6173c809")
|
||||
version("0.3", sha256="0feb4f7fd64879fe147624dd5ad164adf3983f79b2e0383d35724f8d185dcb11")
|
||||
|
||||
@@ -31,6 +34,10 @@ class Cusz(CMakePackage, CudaPackage):
|
||||
|
||||
depends_on("cub", when="^cuda@:10.2.89")
|
||||
|
||||
patch("thrust-includes.patch", when="@0.10:0.14 ^cuda@12.8:")
|
||||
patch("thrust-includes-0.9.patch", when="@0.9 ^cuda@12.8:")
|
||||
conflicts("^cuda@12.8:", when="@:0.8")
|
||||
|
||||
def cmake_args(self):
|
||||
cuda_arch = self.spec.variants["cuda_arch"].value
|
||||
args = ["-DBUILD_TESTING=OFF", ("-DCMAKE_CUDA_ARCHITECTURES=%s" % cuda_arch)]
|
||||
|
@@ -0,0 +1,27 @@
|
||||
diff --git a/src/stat/detail/compare.thrust.inl b/src/stat/detail/compare.thrust.inl
|
||||
index ce49408..ec8d650 100644
|
||||
--- a/src/stat/detail/compare.thrust.inl
|
||||
+++ b/src/stat/detail/compare.thrust.inl
|
||||
@@ -18,6 +18,9 @@
|
||||
// #include <thrust/iterator/constant_iterator.h>
|
||||
#include <thrust/device_ptr.h>
|
||||
#include <thrust/execution_policy.h>
|
||||
+#include <thrust/functional.h>
|
||||
+#include <thrust/iterator/zip_iterator.h>
|
||||
+#include <thrust/transform_reduce.h>
|
||||
#include <thrust/tuple.h>
|
||||
|
||||
#include "cusz/type.h"
|
||||
diff --git a/src/stat/detail/maxerr.thrust.inl b/src/stat/detail/maxerr.thrust.inl
|
||||
index 2415655..9b31e88 100644
|
||||
--- a/src/stat/detail/maxerr.thrust.inl
|
||||
+++ b/src/stat/detail/maxerr.thrust.inl
|
||||
@@ -18,6 +18,8 @@
|
||||
// #include <thrust/iterator/constant_iterator.h>
|
||||
#include <thrust/device_ptr.h>
|
||||
#include <thrust/execution_policy.h>
|
||||
+#include <thrust/extrema.h>
|
||||
+#include <thrust/transform.h>
|
||||
|
||||
#include "cusz/type.h"
|
||||
|
27
var/spack/repos/builtin/packages/cusz/thrust-includes.patch
Normal file
27
var/spack/repos/builtin/packages/cusz/thrust-includes.patch
Normal file
@@ -0,0 +1,27 @@
|
||||
diff --git a/psz/src/stat/detail/compare.thrust.inl b/psz/src/stat/detail/compare.thrust.inl
|
||||
index f35c7df..719d68f 100644
|
||||
--- a/psz/src/stat/detail/compare.thrust.inl
|
||||
+++ b/psz/src/stat/detail/compare.thrust.inl
|
||||
@@ -11,6 +11,9 @@
|
||||
|
||||
#include <thrust/device_ptr.h>
|
||||
#include <thrust/execution_policy.h>
|
||||
+#include <thrust/functional.h>
|
||||
+#include <thrust/iterator/zip_iterator.h>
|
||||
+#include <thrust/transform_reduce.h>
|
||||
#include <thrust/tuple.h>
|
||||
|
||||
#include "cusz/type.h"
|
||||
diff --git a/psz/src/stat/detail/maxerr.thrust.inl b/psz/src/stat/detail/maxerr.thrust.inl
|
||||
index f7a4db5..ce7925a 100644
|
||||
--- a/psz/src/stat/detail/maxerr.thrust.inl
|
||||
+++ b/psz/src/stat/detail/maxerr.thrust.inl
|
||||
@@ -11,6 +11,8 @@
|
||||
|
||||
#include <thrust/device_ptr.h>
|
||||
#include <thrust/execution_policy.h>
|
||||
+#include <thrust/extrema.h>
|
||||
+#include <thrust/transform.h>
|
||||
|
||||
#include "cusz/type.h"
|
||||
#include "port.hh"
|
@@ -30,7 +30,7 @@ def dav_sdk_depends_on(spec, when=None, propagate=None):
|
||||
# Map the propagated variants to the dependency variant. Some packages may need
|
||||
# overrides to propagate a dependency as something else, e.g., {"visit": "libsim"}.
|
||||
# Most call-sites will just use a list.
|
||||
if not type(propagate) is dict:
|
||||
if type(propagate) is not dict:
|
||||
propagate = dict([(v, v) for v in propagate])
|
||||
|
||||
# Determine the base variant
|
||||
|
@@ -25,6 +25,7 @@ class Dcap(AutotoolsPackage):
|
||||
depends_on("m4", type="build")
|
||||
|
||||
depends_on("openssl")
|
||||
depends_on("libxcrypt")
|
||||
depends_on("zlib-api")
|
||||
|
||||
variant("plugins", default=True, description="Build plugins")
|
||||
|
@@ -25,6 +25,7 @@ class Dd4hep(CMakePackage):
|
||||
license("LGPL-3.0-or-later")
|
||||
|
||||
version("master", branch="master")
|
||||
version("1.31", sha256="9c06a1b4462fc1b51161404889c74b37350162d0b0ac2154db27e3f102670bd1")
|
||||
version("1.30", sha256="02de46151e945eff58cffd84b4b86d35051f4436608199c3efb4d2e1183889fe")
|
||||
version("1.29", sha256="435d25a7ef093d8bf660f288b5a89b98556b4c1c293c55b93bf641fb4cba77e9")
|
||||
version("1.28", sha256="b28d671eda0154073873a044a384486e66f1f200065deca99537aa84f07328ad")
|
||||
@@ -118,13 +119,16 @@ class Dd4hep(CMakePackage):
|
||||
depends_on("tbb", when="+tbb")
|
||||
depends_on("intel-tbb@:2020.3", when="+tbb @:1.23")
|
||||
depends_on("lcio", when="+lcio")
|
||||
depends_on("edm4hep", when="+edm4hep")
|
||||
depends_on("podio", when="+edm4hep")
|
||||
depends_on("podio@:0.16.03", when="@:1.23 +edm4hep")
|
||||
depends_on("podio@0.16:", when="@1.24: +edm4hep")
|
||||
depends_on("podio@0.16.3:", when="@1.26: +edm4hep")
|
||||
depends_on("podio@:0", when="@:1.29 +edm4hep")
|
||||
depends_on("py-pytest", type=("build", "test"))
|
||||
with when("+edm4hep"):
|
||||
depends_on("edm4hep")
|
||||
depends_on("edm4hep@0.10.5:", when="@1.31:")
|
||||
depends_on("podio")
|
||||
depends_on("podio@:0.16.03", when="@:1.23")
|
||||
depends_on("podio@:0", when="@:1.29")
|
||||
depends_on("podio@0.16:", when="@1.24:")
|
||||
depends_on("podio@0.16.3:", when="@1.26:")
|
||||
depends_on("podio@0.16.7:", when="@1.31:")
|
||||
|
||||
# See https://github.com/AIDASoft/DD4hep/pull/771 and https://github.com/AIDASoft/DD4hep/pull/876
|
||||
conflicts(
|
||||
|
@@ -552,7 +552,7 @@ def cmake_args(self):
|
||||
)
|
||||
# Make sure we use the same compiler that Trilinos uses
|
||||
if spec.satisfies("+trilinos"):
|
||||
options.extend([self.define("CMAKE_CXX_COMPILER", spec["trilinos"].kokkos_cxx)])
|
||||
options.extend([self.define("CMAKE_CXX_COMPILER", self["trilinos"].kokkos_cxx)])
|
||||
|
||||
# Complex support
|
||||
options.append(self.define_from_variant("DEAL_II_WITH_COMPLEX_VALUES", "complex"))
|
||||
|
@@ -19,6 +19,7 @@ class Detray(CMakePackage):
|
||||
|
||||
license("MPL-2.0", checked_by="stephenswat")
|
||||
|
||||
version("0.88.0", sha256="bda15501c9c96af961e24ce243982f62051c535b9fe458fb28336a19b54eb47d")
|
||||
version("0.87.0", sha256="2d4a76432dd6ddbfc00b88b5d482072e471fefc264b60748bb1f9a123963576e")
|
||||
version("0.86.0", sha256="98350c94e8a2395b8712b7102fd449536857e8158b38a96cc913c79b70301170")
|
||||
version("0.85.0", sha256="a0121a27fd08243d4a6aab060e8ab379ad5129e96775b45f6a683835767fa8e7")
|
||||
|
@@ -20,6 +20,7 @@ class DlaFutureFortran(CMakePackage):
|
||||
license("BSD-3-Clause")
|
||||
|
||||
version("main", branch="main")
|
||||
version("0.3.0", sha256="404ce0d2d3df9317764450158901fd6cb2198b37f5687e9616519100ad6e9ece")
|
||||
version("0.2.0", sha256="7fd3e1779c111b35f0d2701a024398b4f6e8dea4af523b6c8617d28c0b7ae61a")
|
||||
version("0.1.0", sha256="9fd8a105cbb2f3e1daf8a49910f98fce68ca0b954773dba98a91464cf2e7c1da")
|
||||
|
||||
@@ -36,6 +37,7 @@ class DlaFutureFortran(CMakePackage):
|
||||
|
||||
depends_on("dla-future@0.4.1:0.5 +scalapack", when="@0.1.0")
|
||||
depends_on("dla-future@0.6.0: +scalapack", when="@0.2.0:")
|
||||
depends_on("dla-future@0.7.3: +scalapack", when="@0.3:")
|
||||
depends_on("dla-future +shared", when="+shared")
|
||||
|
||||
depends_on("mpi", when="+test")
|
||||
|
@@ -73,7 +73,7 @@ class DlaFuture(CMakePackage, CudaPackage, ROCmPackage):
|
||||
generator("ninja")
|
||||
|
||||
depends_on("cmake@3.22:", type="build")
|
||||
depends_on("pkgconfig", type="build")
|
||||
depends_on("pkgconfig", type=("build", "link"))
|
||||
depends_on("doxygen", type="build", when="+doc")
|
||||
depends_on("mpi")
|
||||
|
||||
|
@@ -17,6 +17,7 @@ class Duckdb(MakefilePackage):
|
||||
maintainers("glentner", "teaguesterling")
|
||||
|
||||
version("master", branch="master")
|
||||
version("1.2.0", sha256="f22c97e18c071fa8e43b5e150c03c6ab4bcc510cca6e6b50cbe13af8535fa701")
|
||||
version("1.1.3", sha256="2aea0af898ad753fee82b776fea1bf78ccbc9648986e7f7a87372df5e74cdb98")
|
||||
version("1.1.2", sha256="a3319a64c390ed0454c869b2e4fc0af2413cd49f55cd0f1400aaed9069cdbc4c")
|
||||
version("1.1.1", sha256="a764cef80287ccfd8555884d8facbe962154e7c747043c0842cd07873b4d6752")
|
||||
@@ -92,7 +93,6 @@ class Duckdb(MakefilePackage):
|
||||
# Extensions
|
||||
variant("autocomplete", default=True, description="Include autocomplete for CLI in build")
|
||||
variant("excel", default=True, description="Include Excel formatting extension in build")
|
||||
variant("fts", default=True, description="Include FTS (full text search) support in build")
|
||||
variant("httpfs", default=True, description="Include HTTPFS (& S3) support in build")
|
||||
variant("inet", default=True, description="Include INET (ip address) support in build")
|
||||
variant("json", default=True, description="Include JSON support in build")
|
||||
@@ -100,6 +100,14 @@ class Duckdb(MakefilePackage):
|
||||
variant("tpce", default=False, description="Include TPCE in build")
|
||||
variant("tpch", default=False, description="Include TPCH in build")
|
||||
|
||||
# FTS was moved to an out-of-tree extension after v1.1.3
|
||||
variant(
|
||||
"fts",
|
||||
default=True,
|
||||
description="Include FTS (full text search) support in build",
|
||||
when="@:1.1",
|
||||
)
|
||||
|
||||
# APIs
|
||||
variant("python", default=True, description="Build with Python driver")
|
||||
extends("python", when="+python")
|
||||
|
@@ -33,60 +33,30 @@ class Dyninst(CMakePackage):
|
||||
version("10.2.1", sha256="8077c6c7a12577d2ffdcd07521c1eb1b7367da94d9a7ef10bf14053aeaae7ba1")
|
||||
version("10.2.0", sha256="4212b93bef4563c7de7dce4258e899bcde52315a571087e87fde9f8040123b43")
|
||||
version("10.1.0", sha256="4a121d70c1bb020408a7a697d74602e18250c3c85800f230566fcccd593c0129")
|
||||
version("10.0.0", sha256="542fccf5c57c4fe784b1a9a9e3db01d40b16ad04e7174dc6f7eb23440485ba06")
|
||||
version(
|
||||
"9.3.2", tag="v9.3.2", commit="5d2ddacb273682daa014ae22f17f3575e05b411e", deprecated=True
|
||||
)
|
||||
version(
|
||||
"9.3.0", tag="v9.3.0", commit="9b8e9c1f16d4616b827d2d36955604a8e3fb915c", deprecated=True
|
||||
)
|
||||
version(
|
||||
"9.2.0", tag="v9.2.0", commit="3a6ad66df7294417cf61618acdcfcc0fecccb045", deprecated=True
|
||||
)
|
||||
version(
|
||||
"9.1.0", tag="v9.1.0", commit="df6d090061bae7ff2ba5a6bd57bb2ecbf538ef7a", deprecated=True
|
||||
)
|
||||
version(
|
||||
"8.2.1", tag="v8.2.1", commit="939afcbad1a8273636a3686a31b51dae4f1f0c11", deprecated=True
|
||||
)
|
||||
|
||||
depends_on("c", type="build") # generated
|
||||
depends_on("cxx", type="build") # generated
|
||||
depends_on("c", type="build")
|
||||
depends_on("cxx", type="build")
|
||||
|
||||
variant(
|
||||
"openmp",
|
||||
default=True,
|
||||
description="Enable OpenMP support for ParseAPI " "(version 10.0.0 or later)",
|
||||
)
|
||||
variant("openmp", default=True, description="Enable OpenMP support for ParseAPI ")
|
||||
|
||||
variant("static", default=False, description="Build static libraries")
|
||||
|
||||
variant("stat_dysect", default=False, description="Patch for STAT's DySectAPI")
|
||||
|
||||
boost_libs = (
|
||||
"+atomic+chrono+date_time+filesystem+system+thread+timer+container+random+exception"
|
||||
depends_on(
|
||||
"boost+atomic+chrono+date_time+filesystem+system+thread+timer+container+random+exception"
|
||||
)
|
||||
|
||||
depends_on("boost@1.61.0:" + boost_libs, when="@10.1.0:")
|
||||
depends_on("boost@1.61.0:1.69" + boost_libs, when="@:10.0")
|
||||
depends_on("boost@1.67.0:" + boost_libs, when="@11.0.0:")
|
||||
depends_on("boost@1.70.0:" + boost_libs, when="@12:12.3.0")
|
||||
depends_on("boost@1.71.0:" + boost_libs, when="@13:")
|
||||
depends_on("boost@1.61.0:", when="@10.1.0:")
|
||||
depends_on("boost@1.67.0:", when="@11.0.0:")
|
||||
depends_on("boost@1.70.0:", when="@12:12.3.0")
|
||||
depends_on("boost@1.71.0:", when="@13:")
|
||||
|
||||
depends_on("libiberty+pic")
|
||||
|
||||
# Dyninst uses elfutils starting with 9.3.0, and used libelf
|
||||
# before that.
|
||||
# NB: Parallel DWARF parsing in Dyninst 10.2.0 requires a thread-
|
||||
# safe libdw
|
||||
# Parallel DWARF parsing requires a thread-safe libdw
|
||||
depends_on("elfutils", type="link")
|
||||
depends_on("elfutils@0.186:", type="link", when="@12.0.1:")
|
||||
depends_on("elfutils@0.178:", type="link", when="@10.2.0:")
|
||||
depends_on("elfutils", type="link", when="@9.3.0:10.1")
|
||||
depends_on("libelf", type="link", when="@:9.2")
|
||||
|
||||
# Dyninst uses libdw from elfutils starting with 10.0, and used
|
||||
# libdwarf before that.
|
||||
depends_on("libdwarf", when="@:9")
|
||||
|
||||
with when("@:12.3.0"):
|
||||
# findtbb.cmake in the dynist repo does not work with recent tbb
|
||||
@@ -95,21 +65,16 @@ class Dyninst(CMakePackage):
|
||||
conflicts("^intel-oneapi-tbb@2021.1:")
|
||||
conflicts("^intel-parallel-studio")
|
||||
|
||||
depends_on("intel-tbb@2019.9:", when="@13.0.0:")
|
||||
depends_on("tbb@2018.6.0:", when="@10.0.0:12.3.0")
|
||||
depends_on("tbb")
|
||||
requires("^[virtuals=tbb] intel-tbb@2019.9:", when="@13.0.0:")
|
||||
|
||||
with when("@13.0.0:"):
|
||||
depends_on("cmake@3.14.0:", type="build")
|
||||
conflicts("cmake@3.19.0")
|
||||
|
||||
depends_on("cmake@3.4.0:", type="build", when="@10.1.0:")
|
||||
depends_on("cmake@3.0.0:", type="build", when="@10.0.0:10.0")
|
||||
depends_on("cmake@2.8:", type="build", when="@:9")
|
||||
|
||||
patch("stat_dysect.patch", when="+stat_dysect")
|
||||
patch("stackanalysis_h.patch", when="@9.2.0")
|
||||
patch("v9.3.2-auto.patch", when="@9.3.2 %gcc@:4.7")
|
||||
patch("tribool.patch", when="@9.3.0:10.0.0 ^boost@1.69:")
|
||||
patch(
|
||||
"missing_include_deque.patch",
|
||||
when="@10.0.0:12.2.0",
|
||||
@@ -124,92 +89,21 @@ class Dyninst(CMakePackage):
|
||||
# Version 11.0 requires a C++11-compliant ABI
|
||||
conflicts("%gcc@:5", when="@11.0.0:")
|
||||
|
||||
# Versions 9.3.x used cotire, but have no knob to turn it off.
|
||||
# Cotire has no real use for one-time builds and can break
|
||||
# parallel builds with both static and shared libs.
|
||||
@when("@9.3.0:9.3")
|
||||
def patch(self):
|
||||
filter_file("USE_COTIRE true", "USE_COTIRE false", "cmake/shared.cmake")
|
||||
|
||||
# New style cmake args, starting with 10.1.
|
||||
@when("@10.1.0:")
|
||||
def cmake_args(self):
|
||||
spec = self.spec
|
||||
|
||||
args = [
|
||||
"-DBoost_ROOT_DIR=%s" % spec["boost"].prefix,
|
||||
"-DElfUtils_ROOT_DIR=%s" % spec["elf"].prefix,
|
||||
"-DLibIberty_ROOT_DIR=%s" % spec["libiberty"].prefix,
|
||||
"-DTBB_ROOT_DIR=%s" % spec["tbb"].prefix,
|
||||
self.define("Boost_ROOT_DIR", spec["boost"].prefix),
|
||||
self.define("ElfUtils_ROOT_DIR", spec["elfutils"].prefix),
|
||||
self.define("LibIberty_ROOT_DIR", spec["libiberty"].prefix),
|
||||
self.define("TBB_ROOT_DIR", spec["tbb"].prefix),
|
||||
self.define("LibIberty_LIBRARIES", spec["libiberty"].libs),
|
||||
self.define_from_variant("USE_OpenMP", "openmp"),
|
||||
self.define_from_variant("ENABLE_STATIC_LIBS", "static"),
|
||||
]
|
||||
|
||||
if spec.satisfies("+openmp"):
|
||||
args.append("-DUSE_OpenMP=ON")
|
||||
else:
|
||||
args.append("-DUSE_OpenMP=OFF")
|
||||
|
||||
if spec.satisfies("+static"):
|
||||
args.append("-DENABLE_STATIC_LIBS=YES")
|
||||
else:
|
||||
args.append("-DENABLE_STATIC_LIBS=NO")
|
||||
|
||||
# Make sure Dyninst doesn't try to build its own dependencies
|
||||
# outside of Spack
|
||||
# Make sure Dyninst doesn't try to build its own dependencies outside of Spack
|
||||
if spec.satisfies("@10.2.0:12.3.0"):
|
||||
args.append("-DSTERILE_BUILD=ON")
|
||||
|
||||
return args
|
||||
|
||||
# Old style cmake args, up through 10.0.
|
||||
@when("@:10.0")
|
||||
def cmake_args(self):
|
||||
spec = self.spec
|
||||
|
||||
# Elf -- the directory containing libelf.h.
|
||||
elf = spec["elf"].prefix
|
||||
elf_include = os.path.dirname(find_headers("libelf", elf.include, recursive=True)[0])
|
||||
|
||||
# Dwarf -- the directory containing elfutils/libdw.h or
|
||||
# libdwarf.h, and the path to libdw.so or libdwarf.so.
|
||||
if spec.satisfies("@10.0.0:"):
|
||||
dwarf_include = elf.include
|
||||
dwarf_lib = find_libraries("libdw", elf, recursive=True)
|
||||
else:
|
||||
dwarf_include = spec["libdwarf"].prefix.include
|
||||
dwarf_lib = spec["libdwarf"].libs
|
||||
|
||||
args = [
|
||||
"-DPATH_BOOST=%s" % spec["boost"].prefix,
|
||||
"-DIBERTY_LIBRARIES=%s" % spec["libiberty"].libs,
|
||||
"-DLIBELF_INCLUDE_DIR=%s" % elf_include,
|
||||
"-DLIBELF_LIBRARIES=%s" % spec["elf"].libs,
|
||||
"-DLIBDWARF_INCLUDE_DIR=%s" % dwarf_include,
|
||||
"-DLIBDWARF_LIBRARIES=%s" % dwarf_lib,
|
||||
]
|
||||
|
||||
# TBB include and lib directories, version 10.x or later.
|
||||
if spec.satisfies("@10.0.0:"):
|
||||
args.extend(
|
||||
[
|
||||
"-DTBB_INCLUDE_DIRS=%s" % spec["tbb"].prefix.include,
|
||||
"-DTBB_LIBRARY=%s" % spec["tbb"].prefix.lib,
|
||||
]
|
||||
)
|
||||
|
||||
# Openmp applies to version 10.x or later.
|
||||
if spec.satisfies("@10.0.0:"):
|
||||
if spec.satisfies("+openmp"):
|
||||
args.append("-DUSE_OpenMP=ON")
|
||||
else:
|
||||
args.append("-DUSE_OpenMP=OFF")
|
||||
|
||||
# Static libs started with version 9.1.0.
|
||||
if spec.satisfies("@9.1.0:"):
|
||||
if spec.satisfies("+static"):
|
||||
args.append("-DENABLE_STATIC_LIBS=1")
|
||||
else:
|
||||
args.append("-DENABLE_STATIC_LIBS=NO")
|
||||
args.append(self.define("STERILE_BUILD", True))
|
||||
|
||||
return args
|
||||
|
||||
|
@@ -1,11 +0,0 @@
|
||||
--- a/dataflowAPI/h/stackanalysis.h 2016-06-29 14:54:14.000000000 -0700
|
||||
+++ b/dataflowAPI/h/stackanalysis.h 2016-08-02 09:50:13.619079000 -0700
|
||||
@@ -331,7 +331,7 @@
|
||||
|
||||
// To build intervals, we must replay the effect of each instruction.
|
||||
// To avoid sucking enormous time, we keep those transfer functions around...
|
||||
- typedef std::map<ParseAPI::Block *, std::map<Offset, TransferFuncs>>
|
||||
+ typedef std::map<ParseAPI::Block *, std::map<Offset, TransferFuncs> >
|
||||
InstructionEffects;
|
||||
|
||||
DATAFLOW_EXPORT StackAnalysis();
|
@@ -1,34 +0,0 @@
|
||||
Add explicit casts from boost::tribool to bool. Starting with 1.69,
|
||||
tribool added 'explicit' to the conversion operator to bool, and this
|
||||
was breaking the build when using boost >= 1.69.
|
||||
|
||||
Patch is from: https://github.com/dyninst/dyninst/commit/54a2debd9
|
||||
|
||||
|
||||
diff --git a/dataflowAPI/rose/util/Message.C b/dataflowAPI/rose/util/Message.C
|
||||
index a8a66ad4c..5e276f97d 100644
|
||||
--- a/dataflowAPI/rose/util/Message.C
|
||||
+++ b/dataflowAPI/rose/util/Message.C
|
||||
@@ -1123,7 +1123,7 @@ StreamBuf::bake() {
|
||||
destination_->bakeDestinations(message_.properties(), baked_/*out*/);
|
||||
anyUnbuffered_ = false;
|
||||
for (BakedDestinations::const_iterator bi=baked_.begin(); bi!=baked_.end() && !anyUnbuffered_; ++bi)
|
||||
- anyUnbuffered_ = !bi->second.isBuffered;
|
||||
+ anyUnbuffered_ = static_cast<bool>(!bi->second.isBuffered);
|
||||
isBaked_ = true;
|
||||
}
|
||||
}
|
||||
diff --git a/dataflowAPI/rose/util/Message.h b/dataflowAPI/rose/util/Message.h
|
||||
index 816f68d2e..03592cb97 100644
|
||||
--- a/dataflowAPI/rose/util/Message.h
|
||||
+++ b/dataflowAPI/rose/util/Message.h
|
||||
@@ -386,7 +386,8 @@ struct SAWYER_EXPORT ColorSpec {
|
||||
ColorSpec(AnsiColor fg, AnsiColor bg, bool bold): foreground(fg), background(bg), bold(bold) {}
|
||||
|
||||
/** Returns true if this object is in its default-constructed state. */
|
||||
- bool isDefault() const { return COLOR_DEFAULT==foreground && COLOR_DEFAULT==background && !bold; }
|
||||
+ bool isDefault() const { return COLOR_DEFAULT==foreground && COLOR_DEFAULT==background
|
||||
+ && static_cast<bool>(!bold); }
|
||||
};
|
||||
|
||||
/** Colors to use for each message importance.
|
@@ -1,73 +0,0 @@
|
||||
Change some 'for (const auto& t: type)' usage to the older but
|
||||
equivalent 'for (auto t = type.begin(); ...)'. This patch allows
|
||||
dyninst 9.3.2 to build with gcc 4.4 which doesn't support the newer
|
||||
syntax.
|
||||
|
||||
|
||||
diff --git a/dyninstAPI/src/BPatch.C b/dyninstAPI/src/BPatch.C
|
||||
index ebf7db0c4..49fe69f9a 100644
|
||||
--- a/dyninstAPI/src/BPatch.C
|
||||
+++ b/dyninstAPI/src/BPatch.C
|
||||
@@ -166,16 +166,16 @@ BPatch::BPatch()
|
||||
stdTypes = BPatch_typeCollection::getGlobalTypeCollection();
|
||||
vector<Type *> *sTypes = Symtab::getAllstdTypes();
|
||||
BPatch_type* type = NULL;
|
||||
- for(const auto& t: *sTypes) {
|
||||
- stdTypes->addType(type = new BPatch_type(t));
|
||||
+ for(auto t = sTypes->begin(); t != sTypes->end(); ++t) {
|
||||
+ stdTypes->addType(type = new BPatch_type(*t));
|
||||
type->decrRefCount();
|
||||
}
|
||||
delete sTypes;
|
||||
|
||||
builtInTypes = new BPatch_builtInTypeCollection;
|
||||
sTypes = Symtab::getAllbuiltInTypes();
|
||||
- for(const auto& t: *sTypes) {
|
||||
- builtInTypes->addBuiltInType(type = new BPatch_type(t));
|
||||
+ for(auto t = sTypes->begin(); t != sTypes->end(); ++t) {
|
||||
+ builtInTypes->addBuiltInType(type = new BPatch_type(*t));
|
||||
type->decrRefCount();
|
||||
}
|
||||
delete sTypes;
|
||||
diff --git a/dyninstAPI/src/BPatch_collections.C b/dyninstAPI/src/BPatch_collections.C
|
||||
index f4e2986a3..129f8b74a 100644
|
||||
--- a/dyninstAPI/src/BPatch_collections.C
|
||||
+++ b/dyninstAPI/src/BPatch_collections.C
|
||||
@@ -172,12 +172,12 @@ BPatch_typeCollection::~BPatch_typeCollection()
|
||||
assert(refcount == 0 ||
|
||||
refcount == 1);
|
||||
|
||||
- for(const auto& t: typesByName) {
|
||||
- t.second->decrRefCount();
|
||||
+ for(auto t = typesByName.begin(); t != typesByName.end(); ++t) {
|
||||
+ t->second->decrRefCount();
|
||||
}
|
||||
|
||||
- for(const auto& t: typesByID) {
|
||||
- t.second->decrRefCount();
|
||||
+ for(auto t = typesByID.begin(); t != typesByID.end(); ++t) {
|
||||
+ t->second->decrRefCount();
|
||||
}
|
||||
}
|
||||
|
||||
diff --git a/symtabAPI/src/Collections.C b/symtabAPI/src/Collections.C
|
||||
index 7431dd6bf..43c339f45 100644
|
||||
--- a/symtabAPI/src/Collections.C
|
||||
+++ b/symtabAPI/src/Collections.C
|
||||
@@ -318,12 +318,12 @@ typeCollection::typeCollection() :
|
||||
typeCollection::~typeCollection()
|
||||
{
|
||||
// delete all of the types
|
||||
- for(const auto& t: typesByName) {
|
||||
- t.second->decrRefCount();
|
||||
+ for(auto t = typesByName.begin(); t != typesByName.end(); ++t) {
|
||||
+ t->second->decrRefCount();
|
||||
}
|
||||
|
||||
- for(const auto& t: typesByID) {
|
||||
- t.second->decrRefCount();
|
||||
+ for(auto t = typesByID.begin(); t != typesByID.end(); ++t) {
|
||||
+ t->second->decrRefCount();
|
||||
}
|
||||
}
|
||||
|
@@ -30,7 +30,7 @@ def dav_sdk_depends_on(spec, when=None, propagate=None):
|
||||
# Map the propagated variants to the dependency variant. Some packages may need
|
||||
# overrides to propagate a dependency as something else, e.g., {"visit": "libsim"}.
|
||||
# Most call-sites will just use a list.
|
||||
if not type(propagate) is dict:
|
||||
if type(propagate) is not dict:
|
||||
propagate = dict([(v, v) for v in propagate])
|
||||
|
||||
# Determine the base variant
|
||||
|
@@ -12,7 +12,7 @@ class Embree(CMakePackage):
|
||||
url = "https://github.com/embree/embree/archive/v3.7.0.tar.gz"
|
||||
maintainers("aumuell")
|
||||
|
||||
license("Apache-2.0")
|
||||
license("Apache-2.0", checked_by="wdconinc")
|
||||
|
||||
version("4.3.3", sha256="8a3bc3c3e21aa209d9861a28f8ba93b2f82ed0dc93341dddac09f1f03c36ef2d")
|
||||
version("4.3.2", sha256="dc7bb6bac095b2e7bc64321435acd07c6137d6d60e4b79ec07bb0b215ddf81cb")
|
||||
@@ -60,9 +60,10 @@ def cmake_args(self):
|
||||
spec = self.spec
|
||||
|
||||
args = [
|
||||
"-DBUILD_TESTING=OFF",
|
||||
"-DEMBREE_TUTORIALS=OFF",
|
||||
"-DEMBREE_IGNORE_CMAKE_CXX_FLAGS=ON",
|
||||
self.define("BUILD_TESTING", self.run_tests),
|
||||
self.define("EMBREE_TUTORIALS", self.run_tests),
|
||||
self.define("EMBREE_TUTORIALS_GLFW", False),
|
||||
self.define("EMBREE_IGNORE_CMAKE_CXX_FLAGS", True),
|
||||
self.define_from_variant("EMBREE_ISPC_SUPPORT", "ispc"),
|
||||
]
|
||||
|
||||
|
@@ -14,7 +14,7 @@ class Enzyme(CMakePackage):
|
||||
"""
|
||||
|
||||
homepage = "https://enzyme.mit.edu"
|
||||
url = "https://github.com/wsmoses/Enzyme/archive/v0.0.15.tar.gz"
|
||||
url = "https://github.com/wsmoses/Enzyme/archive/v0.0.172.tar.gz"
|
||||
list_url = "https://github.com/wsmoses/Enzyme/releases"
|
||||
git = "https://github.com/wsmoses/Enzyme"
|
||||
|
||||
@@ -23,6 +23,7 @@ class Enzyme(CMakePackage):
|
||||
root_cmakelists_dir = "enzyme"
|
||||
|
||||
version("main", branch="main")
|
||||
version("0.0.172", sha256="688200164787d543641cb446cff20f6a8e8b5c92bb7032ebe7f867efa67ceafb")
|
||||
version("0.0.135", sha256="49c798534faec7ba524a3ed053dd4352d690a44d3cad5a14915c9398dc9b175b")
|
||||
version("0.0.100", sha256="fbc53ec02adc0303ff200d7699afface2d9fbc7350664e6c6d4c527ef11c2e82")
|
||||
version("0.0.81", sha256="4c17d0c28f0572a3ab97a60f1e56bbc045ed5dd64c2daac53ae34371ca5e8b34")
|
||||
@@ -42,7 +43,8 @@ class Enzyme(CMakePackage):
|
||||
depends_on("llvm@7:14", when="@0.0.48:0.0.68")
|
||||
depends_on("llvm@9:16", when="@0.0.69:0.0.79")
|
||||
depends_on("llvm@11:16", when="@0.0.80:0.0.99")
|
||||
depends_on("llvm@11:19", when="@0.0.100:")
|
||||
depends_on("llvm@11:19", when="@0.0.100:0.0.148")
|
||||
depends_on("llvm@15:19", when="@0.0.149:")
|
||||
depends_on("cmake@3.13:", type="build")
|
||||
|
||||
def cmake_args(self):
|
||||
|
@@ -121,9 +121,9 @@ def setup_build_environment(self, env):
|
||||
# Manually turn off device self.defines to solve Kokkos issues in Nalu-Wind headers
|
||||
env.append_flags("CXXFLAGS", "-U__HIP_DEVICE_COMPILE__ -DDESUL_HIP_RDC")
|
||||
if self.spec.satisfies("+cuda"):
|
||||
env.set("OMPI_CXX", self.spec["kokkos-nvcc-wrapper"].kokkos_cxx)
|
||||
env.set("MPICH_CXX", self.spec["kokkos-nvcc-wrapper"].kokkos_cxx)
|
||||
env.set("MPICXX_CXX", self.spec["kokkos-nvcc-wrapper"].kokkos_cxx)
|
||||
env.set("OMPI_CXX", self["kokkos-nvcc-wrapper"].kokkos_cxx)
|
||||
env.set("MPICH_CXX", self["kokkos-nvcc-wrapper"].kokkos_cxx)
|
||||
env.set("MPICXX_CXX", self["kokkos-nvcc-wrapper"].kokkos_cxx)
|
||||
if self.spec.satisfies("+rocm"):
|
||||
env.set("OMPI_CXX", self.spec["hip"].hipcc)
|
||||
env.set("MPICH_CXX", self.spec["hip"].hipcc)
|
||||
|
@@ -73,10 +73,6 @@ class Extrae(AutotoolsPackage):
|
||||
depends_on("mpi")
|
||||
depends_on("libunwind")
|
||||
|
||||
# TODO: replace this with an explicit list of components of Boost,
|
||||
# for instance depends_on('boost +filesystem')
|
||||
# See https://github.com/spack/spack/pull/22303 for reference
|
||||
depends_on(Boost.with_default_variants)
|
||||
depends_on("libdwarf")
|
||||
depends_on("elf", type="link")
|
||||
depends_on("libxml2")
|
||||
@@ -92,6 +88,10 @@ class Extrae(AutotoolsPackage):
|
||||
variant("dyninst", default=False, description="Use dyninst for dynamic code installation")
|
||||
with when("+dyninst"):
|
||||
depends_on("dyninst@10.1.0:")
|
||||
# TODO: replace this with an explicit list of components of Boost,
|
||||
# for instance depends_on('boost +filesystem')
|
||||
# See https://github.com/spack/spack/pull/22303 for reference
|
||||
depends_on(Boost.with_default_variants)
|
||||
depends_on("elfutils", when="@4.1.2:")
|
||||
depends_on("intel-oneapi-tbb", when="@4.1.2:")
|
||||
|
||||
@@ -127,7 +127,6 @@ def configure_args(self):
|
||||
args = [
|
||||
"--with-mpi=%s" % mpiroot,
|
||||
"--with-unwind=%s" % spec["libunwind"].prefix,
|
||||
"--with-boost=%s" % spec["boost"].prefix,
|
||||
"--with-dwarf=%s" % spec["libdwarf"].prefix,
|
||||
"--with-elf=%s" % spec["elf"].prefix,
|
||||
"--with-xml-prefix=%s" % spec["libxml2"].prefix,
|
||||
@@ -141,7 +140,10 @@ def configure_args(self):
|
||||
)
|
||||
|
||||
if spec.satisfies("+dyninst"):
|
||||
args += ["--with-dyninst={spec['dyninst'].prefix}"]
|
||||
args += [
|
||||
f"--with-dyninst={spec['dyninst'].prefix}",
|
||||
f"--with-boost={spec['boost'].prefix}",
|
||||
]
|
||||
|
||||
if spec.satisfies("@4.1.2:"):
|
||||
args += [
|
||||
|
@@ -19,6 +19,7 @@ class Fargparse(CMakePackage):
|
||||
version("develop", branch="develop")
|
||||
version("main", branch="main")
|
||||
|
||||
version("1.9.0", sha256="c83c13fa90b6b45adf8d84fe00571174acfa118d2a0d1e8c467f74bbd7dec49d")
|
||||
version("1.8.0", sha256="37108bd3c65d892d8c24611ce4d8e5451767e4afe81445fde67eab652178dd01")
|
||||
version("1.7.0", sha256="9889e7eca9c020b742787fba2be0ba16edcc3fcf52929261ccb7d09996a35f89")
|
||||
version("1.6.0", sha256="055a0af44f50c302f8f20a8bcf3d26c5bbeacf5222cdbaa5b19da4cff56eb9c0")
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user