Compare commits
381 Commits
develop-20
...
features/e
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1b4ff30665 | ||
|
|
c10797718a | ||
|
|
81ea29b007 | ||
|
|
d8666a7fdf | ||
|
|
7c41bba6f8 | ||
|
|
20e9fe3785 | ||
|
|
401218b4f1 | ||
|
|
adfc1c0896 | ||
|
|
f4402c1cde | ||
|
|
c1d6d93388 | ||
|
|
e9012c7781 | ||
|
|
59acfe4f0b | ||
|
|
004ff9d4e2 | ||
|
|
9d20be5fe5 | ||
|
|
edc07dab27 | ||
|
|
acde8ef104 | ||
|
|
ed76966a3a | ||
|
|
2015a51d1a | ||
|
|
34b8fe827e | ||
|
|
6f1ed9b2e4 | ||
|
|
dd00f50943 | ||
|
|
f0ec625321 | ||
|
|
d406c371a8 | ||
|
|
42d374a34d | ||
|
|
d90e4fcc3d | ||
|
|
a44fde9dc9 | ||
|
|
9ac8841dab | ||
|
|
a1f87638ec | ||
|
|
3b55e0a65d | ||
|
|
42667fe7fa | ||
|
|
cd27611d2f | ||
|
|
b111d2172e | ||
|
|
055263fa3c | ||
|
|
f34f207bdc | ||
|
|
0c9f0fd40d | ||
|
|
24d5b1e645 | ||
|
|
616f7bcaef | ||
|
|
dace0316a2 | ||
|
|
3bb86418b8 | ||
|
|
6f6489a2c7 | ||
|
|
543b697df1 | ||
|
|
042dc2e1d8 | ||
|
|
f745e49d9a | ||
|
|
eda21cdfba | ||
|
|
bc8b026072 | ||
|
|
0f84782fcc | ||
|
|
43b86ce282 | ||
|
|
d30698d9a8 | ||
|
|
8e9efa86c8 | ||
|
|
84faf5a6cf | ||
|
|
9428749a3c | ||
|
|
efdac68c28 | ||
|
|
5398c31e82 | ||
|
|
188168c476 | ||
|
|
4af84ac208 | ||
|
|
deb8b51098 | ||
|
|
0d582b2ea9 | ||
|
|
f88b01c34b | ||
|
|
0533c6a1b8 | ||
|
|
f73d5c2b0e | ||
|
|
567d0ee455 | ||
|
|
577df6f498 | ||
|
|
8790efbcfe | ||
|
|
212b1edb6b | ||
|
|
d85a27f317 | ||
|
|
5622afbfd1 | ||
|
|
f345038317 | ||
|
|
e43d4cfee0 | ||
|
|
7070658e2a | ||
|
|
fc4b032fb4 | ||
|
|
8c97d8ad3f | ||
|
|
26107fe6b2 | ||
|
|
9278c0df21 | ||
|
|
37e95713f4 | ||
|
|
3ae8a3a517 | ||
|
|
031af84e90 | ||
|
|
7d4b65491d | ||
|
|
3038d1e7cd | ||
|
|
b2e6ef97ce | ||
|
|
e55236ce5b | ||
|
|
68dfd6ba6e | ||
|
|
38d2459f94 | ||
|
|
e309f367af | ||
|
|
3b59c95323 | ||
|
|
fddaeadff8 | ||
|
|
c85eaf9dc5 | ||
|
|
ddec7f8aec | ||
|
|
f057d7154b | ||
|
|
a102950d67 | ||
|
|
783be9b350 | ||
|
|
27c8135207 | ||
|
|
77ce4701b9 | ||
|
|
73ad3f729e | ||
|
|
1e7a64ad85 | ||
|
|
3a5864bcdb | ||
|
|
7e13a7dccb | ||
|
|
e3249fa155 | ||
|
|
0c20760576 | ||
|
|
7ee7995493 | ||
|
|
ba1fac1c31 | ||
|
|
b05f0ecb6f | ||
|
|
d5c66b75c3 | ||
|
|
98303d6956 | ||
|
|
4622d638a6 | ||
|
|
02023265fc | ||
|
|
8a075998f8 | ||
|
|
f2f48b1872 | ||
|
|
168d63c447 | ||
|
|
c25d4cbc1d | ||
|
|
ccb07538f7 | ||
|
|
1356b13b2f | ||
|
|
935f862863 | ||
|
|
9f6d9df302 | ||
|
|
65d33c02a1 | ||
|
|
40073e7b21 | ||
|
|
752e02e2f2 | ||
|
|
d717b3a33f | ||
|
|
9817f24c9a | ||
|
|
1f7c4b0557 | ||
|
|
6c42d2b7f7 | ||
|
|
8df036a5a5 | ||
|
|
582ebee74c | ||
|
|
1017b9ddde | ||
|
|
80ae73119d | ||
|
|
1d88f690a4 | ||
|
|
fbb271d804 | ||
|
|
d6aac873b7 | ||
|
|
ab3ffd9361 | ||
|
|
3b9454a5cc | ||
|
|
c8eb0f9361 | ||
|
|
fb0f14eb06 | ||
|
|
e489ee4e2e | ||
|
|
fcd49f2f08 | ||
|
|
b3268c2703 | ||
|
|
d1bfcfafe3 | ||
|
|
490c9f5e16 | ||
|
|
85628d1474 | ||
|
|
720c34d18d | ||
|
|
cd175377ca | ||
|
|
b91ec05e13 | ||
|
|
3bb15f420b | ||
|
|
124a81df5b | ||
|
|
d9472c083d | ||
|
|
ac2a5ef4dd | ||
|
|
ea210a6acf | ||
|
|
afb3bef7af | ||
|
|
b5b5881426 | ||
|
|
76fc7915a8 | ||
|
|
e7798b619b | ||
|
|
8ecef12a20 | ||
|
|
694292ebbf | ||
|
|
7f18f6f8a1 | ||
|
|
0b12a480eb | ||
|
|
2d91a79af3 | ||
|
|
72fcee7227 | ||
|
|
d147ef231f | ||
|
|
1c7af83d32 | ||
|
|
b982dfc071 | ||
|
|
c0da8a00fc | ||
|
|
3f18f689d8 | ||
|
|
9dc4553cf3 | ||
|
|
9a99c94b75 | ||
|
|
682f0b2a54 | ||
|
|
dbab0c1ff5 | ||
|
|
2bf95f5340 | ||
|
|
55561405b8 | ||
|
|
8eef458cea | ||
|
|
64eea9d996 | ||
|
|
60b4e2128b | ||
|
|
2f8cea2792 | ||
|
|
06f9bcf734 | ||
|
|
ee2725762f | ||
|
|
eace0a177c | ||
|
|
80c7d74707 | ||
|
|
a6f5bf821d | ||
|
|
b214406253 | ||
|
|
5b003d80e5 | ||
|
|
185b2d3ee7 | ||
|
|
71bb2a1899 | ||
|
|
785c31b730 | ||
|
|
175da4a88a | ||
|
|
73fc1ef11c | ||
|
|
2d77e44f6f | ||
|
|
033599c4cd | ||
|
|
8096ed4b22 | ||
|
|
b49bfe25af | ||
|
|
8b2f34d802 | ||
|
|
3daed0d6a7 | ||
|
|
d6c1f75e8d | ||
|
|
c80a4c1ddc | ||
|
|
466abcb62d | ||
|
|
69e99f0c16 | ||
|
|
bbee6dfc58 | ||
|
|
2d60cf120b | ||
|
|
db17fc2f33 | ||
|
|
c62080d498 | ||
|
|
f9bbe549fa | ||
|
|
55d7fec69c | ||
|
|
e938907150 | ||
|
|
0c40b86e96 | ||
|
|
3d4cf0d8eb | ||
|
|
966e19d278 | ||
|
|
8f930462bd | ||
|
|
bf4fccee15 | ||
|
|
784771a008 | ||
|
|
e4a9d9ae5b | ||
|
|
a6886983dc | ||
|
|
93a34a9635 | ||
|
|
91a54029f9 | ||
|
|
5400b49ed6 | ||
|
|
c17fc3c0c1 | ||
|
|
6f248836ea | ||
|
|
693c1821b0 | ||
|
|
62afe3bd5a | ||
|
|
53a756d045 | ||
|
|
321b687ae6 | ||
|
|
c8617f0574 | ||
|
|
7843e2ead0 | ||
|
|
dca3d071d7 | ||
|
|
436f077482 | ||
|
|
ab3f705019 | ||
|
|
d739989ec8 | ||
|
|
52ee1967d6 | ||
|
|
1af7284b5d | ||
|
|
e1bcefd805 | ||
|
|
2159b0183d | ||
|
|
078fd225a9 | ||
|
|
83974828c7 | ||
|
|
2412f74557 | ||
|
|
db06d3621d | ||
|
|
c25170d2f9 | ||
|
|
b3dfe13670 | ||
|
|
6358e84b48 | ||
|
|
8e634d8e49 | ||
|
|
1a21376515 | ||
|
|
bf45a2b6d3 | ||
|
|
475ce955e7 | ||
|
|
5e44289787 | ||
|
|
e66888511f | ||
|
|
e9e5beee1f | ||
|
|
ffd134c09d | ||
|
|
bfadd5c9a5 | ||
|
|
16e9279420 | ||
|
|
ac0903ef9f | ||
|
|
648839dffd | ||
|
|
489a604920 | ||
|
|
2ac3435810 | ||
|
|
69ea180d26 | ||
|
|
f52f217df0 | ||
|
|
df74aa5d7e | ||
|
|
41932c53ae | ||
|
|
4296db794f | ||
|
|
9ab9302409 | ||
|
|
0187376e54 | ||
|
|
7340d2cb83 | ||
|
|
641d4477d5 | ||
|
|
3ff2fb69af | ||
|
|
e3024b1bcb | ||
|
|
e733b87865 | ||
|
|
919985dc1b | ||
|
|
d746f7d427 | ||
|
|
b6deab515b | ||
|
|
848220c4ba | ||
|
|
98462bd27e | ||
|
|
2e2515266d | ||
|
|
776ab13276 | ||
|
|
c2ce9a6d93 | ||
|
|
4e3ed56dfa | ||
|
|
dcfcc03497 | ||
|
|
125c20bc06 | ||
|
|
f7696a4480 | ||
|
|
a5d7667cb6 | ||
|
|
d45818ccff | ||
|
|
bcb7af6eb3 | ||
|
|
f438fb6c79 | ||
|
|
371a8a361a | ||
|
|
86b9ce1c88 | ||
|
|
05232034f5 | ||
|
|
7a3da0f606 | ||
|
|
d96406a161 | ||
|
|
ffa5962356 | ||
|
|
67e74da3ba | ||
|
|
9ee2d79de1 | ||
|
|
79e4a13eee | ||
|
|
4627438373 | ||
|
|
badaaf7092 | ||
|
|
815ac000cc | ||
|
|
7bc5b26c52 | ||
|
|
a0e7ca94b2 | ||
|
|
e56c90d839 | ||
|
|
54003d4d72 | ||
|
|
c47b554fa1 | ||
|
|
b027f64a7f | ||
|
|
3765a5f7f8 | ||
|
|
690661eadd | ||
|
|
f7bbc326e4 | ||
|
|
a184bfc1a6 | ||
|
|
81634440fb | ||
|
|
711d7683ac | ||
|
|
967356bcf5 | ||
|
|
c006ed034a | ||
|
|
d065c65d94 | ||
|
|
e23c372ff1 | ||
|
|
25d2de5629 | ||
|
|
d73a23ce35 | ||
|
|
a62cb3c0f4 | ||
|
|
177da4595e | ||
|
|
e4f05129fe | ||
|
|
c25b994917 | ||
|
|
95c4c5270a | ||
|
|
1cf6a15a08 | ||
|
|
47d206611a | ||
|
|
a6789cf653 | ||
|
|
933cd858e0 | ||
|
|
8856361076 | ||
|
|
d826df7ef6 | ||
|
|
d8a9b42da6 | ||
|
|
7d926f86e8 | ||
|
|
1579544d57 | ||
|
|
1cee3fb4a5 | ||
|
|
a8e2ad53dd | ||
|
|
6821fa7246 | ||
|
|
09c68da1bd | ||
|
|
73064d62cf | ||
|
|
168ed2a782 | ||
|
|
9f60b29495 | ||
|
|
7abcd78426 | ||
|
|
d5295301de | ||
|
|
beccc49b81 | ||
|
|
037e7ffe33 | ||
|
|
293da8ed20 | ||
|
|
2780ab2f6c | ||
|
|
1ed3c81b58 | ||
|
|
50ce0a25b2 | ||
|
|
d784227603 | ||
|
|
ab9ed91539 | ||
|
|
421256063e | ||
|
|
75459bc70c | ||
|
|
33752eabb8 | ||
|
|
f1d1bb9167 | ||
|
|
68eaff24b0 | ||
|
|
862024cae1 | ||
|
|
9d6bcd67c3 | ||
|
|
d97ecfe147 | ||
|
|
0d991de50a | ||
|
|
4f278a0255 | ||
|
|
6e72a3cff1 | ||
|
|
1532c77ce6 | ||
|
|
5ffbce275c | ||
|
|
0e2ff2dddb | ||
|
|
c0c446a095 | ||
|
|
33dbd44449 | ||
|
|
7b0979c1e9 | ||
|
|
c9849dd41d | ||
|
|
d44e97d3f2 | ||
|
|
8713ab0f67 | ||
|
|
6a47339bf8 | ||
|
|
1c0fb6d641 | ||
|
|
b45eee29eb | ||
|
|
6d26274459 | ||
|
|
2fb07de7bc | ||
|
|
7678dc6b49 | ||
|
|
1944dd55a7 | ||
|
|
0b6c724743 | ||
|
|
fa98023375 | ||
|
|
e79a911bac | ||
|
|
fd3efc71fd | ||
|
|
0458de18de | ||
|
|
f94ac8c770 | ||
|
|
a03c28a916 | ||
|
|
7b7fdf27f3 | ||
|
|
192e564e26 | ||
|
|
b8c5099cde | ||
|
|
ea5bca9067 | ||
|
|
e33eafd34f | ||
|
|
e1344b5497 | ||
|
|
cf9dc3fc81 | ||
|
|
d265dd2487 | ||
|
|
a2a6e65e27 | ||
|
|
0085280db8 | ||
|
|
6e07bf149d |
4
.github/workflows/audit.yaml
vendored
4
.github/workflows/audit.yaml
vendored
@@ -20,7 +20,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{inputs.python_version}}
|
python-version: ${{inputs.python_version}}
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -38,7 +38,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
$(which spack) audit packages
|
$(which spack) audit packages
|
||||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 # @v2.1.0
|
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
if: ${{ inputs.with_coverage == 'true' }}
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,audits
|
flags: unittests,linux,audits
|
||||||
|
|||||||
31
.github/workflows/nightly-win-builds.yml
vendored
Normal file
31
.github/workflows/nightly-win-builds.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
name: Windows Paraview Nightly
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 2 * * *' # Run at 2 am
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell:
|
||||||
|
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||||
|
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-paraview-deps:
|
||||||
|
runs-on: windows-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||||
|
with:
|
||||||
|
python-version: 3.9
|
||||||
|
- name: Install Python packages
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip six pywin32 setuptools coverage
|
||||||
|
- name: Build Test
|
||||||
|
run: |
|
||||||
|
spack compiler find
|
||||||
|
spack external find cmake ninja win-sdk win-wdk wgl msmpi
|
||||||
|
spack -d install -y --cdash-upload-url https://cdash.spack.io/submit.php?project=Spack+on+Windows --cdash-track Nightly --only dependencies paraview
|
||||||
|
exit 0
|
||||||
17
.github/workflows/unit_tests.yaml
vendored
17
.github/workflows/unit_tests.yaml
vendored
@@ -50,7 +50,7 @@ jobs:
|
|||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -87,7 +87,7 @@ jobs:
|
|||||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,${{ matrix.concretizer }}
|
flags: unittests,linux,${{ matrix.concretizer }}
|
||||||
# Test shell integration
|
# Test shell integration
|
||||||
@@ -97,7 +97,7 @@ jobs:
|
|||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -118,7 +118,7 @@ jobs:
|
|||||||
COVERAGE: true
|
COVERAGE: true
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-shell-tests
|
share/spack/qa/run-shell-tests
|
||||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||||
with:
|
with:
|
||||||
flags: shelltests,linux
|
flags: shelltests,linux
|
||||||
|
|
||||||
@@ -137,6 +137,7 @@ jobs:
|
|||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
git fetch --unshallow
|
git fetch --unshallow
|
||||||
. .github/workflows/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
useradd spack-test
|
useradd spack-test
|
||||||
@@ -154,7 +155,7 @@ jobs:
|
|||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -175,7 +176,7 @@ jobs:
|
|||||||
SPACK_TEST_SOLVER: clingo
|
SPACK_TEST_SOLVER: clingo
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 # @v2.1.0
|
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,clingo
|
flags: unittests,linux,clingo
|
||||||
# Run unit tests on MacOS
|
# Run unit tests on MacOS
|
||||||
@@ -188,7 +189,7 @@ jobs:
|
|||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -210,6 +211,6 @@ jobs:
|
|||||||
$(which spack) solve zlib
|
$(which spack) solve zlib
|
||||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||||
$(which spack) unit-test --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
$(which spack) unit-test --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||||
with:
|
with:
|
||||||
flags: unittests,macos
|
flags: unittests,macos
|
||||||
|
|||||||
5
.github/workflows/valid-style.yml
vendored
5
.github/workflows/valid-style.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
@@ -38,7 +38,7 @@ jobs:
|
|||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
@@ -72,6 +72,7 @@ jobs:
|
|||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
git fetch --unshallow
|
git fetch --unshallow
|
||||||
. .github/workflows/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
useradd spack-test
|
useradd spack-test
|
||||||
|
|||||||
10
.github/workflows/windows_python.yml
vendored
10
.github/workflows/windows_python.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
|||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
|
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -33,7 +33,7 @@ jobs:
|
|||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
coverage combine -a
|
coverage combine -a
|
||||||
coverage xml
|
coverage xml
|
||||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||||
with:
|
with:
|
||||||
flags: unittests,windows
|
flags: unittests,windows
|
||||||
unit-tests-cmd:
|
unit-tests-cmd:
|
||||||
@@ -42,7 +42,7 @@ jobs:
|
|||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
|
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -57,7 +57,7 @@ jobs:
|
|||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
coverage combine -a
|
coverage combine -a
|
||||||
coverage xml
|
coverage xml
|
||||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||||
with:
|
with:
|
||||||
flags: unittests,windows
|
flags: unittests,windows
|
||||||
build-abseil:
|
build-abseil:
|
||||||
@@ -66,7 +66,7 @@ jobs:
|
|||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
|
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
|
|||||||
218
CHANGELOG.md
218
CHANGELOG.md
@@ -1,3 +1,221 @@
|
|||||||
|
# v0.20.0 (2023-05-21)
|
||||||
|
|
||||||
|
`v0.20.0` is a major feature release.
|
||||||
|
|
||||||
|
## Features in this release
|
||||||
|
|
||||||
|
1. **`requires()` directive and enhanced package requirements**
|
||||||
|
|
||||||
|
We've added some more enhancements to requirements in Spack (#36286).
|
||||||
|
|
||||||
|
There is a new `requires()` directive for packages. `requires()` is the opposite of
|
||||||
|
`conflicts()`. You can use it to impose constraints on this package when certain
|
||||||
|
conditions are met:
|
||||||
|
|
||||||
|
```python
|
||||||
|
requires(
|
||||||
|
"%apple-clang",
|
||||||
|
when="platform=darwin",
|
||||||
|
msg="This package builds only with clang on macOS"
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
More on this in [the docs](
|
||||||
|
https://spack.rtfd.io/en/latest/packaging_guide.html#conflicts-and-requirements).
|
||||||
|
|
||||||
|
You can also now add a `when:` clause to `requires:` in your `packages.yaml`
|
||||||
|
configuration or in an environment:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
packages:
|
||||||
|
openmpi:
|
||||||
|
require:
|
||||||
|
- any_of: ["%gcc"]
|
||||||
|
when: "@:4.1.4"
|
||||||
|
message: "Only OpenMPI 4.1.5 and up can build with fancy compilers"
|
||||||
|
```
|
||||||
|
|
||||||
|
More details can be found [here](
|
||||||
|
https://spack.readthedocs.io/en/latest/build_settings.html#package-requirements)
|
||||||
|
|
||||||
|
2. **Exact versions**
|
||||||
|
|
||||||
|
Spack did not previously have a way to distinguish a version if it was a prefix of
|
||||||
|
some other version. For example, `@3.2` would match `3.2`, `3.2.1`, `3.2.2`, etc. You
|
||||||
|
can now match *exactly* `3.2` with `@=3.2`. This is useful, for example, if you need
|
||||||
|
to patch *only* the `3.2` version of a package. The new syntax is described in [the docs](
|
||||||
|
https://spack.readthedocs.io/en/latest/basic_usage.html#version-specifier).
|
||||||
|
|
||||||
|
Generally, when writing packages, you should prefer to use ranges like `@3.2` over
|
||||||
|
the specific versions, as this allows the concretizer more leeway when selecting
|
||||||
|
versions of dependencies. More details and recommendations are in the [packaging guide](
|
||||||
|
https://spack.readthedocs.io/en/latest/packaging_guide.html#ranges-versus-specific-versions).
|
||||||
|
|
||||||
|
See #36273 for full details on the version refactor.
|
||||||
|
|
||||||
|
3. **New testing interface**
|
||||||
|
|
||||||
|
Writing package tests is now much simpler with a new [test interface](
|
||||||
|
https://spack.readthedocs.io/en/latest/packaging_guide.html#stand-alone-tests).
|
||||||
|
|
||||||
|
Writing a test is now as easy as adding a method that starts with `test_`:
|
||||||
|
|
||||||
|
```python
|
||||||
|
class MyPackage(Package):
|
||||||
|
...
|
||||||
|
|
||||||
|
def test_always_fails(self):
|
||||||
|
"""use assert to always fail"""
|
||||||
|
assert False
|
||||||
|
|
||||||
|
def test_example(self):
|
||||||
|
"""run installed example"""
|
||||||
|
example = which(self.prefix.bin.example)
|
||||||
|
example()
|
||||||
|
```
|
||||||
|
|
||||||
|
You can use Python's native `assert` statement to implement your checks -- no more
|
||||||
|
need to fiddle with `run_test` or other test framework methods. Spack will
|
||||||
|
introspect the class and run `test_*` methods when you run `spack test`,
|
||||||
|
|
||||||
|
4. **More stable concretization**
|
||||||
|
|
||||||
|
* Now, `spack concretize` will *only* concretize the new portions of the environment
|
||||||
|
and will not change existing parts of an environment unless you specify `--force`.
|
||||||
|
This has always been true for `unify:false`, but not for `unify:true` and
|
||||||
|
`unify:when_possible` environments. Now it is true for all of them (#37438, #37681).
|
||||||
|
|
||||||
|
* The concretizer has a new `--reuse-deps` argument that *only* reuses dependencies.
|
||||||
|
That is, it will always treat the *roots* of your environment as it would with
|
||||||
|
`--fresh`. This allows you to upgrade just the roots of your environment while
|
||||||
|
keeping everything else stable (#30990).
|
||||||
|
|
||||||
|
5. **Weekly develop snapshot releases**
|
||||||
|
|
||||||
|
Since last year, we have maintained a buildcache of `develop` at
|
||||||
|
https://binaries.spack.io/develop, but the cache can grow to contain so many builds
|
||||||
|
as to be unwieldy. When we get a stable `develop` build, we snapshot the release and
|
||||||
|
add a corresponding tag the Spack repository. So, you can use a stack from a specific
|
||||||
|
day. There are now tags in the spack repository like:
|
||||||
|
|
||||||
|
* `develop-2023-05-14`
|
||||||
|
* `develop-2023-05-18`
|
||||||
|
|
||||||
|
that correspond to build caches like:
|
||||||
|
|
||||||
|
* https://binaries.spack.io/develop-2023-05-14/e4s
|
||||||
|
* https://binaries.spack.io/develop-2023-05-18/e4s
|
||||||
|
|
||||||
|
We plan to store these snapshot releases weekly.
|
||||||
|
|
||||||
|
6. **Specs in buildcaches can be referenced by hash.**
|
||||||
|
|
||||||
|
* Previously, you could run `spack buildcache list` and see the hashes in
|
||||||
|
buildcaches, but referring to them by hash would fail.
|
||||||
|
* You can now run commands like `spack spec` and `spack install` and refer to
|
||||||
|
buildcache hashes directly, e.g. `spack install /abc123` (#35042)
|
||||||
|
|
||||||
|
7. **New package and buildcache index websites**
|
||||||
|
|
||||||
|
Our public websites for searching packages have been completely revamped and updated.
|
||||||
|
You can check them out here:
|
||||||
|
|
||||||
|
* *Package Index*: https://packages.spack.io
|
||||||
|
* *Buildcache Index*: https://cache.spack.io
|
||||||
|
|
||||||
|
Both are searchable and more interactive than before. Currently major releases are
|
||||||
|
shown; UI for browsing `develop` snapshots is coming soon.
|
||||||
|
|
||||||
|
8. **Default CMake and Meson build types are now Release**
|
||||||
|
|
||||||
|
Spack has historically defaulted to building with optimization and debugging, but
|
||||||
|
packages like `llvm` can be enormous with debug turned on. Our default build type for
|
||||||
|
all Spack packages is now `Release` (#36679, #37436). This has a number of benefits:
|
||||||
|
|
||||||
|
* much smaller binaries;
|
||||||
|
* higher default optimization level; and
|
||||||
|
* defining `NDEBUG` disables assertions, which may lead to further speedups.
|
||||||
|
|
||||||
|
You can still get the old behavior back through requirements and package preferences.
|
||||||
|
|
||||||
|
## Other new commands and directives
|
||||||
|
|
||||||
|
* `spack checksum` can automatically add new versions to package (#24532)
|
||||||
|
* new command: `spack pkg grep` to easily search package files (#34388)
|
||||||
|
* New `maintainers` directive (#35083)
|
||||||
|
* Add `spack buildcache push` (alias to `buildcache create`) (#34861)
|
||||||
|
* Allow using `-j` to control the parallelism of concretization (#37608)
|
||||||
|
* Add `--exclude` option to 'spack external find' (#35013)
|
||||||
|
|
||||||
|
## Other new features of note
|
||||||
|
|
||||||
|
* editing: add higher-precedence `SPACK_EDITOR` environment variable
|
||||||
|
* Many YAML formatting improvements from updating `ruamel.yaml` to the latest version
|
||||||
|
supporting Python 3.6. (#31091, #24885, #37008).
|
||||||
|
* Requirements and preferences should not define (non-git) versions (#37687, #37747)
|
||||||
|
* Environments now store spack version/commit in `spack.lock` (#32801)
|
||||||
|
* User can specify the name of the `packages` subdirectory in repositories (#36643)
|
||||||
|
* Add container images supporting RHEL alternatives (#36713)
|
||||||
|
* make version(...) kwargs explicit (#36998)
|
||||||
|
|
||||||
|
## Notable refactors
|
||||||
|
|
||||||
|
* buildcache create: reproducible tarballs (#35623)
|
||||||
|
* Bootstrap most of Spack dependencies using environments (#34029)
|
||||||
|
* Split `satisfies(..., strict=True/False)` into two functions (#35681)
|
||||||
|
* spack install: simplify behavior when inside environments (#35206)
|
||||||
|
|
||||||
|
## Binary cache and stack updates
|
||||||
|
|
||||||
|
* Major simplification of CI boilerplate in stacks (#34272, #36045)
|
||||||
|
* Many improvements to our CI pipeline's reliability
|
||||||
|
|
||||||
|
## Removals, Deprecations, and disablements
|
||||||
|
* Module file generation is disabled by default; you'll need to enable it to use it (#37258)
|
||||||
|
* Support for Python 2 was deprecated in `v0.19.0` and has been removed. `v0.20.0` only
|
||||||
|
supports Python 3.6 and higher.
|
||||||
|
* Deprecated target names are no longer recognized by Spack. Use generic names instead:
|
||||||
|
* `graviton` is now `cortex_a72`
|
||||||
|
* `graviton2` is now `neoverse_n1`
|
||||||
|
* `graviton3` is now `neoverse_v1`
|
||||||
|
* `blacklist` and `whitelist` in module configuration were deprecated in `v0.19.0` and are
|
||||||
|
removed in this release. Use `exclude` and `include` instead.
|
||||||
|
* The `ignore=` parameter of the `extends()` directive has been removed. It was not used by
|
||||||
|
any builtin packages and is no longer needed to avoid conflicts in environment views (#35588).
|
||||||
|
* Support for the old YAML buildcache format has been removed. It was deprecated in `v0.19.0` (#34347).
|
||||||
|
* `spack find --bootstrap` has been removed. It was deprecated in `v0.19.0`. Use `spack
|
||||||
|
--bootstrap find` instead (#33964).
|
||||||
|
* `spack bootstrap trust` and `spack bootstrap untrust` are now removed, having been
|
||||||
|
deprecated in `v0.19.0`. Use `spack bootstrap enable` and `spack bootstrap disable`.
|
||||||
|
* The `--mirror-name`, `--mirror-url`, and `--directory` options to buildcache and
|
||||||
|
mirror commands were deprecated in `v0.19.0` and have now been removed. They have been
|
||||||
|
replaced by positional arguments (#37457).
|
||||||
|
* Deprecate `env:` as top level environment key (#37424)
|
||||||
|
* deprecate buildcache create --rel, buildcache install --allow-root (#37285)
|
||||||
|
* Support for very old perl-like spec format strings (e.g., `$_$@$%@+$+$=`) has been
|
||||||
|
removed (#37425). This was deprecated in in `v0.15` (#10556).
|
||||||
|
|
||||||
|
## Notable Bugfixes
|
||||||
|
|
||||||
|
* bugfix: don't fetch package metadata for unknown concrete specs (#36990)
|
||||||
|
* Improve package source code context display on error (#37655)
|
||||||
|
* Relax environment manifest filename requirements and lockfile identification criteria (#37413)
|
||||||
|
* `installer.py`: drop build edges of installed packages by default (#36707)
|
||||||
|
* Bugfix: package requirements with git commits (#35057, #36347)
|
||||||
|
* Package requirements: allow single specs in requirement lists (#36258)
|
||||||
|
* conditional variant values: allow boolean (#33939)
|
||||||
|
* spack uninstall: follow run/link edges on --dependents (#34058)
|
||||||
|
|
||||||
|
## Spack community stats
|
||||||
|
|
||||||
|
* 7,179 total packages, 499 new since `v0.19.0`
|
||||||
|
* 329 new Python packages
|
||||||
|
* 31 new R packages
|
||||||
|
* 336 people contributed to this release
|
||||||
|
* 317 committers to packages
|
||||||
|
* 62 committers to core
|
||||||
|
|
||||||
|
|
||||||
# v0.19.1 (2023-02-07)
|
# v0.19.1 (2023-02-07)
|
||||||
|
|
||||||
### Spack Bugfixes
|
### Spack Bugfixes
|
||||||
|
|||||||
@@ -143,6 +143,26 @@ The OS that are currently supported are summarized in the table below:
|
|||||||
* - Amazon Linux 2
|
* - Amazon Linux 2
|
||||||
- ``amazonlinux:2``
|
- ``amazonlinux:2``
|
||||||
- ``spack/amazon-linux``
|
- ``spack/amazon-linux``
|
||||||
|
* - AlmaLinux 8
|
||||||
|
- ``almalinux:8``
|
||||||
|
- ``spack/almalinux8``
|
||||||
|
* - AlmaLinux 9
|
||||||
|
- ``almalinux:9``
|
||||||
|
- ``spack/almalinux9``
|
||||||
|
* - Rocky Linux 8
|
||||||
|
- ``rockylinux:8``
|
||||||
|
- ``spack/rockylinux8``
|
||||||
|
* - Rocky Linux 9
|
||||||
|
- ``rockylinux:9``
|
||||||
|
- ``spack/rockylinux9``
|
||||||
|
* - Fedora Linux 37
|
||||||
|
- ``fedora:37``
|
||||||
|
- ``spack/fedora37``
|
||||||
|
* - Fedora Linux 38
|
||||||
|
- ``fedora:38``
|
||||||
|
- ``spack/fedora38``
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
All the images are tagged with the corresponding release of Spack:
|
All the images are tagged with the corresponding release of Spack:
|
||||||
|
|
||||||
|
|||||||
@@ -347,7 +347,7 @@ the Environment and then install the concretized specs.
|
|||||||
(see :ref:`build-jobs`). To speed up environment builds further, independent
|
(see :ref:`build-jobs`). To speed up environment builds further, independent
|
||||||
packages can be installed in parallel by launching more Spack instances. For
|
packages can be installed in parallel by launching more Spack instances. For
|
||||||
example, the following will build at most four packages in parallel using
|
example, the following will build at most four packages in parallel using
|
||||||
three background jobs:
|
three background jobs:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
@@ -395,7 +395,7 @@ version (and other constraints) passed as the spec argument to the
|
|||||||
|
|
||||||
For packages with ``git`` attributes, git branches, tags, and commits can
|
For packages with ``git`` attributes, git branches, tags, and commits can
|
||||||
also be used as valid concrete versions (see :ref:`version-specifier`).
|
also be used as valid concrete versions (see :ref:`version-specifier`).
|
||||||
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
||||||
the ``main`` branch of the package, and ``spack install`` will install from
|
the ``main`` branch of the package, and ``spack install`` will install from
|
||||||
that git clone if ``foo`` is in the environment.
|
that git clone if ``foo`` is in the environment.
|
||||||
Further development on ``foo`` can be tested by reinstalling the environment,
|
Further development on ``foo`` can be tested by reinstalling the environment,
|
||||||
@@ -589,10 +589,11 @@ user support groups providing a large software stack for their HPC center.
|
|||||||
|
|
||||||
.. admonition:: Re-concretization of user specs
|
.. admonition:: Re-concretization of user specs
|
||||||
|
|
||||||
When using *unified* concretization (when possible), the entire set of specs will be
|
The ``spack concretize`` command without additional arguments will *not* change any
|
||||||
re-concretized after any addition of new user specs, to ensure that
|
previously concretized specs. This may prevent it from finding a solution when using
|
||||||
the environment remains consistent / minimal. When instead unified concretization is
|
``unify: true``, and it may prevent it from finding a minimal solution when using
|
||||||
disabled, only the new specs will be concretized after any addition.
|
``unify: when_possible``. You can force Spack to ignore the existing concrete environment
|
||||||
|
with ``spack concretize -f``.
|
||||||
|
|
||||||
^^^^^^^^^^^^^
|
^^^^^^^^^^^^^
|
||||||
Spec Matrices
|
Spec Matrices
|
||||||
@@ -1121,19 +1122,19 @@ index once every package is pushed. Note how this target uses the generated
|
|||||||
|
|
||||||
SPACK ?= spack
|
SPACK ?= spack
|
||||||
BUILDCACHE_DIR = $(CURDIR)/tarballs
|
BUILDCACHE_DIR = $(CURDIR)/tarballs
|
||||||
|
|
||||||
.PHONY: all
|
.PHONY: all
|
||||||
|
|
||||||
all: push
|
all: push
|
||||||
|
|
||||||
include env.mk
|
include env.mk
|
||||||
|
|
||||||
example/push/%: example/install/%
|
example/push/%: example/install/%
|
||||||
@mkdir -p $(dir $@)
|
@mkdir -p $(dir $@)
|
||||||
$(info About to push $(SPEC) to a buildcache)
|
$(info About to push $(SPEC) to a buildcache)
|
||||||
$(SPACK) -e . buildcache create --allow-root --only=package --directory $(BUILDCACHE_DIR) /$(HASH)
|
$(SPACK) -e . buildcache create --allow-root --only=package --directory $(BUILDCACHE_DIR) /$(HASH)
|
||||||
@touch $@
|
@touch $@
|
||||||
|
|
||||||
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
||||||
$(info Updating the buildcache index)
|
$(info Updating the buildcache index)
|
||||||
$(SPACK) -e . buildcache update-index --directory $(BUILDCACHE_DIR)
|
$(SPACK) -e . buildcache update-index --directory $(BUILDCACHE_DIR)
|
||||||
|
|||||||
@@ -317,7 +317,7 @@ installed, but you know that new compilers have been added to your
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ module load gcc-4.9.0
|
$ module load gcc/4.9.0
|
||||||
$ spack compiler find
|
$ spack compiler find
|
||||||
==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
|
==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
|
||||||
gcc@4.9.0
|
gcc@4.9.0
|
||||||
|
|||||||
@@ -35,27 +35,27 @@ showing lots of installed packages:
|
|||||||
$ module avail
|
$ module avail
|
||||||
|
|
||||||
--------------------------------------------------------------- ~/spack/share/spack/modules/linux-ubuntu14-x86_64 ---------------------------------------------------------------
|
--------------------------------------------------------------- ~/spack/share/spack/modules/linux-ubuntu14-x86_64 ---------------------------------------------------------------
|
||||||
autoconf-2.69-gcc-4.8-qextxkq hwloc-1.11.6-gcc-6.3.0-akcisez m4-1.4.18-gcc-4.8-ev2znoc openblas-0.2.19-gcc-6.3.0-dhkmed6 py-setuptools-34.2.0-gcc-6.3.0-fadur4s
|
autoconf/2.69-gcc-4.8-qextxkq hwloc/1.11.6-gcc-6.3.0-akcisez m4/1.4.18-gcc-4.8-ev2znoc openblas/0.2.19-gcc-6.3.0-dhkmed6 py-setuptools/34.2.0-gcc-6.3.0-fadur4s
|
||||||
automake-1.15-gcc-4.8-maqvukj isl-0.18-gcc-4.8-afi6taq m4-1.4.18-gcc-6.3.0-uppywnz openmpi-2.1.0-gcc-6.3.0-go2s4z5 py-six-1.10.0-gcc-6.3.0-p4dhkaw
|
automake/1.15-gcc-4.8-maqvukj isl/0.18-gcc-4.8-afi6taq m4/1.4.18-gcc-6.3.0-uppywnz openmpi/2.1.0-gcc-6.3.0-go2s4z5 py-six/1.10.0-gcc-6.3.0-p4dhkaw
|
||||||
binutils-2.28-gcc-4.8-5s7c6rs libiconv-1.15-gcc-4.8-at46wg3 mawk-1.3.4-gcc-4.8-acjez57 openssl-1.0.2k-gcc-4.8-dkls5tk python-2.7.13-gcc-6.3.0-tyehea7
|
binutils/2.28-gcc-4.8-5s7c6rs libiconv/1.15-gcc-4.8-at46wg3 mawk/1.3.4-gcc-4.8-acjez57 openssl/1.0.2k-gcc-4.8-dkls5tk python/2.7.13-gcc-6.3.0-tyehea7
|
||||||
bison-3.0.4-gcc-4.8-ek4luo5 libpciaccess-0.13.4-gcc-6.3.0-gmufnvh mawk-1.3.4-gcc-6.3.0-ostdoms openssl-1.0.2k-gcc-6.3.0-gxgr5or readline-7.0-gcc-4.8-xhufqhn
|
bison/3.0.4-gcc-4.8-ek4luo5 libpciaccess/0.13.4-gcc-6.3.0-gmufnvh mawk/1.3.4-gcc-6.3.0-ostdoms openssl/1.0.2k-gcc-6.3.0-gxgr5or readline/7.0-gcc-4.8-xhufqhn
|
||||||
bzip2-1.0.6-gcc-4.8-iffrxzn libsigsegv-2.11-gcc-4.8-pp2cvte mpc-1.0.3-gcc-4.8-g5mztc5 pcre-8.40-gcc-4.8-r5pbrxb readline-7.0-gcc-6.3.0-zzcyicg
|
bzip2/1.0.6-gcc-4.8-iffrxzn libsigsegv/2.11-gcc-4.8-pp2cvte mpc/1.0.3-gcc-4.8-g5mztc5 pcre/8.40-gcc-4.8-r5pbrxb readline/7.0-gcc-6.3.0-zzcyicg
|
||||||
bzip2-1.0.6-gcc-6.3.0-bequudr libsigsegv-2.11-gcc-6.3.0-7enifnh mpfr-3.1.5-gcc-4.8-o7xm7az perl-5.24.1-gcc-4.8-dg5j65u sqlite-3.8.5-gcc-6.3.0-6zoruzj
|
bzip2/1.0.6-gcc-6.3.0-bequudr libsigsegv/2.11-gcc-6.3.0-7enifnh mpfr/3.1.5-gcc-4.8-o7xm7az perl/5.24.1-gcc-4.8-dg5j65u sqlite/3.8.5-gcc-6.3.0-6zoruzj
|
||||||
cmake-3.7.2-gcc-6.3.0-fowuuby libtool-2.4.6-gcc-4.8-7a523za mpich-3.2-gcc-6.3.0-dmvd3aw perl-5.24.1-gcc-6.3.0-6uzkpt6 tar-1.29-gcc-4.8-wse2ass
|
cmake/3.7.2-gcc-6.3.0-fowuuby libtool/2.4.6-gcc-4.8-7a523za mpich/3.2-gcc-6.3.0-dmvd3aw perl/5.24.1-gcc-6.3.0-6uzkpt6 tar/1.29-gcc-4.8-wse2ass
|
||||||
curl-7.53.1-gcc-4.8-3fz46n6 libtool-2.4.6-gcc-6.3.0-n7zmbzt ncurses-6.0-gcc-4.8-dcpe7ia pkg-config-0.29.2-gcc-4.8-ib33t75 tcl-8.6.6-gcc-4.8-tfxzqbr
|
curl/7.53.1-gcc-4.8-3fz46n6 libtool/2.4.6-gcc-6.3.0-n7zmbzt ncurses/6.0-gcc-4.8-dcpe7ia pkg-config/0.29.2-gcc-4.8-ib33t75 tcl/8.6.6-gcc-4.8-tfxzqbr
|
||||||
expat-2.2.0-gcc-4.8-mrv6bd4 libxml2-2.9.4-gcc-4.8-ryzxnsu ncurses-6.0-gcc-6.3.0-ucbhcdy pkg-config-0.29.2-gcc-6.3.0-jpgubk3 util-macros-1.19.1-gcc-6.3.0-xorz2x2
|
expat/2.2.0-gcc-4.8-mrv6bd4 libxml2/2.9.4-gcc-4.8-ryzxnsu ncurses/6.0-gcc-6.3.0-ucbhcdy pkg-config/0.29.2-gcc-6.3.0-jpgubk3 util-macros/1.19.1-gcc-6.3.0-xorz2x2
|
||||||
flex-2.6.3-gcc-4.8-yf345oo libxml2-2.9.4-gcc-6.3.0-rltzsdh netlib-lapack-3.6.1-gcc-6.3.0-js33dog py-appdirs-1.4.0-gcc-6.3.0-jxawmw7 xz-5.2.3-gcc-4.8-mew4log
|
flex/2.6.3-gcc-4.8-yf345oo libxml2/2.9.4-gcc-6.3.0-rltzsdh netlib-lapack/3.6.1-gcc-6.3.0-js33dog py-appdirs/1.4.0-gcc-6.3.0-jxawmw7 xz/5.2.3-gcc-4.8-mew4log
|
||||||
gcc-6.3.0-gcc-4.8-24puqve lmod-7.4.1-gcc-4.8-je4srhr netlib-scalapack-2.0.2-gcc-6.3.0-5aidk4l py-numpy-1.12.0-gcc-6.3.0-oemmoeu xz-5.2.3-gcc-6.3.0-3vqeuvb
|
gcc/6.3.0-gcc-4.8-24puqve lmod/7.4.1-gcc-4.8-je4srhr netlib-scalapack/2.0.2-gcc-6.3.0-5aidk4l py-numpy/1.12.0-gcc-6.3.0-oemmoeu xz/5.2.3-gcc-6.3.0-3vqeuvb
|
||||||
gettext-0.19.8.1-gcc-4.8-yymghlh lua-5.3.4-gcc-4.8-im75yaz netlib-scalapack-2.0.2-gcc-6.3.0-hjsemcn py-packaging-16.8-gcc-6.3.0-i2n3dtl zip-3.0-gcc-4.8-rwar22d
|
gettext/0.19.8.1-gcc-4.8-yymghlh lua/5.3.4-gcc-4.8-im75yaz netlib-scalapack/2.0.2-gcc-6.3.0-hjsemcn py-packaging/16.8-gcc-6.3.0-i2n3dtl zip/3.0-gcc-4.8-rwar22d
|
||||||
gmp-6.1.2-gcc-4.8-5ub2wu5 lua-luafilesystem-1_6_3-gcc-4.8-wkey3nl netlib-scalapack-2.0.2-gcc-6.3.0-jva724b py-pyparsing-2.1.10-gcc-6.3.0-tbo6gmw zlib-1.2.11-gcc-4.8-pgxsxv7
|
gmp/6.1.2-gcc-4.8-5ub2wu5 lua-luafilesystem/1_6_3-gcc-4.8-wkey3nl netlib-scalapack/2.0.2-gcc-6.3.0-jva724b py-pyparsing/2.1.10-gcc-6.3.0-tbo6gmw zlib/1.2.11-gcc-4.8-pgxsxv7
|
||||||
help2man-1.47.4-gcc-4.8-kcnqmau lua-luaposix-33.4.0-gcc-4.8-mdod2ry netlib-scalapack-2.0.2-gcc-6.3.0-rgqfr6d py-scipy-0.19.0-gcc-6.3.0-kr7nat4 zlib-1.2.11-gcc-6.3.0-7cqp6cj
|
help2man/1.47.4-gcc-4.8-kcnqmau lua-luaposix/33.4.0-gcc-4.8-mdod2ry netlib-scalapack/2.0.2-gcc-6.3.0-rgqfr6d py-scipy/0.19.0-gcc-6.3.0-kr7nat4 zlib/1.2.11-gcc-6.3.0-7cqp6cj
|
||||||
|
|
||||||
The names should look familiar, as they resemble the output from ``spack find``.
|
The names should look familiar, as they resemble the output from ``spack find``.
|
||||||
For example, you could type the following command to load the ``cmake`` module:
|
For example, you could type the following command to load the ``cmake`` module:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ module load cmake-3.7.2-gcc-6.3.0-fowuuby
|
$ module load cmake/3.7.2-gcc-6.3.0-fowuuby
|
||||||
|
|
||||||
Neither of these is particularly pretty, easy to remember, or easy to
|
Neither of these is particularly pretty, easy to remember, or easy to
|
||||||
type. Luckily, Spack offers many facilities for customizing the module
|
type. Luckily, Spack offers many facilities for customizing the module
|
||||||
@@ -779,35 +779,35 @@ cut-and-pasted into a shell script. For example:
|
|||||||
|
|
||||||
$ spack module tcl loads --dependencies py-numpy git
|
$ spack module tcl loads --dependencies py-numpy git
|
||||||
# bzip2@1.0.6%gcc@4.9.3=linux-x86_64
|
# bzip2@1.0.6%gcc@4.9.3=linux-x86_64
|
||||||
module load bzip2-1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx
|
module load bzip2/1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx
|
||||||
# ncurses@6.0%gcc@4.9.3=linux-x86_64
|
# ncurses@6.0%gcc@4.9.3=linux-x86_64
|
||||||
module load ncurses-6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv
|
module load ncurses/6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv
|
||||||
# zlib@1.2.8%gcc@4.9.3=linux-x86_64
|
# zlib@1.2.8%gcc@4.9.3=linux-x86_64
|
||||||
module load zlib-1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z
|
module load zlib/1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z
|
||||||
# sqlite@3.8.5%gcc@4.9.3=linux-x86_64
|
# sqlite@3.8.5%gcc@4.9.3=linux-x86_64
|
||||||
module load sqlite-3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr
|
module load sqlite/3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr
|
||||||
# readline@6.3%gcc@4.9.3=linux-x86_64
|
# readline@6.3%gcc@4.9.3=linux-x86_64
|
||||||
module load readline-6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3
|
module load readline/6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3
|
||||||
# python@3.5.1%gcc@4.9.3=linux-x86_64
|
# python@3.5.1%gcc@4.9.3=linux-x86_64
|
||||||
module load python-3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi
|
module load python/3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi
|
||||||
# py-setuptools@20.5%gcc@4.9.3=linux-x86_64
|
# py-setuptools@20.5%gcc@4.9.3=linux-x86_64
|
||||||
module load py-setuptools-20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2
|
module load py-setuptools/20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2
|
||||||
# py-nose@1.3.7%gcc@4.9.3=linux-x86_64
|
# py-nose@1.3.7%gcc@4.9.3=linux-x86_64
|
||||||
module load py-nose-1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli
|
module load py-nose/1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli
|
||||||
# openblas@0.2.17%gcc@4.9.3+shared=linux-x86_64
|
# openblas@0.2.17%gcc@4.9.3+shared=linux-x86_64
|
||||||
module load openblas-0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y
|
module load openblas/0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y
|
||||||
# py-numpy@1.11.0%gcc@4.9.3+blas+lapack=linux-x86_64
|
# py-numpy@1.11.0%gcc@4.9.3+blas+lapack=linux-x86_64
|
||||||
module load py-numpy-1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r
|
module load py-numpy/1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r
|
||||||
# curl@7.47.1%gcc@4.9.3=linux-x86_64
|
# curl@7.47.1%gcc@4.9.3=linux-x86_64
|
||||||
module load curl-7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi
|
module load curl/7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi
|
||||||
# autoconf@2.69%gcc@4.9.3=linux-x86_64
|
# autoconf@2.69%gcc@4.9.3=linux-x86_64
|
||||||
module load autoconf-2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4
|
module load autoconf/2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4
|
||||||
# cmake@3.5.0%gcc@4.9.3~doc+ncurses+openssl~qt=linux-x86_64
|
# cmake@3.5.0%gcc@4.9.3~doc+ncurses+openssl~qt=linux-x86_64
|
||||||
module load cmake-3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t
|
module load cmake/3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t
|
||||||
# expat@2.1.0%gcc@4.9.3=linux-x86_64
|
# expat@2.1.0%gcc@4.9.3=linux-x86_64
|
||||||
module load expat-2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd
|
module load expat/2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd
|
||||||
# git@2.8.0-rc2%gcc@4.9.3+curl+expat=linux-x86_64
|
# git@2.8.0-rc2%gcc@4.9.3+curl+expat=linux-x86_64
|
||||||
module load git-2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd
|
module load git/2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd
|
||||||
|
|
||||||
The script may be further edited by removing unnecessary modules.
|
The script may be further edited by removing unnecessary modules.
|
||||||
|
|
||||||
@@ -826,12 +826,12 @@ For example, consider the following on one system:
|
|||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ module avail
|
$ module avail
|
||||||
linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y
|
linux-SuSE11-x86_64/antlr/2.7.7-gcc-5.3.0-bdpl46y
|
||||||
|
|
||||||
$ spack module tcl loads antlr # WRONG!
|
$ spack module tcl loads antlr # WRONG!
|
||||||
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
||||||
module load antlr-2.7.7-gcc-5.3.0-bdpl46y
|
module load antlr/2.7.7-gcc-5.3.0-bdpl46y
|
||||||
|
|
||||||
$ spack module tcl loads --prefix linux-SuSE11-x86_64/ antlr
|
$ spack module tcl loads --prefix linux-SuSE11-x86_64/ antlr
|
||||||
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
||||||
module load linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y
|
module load linux-SuSE11-x86_64/antlr/2.7.7-gcc-5.3.0-bdpl46y
|
||||||
|
|||||||
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
* Homepage: https://pypi.python.org/pypi/archspec
|
* Homepage: https://pypi.python.org/pypi/archspec
|
||||||
* Usage: Labeling, comparison and detection of microarchitectures
|
* Usage: Labeling, comparison and detection of microarchitectures
|
||||||
* Version: 0.2.1 (commit 4b1f21802a23b536bbcce73d3c631a566b20e8bd)
|
* Version: 0.2.1 (commit 9e1117bd8a2f0581bced161f2a2e8d6294d0300b)
|
||||||
|
|
||||||
astunparse
|
astunparse
|
||||||
----------------
|
----------------
|
||||||
|
|||||||
@@ -2803,7 +2803,7 @@
|
|||||||
"flags" : "-march=armv8.2-a+fp16+dotprod+crypto -mtune=cortex-a72"
|
"flags" : "-march=armv8.2-a+fp16+dotprod+crypto -mtune=cortex-a72"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"versions": "10.2",
|
"versions": "10.2:10.2.99",
|
||||||
"flags" : "-mcpu=zeus"
|
"flags" : "-mcpu=zeus"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||||
__version__ = "0.20.0.dev0"
|
__version__ = "0.21.0.dev0"
|
||||||
spack_version = __version__
|
spack_version = __version__
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -589,7 +589,6 @@ def set_module_variables_for_package(pkg):
|
|||||||
|
|
||||||
# TODO: make these build deps that can be installed if not found.
|
# TODO: make these build deps that can be installed if not found.
|
||||||
m.make = MakeExecutable("make", jobs)
|
m.make = MakeExecutable("make", jobs)
|
||||||
m.gmake = MakeExecutable("gmake", jobs)
|
|
||||||
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
||||||
# TODO: johnwparent: add package or builder support to define these build tools
|
# TODO: johnwparent: add package or builder support to define these build tools
|
||||||
# for now there is no entrypoint for builders to define these on their
|
# for now there is no entrypoint for builders to define these on their
|
||||||
@@ -1216,6 +1215,9 @@ def child_fun():
|
|||||||
return child_result
|
return child_result
|
||||||
|
|
||||||
|
|
||||||
|
CONTEXT_BASES = (spack.package_base.PackageBase, spack.build_systems._checks.BaseBuilder)
|
||||||
|
|
||||||
|
|
||||||
def get_package_context(traceback, context=3):
|
def get_package_context(traceback, context=3):
|
||||||
"""Return some context for an error message when the build fails.
|
"""Return some context for an error message when the build fails.
|
||||||
|
|
||||||
@@ -1244,32 +1246,38 @@ def make_stack(tb, stack=None):
|
|||||||
|
|
||||||
stack = make_stack(traceback)
|
stack = make_stack(traceback)
|
||||||
|
|
||||||
|
basenames = tuple(base.__name__ for base in CONTEXT_BASES)
|
||||||
for tb in stack:
|
for tb in stack:
|
||||||
frame = tb.tb_frame
|
frame = tb.tb_frame
|
||||||
if "self" in frame.f_locals:
|
if "self" in frame.f_locals:
|
||||||
# Find the first proper subclass of PackageBase.
|
# Find the first proper subclass of the PackageBase or BaseBuilder, but
|
||||||
|
# don't provide context if the code is actually in the base classes.
|
||||||
obj = frame.f_locals["self"]
|
obj = frame.f_locals["self"]
|
||||||
if isinstance(obj, spack.package_base.PackageBase):
|
func = getattr(obj, tb.tb_frame.f_code.co_name, "")
|
||||||
|
if func:
|
||||||
|
typename, *_ = func.__qualname__.partition(".")
|
||||||
|
|
||||||
|
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# We found obj, the Package implementation we care about.
|
# We found obj, the Package implementation we care about.
|
||||||
# Point out the location in the install method where we failed.
|
# Point out the location in the install method where we failed.
|
||||||
lines = [
|
filename = inspect.getfile(frame.f_code)
|
||||||
"{0}:{1:d}, in {2}:".format(
|
lineno = frame.f_lineno
|
||||||
inspect.getfile(frame.f_code),
|
if os.path.basename(filename) == "package.py":
|
||||||
frame.f_lineno - 1, # subtract 1 because f_lineno is 0-indexed
|
# subtract 1 because we inject a magic import at the top of package files.
|
||||||
frame.f_code.co_name,
|
# TODO: get rid of the magic import.
|
||||||
)
|
lineno -= 1
|
||||||
]
|
|
||||||
|
lines = ["{0}:{1:d}, in {2}:".format(filename, lineno, frame.f_code.co_name)]
|
||||||
|
|
||||||
# Build a message showing context in the install method.
|
# Build a message showing context in the install method.
|
||||||
sourcelines, start = inspect.getsourcelines(frame)
|
sourcelines, start = inspect.getsourcelines(frame)
|
||||||
|
|
||||||
# Calculate lineno of the error relative to the start of the function.
|
# Calculate lineno of the error relative to the start of the function.
|
||||||
# Subtract 1 because f_lineno is 0-indexed.
|
fun_lineno = lineno - start
|
||||||
fun_lineno = frame.f_lineno - start - 1
|
|
||||||
start_ctx = max(0, fun_lineno - context)
|
start_ctx = max(0, fun_lineno - context)
|
||||||
sourcelines = sourcelines[start_ctx : fun_lineno + context + 1]
|
sourcelines = sourcelines[start_ctx : fun_lineno + context + 1]
|
||||||
|
|
||||||
@@ -1365,7 +1373,7 @@ def long_message(self):
|
|||||||
test_log = join_path(os.path.dirname(self.log_name), spack_install_test_log)
|
test_log = join_path(os.path.dirname(self.log_name), spack_install_test_log)
|
||||||
if os.path.isfile(test_log):
|
if os.path.isfile(test_log):
|
||||||
out.write("\nSee test log for details:\n")
|
out.write("\nSee test log for details:\n")
|
||||||
out.write(" {0}n".format(test_log))
|
out.write(" {0}\n".format(test_log))
|
||||||
|
|
||||||
return out.getvalue()
|
return out.getvalue()
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import collections.abc
|
||||||
import os
|
import os
|
||||||
from typing import Tuple
|
from typing import Tuple
|
||||||
|
|
||||||
@@ -13,21 +14,24 @@
|
|||||||
from .cmake import CMakeBuilder, CMakePackage
|
from .cmake import CMakeBuilder, CMakePackage
|
||||||
|
|
||||||
|
|
||||||
def cmake_cache_path(name, value, comment=""):
|
def cmake_cache_path(name, value, comment="", force=False):
|
||||||
"""Generate a string for a cmake cache variable"""
|
"""Generate a string for a cmake cache variable"""
|
||||||
return 'set({0} "{1}" CACHE PATH "{2}")\n'.format(name, value, comment)
|
force_str = " FORCE" if force else ""
|
||||||
|
return 'set({0} "{1}" CACHE PATH "{2}"{3})\n'.format(name, value, comment, force_str)
|
||||||
|
|
||||||
|
|
||||||
def cmake_cache_string(name, value, comment=""):
|
def cmake_cache_string(name, value, comment="", force=False):
|
||||||
"""Generate a string for a cmake cache variable"""
|
"""Generate a string for a cmake cache variable"""
|
||||||
return 'set({0} "{1}" CACHE STRING "{2}")\n'.format(name, value, comment)
|
force_str = " FORCE" if force else ""
|
||||||
|
return 'set({0} "{1}" CACHE STRING "{2}"{3})\n'.format(name, value, comment, force_str)
|
||||||
|
|
||||||
|
|
||||||
def cmake_cache_option(name, boolean_value, comment=""):
|
def cmake_cache_option(name, boolean_value, comment="", force=False):
|
||||||
"""Generate a string for a cmake configuration option"""
|
"""Generate a string for a cmake configuration option"""
|
||||||
|
|
||||||
value = "ON" if boolean_value else "OFF"
|
value = "ON" if boolean_value else "OFF"
|
||||||
return 'set({0} {1} CACHE BOOL "{2}")\n'.format(name, value, comment)
|
force_str = " FORCE" if force else ""
|
||||||
|
return 'set({0} {1} CACHE BOOL "{2}"{3})\n'.format(name, value, comment, force_str)
|
||||||
|
|
||||||
|
|
||||||
class CachedCMakeBuilder(CMakeBuilder):
|
class CachedCMakeBuilder(CMakeBuilder):
|
||||||
@@ -63,6 +67,34 @@ def cache_name(self):
|
|||||||
def cache_path(self):
|
def cache_path(self):
|
||||||
return os.path.join(self.pkg.stage.source_path, self.cache_name)
|
return os.path.join(self.pkg.stage.source_path, self.cache_name)
|
||||||
|
|
||||||
|
# Implement a version of the define_from_variant for Cached packages
|
||||||
|
def define_cmake_cache_from_variant(self, cmake_var, variant=None, comment=""):
|
||||||
|
"""Return a Cached CMake field from the given variant's value.
|
||||||
|
See define_from_variant in lib/spack/spack/build_systems/cmake.py package
|
||||||
|
"""
|
||||||
|
|
||||||
|
if variant is None:
|
||||||
|
variant = cmake_var.lower()
|
||||||
|
|
||||||
|
if variant not in self.pkg.variants:
|
||||||
|
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
|
||||||
|
|
||||||
|
if variant not in self.pkg.spec.variants:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
value = self.pkg.spec.variants[variant].value
|
||||||
|
field = None
|
||||||
|
if isinstance(value, bool):
|
||||||
|
field = cmake_cache_option(cmake_var, value, comment)
|
||||||
|
else:
|
||||||
|
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
|
||||||
|
value = ";".join(str(v) for v in value)
|
||||||
|
else:
|
||||||
|
value = str(value)
|
||||||
|
field = cmake_cache_string(cmake_var, value, comment)
|
||||||
|
|
||||||
|
return field
|
||||||
|
|
||||||
def initconfig_compiler_entries(self):
|
def initconfig_compiler_entries(self):
|
||||||
# This will tell cmake to use the Spack compiler wrappers when run
|
# This will tell cmake to use the Spack compiler wrappers when run
|
||||||
# through Spack, but use the underlying compiler when run outside of
|
# through Spack, but use the underlying compiler when run outside of
|
||||||
@@ -130,6 +162,17 @@ def initconfig_compiler_entries(self):
|
|||||||
libs_string = libs_format_string.format(lang)
|
libs_string = libs_format_string.format(lang)
|
||||||
entries.append(cmake_cache_string(libs_string, libs_flags))
|
entries.append(cmake_cache_string(libs_string, libs_flags))
|
||||||
|
|
||||||
|
# Set the generator in the cached config
|
||||||
|
if self.spec.satisfies("generator=make"):
|
||||||
|
entries.append(cmake_cache_string("CMAKE_GENERATOR", "Unix Makefiles"))
|
||||||
|
if self.spec.satisfies("generator=ninja"):
|
||||||
|
entries.append(cmake_cache_string("CMAKE_GENERATOR", "Ninja"))
|
||||||
|
entries.append(
|
||||||
|
cmake_cache_string(
|
||||||
|
"CMAKE_MAKE_PROGRAM", "{0}/ninja".format(spec["ninja"].prefix.bin)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
return entries
|
return entries
|
||||||
|
|
||||||
def initconfig_mpi_entries(self):
|
def initconfig_mpi_entries(self):
|
||||||
@@ -195,26 +238,57 @@ def initconfig_hardware_entries(self):
|
|||||||
"#------------------{0}\n".format("-" * 60),
|
"#------------------{0}\n".format("-" * 60),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# Provide standard CMake arguments for dependent CachedCMakePackages
|
||||||
if spec.satisfies("^cuda"):
|
if spec.satisfies("^cuda"):
|
||||||
entries.append("#------------------{0}".format("-" * 30))
|
entries.append("#------------------{0}".format("-" * 30))
|
||||||
entries.append("# Cuda")
|
entries.append("# Cuda")
|
||||||
entries.append("#------------------{0}\n".format("-" * 30))
|
entries.append("#------------------{0}\n".format("-" * 30))
|
||||||
|
|
||||||
cudatoolkitdir = spec["cuda"].prefix
|
cudatoolkitdir = spec["cuda"].prefix
|
||||||
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
|
entries.append(cmake_cache_path("CUDAToolkit_ROOT", cudatoolkitdir))
|
||||||
cudacompiler = "${CUDA_TOOLKIT_ROOT_DIR}/bin/nvcc"
|
entries.append(cmake_cache_path("CMAKE_CUDA_COMPILER", "${CUDAToolkit_ROOT}/bin/nvcc"))
|
||||||
entries.append(cmake_cache_path("CMAKE_CUDA_COMPILER", cudacompiler))
|
|
||||||
entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${CMAKE_CXX_COMPILER}"))
|
entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${CMAKE_CXX_COMPILER}"))
|
||||||
|
# Include the deprecated CUDA_TOOLKIT_ROOT_DIR for supporting BLT packages
|
||||||
|
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
|
||||||
|
|
||||||
|
archs = spec.variants["cuda_arch"].value
|
||||||
|
if archs != "none":
|
||||||
|
arch_str = ";".join(archs)
|
||||||
|
entries.append(
|
||||||
|
cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", "{0}".format(arch_str))
|
||||||
|
)
|
||||||
|
|
||||||
|
if "+rocm" in spec:
|
||||||
|
entries.append("#------------------{0}".format("-" * 30))
|
||||||
|
entries.append("# ROCm")
|
||||||
|
entries.append("#------------------{0}\n".format("-" * 30))
|
||||||
|
|
||||||
|
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
||||||
|
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
||||||
|
entries.append(
|
||||||
|
cmake_cache_path("HIP_CXX_COMPILER", "{0}".format(self.spec["hip"].hipcc))
|
||||||
|
)
|
||||||
|
archs = self.spec.variants["amdgpu_target"].value
|
||||||
|
if archs != "none":
|
||||||
|
arch_str = ";".join(archs)
|
||||||
|
entries.append(
|
||||||
|
cmake_cache_string("CMAKE_HIP_ARCHITECTURES", "{0}".format(arch_str))
|
||||||
|
)
|
||||||
|
entries.append(cmake_cache_string("AMDGPU_TARGETS", "{0}".format(arch_str)))
|
||||||
|
entries.append(cmake_cache_string("GPU_TARGETS", "{0}".format(arch_str)))
|
||||||
|
|
||||||
return entries
|
return entries
|
||||||
|
|
||||||
def std_initconfig_entries(self):
|
def std_initconfig_entries(self):
|
||||||
|
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
|
||||||
|
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
|
||||||
return [
|
return [
|
||||||
"#------------------{0}".format("-" * 60),
|
"#------------------{0}".format("-" * 60),
|
||||||
"# !!!! This is a generated file, edit at own risk !!!!",
|
"# !!!! This is a generated file, edit at own risk !!!!",
|
||||||
"#------------------{0}".format("-" * 60),
|
"#------------------{0}".format("-" * 60),
|
||||||
"# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path),
|
"# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path),
|
||||||
"#------------------{0}\n".format("-" * 60),
|
"#------------------{0}\n".format("-" * 60),
|
||||||
|
cmake_cache_path("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
||||||
]
|
]
|
||||||
|
|
||||||
def initconfig_package_entries(self):
|
def initconfig_package_entries(self):
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
|
|
||||||
import llnl.util.lang as lang
|
import llnl.util.lang as lang
|
||||||
|
|
||||||
from spack.directives import extends, maintainers
|
from spack.directives import extends
|
||||||
|
|
||||||
from .generic import GenericBuilder, Package
|
from .generic import GenericBuilder, Package
|
||||||
|
|
||||||
@@ -71,8 +71,6 @@ class RPackage(Package):
|
|||||||
|
|
||||||
GenericBuilder = RBuilder
|
GenericBuilder = RBuilder
|
||||||
|
|
||||||
maintainers("glennpj")
|
|
||||||
|
|
||||||
#: This attribute is used in UI queries that need to know the build
|
#: This attribute is used in UI queries that need to know the build
|
||||||
#: system base class
|
#: system base class
|
||||||
build_system_class = "RPackage"
|
build_system_class = "RPackage"
|
||||||
|
|||||||
@@ -10,6 +10,7 @@
|
|||||||
from llnl.util.filesystem import find, join_path, working_dir
|
from llnl.util.filesystem import find, join_path, working_dir
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
|
import spack.install_test
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
from spack.directives import build_system, depends_on, extends
|
from spack.directives import build_system, depends_on, extends
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
@@ -30,8 +31,8 @@ class SIPPackage(spack.package_base.PackageBase):
|
|||||||
#: Name of private sip module to install alongside package
|
#: Name of private sip module to install alongside package
|
||||||
sip_module = "sip"
|
sip_module = "sip"
|
||||||
|
|
||||||
#: Callback names for install-time test
|
#: Callback names for install-time testing
|
||||||
install_time_test_callbacks = ["test"]
|
install_time_test_callbacks = ["test_imports"]
|
||||||
#: Legacy buildsystem attribute used to deserialize and install old specs
|
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||||
legacy_buildsystem = "sip"
|
legacy_buildsystem = "sip"
|
||||||
|
|
||||||
@@ -87,18 +88,20 @@ def python(self, *args, **kwargs):
|
|||||||
"""The python ``Executable``."""
|
"""The python ``Executable``."""
|
||||||
inspect.getmodule(self).python(*args, **kwargs)
|
inspect.getmodule(self).python(*args, **kwargs)
|
||||||
|
|
||||||
def test(self):
|
def test_imports(self):
|
||||||
"""Attempts to import modules of the installed package."""
|
"""Attempts to import modules of the installed package."""
|
||||||
|
|
||||||
# Make sure we are importing the installed modules,
|
# Make sure we are importing the installed modules,
|
||||||
# not the ones in the source directory
|
# not the ones in the source directory
|
||||||
|
python = inspect.getmodule(self).python
|
||||||
for module in self.import_modules:
|
for module in self.import_modules:
|
||||||
self.run_test(
|
with spack.install_test.test_part(
|
||||||
inspect.getmodule(self).python.path,
|
self,
|
||||||
["-c", "import {0}".format(module)],
|
"test_imports_{0}".format(module),
|
||||||
purpose="checking import of {0}".format(module),
|
purpose="checking import of {0}".format(module),
|
||||||
work_dir="spack-test",
|
work_dir="spack-test",
|
||||||
)
|
):
|
||||||
|
python("-c", "import {0}".format(module))
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("sip")
|
@spack.builder.builder("sip")
|
||||||
|
|||||||
@@ -756,6 +756,7 @@ def generate_gitlab_ci_yaml(
|
|||||||
# Get the joined "ci" config with all of the current scopes resolved
|
# Get the joined "ci" config with all of the current scopes resolved
|
||||||
ci_config = cfg.get("ci")
|
ci_config = cfg.get("ci")
|
||||||
|
|
||||||
|
config_deprecated = False
|
||||||
if not ci_config:
|
if not ci_config:
|
||||||
tty.warn("Environment does not have `ci` a configuration")
|
tty.warn("Environment does not have `ci` a configuration")
|
||||||
gitlabci_config = yaml_root.get("gitlab-ci")
|
gitlabci_config = yaml_root.get("gitlab-ci")
|
||||||
@@ -768,6 +769,7 @@ def generate_gitlab_ci_yaml(
|
|||||||
)
|
)
|
||||||
translate_deprecated_config(gitlabci_config)
|
translate_deprecated_config(gitlabci_config)
|
||||||
ci_config = gitlabci_config
|
ci_config = gitlabci_config
|
||||||
|
config_deprecated = True
|
||||||
|
|
||||||
# Default target is gitlab...and only target is gitlab
|
# Default target is gitlab...and only target is gitlab
|
||||||
if not ci_config.get("target", "gitlab") == "gitlab":
|
if not ci_config.get("target", "gitlab") == "gitlab":
|
||||||
@@ -831,6 +833,14 @@ def generate_gitlab_ci_yaml(
|
|||||||
# Values: "spack_pull_request", "spack_protected_branch", or not set
|
# Values: "spack_pull_request", "spack_protected_branch", or not set
|
||||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE", None)
|
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE", None)
|
||||||
|
|
||||||
|
copy_only_pipeline = spack_pipeline_type == "spack_copy_only"
|
||||||
|
if copy_only_pipeline and config_deprecated:
|
||||||
|
tty.warn(
|
||||||
|
"SPACK_PIPELINE_TYPE=spack_copy_only is not supported when using\n",
|
||||||
|
"deprecated ci configuration, a no-op pipeline will be generated\n",
|
||||||
|
"instead.",
|
||||||
|
)
|
||||||
|
|
||||||
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
||||||
tty.die("spack ci generate requires an env containing a mirror")
|
tty.die("spack ci generate requires an env containing a mirror")
|
||||||
|
|
||||||
@@ -1085,7 +1095,7 @@ def generate_gitlab_ci_yaml(
|
|||||||
raise AttributeError
|
raise AttributeError
|
||||||
|
|
||||||
def main_script_replacements(cmd):
|
def main_script_replacements(cmd):
|
||||||
return cmd.replace("{env_dir}", concrete_env_dir)
|
return cmd.replace("{env_dir}", rel_concrete_env_dir)
|
||||||
|
|
||||||
job_object["script"] = _unpack_script(
|
job_object["script"] = _unpack_script(
|
||||||
job_object["script"], op=main_script_replacements
|
job_object["script"], op=main_script_replacements
|
||||||
@@ -1207,7 +1217,7 @@ def main_script_replacements(cmd):
|
|||||||
).format(c_spec, release_spec)
|
).format(c_spec, release_spec)
|
||||||
tty.debug(debug_msg)
|
tty.debug(debug_msg)
|
||||||
|
|
||||||
if prune_dag and not rebuild_spec and spack_pipeline_type != "spack_copy_only":
|
if prune_dag and not rebuild_spec and not copy_only_pipeline:
|
||||||
tty.debug(
|
tty.debug(
|
||||||
"Pruning {0}/{1}, does not need rebuild.".format(
|
"Pruning {0}/{1}, does not need rebuild.".format(
|
||||||
release_spec.name, release_spec.dag_hash()
|
release_spec.name, release_spec.dag_hash()
|
||||||
@@ -1298,7 +1308,7 @@ def main_script_replacements(cmd):
|
|||||||
max_length_needs = length_needs
|
max_length_needs = length_needs
|
||||||
max_needs_job = job_name
|
max_needs_job = job_name
|
||||||
|
|
||||||
if spack_pipeline_type != "spack_copy_only":
|
if not copy_only_pipeline:
|
||||||
output_object[job_name] = job_object
|
output_object[job_name] = job_object
|
||||||
job_id += 1
|
job_id += 1
|
||||||
|
|
||||||
@@ -1330,7 +1340,7 @@ def main_script_replacements(cmd):
|
|||||||
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
|
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
|
||||||
}
|
}
|
||||||
|
|
||||||
if spack_pipeline_type == "spack_copy_only":
|
if copy_only_pipeline and not config_deprecated:
|
||||||
stage_names.append("copy")
|
stage_names.append("copy")
|
||||||
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
||||||
sync_job["stage"] = "copy"
|
sync_job["stage"] = "copy"
|
||||||
@@ -1474,12 +1484,18 @@ def main_script_replacements(cmd):
|
|||||||
sorted_output = cinw.needs_to_dependencies(sorted_output)
|
sorted_output = cinw.needs_to_dependencies(sorted_output)
|
||||||
else:
|
else:
|
||||||
# No jobs were generated
|
# No jobs were generated
|
||||||
tty.debug("No specs to rebuild, generating no-op job")
|
|
||||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||||
|
|
||||||
noop_job["retry"] = service_job_retries
|
noop_job["retry"] = service_job_retries
|
||||||
|
|
||||||
sorted_output = {"no-specs-to-rebuild": noop_job}
|
if copy_only_pipeline and config_deprecated:
|
||||||
|
tty.debug("Generating no-op job as copy-only is unsupported here.")
|
||||||
|
noop_job["script"] = [
|
||||||
|
'echo "copy-only pipelines are not supported with deprecated ci configs"'
|
||||||
|
]
|
||||||
|
sorted_output = {"unsupported-copy": noop_job}
|
||||||
|
else:
|
||||||
|
tty.debug("No specs to rebuild, generating no-op job")
|
||||||
|
sorted_output = {"no-specs-to-rebuild": noop_job}
|
||||||
|
|
||||||
if known_broken_specs_encountered:
|
if known_broken_specs_encountered:
|
||||||
tty.error("This pipeline generated hashes known to be broken on develop:")
|
tty.error("This pipeline generated hashes known to be broken on develop:")
|
||||||
|
|||||||
@@ -347,7 +347,7 @@ def iter_groups(specs, indent, all_headers):
|
|||||||
spack.spec.architecture_color,
|
spack.spec.architecture_color,
|
||||||
architecture if architecture else "no arch",
|
architecture if architecture else "no arch",
|
||||||
spack.spec.compiler_color,
|
spack.spec.compiler_color,
|
||||||
compiler if compiler else "no compiler",
|
f"{compiler.display_str}" if compiler else "no compiler",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Sometimes we want to display specs that are not yet concretized.
|
# Sometimes we want to display specs that are not yet concretized.
|
||||||
|
|||||||
@@ -98,7 +98,7 @@ def compiler_find(args):
|
|||||||
config = spack.config.config
|
config = spack.config.config
|
||||||
filename = config.get_config_filename(args.scope, "compilers")
|
filename = config.get_config_filename(args.scope, "compilers")
|
||||||
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
||||||
colify(reversed(sorted(c.spec for c in new_compilers)), indent=4)
|
colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
|
||||||
else:
|
else:
|
||||||
tty.msg("Found no new compilers")
|
tty.msg("Found no new compilers")
|
||||||
tty.msg("Compilers are defined in the following files:")
|
tty.msg("Compilers are defined in the following files:")
|
||||||
@@ -112,13 +112,13 @@ def compiler_remove(args):
|
|||||||
tty.die("No compilers match spec %s" % cspec)
|
tty.die("No compilers match spec %s" % cspec)
|
||||||
elif not args.all and len(compilers) > 1:
|
elif not args.all and len(compilers) > 1:
|
||||||
tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
|
tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
|
||||||
colify(reversed(sorted([c.spec for c in compilers])), indent=4)
|
colify(reversed(sorted([c.spec.display_str for c in compilers])), indent=4)
|
||||||
tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
|
tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
for compiler in compilers:
|
for compiler in compilers:
|
||||||
spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
|
spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
|
||||||
tty.msg("Removed compiler %s" % compiler.spec)
|
tty.msg("Removed compiler %s" % compiler.spec.display_str)
|
||||||
|
|
||||||
|
|
||||||
def compiler_info(args):
|
def compiler_info(args):
|
||||||
@@ -130,7 +130,7 @@ def compiler_info(args):
|
|||||||
tty.die("No compilers match spec %s" % cspec)
|
tty.die("No compilers match spec %s" % cspec)
|
||||||
else:
|
else:
|
||||||
for c in compilers:
|
for c in compilers:
|
||||||
print(str(c.spec) + ":")
|
print(c.spec.display_str + ":")
|
||||||
print("\tpaths:")
|
print("\tpaths:")
|
||||||
for cpath in ["cc", "cxx", "f77", "fc"]:
|
for cpath in ["cc", "cxx", "f77", "fc"]:
|
||||||
print("\t\t%s = %s" % (cpath, getattr(c, cpath, None)))
|
print("\t\t%s = %s" % (cpath, getattr(c, cpath, None)))
|
||||||
@@ -188,7 +188,7 @@ def compiler_list(args):
|
|||||||
os_str += "-%s" % target
|
os_str += "-%s" % target
|
||||||
cname = "%s{%s} %s" % (spack.spec.compiler_color, name, os_str)
|
cname = "%s{%s} %s" % (spack.spec.compiler_color, name, os_str)
|
||||||
tty.hline(colorize(cname), char="-")
|
tty.hline(colorize(cname), char="-")
|
||||||
colify(reversed(sorted(c.spec for c in compilers)))
|
colify(reversed(sorted(c.spec.display_str for c in compilers)))
|
||||||
|
|
||||||
|
|
||||||
def compiler(parser, args):
|
def compiler(parser, args):
|
||||||
|
|||||||
@@ -302,7 +302,7 @@ def env_create(args):
|
|||||||
# the environment should not include a view.
|
# the environment should not include a view.
|
||||||
with_view = None
|
with_view = None
|
||||||
|
|
||||||
_env_create(
|
env = _env_create(
|
||||||
args.create_env,
|
args.create_env,
|
||||||
init_file=args.envfile,
|
init_file=args.envfile,
|
||||||
dir=args.dir,
|
dir=args.dir,
|
||||||
@@ -310,6 +310,9 @@ def env_create(args):
|
|||||||
keep_relative=args.keep_relative,
|
keep_relative=args.keep_relative,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Generate views, only really useful for environments created from spack.lock files.
|
||||||
|
env.regenerate_views()
|
||||||
|
|
||||||
|
|
||||||
def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep_relative=False):
|
def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep_relative=False):
|
||||||
"""Create a new environment, with an optional yaml description.
|
"""Create a new environment, with an optional yaml description.
|
||||||
|
|||||||
@@ -79,6 +79,12 @@ def setup_parser(subparser):
|
|||||||
read_cray_manifest.add_argument(
|
read_cray_manifest.add_argument(
|
||||||
"--directory", default=None, help="specify a directory storing a group of manifest files"
|
"--directory", default=None, help="specify a directory storing a group of manifest files"
|
||||||
)
|
)
|
||||||
|
read_cray_manifest.add_argument(
|
||||||
|
"--ignore-default-dir",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="ignore the default directory of manifest files",
|
||||||
|
)
|
||||||
read_cray_manifest.add_argument(
|
read_cray_manifest.add_argument(
|
||||||
"--dry-run",
|
"--dry-run",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
@@ -177,11 +183,16 @@ def external_read_cray_manifest(args):
|
|||||||
manifest_directory=args.directory,
|
manifest_directory=args.directory,
|
||||||
dry_run=args.dry_run,
|
dry_run=args.dry_run,
|
||||||
fail_on_error=args.fail_on_error,
|
fail_on_error=args.fail_on_error,
|
||||||
|
ignore_default_dir=args.ignore_default_dir,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _collect_and_consume_cray_manifest_files(
|
def _collect_and_consume_cray_manifest_files(
|
||||||
manifest_file=None, manifest_directory=None, dry_run=False, fail_on_error=False
|
manifest_file=None,
|
||||||
|
manifest_directory=None,
|
||||||
|
dry_run=False,
|
||||||
|
fail_on_error=False,
|
||||||
|
ignore_default_dir=False,
|
||||||
):
|
):
|
||||||
manifest_files = []
|
manifest_files = []
|
||||||
if manifest_file:
|
if manifest_file:
|
||||||
@@ -191,7 +202,7 @@ def _collect_and_consume_cray_manifest_files(
|
|||||||
if manifest_directory:
|
if manifest_directory:
|
||||||
manifest_dirs.append(manifest_directory)
|
manifest_dirs.append(manifest_directory)
|
||||||
|
|
||||||
if os.path.isdir(cray_manifest.default_path):
|
if not ignore_default_dir and os.path.isdir(cray_manifest.default_path):
|
||||||
tty.debug(
|
tty.debug(
|
||||||
"Cray manifest path {0} exists: collecting all files to read.".format(
|
"Cray manifest path {0} exists: collecting all files to read.".format(
|
||||||
cray_manifest.default_path
|
cray_manifest.default_path
|
||||||
|
|||||||
@@ -116,21 +116,23 @@ def one_spec_or_raise(specs):
|
|||||||
|
|
||||||
|
|
||||||
def check_module_set_name(name):
|
def check_module_set_name(name):
|
||||||
modules_config = spack.config.get("modules")
|
modules = spack.config.get("modules")
|
||||||
valid_names = set(
|
if name != "prefix_inspections" and name in modules:
|
||||||
[
|
return
|
||||||
key
|
|
||||||
for key, value in modules_config.items()
|
|
||||||
if isinstance(value, dict) and value.get("enable", [])
|
|
||||||
]
|
|
||||||
)
|
|
||||||
if "enable" in modules_config and modules_config["enable"]:
|
|
||||||
valid_names.add("default")
|
|
||||||
|
|
||||||
if name not in valid_names:
|
names = [k for k in modules if k != "prefix_inspections"]
|
||||||
msg = "Cannot use invalid module set %s." % name
|
|
||||||
msg += " Valid module set names are %s" % list(valid_names)
|
if not names:
|
||||||
raise spack.config.ConfigError(msg)
|
raise spack.config.ConfigError(
|
||||||
|
f"Module set configuration is missing. Cannot use module set '{name}'"
|
||||||
|
)
|
||||||
|
|
||||||
|
pretty_names = "', '".join(names)
|
||||||
|
|
||||||
|
raise spack.config.ConfigError(
|
||||||
|
f"Cannot use invalid module set '{name}'.",
|
||||||
|
f"Valid module set names are: '{pretty_names}'.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
_missing_modules_warning = (
|
_missing_modules_warning = (
|
||||||
|
|||||||
@@ -25,7 +25,7 @@
|
|||||||
|
|
||||||
|
|
||||||
# tutorial configuration parameters
|
# tutorial configuration parameters
|
||||||
tutorial_branch = "releases/v0.19"
|
tutorial_branch = "releases/v0.20"
|
||||||
tutorial_mirror = "file:///mirror"
|
tutorial_mirror = "file:///mirror"
|
||||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||||
|
|
||||||
|
|||||||
@@ -112,36 +112,26 @@ def _to_dict(compiler):
|
|||||||
def get_compiler_config(scope=None, init_config=True):
|
def get_compiler_config(scope=None, init_config=True):
|
||||||
"""Return the compiler configuration for the specified architecture."""
|
"""Return the compiler configuration for the specified architecture."""
|
||||||
|
|
||||||
def init_compiler_config():
|
config = spack.config.get("compilers", scope=scope) or []
|
||||||
"""Compiler search used when Spack has no compilers."""
|
if config or not init_config:
|
||||||
compilers = find_compilers()
|
return config
|
||||||
compilers_dict = []
|
|
||||||
for compiler in compilers:
|
|
||||||
compilers_dict.append(_to_dict(compiler))
|
|
||||||
spack.config.set("compilers", compilers_dict, scope=scope)
|
|
||||||
|
|
||||||
|
merged_config = spack.config.get("compilers")
|
||||||
|
if merged_config:
|
||||||
|
return config
|
||||||
|
|
||||||
|
_init_compiler_config(scope=scope)
|
||||||
config = spack.config.get("compilers", scope=scope)
|
config = spack.config.get("compilers", scope=scope)
|
||||||
# Update the configuration if there are currently no compilers
|
return config
|
||||||
# configured. Avoid updating automatically if there ARE site
|
|
||||||
# compilers configured but no user ones.
|
|
||||||
if not config and init_config:
|
def _init_compiler_config(*, scope):
|
||||||
if scope is None:
|
"""Compiler search used when Spack has no compilers."""
|
||||||
# We know no compilers were configured in any scope.
|
compilers = find_compilers()
|
||||||
init_compiler_config()
|
compilers_dict = []
|
||||||
config = spack.config.get("compilers", scope=scope)
|
for compiler in compilers:
|
||||||
elif scope == "user":
|
compilers_dict.append(_to_dict(compiler))
|
||||||
# Check the site config and update the user config if
|
spack.config.set("compilers", compilers_dict, scope=scope)
|
||||||
# nothing is configured at the site level.
|
|
||||||
site_config = spack.config.get("compilers", scope="site")
|
|
||||||
sys_config = spack.config.get("compilers", scope="system")
|
|
||||||
if not site_config and not sys_config:
|
|
||||||
init_compiler_config()
|
|
||||||
config = spack.config.get("compilers", scope=scope)
|
|
||||||
return config
|
|
||||||
elif config:
|
|
||||||
return config
|
|
||||||
else:
|
|
||||||
return [] # Return empty list which we will later append to.
|
|
||||||
|
|
||||||
|
|
||||||
def compiler_config_files():
|
def compiler_config_files():
|
||||||
|
|||||||
@@ -30,7 +30,7 @@
|
|||||||
|
|
||||||
|
|
||||||
def get_valid_fortran_pth(comp_ver):
|
def get_valid_fortran_pth(comp_ver):
|
||||||
cl_ver = str(comp_ver).split("@")[1]
|
cl_ver = str(comp_ver)
|
||||||
sort_fn = lambda fc_ver: StrictVersion(fc_ver)
|
sort_fn = lambda fc_ver: StrictVersion(fc_ver)
|
||||||
sort_fc_ver = sorted(list(avail_fc_version), key=sort_fn)
|
sort_fc_ver = sorted(list(avail_fc_version), key=sort_fn)
|
||||||
for ver in sort_fc_ver:
|
for ver in sort_fc_ver:
|
||||||
@@ -75,7 +75,7 @@ class Msvc(Compiler):
|
|||||||
# file based on compiler executable path.
|
# file based on compiler executable path.
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
new_pth = [pth if pth else get_valid_fortran_pth(args[0]) for pth in args[3]]
|
new_pth = [pth if pth else get_valid_fortran_pth(args[0].version) for pth in args[3]]
|
||||||
args[3][:] = new_pth
|
args[3][:] = new_pth
|
||||||
super(Msvc, self).__init__(*args, **kwargs)
|
super(Msvc, self).__init__(*args, **kwargs)
|
||||||
if os.getenv("ONEAPI_ROOT"):
|
if os.getenv("ONEAPI_ROOT"):
|
||||||
@@ -151,7 +151,11 @@ def setup_custom_environment(self, pkg, env):
|
|||||||
arch = arch.replace("-", "_")
|
arch = arch.replace("-", "_")
|
||||||
# vcvars can target specific sdk versions, force it to pick up concretized sdk
|
# vcvars can target specific sdk versions, force it to pick up concretized sdk
|
||||||
# version, if needed by spec
|
# version, if needed by spec
|
||||||
sdk_ver = "" if "win-sdk" not in pkg.spec else pkg.spec["win-sdk"].version.string + ".0"
|
sdk_ver = (
|
||||||
|
""
|
||||||
|
if "win-sdk" not in pkg.spec or pkg.name == "win-sdk"
|
||||||
|
else pkg.spec["win-sdk"].version.string + ".0"
|
||||||
|
)
|
||||||
# provide vcvars with msvc version selected by concretization,
|
# provide vcvars with msvc version selected by concretization,
|
||||||
# not whatever it happens to pick up on the system (highest available version)
|
# not whatever it happens to pick up on the system (highest available version)
|
||||||
out = subprocess.check_output( # novermin
|
out = subprocess.check_output( # novermin
|
||||||
|
|||||||
@@ -48,7 +48,8 @@ def translated_compiler_name(manifest_compiler_name):
|
|||||||
def compiler_from_entry(entry):
|
def compiler_from_entry(entry):
|
||||||
compiler_name = translated_compiler_name(entry["name"])
|
compiler_name = translated_compiler_name(entry["name"])
|
||||||
paths = entry["executables"]
|
paths = entry["executables"]
|
||||||
version = entry["version"]
|
# to instantiate a compiler class we may need a concrete version:
|
||||||
|
version = "={}".format(entry["version"])
|
||||||
arch = entry["arch"]
|
arch = entry["arch"]
|
||||||
operating_system = arch["os"]
|
operating_system = arch["os"]
|
||||||
target = arch["target"]
|
target = arch["target"]
|
||||||
|
|||||||
@@ -16,7 +16,7 @@
|
|||||||
import urllib.parse
|
import urllib.parse
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import warnings
|
import warnings
|
||||||
from typing import Any, Dict, List, Optional, Union
|
from typing import Any, Dict, List, Optional, Set, Tuple, Union
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
@@ -1221,28 +1221,27 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
|||||||
old_specs = set(self.user_specs)
|
old_specs = set(self.user_specs)
|
||||||
new_specs = set()
|
new_specs = set()
|
||||||
for spec in matches:
|
for spec in matches:
|
||||||
if spec in list_to_change:
|
if spec not in list_to_change:
|
||||||
try:
|
continue
|
||||||
list_to_change.remove(spec)
|
try:
|
||||||
self.update_stale_references(list_name)
|
list_to_change.remove(spec)
|
||||||
new_specs = set(self.user_specs)
|
self.update_stale_references(list_name)
|
||||||
except spack.spec_list.SpecListError:
|
new_specs = set(self.user_specs)
|
||||||
# define new specs list
|
except spack.spec_list.SpecListError:
|
||||||
new_specs = set(self.user_specs)
|
# define new specs list
|
||||||
msg = f"Spec '{spec}' is part of a spec matrix and "
|
new_specs = set(self.user_specs)
|
||||||
msg += f"cannot be removed from list '{list_to_change}'."
|
msg = f"Spec '{spec}' is part of a spec matrix and "
|
||||||
if force:
|
msg += f"cannot be removed from list '{list_to_change}'."
|
||||||
msg += " It will be removed from the concrete specs."
|
if force:
|
||||||
# Mock new specs, so we can remove this spec from concrete spec lists
|
msg += " It will be removed from the concrete specs."
|
||||||
new_specs.remove(spec)
|
# Mock new specs, so we can remove this spec from concrete spec lists
|
||||||
tty.warn(msg)
|
new_specs.remove(spec)
|
||||||
|
tty.warn(msg)
|
||||||
|
else:
|
||||||
|
if list_name == user_speclist_name:
|
||||||
|
self.manifest.remove_user_spec(str(spec))
|
||||||
else:
|
else:
|
||||||
if list_name == user_speclist_name:
|
self.manifest.remove_definition(str(spec), list_name=list_name)
|
||||||
for user_spec in matches:
|
|
||||||
self.manifest.remove_user_spec(str(user_spec))
|
|
||||||
else:
|
|
||||||
for user_spec in matches:
|
|
||||||
self.manifest.remove_definition(str(user_spec), list_name=list_name)
|
|
||||||
|
|
||||||
# If force, update stale concretized specs
|
# If force, update stale concretized specs
|
||||||
for spec in old_specs - new_specs:
|
for spec in old_specs - new_specs:
|
||||||
@@ -1352,6 +1351,10 @@ def concretize(self, force=False, tests=False):
|
|||||||
self.concretized_order = []
|
self.concretized_order = []
|
||||||
self.specs_by_hash = {}
|
self.specs_by_hash = {}
|
||||||
|
|
||||||
|
# Remove concrete specs that no longer correlate to a user spec
|
||||||
|
for spec in set(self.concretized_user_specs) - set(self.user_specs):
|
||||||
|
self.deconcretize(spec)
|
||||||
|
|
||||||
# Pick the right concretization strategy
|
# Pick the right concretization strategy
|
||||||
if self.unify == "when_possible":
|
if self.unify == "when_possible":
|
||||||
return self._concretize_together_where_possible(tests=tests)
|
return self._concretize_together_where_possible(tests=tests)
|
||||||
@@ -1365,67 +1368,120 @@ def concretize(self, force=False, tests=False):
|
|||||||
msg = "concretization strategy not implemented [{0}]"
|
msg = "concretization strategy not implemented [{0}]"
|
||||||
raise SpackEnvironmentError(msg.format(self.unify))
|
raise SpackEnvironmentError(msg.format(self.unify))
|
||||||
|
|
||||||
def _concretize_together_where_possible(self, tests=False):
|
def deconcretize(self, spec):
|
||||||
|
# spec has to be a root of the environment
|
||||||
|
index = self.concretized_user_specs.index(spec)
|
||||||
|
dag_hash = self.concretized_order.pop(index)
|
||||||
|
del self.concretized_user_specs[index]
|
||||||
|
|
||||||
|
# If this was the only user spec that concretized to this concrete spec, remove it
|
||||||
|
if dag_hash not in self.concretized_order:
|
||||||
|
del self.specs_by_hash[dag_hash]
|
||||||
|
|
||||||
|
def _get_specs_to_concretize(
|
||||||
|
self,
|
||||||
|
) -> Tuple[Set[spack.spec.Spec], Set[spack.spec.Spec], List[spack.spec.Spec]]:
|
||||||
|
"""Compute specs to concretize for unify:true and unify:when_possible.
|
||||||
|
|
||||||
|
This includes new user specs and any already concretized specs.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of new user specs, user specs to keep, and the specs to concretize.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Exit early if the set of concretized specs is the set of user specs
|
||||||
|
new_user_specs = set(self.user_specs) - set(self.concretized_user_specs)
|
||||||
|
kept_user_specs = set(self.user_specs) & set(self.concretized_user_specs)
|
||||||
|
if not new_user_specs:
|
||||||
|
return new_user_specs, kept_user_specs, []
|
||||||
|
|
||||||
|
concrete_specs_to_keep = [
|
||||||
|
concrete
|
||||||
|
for abstract, concrete in self.concretized_specs()
|
||||||
|
if abstract in kept_user_specs
|
||||||
|
]
|
||||||
|
|
||||||
|
specs_to_concretize = list(new_user_specs) + concrete_specs_to_keep
|
||||||
|
return new_user_specs, kept_user_specs, specs_to_concretize
|
||||||
|
|
||||||
|
def _concretize_together_where_possible(
|
||||||
|
self, tests: bool = False
|
||||||
|
) -> List[Tuple[spack.spec.Spec, spack.spec.Spec]]:
|
||||||
# Avoid cyclic dependency
|
# Avoid cyclic dependency
|
||||||
import spack.solver.asp
|
import spack.solver.asp
|
||||||
|
|
||||||
# Exit early if the set of concretized specs is the set of user specs
|
# Exit early if the set of concretized specs is the set of user specs
|
||||||
user_specs_did_not_change = not bool(
|
new_user_specs, _, specs_to_concretize = self._get_specs_to_concretize()
|
||||||
set(self.user_specs) - set(self.concretized_user_specs)
|
if not new_user_specs:
|
||||||
)
|
|
||||||
if user_specs_did_not_change:
|
|
||||||
return []
|
return []
|
||||||
|
|
||||||
# Proceed with concretization
|
old_concrete_to_abstract = {
|
||||||
|
concrete: abstract for (abstract, concrete) in self.concretized_specs()
|
||||||
|
}
|
||||||
|
|
||||||
self.concretized_user_specs = []
|
self.concretized_user_specs = []
|
||||||
self.concretized_order = []
|
self.concretized_order = []
|
||||||
self.specs_by_hash = {}
|
self.specs_by_hash = {}
|
||||||
|
|
||||||
result_by_user_spec = {}
|
result_by_user_spec = {}
|
||||||
solver = spack.solver.asp.Solver()
|
solver = spack.solver.asp.Solver()
|
||||||
for result in solver.solve_in_rounds(self.user_specs, tests=tests):
|
for result in solver.solve_in_rounds(specs_to_concretize, tests=tests):
|
||||||
result_by_user_spec.update(result.specs_by_input)
|
result_by_user_spec.update(result.specs_by_input)
|
||||||
|
|
||||||
result = []
|
result = []
|
||||||
for abstract, concrete in sorted(result_by_user_spec.items()):
|
for abstract, concrete in sorted(result_by_user_spec.items()):
|
||||||
|
# If the "abstract" spec is a concrete spec from the previous concretization
|
||||||
|
# translate it back to an abstract spec. Otherwise, keep the abstract spec
|
||||||
|
abstract = old_concrete_to_abstract.get(abstract, abstract)
|
||||||
|
if abstract in new_user_specs:
|
||||||
|
result.append((abstract, concrete))
|
||||||
self._add_concrete_spec(abstract, concrete)
|
self._add_concrete_spec(abstract, concrete)
|
||||||
result.append((abstract, concrete))
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def _concretize_together(self, tests=False):
|
def _concretize_together(
|
||||||
|
self, tests: bool = False
|
||||||
|
) -> List[Tuple[spack.spec.Spec, spack.spec.Spec]]:
|
||||||
"""Concretization strategy that concretizes all the specs
|
"""Concretization strategy that concretizes all the specs
|
||||||
in the same DAG.
|
in the same DAG.
|
||||||
"""
|
"""
|
||||||
# Exit early if the set of concretized specs is the set of user specs
|
# Exit early if the set of concretized specs is the set of user specs
|
||||||
user_specs_did_not_change = not bool(
|
new_user_specs, kept_user_specs, specs_to_concretize = self._get_specs_to_concretize()
|
||||||
set(self.user_specs) - set(self.concretized_user_specs)
|
if not new_user_specs:
|
||||||
)
|
|
||||||
if user_specs_did_not_change:
|
|
||||||
return []
|
return []
|
||||||
|
|
||||||
# Proceed with concretization
|
|
||||||
self.concretized_user_specs = []
|
self.concretized_user_specs = []
|
||||||
self.concretized_order = []
|
self.concretized_order = []
|
||||||
self.specs_by_hash = {}
|
self.specs_by_hash = {}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
concrete_specs = spack.concretize.concretize_specs_together(
|
concrete_specs: List[spack.spec.Spec] = spack.concretize.concretize_specs_together(
|
||||||
*self.user_specs, tests=tests
|
*specs_to_concretize, tests=tests
|
||||||
)
|
)
|
||||||
except spack.error.UnsatisfiableSpecError as e:
|
except spack.error.UnsatisfiableSpecError as e:
|
||||||
# "Enhance" the error message for multiple root specs, suggest a less strict
|
# "Enhance" the error message for multiple root specs, suggest a less strict
|
||||||
# form of concretization.
|
# form of concretization.
|
||||||
if len(self.user_specs) > 1:
|
if len(self.user_specs) > 1:
|
||||||
|
e.message += ". "
|
||||||
|
if kept_user_specs:
|
||||||
|
e.message += (
|
||||||
|
"Couldn't concretize without changing the existing environment. "
|
||||||
|
"If you are ok with changing it, try `spack concretize --force`. "
|
||||||
|
)
|
||||||
e.message += (
|
e.message += (
|
||||||
". Consider setting `concretizer:unify` to `when_possible` "
|
"You could consider setting `concretizer:unify` to `when_possible` "
|
||||||
"or `false` to relax the concretizer strictness."
|
"or `false` to allow multiple versions of some packages."
|
||||||
)
|
)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
concretized_specs = [x for x in zip(self.user_specs, concrete_specs)]
|
# set() | set() does not preserve ordering, even though sets are ordered
|
||||||
|
ordered_user_specs = list(new_user_specs) + list(kept_user_specs)
|
||||||
|
concretized_specs = [x for x in zip(ordered_user_specs, concrete_specs)]
|
||||||
for abstract, concrete in concretized_specs:
|
for abstract, concrete in concretized_specs:
|
||||||
self._add_concrete_spec(abstract, concrete)
|
self._add_concrete_spec(abstract, concrete)
|
||||||
return concretized_specs
|
|
||||||
|
# zip truncates the longer list, which is exactly what we want here
|
||||||
|
return list(zip(new_user_specs, concrete_specs))
|
||||||
|
|
||||||
def _concretize_separately(self, tests=False):
|
def _concretize_separately(self, tests=False):
|
||||||
"""Concretization strategy that concretizes separately one
|
"""Concretization strategy that concretizes separately one
|
||||||
|
|||||||
@@ -215,6 +215,31 @@ def print_message(logger: LogType, msg: str, verbose: bool = False):
|
|||||||
tty.info(msg, format="g")
|
tty.info(msg, format="g")
|
||||||
|
|
||||||
|
|
||||||
|
def overall_status(current_status: "TestStatus", substatuses: List["TestStatus"]) -> "TestStatus":
|
||||||
|
"""Determine the overall status based on the current and associated sub status values.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
current_status: current overall status, assumed to default to PASSED
|
||||||
|
substatuses: status of each test part or overall status of each test spec
|
||||||
|
Returns:
|
||||||
|
test status encompassing the main test and all subtests
|
||||||
|
"""
|
||||||
|
if current_status in [TestStatus.SKIPPED, TestStatus.NO_TESTS, TestStatus.FAILED]:
|
||||||
|
return current_status
|
||||||
|
|
||||||
|
skipped = 0
|
||||||
|
for status in substatuses:
|
||||||
|
if status == TestStatus.FAILED:
|
||||||
|
return status
|
||||||
|
elif status == TestStatus.SKIPPED:
|
||||||
|
skipped += 1
|
||||||
|
|
||||||
|
if skipped and skipped == len(substatuses):
|
||||||
|
return TestStatus.SKIPPED
|
||||||
|
|
||||||
|
return current_status
|
||||||
|
|
||||||
|
|
||||||
class PackageTest:
|
class PackageTest:
|
||||||
"""The class that manages stand-alone (post-install) package tests."""
|
"""The class that manages stand-alone (post-install) package tests."""
|
||||||
|
|
||||||
@@ -308,14 +333,12 @@ def status(self, name: str, status: "TestStatus", msg: Optional[str] = None):
|
|||||||
# to start with the same name) may not have PASSED. This extra
|
# to start with the same name) may not have PASSED. This extra
|
||||||
# check is used to ensure the containing test part is not claiming
|
# check is used to ensure the containing test part is not claiming
|
||||||
# to have passed when at least one subpart failed.
|
# to have passed when at least one subpart failed.
|
||||||
if status == TestStatus.PASSED:
|
substatuses = []
|
||||||
for pname, substatus in self.test_parts.items():
|
for pname, substatus in self.test_parts.items():
|
||||||
if pname != part_name and pname.startswith(part_name):
|
if pname != part_name and pname.startswith(part_name):
|
||||||
if substatus == TestStatus.FAILED:
|
substatuses.append(substatus)
|
||||||
print(f"{substatus}: {part_name}{extra}")
|
if substatuses:
|
||||||
self.test_parts[part_name] = substatus
|
status = overall_status(status, substatuses)
|
||||||
self.counts[substatus] += 1
|
|
||||||
return
|
|
||||||
|
|
||||||
print(f"{status}: {part_name}{extra}")
|
print(f"{status}: {part_name}{extra}")
|
||||||
self.test_parts[part_name] = status
|
self.test_parts[part_name] = status
|
||||||
@@ -420,6 +443,26 @@ def summarize(self):
|
|||||||
lines.append(f"{totals:=^80}")
|
lines.append(f"{totals:=^80}")
|
||||||
return lines
|
return lines
|
||||||
|
|
||||||
|
def write_tested_status(self):
|
||||||
|
"""Write the overall status to the tested file.
|
||||||
|
|
||||||
|
If there any test part failures, then the tests failed. If all test
|
||||||
|
parts are skipped, then the tests were skipped. If any tests passed
|
||||||
|
then the tests passed; otherwise, there were not tests executed.
|
||||||
|
"""
|
||||||
|
status = TestStatus.NO_TESTS
|
||||||
|
if self.counts[TestStatus.FAILED] > 0:
|
||||||
|
status = TestStatus.FAILED
|
||||||
|
else:
|
||||||
|
skipped = self.counts[TestStatus.SKIPPED]
|
||||||
|
if skipped and self.parts() == skipped:
|
||||||
|
status = TestStatus.SKIPPED
|
||||||
|
elif self.counts[TestStatus.PASSED] > 0:
|
||||||
|
status = TestStatus.PASSED
|
||||||
|
|
||||||
|
with open(self.tested_file, "w") as f:
|
||||||
|
f.write(f"{status.value}\n")
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def test_part(pkg: Pb, test_name: str, purpose: str, work_dir: str = ".", verbose: bool = False):
|
def test_part(pkg: Pb, test_name: str, purpose: str, work_dir: str = ".", verbose: bool = False):
|
||||||
@@ -654,8 +697,9 @@ def process_test_parts(pkg: Pb, test_specs: List[spack.spec.Spec], verbose: bool
|
|||||||
try:
|
try:
|
||||||
tests = test_functions(spec.package_class)
|
tests = test_functions(spec.package_class)
|
||||||
except spack.repo.UnknownPackageError:
|
except spack.repo.UnknownPackageError:
|
||||||
# some virtuals don't have a package
|
# Some virtuals don't have a package so we don't want to report
|
||||||
tests = []
|
# them as not having tests when that isn't appropriate.
|
||||||
|
continue
|
||||||
|
|
||||||
if len(tests) == 0:
|
if len(tests) == 0:
|
||||||
tester.status(spec.name, TestStatus.NO_TESTS)
|
tester.status(spec.name, TestStatus.NO_TESTS)
|
||||||
@@ -682,7 +726,7 @@ def process_test_parts(pkg: Pb, test_specs: List[spack.spec.Spec], verbose: bool
|
|||||||
|
|
||||||
finally:
|
finally:
|
||||||
if tester.ran_tests():
|
if tester.ran_tests():
|
||||||
fs.touch(tester.tested_file)
|
tester.write_tested_status()
|
||||||
|
|
||||||
# log one more test message to provide a completion timestamp
|
# log one more test message to provide a completion timestamp
|
||||||
# for CDash reporting
|
# for CDash reporting
|
||||||
@@ -889,20 +933,15 @@ def __call__(self, *args, **kwargs):
|
|||||||
if remove_directory:
|
if remove_directory:
|
||||||
shutil.rmtree(test_dir)
|
shutil.rmtree(test_dir)
|
||||||
|
|
||||||
tested = os.path.exists(self.tested_file_for_spec(spec))
|
status = self.test_status(spec, externals)
|
||||||
if tested:
|
|
||||||
status = TestStatus.PASSED
|
|
||||||
else:
|
|
||||||
self.ensure_stage()
|
|
||||||
if spec.external and not externals:
|
|
||||||
status = TestStatus.SKIPPED
|
|
||||||
elif not spec.installed:
|
|
||||||
status = TestStatus.SKIPPED
|
|
||||||
else:
|
|
||||||
status = TestStatus.NO_TESTS
|
|
||||||
self.counts[status] += 1
|
self.counts[status] += 1
|
||||||
|
|
||||||
self.write_test_result(spec, status)
|
self.write_test_result(spec, status)
|
||||||
|
|
||||||
|
except SkipTest:
|
||||||
|
status = TestStatus.SKIPPED
|
||||||
|
self.counts[status] += 1
|
||||||
|
self.write_test_result(spec, TestStatus.SKIPPED)
|
||||||
|
|
||||||
except BaseException as exc:
|
except BaseException as exc:
|
||||||
status = TestStatus.FAILED
|
status = TestStatus.FAILED
|
||||||
self.counts[status] += 1
|
self.counts[status] += 1
|
||||||
@@ -939,6 +978,31 @@ def __call__(self, *args, **kwargs):
|
|||||||
if failures:
|
if failures:
|
||||||
raise TestSuiteFailure(failures)
|
raise TestSuiteFailure(failures)
|
||||||
|
|
||||||
|
def test_status(self, spec: spack.spec.Spec, externals: bool) -> Optional[TestStatus]:
|
||||||
|
"""Determine the overall test results status for the spec.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
spec: instance of the spec under test
|
||||||
|
externals: ``True`` if externals are to be tested, else ``False``
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
the spec's test status if available or ``None``
|
||||||
|
"""
|
||||||
|
tests_status_file = self.tested_file_for_spec(spec)
|
||||||
|
if not os.path.exists(tests_status_file):
|
||||||
|
self.ensure_stage()
|
||||||
|
if spec.external and not externals:
|
||||||
|
status = TestStatus.SKIPPED
|
||||||
|
elif not spec.installed:
|
||||||
|
status = TestStatus.SKIPPED
|
||||||
|
else:
|
||||||
|
status = TestStatus.NO_TESTS
|
||||||
|
return status
|
||||||
|
|
||||||
|
with open(tests_status_file, "r") as f:
|
||||||
|
value = (f.read()).strip("\n")
|
||||||
|
return TestStatus(int(value)) if value else TestStatus.NO_TESTS
|
||||||
|
|
||||||
def ensure_stage(self):
|
def ensure_stage(self):
|
||||||
"""Ensure the test suite stage directory exists."""
|
"""Ensure the test suite stage directory exists."""
|
||||||
if not os.path.exists(self.stage):
|
if not os.path.exists(self.stage):
|
||||||
|
|||||||
@@ -170,17 +170,10 @@ def merge_config_rules(configuration, spec):
|
|||||||
Returns:
|
Returns:
|
||||||
dict: actions to be taken on the spec passed as an argument
|
dict: actions to be taken on the spec passed as an argument
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Get the top-level configuration for the module type we are using
|
|
||||||
module_specific_configuration = copy.deepcopy(configuration)
|
|
||||||
|
|
||||||
# Construct a dictionary with the actions we need to perform on the spec
|
|
||||||
# passed as a parameter
|
|
||||||
|
|
||||||
# The keyword 'all' is always evaluated first, all the others are
|
# The keyword 'all' is always evaluated first, all the others are
|
||||||
# evaluated in order of appearance in the module file
|
# evaluated in order of appearance in the module file
|
||||||
spec_configuration = module_specific_configuration.pop("all", {})
|
spec_configuration = copy.deepcopy(configuration.get("all", {}))
|
||||||
for constraint, action in module_specific_configuration.items():
|
for constraint, action in configuration.items():
|
||||||
if spec.satisfies(constraint):
|
if spec.satisfies(constraint):
|
||||||
if hasattr(constraint, "override") and constraint.override:
|
if hasattr(constraint, "override") and constraint.override:
|
||||||
spec_configuration = {}
|
spec_configuration = {}
|
||||||
@@ -200,14 +193,14 @@ def merge_config_rules(configuration, spec):
|
|||||||
# configuration
|
# configuration
|
||||||
|
|
||||||
# Hash length in module files
|
# Hash length in module files
|
||||||
hash_length = module_specific_configuration.get("hash_length", 7)
|
hash_length = configuration.get("hash_length", 7)
|
||||||
spec_configuration["hash_length"] = hash_length
|
spec_configuration["hash_length"] = hash_length
|
||||||
|
|
||||||
verbose = module_specific_configuration.get("verbose", False)
|
verbose = configuration.get("verbose", False)
|
||||||
spec_configuration["verbose"] = verbose
|
spec_configuration["verbose"] = verbose
|
||||||
|
|
||||||
# module defaults per-package
|
# module defaults per-package
|
||||||
defaults = module_specific_configuration.get("defaults", [])
|
defaults = configuration.get("defaults", [])
|
||||||
spec_configuration["defaults"] = defaults
|
spec_configuration["defaults"] = defaults
|
||||||
|
|
||||||
return spec_configuration
|
return spec_configuration
|
||||||
@@ -400,7 +393,7 @@ class BaseConfiguration(object):
|
|||||||
querying easier. It needs to be sub-classed for specific module types.
|
querying easier. It needs to be sub-classed for specific module types.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
default_projections = {"all": "{name}-{version}-{compiler.name}-{compiler.version}"}
|
default_projections = {"all": "{name}/{version}-{compiler.name}-{compiler.version}"}
|
||||||
|
|
||||||
def __init__(self, spec, module_set_name, explicit=None):
|
def __init__(self, spec, module_set_name, explicit=None):
|
||||||
# Module where type(self) is defined
|
# Module where type(self) is defined
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
import itertools
|
import itertools
|
||||||
import os.path
|
import os.path
|
||||||
import posixpath
|
import posixpath
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
import llnl.util.lang as lang
|
import llnl.util.lang as lang
|
||||||
|
|
||||||
@@ -56,7 +56,7 @@ def make_context(spec, module_set_name, explicit):
|
|||||||
return LmodContext(conf)
|
return LmodContext(conf)
|
||||||
|
|
||||||
|
|
||||||
def guess_core_compilers(name, store=False):
|
def guess_core_compilers(name, store=False) -> List[spack.spec.CompilerSpec]:
|
||||||
"""Guesses the list of core compilers installed in the system.
|
"""Guesses the list of core compilers installed in the system.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -64,21 +64,19 @@ def guess_core_compilers(name, store=False):
|
|||||||
modules.yaml configuration file
|
modules.yaml configuration file
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
List of core compilers, if found, or None
|
List of found core compilers
|
||||||
"""
|
"""
|
||||||
core_compilers = []
|
core_compilers = []
|
||||||
for compiler_config in spack.compilers.all_compilers_config():
|
for compiler in spack.compilers.all_compilers():
|
||||||
try:
|
try:
|
||||||
compiler = compiler_config["compiler"]
|
|
||||||
# A compiler is considered to be a core compiler if any of the
|
# A compiler is considered to be a core compiler if any of the
|
||||||
# C, C++ or Fortran compilers reside in a system directory
|
# C, C++ or Fortran compilers reside in a system directory
|
||||||
is_system_compiler = any(
|
is_system_compiler = any(
|
||||||
os.path.dirname(x) in spack.util.environment.SYSTEM_DIRS
|
os.path.dirname(getattr(compiler, x, "")) in spack.util.environment.SYSTEM_DIRS
|
||||||
for x in compiler["paths"].values()
|
for x in ("cc", "cxx", "f77", "fc")
|
||||||
if x is not None
|
|
||||||
)
|
)
|
||||||
if is_system_compiler:
|
if is_system_compiler:
|
||||||
core_compilers.append(str(compiler["spec"]))
|
core_compilers.append(compiler.spec)
|
||||||
except (KeyError, TypeError, AttributeError):
|
except (KeyError, TypeError, AttributeError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -89,10 +87,10 @@ def guess_core_compilers(name, store=False):
|
|||||||
modules_cfg = spack.config.get(
|
modules_cfg = spack.config.get(
|
||||||
"modules:" + name, {}, scope=spack.config.default_modify_scope()
|
"modules:" + name, {}, scope=spack.config.default_modify_scope()
|
||||||
)
|
)
|
||||||
modules_cfg.setdefault("lmod", {})["core_compilers"] = core_compilers
|
modules_cfg.setdefault("lmod", {})["core_compilers"] = [str(x) for x in core_compilers]
|
||||||
spack.config.set("modules:" + name, modules_cfg, scope=spack.config.default_modify_scope())
|
spack.config.set("modules:" + name, modules_cfg, scope=spack.config.default_modify_scope())
|
||||||
|
|
||||||
return core_compilers or None
|
return core_compilers
|
||||||
|
|
||||||
|
|
||||||
class LmodConfiguration(BaseConfiguration):
|
class LmodConfiguration(BaseConfiguration):
|
||||||
@@ -104,7 +102,7 @@ class LmodConfiguration(BaseConfiguration):
|
|||||||
default_projections = {"all": posixpath.join("{name}", "{version}")}
|
default_projections = {"all": posixpath.join("{name}", "{version}")}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def core_compilers(self):
|
def core_compilers(self) -> List[spack.spec.CompilerSpec]:
|
||||||
"""Returns the list of "Core" compilers
|
"""Returns the list of "Core" compilers
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
@@ -112,14 +110,18 @@ def core_compilers(self):
|
|||||||
specified in the configuration file or the sequence
|
specified in the configuration file or the sequence
|
||||||
is empty
|
is empty
|
||||||
"""
|
"""
|
||||||
value = configuration(self.name).get("core_compilers") or guess_core_compilers(
|
compilers = [
|
||||||
self.name, store=True
|
spack.spec.CompilerSpec(c) for c in configuration(self.name).get("core_compilers", [])
|
||||||
)
|
]
|
||||||
|
|
||||||
if not value:
|
if not compilers:
|
||||||
|
compilers = guess_core_compilers(self.name, store=True)
|
||||||
|
|
||||||
|
if not compilers:
|
||||||
msg = 'the key "core_compilers" must be set in modules.yaml'
|
msg = 'the key "core_compilers" must be set in modules.yaml'
|
||||||
raise CoreCompilersNotFoundError(msg)
|
raise CoreCompilersNotFoundError(msg)
|
||||||
return value
|
|
||||||
|
return compilers
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def core_specs(self):
|
def core_specs(self):
|
||||||
@@ -132,6 +134,7 @@ def filter_hierarchy_specs(self):
|
|||||||
return configuration(self.name).get("filter_hierarchy_specs", {})
|
return configuration(self.name).get("filter_hierarchy_specs", {})
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@lang.memoized
|
||||||
def hierarchy_tokens(self):
|
def hierarchy_tokens(self):
|
||||||
"""Returns the list of tokens that are part of the modulefile
|
"""Returns the list of tokens that are part of the modulefile
|
||||||
hierarchy. 'compiler' is always present.
|
hierarchy. 'compiler' is always present.
|
||||||
@@ -156,6 +159,7 @@ def hierarchy_tokens(self):
|
|||||||
return tokens
|
return tokens
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@lang.memoized
|
||||||
def requires(self):
|
def requires(self):
|
||||||
"""Returns a dictionary mapping all the requirements of this spec
|
"""Returns a dictionary mapping all the requirements of this spec
|
||||||
to the actual provider. 'compiler' is always present among the
|
to the actual provider. 'compiler' is always present among the
|
||||||
@@ -222,6 +226,7 @@ def available(self):
|
|||||||
return available
|
return available
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@lang.memoized
|
||||||
def missing(self):
|
def missing(self):
|
||||||
"""Returns the list of tokens that are not available."""
|
"""Returns the list of tokens that are not available."""
|
||||||
return [x for x in self.hierarchy_tokens if x not in self.available]
|
return [x for x in self.hierarchy_tokens if x not in self.available]
|
||||||
@@ -283,16 +288,18 @@ def token_to_path(self, name, value):
|
|||||||
|
|
||||||
# If we are dealing with a core compiler, return 'Core'
|
# If we are dealing with a core compiler, return 'Core'
|
||||||
core_compilers = self.conf.core_compilers
|
core_compilers = self.conf.core_compilers
|
||||||
if name == "compiler" and str(value) in core_compilers:
|
if name == "compiler" and any(
|
||||||
|
spack.spec.CompilerSpec(value).satisfies(c) for c in core_compilers
|
||||||
|
):
|
||||||
return "Core"
|
return "Core"
|
||||||
|
|
||||||
# CompilerSpec does not have an hash, as we are not allowed to
|
# CompilerSpec does not have a hash, as we are not allowed to
|
||||||
# use different flavors of the same compiler
|
# use different flavors of the same compiler
|
||||||
if name == "compiler":
|
if name == "compiler":
|
||||||
return path_part_fmt.format(token=value)
|
return path_part_fmt.format(token=value)
|
||||||
|
|
||||||
# In case the hierarchy token refers to a virtual provider
|
# In case the hierarchy token refers to a virtual provider
|
||||||
# we need to append an hash to the version to distinguish
|
# we need to append a hash to the version to distinguish
|
||||||
# among flavors of the same library (e.g. openblas~openmp vs.
|
# among flavors of the same library (e.g. openblas~openmp vs.
|
||||||
# openblas+openmp)
|
# openblas+openmp)
|
||||||
path = path_part_fmt.format(token=value)
|
path = path_part_fmt.format(token=value)
|
||||||
@@ -313,6 +320,7 @@ def available_path_parts(self):
|
|||||||
return parts
|
return parts
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@lang.memoized
|
||||||
def unlocked_paths(self):
|
def unlocked_paths(self):
|
||||||
"""Returns a dictionary mapping conditions to a list of unlocked
|
"""Returns a dictionary mapping conditions to a list of unlocked
|
||||||
paths.
|
paths.
|
||||||
@@ -424,6 +432,7 @@ def missing(self):
|
|||||||
return self.conf.missing
|
return self.conf.missing
|
||||||
|
|
||||||
@tengine.context_property
|
@tengine.context_property
|
||||||
|
@lang.memoized
|
||||||
def unlocked_paths(self):
|
def unlocked_paths(self):
|
||||||
"""Returns the list of paths that are unlocked unconditionally."""
|
"""Returns the list of paths that are unlocked unconditionally."""
|
||||||
layout = make_layout(self.spec, self.conf.name, self.conf.explicit)
|
layout = make_layout(self.spec, self.conf.name, self.conf.explicit)
|
||||||
|
|||||||
@@ -108,6 +108,5 @@
|
|||||||
# These are just here for editor support; they will be replaced when the build env
|
# These are just here for editor support; they will be replaced when the build env
|
||||||
# is set up.
|
# is set up.
|
||||||
make = MakeExecutable("make", jobs=1)
|
make = MakeExecutable("make", jobs=1)
|
||||||
gmake = MakeExecutable("gmake", jobs=1)
|
|
||||||
ninja = MakeExecutable("ninja", jobs=1)
|
ninja = MakeExecutable("ninja", jobs=1)
|
||||||
configure = Executable(join_path(".", "configure"))
|
configure = Executable(join_path(".", "configure"))
|
||||||
|
|||||||
@@ -2017,7 +2017,8 @@ def test_title(purpose, test_name):
|
|||||||
# stack instead of from traceback.
|
# stack instead of from traceback.
|
||||||
# The traceback is truncated here, so we can't use it to
|
# The traceback is truncated here, so we can't use it to
|
||||||
# traverse the stack.
|
# traverse the stack.
|
||||||
m = "\n".join(spack.build_environment.get_package_context(tb))
|
context = spack.build_environment.get_package_context(tb)
|
||||||
|
m = "\n".join(context) if context else ""
|
||||||
|
|
||||||
exc = e # e is deleted after this block
|
exc = e # e is deleted after this block
|
||||||
|
|
||||||
|
|||||||
@@ -37,7 +37,9 @@
|
|||||||
|
|
||||||
|
|
||||||
def slingshot_network():
|
def slingshot_network():
|
||||||
return os.path.exists("/opt/cray/pe") and os.path.exists("/lib64/libcxi.so")
|
return os.path.exists("/opt/cray/pe") and (
|
||||||
|
os.path.exists("/lib64/libcxi.so") or os.path.exists("/usr/lib64/libcxi.so")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _target_name_from_craype_target_name(name):
|
def _target_name_from_craype_target_name(name):
|
||||||
|
|||||||
@@ -6,7 +6,6 @@
|
|||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
|
||||||
import macholib.mach_o
|
import macholib.mach_o
|
||||||
@@ -356,13 +355,7 @@ def _set_elf_rpaths(target, rpaths):
|
|||||||
# Join the paths using ':' as a separator
|
# Join the paths using ':' as a separator
|
||||||
rpaths_str = ":".join(rpaths)
|
rpaths_str = ":".join(rpaths)
|
||||||
|
|
||||||
# If we're relocating patchelf itself, make a copy and use it
|
patchelf, output = executable.Executable(_patchelf()), None
|
||||||
bak_path = None
|
|
||||||
if target.endswith("/bin/patchelf"):
|
|
||||||
bak_path = target + ".bak"
|
|
||||||
shutil.copy(target, bak_path)
|
|
||||||
|
|
||||||
patchelf, output = executable.Executable(bak_path or _patchelf()), None
|
|
||||||
try:
|
try:
|
||||||
# TODO: revisit the use of --force-rpath as it might be conditional
|
# TODO: revisit the use of --force-rpath as it might be conditional
|
||||||
# TODO: if we want to support setting RUNPATH from binary packages
|
# TODO: if we want to support setting RUNPATH from binary packages
|
||||||
@@ -371,9 +364,6 @@ def _set_elf_rpaths(target, rpaths):
|
|||||||
except executable.ProcessError as e:
|
except executable.ProcessError as e:
|
||||||
msg = "patchelf --force-rpath --set-rpath {0} failed with error {1}"
|
msg = "patchelf --force-rpath --set-rpath {0} failed with error {1}"
|
||||||
tty.warn(msg.format(target, e))
|
tty.warn(msg.format(target, e))
|
||||||
finally:
|
|
||||||
if bak_path and os.path.exists(bak_path):
|
|
||||||
os.remove(bak_path)
|
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1239,7 +1239,7 @@ def get_pkg_class(self, pkg_name):
|
|||||||
try:
|
try:
|
||||||
module = importlib.import_module(fullname)
|
module = importlib.import_module(fullname)
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise UnknownPackageError(pkg_name)
|
raise UnknownPackageError(fullname)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = f"cannot load package '{pkg_name}' from the '{self.namespace}' repository: {e}"
|
msg = f"cannot load package '{pkg_name}' from the '{self.namespace}' repository: {e}"
|
||||||
raise RepoError(msg) from e
|
raise RepoError(msg) from e
|
||||||
|
|||||||
@@ -614,6 +614,23 @@ def multiple_values_error(self, attribute, pkg):
|
|||||||
def no_value_error(self, attribute, pkg):
|
def no_value_error(self, attribute, pkg):
|
||||||
return f'Cannot select a single "{attribute}" for package "{pkg}"'
|
return f'Cannot select a single "{attribute}" for package "{pkg}"'
|
||||||
|
|
||||||
|
def _get_cause_tree(self, cause, conditions, condition_causes, literals, indent=" "):
|
||||||
|
parents = [c for e, c in condition_causes if e == cause]
|
||||||
|
local = "required because %s " % conditions[cause]
|
||||||
|
|
||||||
|
return [indent + local] + [
|
||||||
|
c
|
||||||
|
for parent in parents
|
||||||
|
for c in self._get_cause_tree(
|
||||||
|
parent, conditions, condition_causes, literals, indent=indent + " "
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_cause_tree(self, cause):
|
||||||
|
conditions = dict(extract_args(self.model, "condition"))
|
||||||
|
condition_causes = list(extract_args(self.model, "condition_cause"))
|
||||||
|
return self._get_cause_tree(cause, conditions, condition_causes, [])
|
||||||
|
|
||||||
def handle_error(self, msg, *args):
|
def handle_error(self, msg, *args):
|
||||||
"""Handle an error state derived by the solver."""
|
"""Handle an error state derived by the solver."""
|
||||||
if msg == "multiple_values_error":
|
if msg == "multiple_values_error":
|
||||||
@@ -622,14 +639,28 @@ def handle_error(self, msg, *args):
|
|||||||
if msg == "no_value_error":
|
if msg == "no_value_error":
|
||||||
return self.no_value_error(*args)
|
return self.no_value_error(*args)
|
||||||
|
|
||||||
|
try:
|
||||||
|
idx = args.index("startcauses")
|
||||||
|
except ValueError:
|
||||||
|
msg_args = args
|
||||||
|
cause_args = []
|
||||||
|
else:
|
||||||
|
msg_args = args[:idx]
|
||||||
|
cause_args = args[idx + 1 :]
|
||||||
|
|
||||||
|
msg = msg.format(*msg_args)
|
||||||
|
|
||||||
# For variant formatting, we sometimes have to construct specs
|
# For variant formatting, we sometimes have to construct specs
|
||||||
# to format values properly. Find/replace all occurances of
|
# to format values properly. Find/replace all occurances of
|
||||||
# Spec(...) with the string representation of the spec mentioned
|
# Spec(...) with the string representation of the spec mentioned
|
||||||
msg = msg.format(*args)
|
|
||||||
specs_to_construct = re.findall(r"Spec\(([^)]*)\)", msg)
|
specs_to_construct = re.findall(r"Spec\(([^)]*)\)", msg)
|
||||||
for spec_str in specs_to_construct:
|
for spec_str in specs_to_construct:
|
||||||
msg = msg.replace("Spec(%s)" % spec_str, str(spack.spec.Spec(spec_str)))
|
msg = msg.replace("Spec(%s)" % spec_str, str(spack.spec.Spec(spec_str)))
|
||||||
|
|
||||||
|
for cause in set(cause_args):
|
||||||
|
for c in self.get_cause_tree(cause):
|
||||||
|
msg += f"\n{c}"
|
||||||
|
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
def message(self, errors) -> str:
|
def message(self, errors) -> str:
|
||||||
@@ -775,6 +806,8 @@ def visit(node):
|
|||||||
self.control.load(os.path.join(parent_dir, "concretize.lp"))
|
self.control.load(os.path.join(parent_dir, "concretize.lp"))
|
||||||
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
|
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
|
||||||
self.control.load(os.path.join(parent_dir, "display.lp"))
|
self.control.load(os.path.join(parent_dir, "display.lp"))
|
||||||
|
if spack.error.debug:
|
||||||
|
self.control.load(os.path.join(parent_dir, "causation.lp"))
|
||||||
timer.stop("load")
|
timer.stop("load")
|
||||||
|
|
||||||
# Grounding is the first step in the solve -- it turns our facts
|
# Grounding is the first step in the solve -- it turns our facts
|
||||||
@@ -835,7 +868,13 @@ def on_model(model):
|
|||||||
|
|
||||||
# print any unknown functions in the model
|
# print any unknown functions in the model
|
||||||
for sym in best_model:
|
for sym in best_model:
|
||||||
if sym.name not in ("attr", "error", "opt_criterion"):
|
if sym.name not in (
|
||||||
|
"attr",
|
||||||
|
"error",
|
||||||
|
"opt_criterion",
|
||||||
|
"condition",
|
||||||
|
"condition_cause",
|
||||||
|
):
|
||||||
tty.debug(
|
tty.debug(
|
||||||
"UNKNOWN SYMBOL: %s(%s)" % (sym.name, ", ".join(stringify(sym.arguments)))
|
"UNKNOWN SYMBOL: %s(%s)" % (sym.name, ", ".join(stringify(sym.arguments)))
|
||||||
)
|
)
|
||||||
@@ -861,9 +900,9 @@ class SpackSolverSetup(object):
|
|||||||
def __init__(self, tests=False):
|
def __init__(self, tests=False):
|
||||||
self.gen = None # set by setup()
|
self.gen = None # set by setup()
|
||||||
|
|
||||||
self.declared_versions = {}
|
self.declared_versions = collections.defaultdict(list)
|
||||||
self.possible_versions = {}
|
self.possible_versions = collections.defaultdict(set)
|
||||||
self.deprecated_versions = {}
|
self.deprecated_versions = collections.defaultdict(set)
|
||||||
|
|
||||||
self.possible_virtuals = None
|
self.possible_virtuals = None
|
||||||
self.possible_compilers = []
|
self.possible_compilers = []
|
||||||
@@ -1266,7 +1305,11 @@ def package_provider_rules(self, pkg):
|
|||||||
for when in whens:
|
for when in whens:
|
||||||
msg = "%s provides %s when %s" % (pkg.name, provided, when)
|
msg = "%s provides %s when %s" % (pkg.name, provided, when)
|
||||||
condition_id = self.condition(when, provided, pkg.name, msg)
|
condition_id = self.condition(when, provided, pkg.name, msg)
|
||||||
self.gen.fact(fn.provider_condition(condition_id, when.name, provided.name))
|
self.gen.fact(
|
||||||
|
fn.imposed_constraint(
|
||||||
|
condition_id, "virtual_condition_holds", pkg.name, provided.name
|
||||||
|
)
|
||||||
|
)
|
||||||
self.gen.newline()
|
self.gen.newline()
|
||||||
|
|
||||||
def package_dependencies_rules(self, pkg):
|
def package_dependencies_rules(self, pkg):
|
||||||
@@ -1287,16 +1330,25 @@ def package_dependencies_rules(self, pkg):
|
|||||||
if not deptypes:
|
if not deptypes:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
msg = "%s depends on %s" % (pkg.name, dep.spec.name)
|
msg = "%s depends on %s" % (pkg.name, dep.spec)
|
||||||
if cond != spack.spec.Spec():
|
if cond != spack.spec.Spec():
|
||||||
msg += " when %s" % cond
|
msg += " when %s" % cond
|
||||||
|
|
||||||
condition_id = self.condition(cond, dep.spec, pkg.name, msg)
|
condition_id = self.condition(cond, dep.spec, pkg.name, msg)
|
||||||
self.gen.fact(fn.dependency_condition(condition_id, pkg.name, dep.spec.name))
|
self.gen.fact(fn.condition_requirement(condition_id, "spack_installed", pkg.name))
|
||||||
|
|
||||||
for t in sorted(deptypes):
|
for t in sorted(deptypes):
|
||||||
# there is a declared dependency of type t
|
# there is a declared dependency of type t
|
||||||
self.gen.fact(fn.dependency_type(condition_id, t))
|
self.gen.fact(
|
||||||
|
fn.imposed_constraint(
|
||||||
|
condition_id, "dependency_holds", pkg.name, dep.spec.name, t
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.gen.fact(
|
||||||
|
fn.imposed_constraint(
|
||||||
|
condition_id, "virtual_node" if dep.spec.virtual else "node", dep.spec.name
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
self.gen.newline()
|
self.gen.newline()
|
||||||
|
|
||||||
@@ -1450,7 +1502,11 @@ def external_packages(self):
|
|||||||
for local_idx, spec in enumerate(external_specs):
|
for local_idx, spec in enumerate(external_specs):
|
||||||
msg = "%s available as external when satisfying %s" % (spec.name, spec)
|
msg = "%s available as external when satisfying %s" % (spec.name, spec)
|
||||||
condition_id = self.condition(spec, msg=msg)
|
condition_id = self.condition(spec, msg=msg)
|
||||||
self.gen.fact(fn.possible_external(condition_id, pkg_name, local_idx))
|
self.gen.fact(
|
||||||
|
fn.imposed_constraint(
|
||||||
|
condition_id, "external_conditions_hold", pkg_name, local_idx
|
||||||
|
)
|
||||||
|
)
|
||||||
self.possible_versions[spec.name].add(spec.version)
|
self.possible_versions[spec.name].add(spec.version)
|
||||||
self.gen.newline()
|
self.gen.newline()
|
||||||
|
|
||||||
@@ -1669,9 +1725,34 @@ class Body(object):
|
|||||||
if concrete_build_deps or dtype != "build":
|
if concrete_build_deps or dtype != "build":
|
||||||
clauses.append(fn.attr("depends_on", spec.name, dep.name, dtype))
|
clauses.append(fn.attr("depends_on", spec.name, dep.name, dtype))
|
||||||
|
|
||||||
# Ensure Spack will not coconcretize this with another provider
|
# TODO: We have to look up info from package.py here, but we'd
|
||||||
# for the same virtual
|
# TODO: like to avoid this entirely. We should not need to look
|
||||||
for virtual in dep.package.virtuals_provided:
|
# TODO: up potentially wrong info if we have virtual edge info.
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
pkg = dep.package
|
||||||
|
|
||||||
|
except spack.repo.UnknownNamespaceError:
|
||||||
|
# Try to look up the package of the same name and use its
|
||||||
|
# providers. This is as good as we can do without edge info.
|
||||||
|
pkg_class = spack.repo.path.get_pkg_class(dep.name)
|
||||||
|
spec = spack.spec.Spec(f"{dep.name}@{dep.version}")
|
||||||
|
pkg = pkg_class(spec)
|
||||||
|
|
||||||
|
virtuals = pkg.virtuals_provided
|
||||||
|
|
||||||
|
except spack.repo.UnknownPackageError:
|
||||||
|
# Skip virtual node constriants for renamed/deleted packages,
|
||||||
|
# so their binaries can still be installed.
|
||||||
|
# NOTE: with current specs (which lack edge attributes) this
|
||||||
|
# can allow concretizations with two providers, but it's unlikely.
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Don't concretize with two providers of the same virtual.
|
||||||
|
# See above for exception for unknown packages.
|
||||||
|
# TODO: we will eventually record provider information on edges,
|
||||||
|
# TODO: which avoids the need for the package lookup above.
|
||||||
|
for virtual in virtuals:
|
||||||
clauses.append(fn.attr("virtual_node", virtual.name))
|
clauses.append(fn.attr("virtual_node", virtual.name))
|
||||||
clauses.append(fn.provider(dep.name, virtual.name))
|
clauses.append(fn.provider(dep.name, virtual.name))
|
||||||
|
|
||||||
@@ -1697,10 +1778,6 @@ class Body(object):
|
|||||||
|
|
||||||
def build_version_dict(self, possible_pkgs):
|
def build_version_dict(self, possible_pkgs):
|
||||||
"""Declare any versions in specs not declared in packages."""
|
"""Declare any versions in specs not declared in packages."""
|
||||||
self.declared_versions = collections.defaultdict(list)
|
|
||||||
self.possible_versions = collections.defaultdict(set)
|
|
||||||
self.deprecated_versions = collections.defaultdict(set)
|
|
||||||
|
|
||||||
packages_yaml = spack.config.get("packages")
|
packages_yaml = spack.config.get("packages")
|
||||||
packages_yaml = _normalize_packages_yaml(packages_yaml)
|
packages_yaml = _normalize_packages_yaml(packages_yaml)
|
||||||
for pkg_name in possible_pkgs:
|
for pkg_name in possible_pkgs:
|
||||||
@@ -1734,13 +1811,47 @@ def key_fn(item):
|
|||||||
# All the preferred version from packages.yaml, versions in external
|
# All the preferred version from packages.yaml, versions in external
|
||||||
# specs will be computed later
|
# specs will be computed later
|
||||||
version_preferences = packages_yaml.get(pkg_name, {}).get("version", [])
|
version_preferences = packages_yaml.get(pkg_name, {}).get("version", [])
|
||||||
for idx, v in enumerate(version_preferences):
|
version_defs = []
|
||||||
# v can be a string so force it into an actual version for comparisons
|
pkg_class = spack.repo.path.get_pkg_class(pkg_name)
|
||||||
ver = vn.Version(v)
|
for vstr in version_preferences:
|
||||||
|
v = vn.ver(vstr)
|
||||||
|
if isinstance(v, vn.GitVersion):
|
||||||
|
version_defs.append(v)
|
||||||
|
else:
|
||||||
|
satisfying_versions = self._check_for_defined_matching_versions(pkg_class, v)
|
||||||
|
# Amongst all defined versions satisfying this specific
|
||||||
|
# preference, the highest-numbered version is the
|
||||||
|
# most-preferred: therefore sort satisfying versions
|
||||||
|
# from greatest to least
|
||||||
|
version_defs.extend(sorted(satisfying_versions, reverse=True))
|
||||||
|
|
||||||
|
for weight, vdef in enumerate(llnl.util.lang.dedupe(version_defs)):
|
||||||
self.declared_versions[pkg_name].append(
|
self.declared_versions[pkg_name].append(
|
||||||
DeclaredVersion(version=ver, idx=idx, origin=Provenance.PACKAGES_YAML)
|
DeclaredVersion(version=vdef, idx=weight, origin=Provenance.PACKAGES_YAML)
|
||||||
)
|
)
|
||||||
self.possible_versions[pkg_name].add(ver)
|
self.possible_versions[pkg_name].add(vdef)
|
||||||
|
|
||||||
|
def _check_for_defined_matching_versions(self, pkg_class, v):
|
||||||
|
"""Given a version specification (which may be a concrete version,
|
||||||
|
range, etc.), determine if any package.py version declarations
|
||||||
|
or externals define a version which satisfies it.
|
||||||
|
|
||||||
|
This is primarily for determining whether a version request (e.g.
|
||||||
|
version preferences, which should not themselves define versions)
|
||||||
|
refers to a defined version.
|
||||||
|
|
||||||
|
This function raises an exception if no satisfying versions are
|
||||||
|
found.
|
||||||
|
"""
|
||||||
|
pkg_name = pkg_class.name
|
||||||
|
satisfying_versions = list(x for x in pkg_class.versions if x.satisfies(v))
|
||||||
|
satisfying_versions.extend(x for x in self.possible_versions[pkg_name] if x.satisfies(v))
|
||||||
|
if not satisfying_versions:
|
||||||
|
raise spack.config.ConfigError(
|
||||||
|
"Preference for version {0} does not match any version"
|
||||||
|
" defined for {1} (in its package.py or any external)".format(str(v), pkg_name)
|
||||||
|
)
|
||||||
|
return satisfying_versions
|
||||||
|
|
||||||
def add_concrete_versions_from_specs(self, specs, origin):
|
def add_concrete_versions_from_specs(self, specs, origin):
|
||||||
"""Add concrete versions to possible versions from lists of CLI/dev specs."""
|
"""Add concrete versions to possible versions from lists of CLI/dev specs."""
|
||||||
@@ -2173,14 +2284,6 @@ def setup(self, driver, specs, reuse=None):
|
|||||||
# get possible compilers
|
# get possible compilers
|
||||||
self.possible_compilers = self.generate_possible_compilers(specs)
|
self.possible_compilers = self.generate_possible_compilers(specs)
|
||||||
|
|
||||||
# traverse all specs and packages to build dict of possible versions
|
|
||||||
self.build_version_dict(possible)
|
|
||||||
self.add_concrete_versions_from_specs(specs, Provenance.SPEC)
|
|
||||||
self.add_concrete_versions_from_specs(dev_specs, Provenance.DEV_SPEC)
|
|
||||||
|
|
||||||
req_version_specs = _get_versioned_specs_from_pkg_requirements()
|
|
||||||
self.add_concrete_versions_from_specs(req_version_specs, Provenance.PACKAGE_REQUIREMENT)
|
|
||||||
|
|
||||||
self.gen.h1("Concrete input spec definitions")
|
self.gen.h1("Concrete input spec definitions")
|
||||||
self.define_concrete_input_specs(specs, possible)
|
self.define_concrete_input_specs(specs, possible)
|
||||||
|
|
||||||
@@ -2208,6 +2311,14 @@ def setup(self, driver, specs, reuse=None):
|
|||||||
self.provider_requirements()
|
self.provider_requirements()
|
||||||
self.external_packages()
|
self.external_packages()
|
||||||
|
|
||||||
|
# traverse all specs and packages to build dict of possible versions
|
||||||
|
self.build_version_dict(possible)
|
||||||
|
self.add_concrete_versions_from_specs(specs, Provenance.SPEC)
|
||||||
|
self.add_concrete_versions_from_specs(dev_specs, Provenance.DEV_SPEC)
|
||||||
|
|
||||||
|
req_version_specs = self._get_versioned_specs_from_pkg_requirements()
|
||||||
|
self.add_concrete_versions_from_specs(req_version_specs, Provenance.PACKAGE_REQUIREMENT)
|
||||||
|
|
||||||
self.gen.h1("Package Constraints")
|
self.gen.h1("Package Constraints")
|
||||||
for pkg in sorted(self.pkgs):
|
for pkg in sorted(self.pkgs):
|
||||||
self.gen.h2("Package rules: %s" % pkg)
|
self.gen.h2("Package rules: %s" % pkg)
|
||||||
@@ -2239,70 +2350,106 @@ def setup(self, driver, specs, reuse=None):
|
|||||||
self.define_target_constraints()
|
self.define_target_constraints()
|
||||||
|
|
||||||
def literal_specs(self, specs):
|
def literal_specs(self, specs):
|
||||||
for idx, spec in enumerate(specs):
|
for spec in specs:
|
||||||
self.gen.h2("Spec: %s" % str(spec))
|
self.gen.h2("Spec: %s" % str(spec))
|
||||||
self.gen.fact(fn.literal(idx))
|
|
||||||
|
|
||||||
self.gen.fact(fn.literal(idx, "virtual_root" if spec.virtual else "root", spec.name))
|
# cannot use self.condition because it requires condition requirements
|
||||||
|
condition_id = next(self._condition_id_counter)
|
||||||
|
self.gen.fact(fn.condition(condition_id, "%s is provided as input spec" % spec))
|
||||||
|
self.gen.fact(fn.literal(condition_id))
|
||||||
|
|
||||||
|
self.gen.fact(fn.condition_requirement(condition_id, "literal_solved", condition_id))
|
||||||
|
|
||||||
|
self.gen.fact(
|
||||||
|
fn.imposed_constraint(
|
||||||
|
condition_id, "virtual_root" if spec.virtual else "root", spec.name
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
for clause in self.spec_clauses(spec):
|
for clause in self.spec_clauses(spec):
|
||||||
self.gen.fact(fn.literal(idx, *clause.args))
|
self.gen.fact(fn.imposed_constraint(condition_id, *clause.args))
|
||||||
if clause.args[0] == "variant_set":
|
if clause.args[0] == "variant_set":
|
||||||
self.gen.fact(
|
self.gen.fact(
|
||||||
fn.literal(idx, "variant_default_value_from_cli", *clause.args[1:])
|
fn.imposed_constraint(
|
||||||
|
condition_id, "variant_default_value_from_cli", *clause.args[1:]
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.concretize_everything:
|
if self.concretize_everything:
|
||||||
self.gen.fact(fn.concretize_everything())
|
self.gen.fact(fn.concretize_everything())
|
||||||
|
|
||||||
|
def _get_versioned_specs_from_pkg_requirements(self):
|
||||||
|
"""If package requirements mention versions that are not mentioned
|
||||||
|
elsewhere, then we need to collect those to mark them as possible
|
||||||
|
versions.
|
||||||
|
"""
|
||||||
|
req_version_specs = list()
|
||||||
|
config = spack.config.get("packages")
|
||||||
|
for pkg_name, d in config.items():
|
||||||
|
if pkg_name == "all":
|
||||||
|
continue
|
||||||
|
if "require" in d:
|
||||||
|
req_version_specs.extend(self._specs_from_requires(pkg_name, d["require"]))
|
||||||
|
return req_version_specs
|
||||||
|
|
||||||
def _get_versioned_specs_from_pkg_requirements():
|
def _specs_from_requires(self, pkg_name, section):
|
||||||
"""If package requirements mention versions that are not mentioned
|
"""Collect specs from requirements which define versions (i.e. those that
|
||||||
elsewhere, then we need to collect those to mark them as possible
|
have a concrete version). Requirements can define *new* versions if
|
||||||
versions.
|
they are included as part of an equivalence (hash=number) but not
|
||||||
"""
|
otherwise.
|
||||||
req_version_specs = list()
|
"""
|
||||||
config = spack.config.get("packages")
|
if isinstance(section, str):
|
||||||
for pkg_name, d in config.items():
|
spec = spack.spec.Spec(section)
|
||||||
if pkg_name == "all":
|
|
||||||
continue
|
|
||||||
if "require" in d:
|
|
||||||
req_version_specs.extend(_specs_from_requires(pkg_name, d["require"]))
|
|
||||||
return req_version_specs
|
|
||||||
|
|
||||||
|
|
||||||
def _specs_from_requires(pkg_name, section):
|
|
||||||
if isinstance(section, str):
|
|
||||||
spec = spack.spec.Spec(section)
|
|
||||||
if not spec.name:
|
|
||||||
spec.name = pkg_name
|
|
||||||
extracted_specs = [spec]
|
|
||||||
else:
|
|
||||||
spec_strs = []
|
|
||||||
for spec_group in section:
|
|
||||||
if isinstance(spec_group, str):
|
|
||||||
spec_strs.append(spec_group)
|
|
||||||
else:
|
|
||||||
# Otherwise it is an object. The object can contain a single
|
|
||||||
# "spec" constraint, or a list of them with "any_of" or
|
|
||||||
# "one_of" policy.
|
|
||||||
if "spec" in spec_group:
|
|
||||||
new_constraints = [spec_group["spec"]]
|
|
||||||
else:
|
|
||||||
key = "one_of" if "one_of" in spec_group else "any_of"
|
|
||||||
new_constraints = spec_group[key]
|
|
||||||
spec_strs.extend(new_constraints)
|
|
||||||
|
|
||||||
extracted_specs = []
|
|
||||||
for spec_str in spec_strs:
|
|
||||||
spec = spack.spec.Spec(spec_str)
|
|
||||||
if not spec.name:
|
if not spec.name:
|
||||||
spec.name = pkg_name
|
spec.name = pkg_name
|
||||||
extracted_specs.append(spec)
|
extracted_specs = [spec]
|
||||||
|
else:
|
||||||
|
spec_strs = []
|
||||||
|
for spec_group in section:
|
||||||
|
if isinstance(spec_group, str):
|
||||||
|
spec_strs.append(spec_group)
|
||||||
|
else:
|
||||||
|
# Otherwise it is an object. The object can contain a single
|
||||||
|
# "spec" constraint, or a list of them with "any_of" or
|
||||||
|
# "one_of" policy.
|
||||||
|
if "spec" in spec_group:
|
||||||
|
new_constraints = [spec_group["spec"]]
|
||||||
|
else:
|
||||||
|
key = "one_of" if "one_of" in spec_group else "any_of"
|
||||||
|
new_constraints = spec_group[key]
|
||||||
|
spec_strs.extend(new_constraints)
|
||||||
|
|
||||||
version_specs = [x for x in extracted_specs if x.versions.concrete]
|
extracted_specs = []
|
||||||
for spec in version_specs:
|
for spec_str in spec_strs:
|
||||||
spec.attach_git_version_lookup()
|
spec = spack.spec.Spec(spec_str)
|
||||||
return version_specs
|
if not spec.name:
|
||||||
|
spec.name = pkg_name
|
||||||
|
extracted_specs.append(spec)
|
||||||
|
|
||||||
|
version_specs = []
|
||||||
|
for spec in extracted_specs:
|
||||||
|
if spec.versions.concrete:
|
||||||
|
# Note: this includes git versions
|
||||||
|
version_specs.append(spec)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Prefer spec's name if it exists, in case the spec is
|
||||||
|
# requiring a specific implementation inside of a virtual section
|
||||||
|
# e.g. packages:mpi:require:openmpi@4.0.1
|
||||||
|
pkg_class = spack.repo.path.get_pkg_class(spec.name or pkg_name)
|
||||||
|
satisfying_versions = self._check_for_defined_matching_versions(
|
||||||
|
pkg_class, spec.versions
|
||||||
|
)
|
||||||
|
|
||||||
|
# Version ranges ("@1.3" without the "=", "@1.2:1.4") and lists
|
||||||
|
# will end up here
|
||||||
|
ordered_satisfying_versions = sorted(satisfying_versions, reverse=True)
|
||||||
|
vspecs = list(spack.spec.Spec("@{0}".format(x)) for x in ordered_satisfying_versions)
|
||||||
|
version_specs.extend(vspecs)
|
||||||
|
|
||||||
|
for spec in version_specs:
|
||||||
|
spec.attach_git_version_lookup()
|
||||||
|
return version_specs
|
||||||
|
|
||||||
|
|
||||||
class SpecBuilder(object):
|
class SpecBuilder(object):
|
||||||
@@ -2319,6 +2466,8 @@ class SpecBuilder(object):
|
|||||||
r"^root$",
|
r"^root$",
|
||||||
r"^virtual_node$",
|
r"^virtual_node$",
|
||||||
r"^virtual_root$",
|
r"^virtual_root$",
|
||||||
|
r"^.*holds?$",
|
||||||
|
r"^literal.*$",
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@@ -2758,12 +2907,13 @@ class InternalConcretizerError(spack.error.UnsatisfiableSpecError):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, provided, conflicts):
|
def __init__(self, provided, conflicts):
|
||||||
indented = [" %s\n" % conflict for conflict in conflicts]
|
msg = (
|
||||||
error_msg = "".join(indented)
|
"Spack concretizer internal error. Please submit a bug report and include the "
|
||||||
msg = "Spack concretizer internal error. Please submit a bug report"
|
"command, environment if applicable and the following error message."
|
||||||
msg += "\n Please include the command, environment if applicable,"
|
f"\n {provided} is unsatisfiable, errors are:"
|
||||||
msg += "\n and the following error message."
|
)
|
||||||
msg = "\n %s is unsatisfiable, errors are:\n%s" % (provided, error_msg)
|
|
||||||
|
msg += "".join([f"\n {conflict}" for conflict in conflicts])
|
||||||
|
|
||||||
super(spack.error.UnsatisfiableSpecError, self).__init__(msg)
|
super(spack.error.UnsatisfiableSpecError, self).__init__(msg)
|
||||||
|
|
||||||
|
|||||||
72
lib/spack/spack/solver/causation.lp
Normal file
72
lib/spack/spack/solver/causation.lp
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
% Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
%
|
||||||
|
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
% associated conditions by cause -> effect
|
||||||
|
condition_cause(Effect, Cause) :-
|
||||||
|
condition_holds(Effect), condition_holds(Cause),
|
||||||
|
attr(Name, A1),
|
||||||
|
condition_requirement(Effect, Name, A1),
|
||||||
|
imposed_constraint(Cause, Name, A1).
|
||||||
|
condition_cause(Effect, Cause) :-
|
||||||
|
condition_holds(Effect), condition_holds(Cause),
|
||||||
|
attr(Name, A1, A2),
|
||||||
|
condition_requirement(Effect, Name, A1, A2),
|
||||||
|
imposed_constraint(Cause, Name, A1, A2).
|
||||||
|
condition_cause(Effect, Cause) :-
|
||||||
|
condition_holds(Effect), condition_holds(Cause),
|
||||||
|
attr(Name, A1, A2, A3),
|
||||||
|
condition_requirement(Effect, Name, A1, A2, A3),
|
||||||
|
imposed_constraint(Cause, Name, A1, A2, A3).
|
||||||
|
condition_cause(Effect, Cause) :-
|
||||||
|
condition_holds(Effect), condition_holds(Cause),
|
||||||
|
attr(Name, A1, A2, A3, A4),
|
||||||
|
condition_requirement(Effect, Name, A1, A2, A3, A4),
|
||||||
|
imposed_constraint(Cause, Name, A1, A2, A3, A4).
|
||||||
|
|
||||||
|
% At most one variant for single valued variants
|
||||||
|
error(0, "'{0}' required multiple values for single-valued variant '{1}'\n Requested 'Spec({1}={2})' and 'Spec({1}={3})'", Package, Variant, Value1, Value2, startcauses, Cause1, Cause2)
|
||||||
|
:- attr("node", Package),
|
||||||
|
variant(Package, Variant),
|
||||||
|
variant_single_value(Package, Variant),
|
||||||
|
build(Package),
|
||||||
|
attr("variant_value", Package, Variant, Value1),
|
||||||
|
imposed_constraint(Cause1, "variant_set", Package, Variant, Value1),
|
||||||
|
condition_holds(Cause1),
|
||||||
|
attr("variant_value", Package, Variant, Value2),
|
||||||
|
imposed_constraint(Cause2, "variant_set", Package, Variant, Value2),
|
||||||
|
condition_holds(Cause2),
|
||||||
|
Value1 < Value2. % see[1] in concretize.lp
|
||||||
|
|
||||||
|
% We cannot have a version that violates another version constraint
|
||||||
|
error(0, "Version '{0}' of {1} does not satisfy '@{2}'", Version, Package, Constraint, startcauses, VersionCause, ConstraintCause)
|
||||||
|
:- attr("node", Package),
|
||||||
|
attr("version", Package, Version),
|
||||||
|
imposed_constraint(VersionCause, "node_version_satisfies", Package, Version),
|
||||||
|
condition_holds(VersionCause),
|
||||||
|
attr("node_version_satisfies", Package, Constraint),
|
||||||
|
imposed_constraint(ConstraintCause, "node_version_satisfies", Package, Constraint),
|
||||||
|
condition_holds(ConstraintCause),
|
||||||
|
not version_satisfies(Package, Constraint, Version).
|
||||||
|
|
||||||
|
% A virtual package may or may not have a version, but never has more than one
|
||||||
|
% Error to catch how it happens
|
||||||
|
error(0, "Version '{0}' of {1} does not satisfy '@{2}'", Version, Virtual, Constraint, startcauses, VersionCause, ConstraintCause)
|
||||||
|
:- attr("virtual_node", Virtual),
|
||||||
|
attr("version", Virtual, Version),
|
||||||
|
imposed_constraint(VersionCause, "node_version_satisfies", Virtual, Version),
|
||||||
|
condition_holds(VersionCause),
|
||||||
|
attr("node_version_satisfies", Virtual, Constraint),
|
||||||
|
imposed_constraint(ConstraintCause, "node_version_satisfies", Virtual, Constraint),
|
||||||
|
condition_holds(ConstraintCause),
|
||||||
|
not version_satisfies(Virtual, Constraint, Version).
|
||||||
|
|
||||||
|
% More specific error message if the version cannot satisfy some constraint
|
||||||
|
% Otherwise covered by `no_version_error` and `versions_conflict_error`.
|
||||||
|
error(0, "Cannot satisfy '{0}@{1}'", Package, Constraint, startcauses, ConstraintCause)
|
||||||
|
:- attr("node_version_satisfies", Package, Constraint),
|
||||||
|
imposed_constraint(ConstraintCause, "node_version_satisfies", Package, Constraint),
|
||||||
|
condition_holds(ConstraintCause),
|
||||||
|
attr("version", Package, Version),
|
||||||
|
not version_satisfies(Package, Constraint, Version).
|
||||||
@@ -12,8 +12,8 @@
|
|||||||
%-----------------------------------------------------------------------------
|
%-----------------------------------------------------------------------------
|
||||||
|
|
||||||
% Give clingo the choice to solve an input spec or not
|
% Give clingo the choice to solve an input spec or not
|
||||||
{ literal_solved(ID) } :- literal(ID).
|
{ attr("literal_solved", ID) } :- literal(ID).
|
||||||
literal_not_solved(ID) :- not literal_solved(ID), literal(ID).
|
literal_not_solved(ID) :- not attr("literal_solved", ID), literal(ID).
|
||||||
|
|
||||||
% If concretize_everything() is a fact, then we cannot have unsolved specs
|
% If concretize_everything() is a fact, then we cannot have unsolved specs
|
||||||
:- literal_not_solved(ID), concretize_everything.
|
:- literal_not_solved(ID), concretize_everything.
|
||||||
@@ -21,24 +21,14 @@ literal_not_solved(ID) :- not literal_solved(ID), literal(ID).
|
|||||||
% Make a problem with "zero literals solved" unsat. This is to trigger
|
% Make a problem with "zero literals solved" unsat. This is to trigger
|
||||||
% looking for solutions to the ASP problem with "errors", which results
|
% looking for solutions to the ASP problem with "errors", which results
|
||||||
% in better reporting for users. See #30669 for details.
|
% in better reporting for users. See #30669 for details.
|
||||||
1 { literal_solved(ID) : literal(ID) }.
|
1 { attr("literal_solved", ID) : literal(ID) }.
|
||||||
|
|
||||||
opt_criterion(300, "number of input specs not concretized").
|
opt_criterion(300, "number of input specs not concretized").
|
||||||
#minimize{ 0@300: #true }.
|
#minimize{ 0@300: #true }.
|
||||||
#minimize { 1@300,ID : literal_not_solved(ID) }.
|
#minimize { 1@300,ID : literal_not_solved(ID) }.
|
||||||
|
|
||||||
% Map constraint on the literal ID to the correct PSID
|
|
||||||
attr(Name, A1) :- literal(LiteralID, Name, A1), literal_solved(LiteralID).
|
|
||||||
attr(Name, A1, A2) :- literal(LiteralID, Name, A1, A2), literal_solved(LiteralID).
|
|
||||||
attr(Name, A1, A2, A3) :- literal(LiteralID, Name, A1, A2, A3), literal_solved(LiteralID).
|
|
||||||
attr(Name, A1, A2, A3, A4) :- literal(LiteralID, Name, A1, A2, A3, A4), literal_solved(LiteralID).
|
|
||||||
|
|
||||||
#defined concretize_everything/0.
|
#defined concretize_everything/0.
|
||||||
#defined literal/1.
|
#defined literal/1.
|
||||||
#defined literal/3.
|
|
||||||
#defined literal/4.
|
|
||||||
#defined literal/5.
|
|
||||||
#defined literal/6.
|
|
||||||
|
|
||||||
% Attributes for node packages which must have a single value
|
% Attributes for node packages which must have a single value
|
||||||
attr_single_value("version").
|
attr_single_value("version").
|
||||||
@@ -58,6 +48,13 @@ error(100, multiple_values_error, Attribute, Package)
|
|||||||
attr_single_value(Attribute),
|
attr_single_value(Attribute),
|
||||||
2 { attr(Attribute, Package, Version) }.
|
2 { attr(Attribute, Package, Version) }.
|
||||||
|
|
||||||
|
%-----------------------------------------------------------------------------
|
||||||
|
% Define functions for error handling
|
||||||
|
%-----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#defined error/9.
|
||||||
|
#defined condition_cause/2.
|
||||||
|
|
||||||
%-----------------------------------------------------------------------------
|
%-----------------------------------------------------------------------------
|
||||||
% Version semantics
|
% Version semantics
|
||||||
%-----------------------------------------------------------------------------
|
%-----------------------------------------------------------------------------
|
||||||
@@ -96,7 +93,18 @@ version_satisfies(Package, Constraint, HashVersion) :- version_satisfies(Package
|
|||||||
{ attr("version", Package, Version) : version_declared(Package, Version) }
|
{ attr("version", Package, Version) : version_declared(Package, Version) }
|
||||||
:- attr("node", Package).
|
:- attr("node", Package).
|
||||||
|
|
||||||
|
% Error to ensure structure of the program is not violated
|
||||||
|
error(2, "No version from '{0}' satisfies '@{1}' and '@{2}'", Package, Version1, Version2)
|
||||||
|
:- attr("node", Package),
|
||||||
|
attr("version", Package, Version1),
|
||||||
|
attr("version", Package, Version2),
|
||||||
|
Version1 < Version2. % see[1]
|
||||||
|
|
||||||
|
error(2, "No versions available for package '{0}'", Package)
|
||||||
|
:- attr("node", Package), not attr("version", Package, _).
|
||||||
|
|
||||||
% A virtual package may or may not have a version, but never has more than one
|
% A virtual package may or may not have a version, but never has more than one
|
||||||
|
% fallback error for structure in case there's another way for it to happen
|
||||||
error(100, "Cannot select a single version for virtual '{0}'", Virtual)
|
error(100, "Cannot select a single version for virtual '{0}'", Virtual)
|
||||||
:- attr("virtual_node", Virtual),
|
:- attr("virtual_node", Virtual),
|
||||||
2 { attr("version", Virtual, Version) }.
|
2 { attr("version", Virtual, Version) }.
|
||||||
@@ -150,8 +158,7 @@ possible_version_weight(Package, Weight)
|
|||||||
:- attr("node_version_satisfies", Package, Constraint),
|
:- attr("node_version_satisfies", Package, Constraint),
|
||||||
version_satisfies(Package, Constraint, _).
|
version_satisfies(Package, Constraint, _).
|
||||||
|
|
||||||
% More specific error message if the version cannot satisfy some constraint
|
% Error for structure of program
|
||||||
% Otherwise covered by `no_version_error` and `versions_conflict_error`.
|
|
||||||
error(10, "Cannot satisfy '{0}@{1}'", Package, Constraint)
|
error(10, "Cannot satisfy '{0}@{1}'", Package, Constraint)
|
||||||
:- attr("node_version_satisfies", Package, Constraint),
|
:- attr("node_version_satisfies", Package, Constraint),
|
||||||
attr("version", Package, Version),
|
attr("version", Package, Version),
|
||||||
@@ -182,9 +189,8 @@ condition_holds(ID) :-
|
|||||||
attr(Name, A1, A2, A3) : condition_requirement(ID, Name, A1, A2, A3);
|
attr(Name, A1, A2, A3) : condition_requirement(ID, Name, A1, A2, A3);
|
||||||
attr(Name, A1, A2, A3, A4) : condition_requirement(ID, Name, A1, A2, A3, A4).
|
attr(Name, A1, A2, A3, A4) : condition_requirement(ID, Name, A1, A2, A3, A4).
|
||||||
|
|
||||||
% condition_holds(ID) implies all imposed_constraints, unless do_not_impose(ID)
|
% condition_holds(ID) implies all imposed_constraints.
|
||||||
% is derived. This allows imposed constraints to be canceled in special cases.
|
impose(ID) :- condition_holds(ID).
|
||||||
impose(ID) :- condition_holds(ID), not do_not_impose(ID).
|
|
||||||
|
|
||||||
% conditions that hold impose constraints on other specs
|
% conditions that hold impose constraints on other specs
|
||||||
attr(Name, A1) :- impose(ID), imposed_constraint(ID, Name, A1).
|
attr(Name, A1) :- impose(ID), imposed_constraint(ID, Name, A1).
|
||||||
@@ -229,33 +235,19 @@ depends_on(Package, Dependency) :- attr("depends_on", Package, Dependency, _).
|
|||||||
% a dependency holds if its condition holds and if it is not external or
|
% a dependency holds if its condition holds and if it is not external or
|
||||||
% concrete. We chop off dependencies for externals, and dependencies of
|
% concrete. We chop off dependencies for externals, and dependencies of
|
||||||
% concrete specs don't need to be resolved -- they arise from the concrete
|
% concrete specs don't need to be resolved -- they arise from the concrete
|
||||||
% specs themselves.
|
% specs themselves. This attr is used in constraints from dependency conditions
|
||||||
dependency_holds(Package, Dependency, Type) :-
|
attr("spack_installed", Package) :- build(Package), not external(Package).
|
||||||
dependency_condition(ID, Package, Dependency),
|
|
||||||
dependency_type(ID, Type),
|
|
||||||
build(Package),
|
|
||||||
not external(Package),
|
|
||||||
condition_holds(ID).
|
|
||||||
|
|
||||||
% We cut off dependencies of externals (as we don't really know them).
|
|
||||||
% Don't impose constraints on dependencies that don't exist.
|
|
||||||
do_not_impose(ID) :-
|
|
||||||
not dependency_holds(Package, Dependency, _),
|
|
||||||
dependency_condition(ID, Package, Dependency).
|
|
||||||
|
|
||||||
% declared dependencies are real if they're not virtual AND
|
% declared dependencies are real if they're not virtual AND
|
||||||
% the package is not an external.
|
% the package is not an external.
|
||||||
% They're only triggered if the associated dependnecy condition holds.
|
% They're only triggered if the associated dependnecy condition holds.
|
||||||
attr("depends_on", Package, Dependency, Type)
|
attr("depends_on", Package, Dependency, Type)
|
||||||
:- dependency_holds(Package, Dependency, Type),
|
:- attr("dependency_holds", Package, Dependency, Type),
|
||||||
not virtual(Dependency).
|
not virtual(Dependency).
|
||||||
|
|
||||||
% every root must be a node
|
% every root must be a node
|
||||||
attr("node", Package) :- attr("root", Package).
|
attr("node", Package) :- attr("root", Package).
|
||||||
|
|
||||||
% dependencies imply new nodes
|
|
||||||
attr("node", Dependency) :- attr("node", Package), depends_on(Package, Dependency).
|
|
||||||
|
|
||||||
% all nodes in the graph must be reachable from some root
|
% all nodes in the graph must be reachable from some root
|
||||||
% this ensures a user can't say `zlib ^libiconv` (neither of which have any
|
% this ensures a user can't say `zlib ^libiconv` (neither of which have any
|
||||||
% dependencies) and get a two-node unconnected graph
|
% dependencies) and get a two-node unconnected graph
|
||||||
@@ -296,14 +288,17 @@ error(1, Msg) :- attr("node", Package),
|
|||||||
% if a package depends on a virtual, it's not external and we have a
|
% if a package depends on a virtual, it's not external and we have a
|
||||||
% provider for that virtual then it depends on the provider
|
% provider for that virtual then it depends on the provider
|
||||||
attr("depends_on", Package, Provider, Type)
|
attr("depends_on", Package, Provider, Type)
|
||||||
:- dependency_holds(Package, Virtual, Type),
|
:- attr("dependency_holds", Package, Virtual, Type),
|
||||||
provider(Provider, Virtual),
|
provider(Provider, Virtual),
|
||||||
not external(Package).
|
not external(Package).
|
||||||
|
|
||||||
% dependencies on virtuals also imply that the virtual is a virtual node
|
% If a package depends on a provider, the provider must be a node
|
||||||
attr("virtual_node", Virtual)
|
% nodes that are not indirected by a virtual are instantiated
|
||||||
:- dependency_holds(Package, Virtual, Type),
|
% directly from the imposed constraints of the dependency condition
|
||||||
virtual(Virtual), not external(Package).
|
attr("node", Provider)
|
||||||
|
:- attr("dependency_holds", Package, Virtual, Type),
|
||||||
|
provider(Provider, Virtual),
|
||||||
|
not external(Package).
|
||||||
|
|
||||||
% If there's a virtual node, we must select one and only one provider.
|
% If there's a virtual node, we must select one and only one provider.
|
||||||
% The provider must be selected among the possible providers.
|
% The provider must be selected among the possible providers.
|
||||||
@@ -330,17 +325,11 @@ attr("root", Package) :- attr("virtual_root", Virtual), provider(Package, Virtua
|
|||||||
% for environments that are concretized together (e.g. where we
|
% for environments that are concretized together (e.g. where we
|
||||||
% asks to install "mpich" and "hdf5+mpi" and we want "mpich" to
|
% asks to install "mpich" and "hdf5+mpi" and we want "mpich" to
|
||||||
% be the mpi provider)
|
% be the mpi provider)
|
||||||
provider(Package, Virtual) :- attr("node", Package), virtual_condition_holds(Package, Virtual).
|
provider(Package, Virtual) :- attr("node", Package), attr("virtual_condition_holds", Package, Virtual).
|
||||||
|
|
||||||
% The provider provides the virtual if some provider condition holds.
|
|
||||||
virtual_condition_holds(Provider, Virtual) :-
|
|
||||||
provider_condition(ID, Provider, Virtual),
|
|
||||||
condition_holds(ID),
|
|
||||||
virtual(Virtual).
|
|
||||||
|
|
||||||
% A package cannot be the actual provider for a virtual if it does not
|
% A package cannot be the actual provider for a virtual if it does not
|
||||||
% fulfill the conditions to provide that virtual
|
% fulfill the conditions to provide that virtual
|
||||||
:- provider(Package, Virtual), not virtual_condition_holds(Package, Virtual),
|
:- provider(Package, Virtual), not attr("virtual_condition_holds", Package, Virtual),
|
||||||
internal_error("Virtual when provides not respected").
|
internal_error("Virtual when provides not respected").
|
||||||
|
|
||||||
#defined possible_provider/2.
|
#defined possible_provider/2.
|
||||||
@@ -382,14 +371,8 @@ possible_provider_weight(Dependency, Virtual, 100, "fallback") :- provider(Depen
|
|||||||
|
|
||||||
% do not warn if generated program contains none of these.
|
% do not warn if generated program contains none of these.
|
||||||
#defined possible_provider/2.
|
#defined possible_provider/2.
|
||||||
#defined provider_condition/3.
|
|
||||||
#defined required_provider_condition/3.
|
|
||||||
#defined required_provider_condition/4.
|
|
||||||
#defined required_provider_condition/5.
|
|
||||||
#defined required_provider_condition/6.
|
|
||||||
#defined declared_dependency/3.
|
#defined declared_dependency/3.
|
||||||
#defined virtual/1.
|
#defined virtual/1.
|
||||||
#defined virtual_condition_holds/2.
|
|
||||||
#defined external/1.
|
#defined external/1.
|
||||||
#defined external_spec/2.
|
#defined external_spec/2.
|
||||||
#defined external_version_declared/4.
|
#defined external_version_declared/4.
|
||||||
@@ -437,25 +420,15 @@ external(Package) :- attr("external_spec_selected", Package, _).
|
|||||||
|
|
||||||
% determine if an external spec has been selected
|
% determine if an external spec has been selected
|
||||||
attr("external_spec_selected", Package, LocalIndex) :-
|
attr("external_spec_selected", Package, LocalIndex) :-
|
||||||
external_conditions_hold(Package, LocalIndex),
|
attr("external_conditions_hold", Package, LocalIndex),
|
||||||
attr("node", Package),
|
attr("node", Package),
|
||||||
not attr("hash", Package, _).
|
not attr("hash", Package, _).
|
||||||
|
|
||||||
external_conditions_hold(Package, LocalIndex) :-
|
|
||||||
possible_external(ID, Package, LocalIndex), condition_holds(ID).
|
|
||||||
|
|
||||||
% it cannot happen that a spec is external, but none of the external specs
|
% it cannot happen that a spec is external, but none of the external specs
|
||||||
% conditions hold.
|
% conditions hold.
|
||||||
error(100, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
|
error(100, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
|
||||||
:- external(Package),
|
:- external(Package),
|
||||||
not external_conditions_hold(Package, _).
|
not attr("external_conditions_hold", Package, _).
|
||||||
|
|
||||||
#defined possible_external/3.
|
|
||||||
#defined external_spec_index/3.
|
|
||||||
#defined external_spec_condition/3.
|
|
||||||
#defined external_spec_condition/4.
|
|
||||||
#defined external_spec_condition/5.
|
|
||||||
#defined external_spec_condition/6.
|
|
||||||
|
|
||||||
%-----------------------------------------------------------------------------
|
%-----------------------------------------------------------------------------
|
||||||
% Config required semantics
|
% Config required semantics
|
||||||
@@ -594,7 +567,6 @@ attr("variant_value", Package, Variant, Value) :-
|
|||||||
variant(Package, Variant),
|
variant(Package, Variant),
|
||||||
build(Package).
|
build(Package).
|
||||||
|
|
||||||
|
|
||||||
error(100, "'{0}' required multiple values for single-valued variant '{1}'", Package, Variant)
|
error(100, "'{0}' required multiple values for single-valued variant '{1}'", Package, Variant)
|
||||||
:- attr("node", Package),
|
:- attr("node", Package),
|
||||||
variant(Package, Variant),
|
variant(Package, Variant),
|
||||||
@@ -665,7 +637,7 @@ variant_default_not_used(Package, Variant, Value)
|
|||||||
external_with_variant_set(Package, Variant, Value)
|
external_with_variant_set(Package, Variant, Value)
|
||||||
:- attr("variant_value", Package, Variant, Value),
|
:- attr("variant_value", Package, Variant, Value),
|
||||||
condition_requirement(ID, "variant_value", Package, Variant, Value),
|
condition_requirement(ID, "variant_value", Package, Variant, Value),
|
||||||
possible_external(ID, Package, _),
|
imposed_constraint(ID, "external_conditions_hold", Package, _),
|
||||||
external(Package),
|
external(Package),
|
||||||
attr("node", Package).
|
attr("node", Package).
|
||||||
|
|
||||||
|
|||||||
@@ -23,5 +23,12 @@
|
|||||||
#show error/4.
|
#show error/4.
|
||||||
#show error/5.
|
#show error/5.
|
||||||
#show error/6.
|
#show error/6.
|
||||||
|
#show error/7.
|
||||||
|
#show error/8.
|
||||||
|
#show error/9.
|
||||||
|
|
||||||
|
% show cause -> effect data for errors
|
||||||
|
#show condition_cause/2.
|
||||||
|
#show condition/2.
|
||||||
|
|
||||||
% debug
|
% debug
|
||||||
|
|||||||
@@ -679,6 +679,16 @@ def from_dict(d):
|
|||||||
d = d["compiler"]
|
d = d["compiler"]
|
||||||
return CompilerSpec(d["name"], vn.VersionList.from_dict(d))
|
return CompilerSpec(d["name"], vn.VersionList.from_dict(d))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def display_str(self):
|
||||||
|
"""Equivalent to {compiler.name}{@compiler.version} for Specs, without extra
|
||||||
|
@= for readability."""
|
||||||
|
if self.concrete:
|
||||||
|
return f"{self.name}@{self.version}"
|
||||||
|
elif self.versions != vn.any_version:
|
||||||
|
return f"{self.name}@{self.versions}"
|
||||||
|
return self.name
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
out = self.name
|
out = self.name
|
||||||
if self.versions and self.versions != vn.any_version:
|
if self.versions and self.versions != vn.any_version:
|
||||||
@@ -1730,14 +1740,14 @@ def traverse_edges(self, **kwargs):
|
|||||||
def short_spec(self):
|
def short_spec(self):
|
||||||
"""Returns a version of the spec with the dependencies hashed
|
"""Returns a version of the spec with the dependencies hashed
|
||||||
instead of completely enumerated."""
|
instead of completely enumerated."""
|
||||||
spec_format = "{name}{@version}{%compiler}"
|
spec_format = "{name}{@version}{%compiler.name}{@compiler.version}"
|
||||||
spec_format += "{variants}{arch=architecture}{/hash:7}"
|
spec_format += "{variants}{arch=architecture}{/hash:7}"
|
||||||
return self.format(spec_format)
|
return self.format(spec_format)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cshort_spec(self):
|
def cshort_spec(self):
|
||||||
"""Returns an auto-colorized version of ``self.short_spec``."""
|
"""Returns an auto-colorized version of ``self.short_spec``."""
|
||||||
spec_format = "{name}{@version}{%compiler}"
|
spec_format = "{name}{@version}{%compiler.name}{@compiler.version}"
|
||||||
spec_format += "{variants}{arch=architecture}{/hash:7}"
|
spec_format += "{variants}{arch=architecture}{/hash:7}"
|
||||||
return self.cformat(spec_format)
|
return self.cformat(spec_format)
|
||||||
|
|
||||||
@@ -2789,11 +2799,11 @@ def inject_patches_variant(root):
|
|||||||
# Also record all patches required on dependencies by
|
# Also record all patches required on dependencies by
|
||||||
# depends_on(..., patch=...)
|
# depends_on(..., patch=...)
|
||||||
for dspec in root.traverse_edges(deptype=all, cover="edges", root=False):
|
for dspec in root.traverse_edges(deptype=all, cover="edges", root=False):
|
||||||
pkg_deps = dspec.parent.package_class.dependencies
|
if dspec.spec.concrete:
|
||||||
if dspec.spec.name not in pkg_deps:
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if dspec.spec.concrete:
|
pkg_deps = dspec.parent.package_class.dependencies
|
||||||
|
if dspec.spec.name not in pkg_deps:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
patches = []
|
patches = []
|
||||||
@@ -4323,7 +4333,7 @@ def write_attribute(spec, attribute, color):
|
|||||||
|
|
||||||
if callable(current):
|
if callable(current):
|
||||||
raise SpecFormatStringError("Attempted to format callable object")
|
raise SpecFormatStringError("Attempted to format callable object")
|
||||||
if not current:
|
if current is None:
|
||||||
# We're not printing anything
|
# We're not printing anything
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import itertools
|
import itertools
|
||||||
import textwrap
|
import textwrap
|
||||||
from typing import List
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
|
|
||||||
@@ -66,17 +66,17 @@ def to_dict(self):
|
|||||||
return dict(d)
|
return dict(d)
|
||||||
|
|
||||||
|
|
||||||
def make_environment(dirs=None):
|
@llnl.util.lang.memoized
|
||||||
"""Returns an configured environment for template rendering."""
|
def make_environment(dirs: Optional[Tuple[str, ...]] = None):
|
||||||
|
"""Returns a configured environment for template rendering."""
|
||||||
|
# Import at this scope to avoid slowing Spack startup down
|
||||||
|
import jinja2
|
||||||
|
|
||||||
if dirs is None:
|
if dirs is None:
|
||||||
# Default directories where to search for templates
|
# Default directories where to search for templates
|
||||||
builtins = spack.config.get("config:template_dirs", ["$spack/share/spack/templates"])
|
builtins = spack.config.get("config:template_dirs", ["$spack/share/spack/templates"])
|
||||||
extensions = spack.extensions.get_template_dirs()
|
extensions = spack.extensions.get_template_dirs()
|
||||||
dirs = [canonicalize_path(d) for d in itertools.chain(builtins, extensions)]
|
dirs = tuple(canonicalize_path(d) for d in itertools.chain(builtins, extensions))
|
||||||
|
|
||||||
# avoid importing this at the top level as it's used infrequently and
|
|
||||||
# slows down startup a bit.
|
|
||||||
import jinja2
|
|
||||||
|
|
||||||
# Loader for the templates
|
# Loader for the templates
|
||||||
loader = jinja2.FileSystemLoader(dirs)
|
loader = jinja2.FileSystemLoader(dirs)
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import pickle
|
import pickle
|
||||||
|
import sys
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -39,7 +40,10 @@ def test_dump(tmpdir):
|
|||||||
with tmpdir.as_cwd():
|
with tmpdir.as_cwd():
|
||||||
build_env("--dump", _out_file, "zlib")
|
build_env("--dump", _out_file, "zlib")
|
||||||
with open(_out_file) as f:
|
with open(_out_file) as f:
|
||||||
assert any(line.startswith("PATH=") for line in f.readlines())
|
if sys.platform == "win32":
|
||||||
|
assert any(line.startswith('set "PATH=') for line in f.readlines())
|
||||||
|
else:
|
||||||
|
assert any(line.startswith("PATH=") for line in f.readlines())
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
||||||
|
|||||||
@@ -204,8 +204,8 @@ def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, clangdir):
|
|||||||
os.environ["PATH"] = str(clangdir)
|
os.environ["PATH"] = str(clangdir)
|
||||||
output = compiler("find", "--scope=site")
|
output = compiler("find", "--scope=site")
|
||||||
|
|
||||||
assert "clang@=11.0.0" in output
|
assert "clang@11.0.0" in output
|
||||||
assert "gcc@=8.4.0" in output
|
assert "gcc@8.4.0" in output
|
||||||
|
|
||||||
config = spack.compilers.get_compiler_config("site", False)
|
config = spack.compilers.get_compiler_config("site", False)
|
||||||
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
|
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
|
||||||
@@ -246,8 +246,8 @@ def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, clangdir
|
|||||||
os.environ["PATH"] = str(clangdir)
|
os.environ["PATH"] = str(clangdir)
|
||||||
output = compiler("find", "--scope=site")
|
output = compiler("find", "--scope=site")
|
||||||
|
|
||||||
assert "clang@=11.0.0" in output
|
assert "clang@11.0.0" in output
|
||||||
assert "gcc@=8.4.0" in output
|
assert "gcc@8.4.0" in output
|
||||||
|
|
||||||
config = spack.compilers.get_compiler_config("site", False)
|
config = spack.compilers.get_compiler_config("site", False)
|
||||||
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
|
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
|
||||||
|
|||||||
@@ -390,6 +390,19 @@ def test_remove_after_concretize():
|
|||||||
assert not any(s.name == "mpileaks" for s in env_specs)
|
assert not any(s.name == "mpileaks" for s in env_specs)
|
||||||
|
|
||||||
|
|
||||||
|
def test_remove_before_concretize():
|
||||||
|
e = ev.create("test")
|
||||||
|
e.unify = True
|
||||||
|
|
||||||
|
e.add("mpileaks")
|
||||||
|
e.concretize()
|
||||||
|
|
||||||
|
e.remove("mpileaks")
|
||||||
|
e.concretize()
|
||||||
|
|
||||||
|
assert not list(e.concretized_specs())
|
||||||
|
|
||||||
|
|
||||||
def test_remove_command():
|
def test_remove_command():
|
||||||
env("create", "test")
|
env("create", "test")
|
||||||
assert "test" in env("list")
|
assert "test" in env("list")
|
||||||
@@ -906,7 +919,7 @@ def test_env_config_precedence(environment_from_manifest):
|
|||||||
mpileaks:
|
mpileaks:
|
||||||
version: ["2.2"]
|
version: ["2.2"]
|
||||||
libelf:
|
libelf:
|
||||||
version: ["0.8.11"]
|
version: ["0.8.10"]
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -2404,7 +2417,11 @@ def test_concretize_user_specs_together():
|
|||||||
# Concretize a second time using 'mpich2' as the MPI provider
|
# Concretize a second time using 'mpich2' as the MPI provider
|
||||||
e.remove("mpich")
|
e.remove("mpich")
|
||||||
e.add("mpich2")
|
e.add("mpich2")
|
||||||
e.concretize()
|
|
||||||
|
# Concretizing without invalidating the concrete spec for mpileaks fails
|
||||||
|
with pytest.raises(spack.error.UnsatisfiableSpecError):
|
||||||
|
e.concretize()
|
||||||
|
e.concretize(force=True)
|
||||||
|
|
||||||
assert all("mpich2" in spec for _, spec in e.concretized_specs())
|
assert all("mpich2" in spec for _, spec in e.concretized_specs())
|
||||||
assert all("mpich" not in spec for _, spec in e.concretized_specs())
|
assert all("mpich" not in spec for _, spec in e.concretized_specs())
|
||||||
@@ -2435,7 +2452,7 @@ def test_duplicate_packages_raise_when_concretizing_together():
|
|||||||
e.add("mpich")
|
e.add("mpich")
|
||||||
|
|
||||||
with pytest.raises(
|
with pytest.raises(
|
||||||
spack.error.UnsatisfiableSpecError, match=r"relax the concretizer strictness"
|
spack.error.UnsatisfiableSpecError, match=r"You could consider setting `concretizer:unify`"
|
||||||
):
|
):
|
||||||
e.concretize()
|
e.concretize()
|
||||||
|
|
||||||
@@ -2671,7 +2688,7 @@ def test_modules_relative_to_views(environment_from_manifest, install_mockery, m
|
|||||||
|
|
||||||
spec = e.specs_by_hash[e.concretized_order[0]]
|
spec = e.specs_by_hash[e.concretized_order[0]]
|
||||||
view_prefix = e.default_view.get_projection_for_spec(spec)
|
view_prefix = e.default_view.get_projection_for_spec(spec)
|
||||||
modules_glob = "%s/modules/**/*" % e.path
|
modules_glob = "%s/modules/**/*/*" % e.path
|
||||||
modules = glob.glob(modules_glob)
|
modules = glob.glob(modules_glob)
|
||||||
assert len(modules) == 1
|
assert len(modules) == 1
|
||||||
module = modules[0]
|
module = modules[0]
|
||||||
@@ -2707,12 +2724,12 @@ def test_multiple_modules_post_env_hook(environment_from_manifest, install_mocke
|
|||||||
|
|
||||||
spec = e.specs_by_hash[e.concretized_order[0]]
|
spec = e.specs_by_hash[e.concretized_order[0]]
|
||||||
view_prefix = e.default_view.get_projection_for_spec(spec)
|
view_prefix = e.default_view.get_projection_for_spec(spec)
|
||||||
modules_glob = "%s/modules/**/*" % e.path
|
modules_glob = "%s/modules/**/*/*" % e.path
|
||||||
modules = glob.glob(modules_glob)
|
modules = glob.glob(modules_glob)
|
||||||
assert len(modules) == 1
|
assert len(modules) == 1
|
||||||
module = modules[0]
|
module = modules[0]
|
||||||
|
|
||||||
full_modules_glob = "%s/full_modules/**/*" % e.path
|
full_modules_glob = "%s/full_modules/**/*/*" % e.path
|
||||||
full_modules = glob.glob(full_modules_glob)
|
full_modules = glob.glob(full_modules_glob)
|
||||||
assert len(full_modules) == 1
|
assert len(full_modules) == 1
|
||||||
full_module = full_modules[0]
|
full_module = full_modules[0]
|
||||||
@@ -3295,3 +3312,22 @@ def test_environment_created_in_users_location(mutable_config, tmpdir):
|
|||||||
assert dir_name in out
|
assert dir_name in out
|
||||||
assert env_dir in ev.root(dir_name)
|
assert env_dir in ev.root(dir_name)
|
||||||
assert os.path.isdir(os.path.join(env_dir, dir_name))
|
assert os.path.isdir(os.path.join(env_dir, dir_name))
|
||||||
|
|
||||||
|
|
||||||
|
def test_environment_created_from_lockfile_has_view(mock_packages, tmpdir):
|
||||||
|
"""When an env is created from a lockfile, a view should be generated for it"""
|
||||||
|
env_a = str(tmpdir.join("a"))
|
||||||
|
env_b = str(tmpdir.join("b"))
|
||||||
|
|
||||||
|
# Create an environment and install a package in it
|
||||||
|
env("create", "-d", env_a)
|
||||||
|
with ev.Environment(env_a):
|
||||||
|
add("libelf")
|
||||||
|
install("--fake")
|
||||||
|
|
||||||
|
# Create another environment from the lockfile of the first environment
|
||||||
|
env("create", "-d", env_b, os.path.join(env_a, "spack.lock"))
|
||||||
|
|
||||||
|
# Make sure the view was created
|
||||||
|
with ev.Environment(env_b) as e:
|
||||||
|
assert os.path.isdir(e.view_path_default)
|
||||||
|
|||||||
@@ -357,3 +357,18 @@ def test_find_loaded(database, working_env):
|
|||||||
output = find("--loaded")
|
output = find("--loaded")
|
||||||
expected = find()
|
expected = find()
|
||||||
assert output == expected
|
assert output == expected
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.regression("37712")
|
||||||
|
def test_environment_with_version_range_in_compiler_doesnt_fail(tmp_path):
|
||||||
|
"""Tests that having an active environment with a root spec containing a compiler constrained
|
||||||
|
by a version range (i.e. @X.Y rather the single version than @=X.Y) doesn't result in an error
|
||||||
|
when invoking "spack find".
|
||||||
|
"""
|
||||||
|
test_environment = ev.create_in_dir(tmp_path)
|
||||||
|
test_environment.add("zlib %gcc@12.1.0")
|
||||||
|
test_environment.write()
|
||||||
|
|
||||||
|
with test_environment:
|
||||||
|
output = find()
|
||||||
|
assert "zlib%gcc@12.1.0" in output
|
||||||
|
|||||||
@@ -319,3 +319,17 @@ def test_report_filename_for_cdash(install_mockery_mutable_config, mock_fetch):
|
|||||||
spack.cmd.common.arguments.sanitize_reporter_options(args)
|
spack.cmd.common.arguments.sanitize_reporter_options(args)
|
||||||
filename = spack.cmd.test.report_filename(args, suite)
|
filename = spack.cmd.test.report_filename(args, suite)
|
||||||
assert filename != "https://blahblah/submit.php?project=debugging"
|
assert filename != "https://blahblah/submit.php?project=debugging"
|
||||||
|
|
||||||
|
|
||||||
|
def test_test_output_multiple_specs(
|
||||||
|
mock_test_stage, mock_packages, mock_archive, mock_fetch, install_mockery_mutable_config
|
||||||
|
):
|
||||||
|
"""Ensure proper reporting for suite with skipped, failing, and passed tests."""
|
||||||
|
install("test-error", "simple-standalone-test@0.9", "simple-standalone-test@1.0")
|
||||||
|
out = spack_test("run", "test-error", "simple-standalone-test", fail_on_error=False)
|
||||||
|
|
||||||
|
# Note that a spec with passing *and* skipped tests is still considered
|
||||||
|
# to have passed at this level. If you want to see the spec-specific
|
||||||
|
# part result summaries, you'll have to look at the "test-out.txt" files
|
||||||
|
# for each spec.
|
||||||
|
assert "1 failed, 2 passed of 3 specs" in out
|
||||||
|
|||||||
@@ -152,7 +152,9 @@ def test_preferred_versions(self):
|
|||||||
assert spec.version == Version("2.2")
|
assert spec.version == Version("2.2")
|
||||||
|
|
||||||
def test_preferred_versions_mixed_version_types(self):
|
def test_preferred_versions_mixed_version_types(self):
|
||||||
update_packages("mixedversions", "version", ["2.0"])
|
if spack.config.get("config:concretizer") == "original":
|
||||||
|
pytest.skip("This behavior is not enforced for the old concretizer")
|
||||||
|
update_packages("mixedversions", "version", ["=2.0"])
|
||||||
spec = concretize("mixedversions")
|
spec = concretize("mixedversions")
|
||||||
assert spec.version == Version("2.0")
|
assert spec.version == Version("2.0")
|
||||||
|
|
||||||
@@ -228,6 +230,29 @@ def test_preferred(self):
|
|||||||
spec.concretize()
|
spec.concretize()
|
||||||
assert spec.version == Version("3.5.0")
|
assert spec.version == Version("3.5.0")
|
||||||
|
|
||||||
|
def test_preferred_undefined_raises(self):
|
||||||
|
"""Preference should not specify an undefined version"""
|
||||||
|
if spack.config.get("config:concretizer") == "original":
|
||||||
|
pytest.xfail("This behavior is not enforced for the old concretizer")
|
||||||
|
|
||||||
|
update_packages("python", "version", ["3.5.0.1"])
|
||||||
|
spec = Spec("python")
|
||||||
|
with pytest.raises(spack.config.ConfigError):
|
||||||
|
spec.concretize()
|
||||||
|
|
||||||
|
def test_preferred_truncated(self):
|
||||||
|
"""Versions without "=" are treated as version ranges: if there is
|
||||||
|
a satisfying version defined in the package.py, we should use that
|
||||||
|
(don't define a new version).
|
||||||
|
"""
|
||||||
|
if spack.config.get("config:concretizer") == "original":
|
||||||
|
pytest.skip("This behavior is not enforced for the old concretizer")
|
||||||
|
|
||||||
|
update_packages("python", "version", ["3.5"])
|
||||||
|
spec = Spec("python")
|
||||||
|
spec.concretize()
|
||||||
|
assert spec.satisfies("@3.5.1")
|
||||||
|
|
||||||
def test_develop(self):
|
def test_develop(self):
|
||||||
"""Test concretization with develop-like versions"""
|
"""Test concretization with develop-like versions"""
|
||||||
spec = Spec("develop-test")
|
spec = Spec("develop-test")
|
||||||
|
|||||||
@@ -66,6 +66,28 @@ class V(Package):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_pkgt = (
|
||||||
|
"t",
|
||||||
|
"""\
|
||||||
|
class T(Package):
|
||||||
|
version('2.1')
|
||||||
|
version('2.0')
|
||||||
|
|
||||||
|
depends_on('u', when='@2.1:')
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_pkgu = (
|
||||||
|
"u",
|
||||||
|
"""\
|
||||||
|
class U(Package):
|
||||||
|
version('1.1')
|
||||||
|
version('1.0')
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def create_test_repo(tmpdir, mutable_config):
|
def create_test_repo(tmpdir, mutable_config):
|
||||||
repo_path = str(tmpdir)
|
repo_path = str(tmpdir)
|
||||||
@@ -79,7 +101,7 @@ def create_test_repo(tmpdir, mutable_config):
|
|||||||
)
|
)
|
||||||
|
|
||||||
packages_dir = tmpdir.join("packages")
|
packages_dir = tmpdir.join("packages")
|
||||||
for pkg_name, pkg_str in [_pkgx, _pkgy, _pkgv]:
|
for pkg_name, pkg_str in [_pkgx, _pkgy, _pkgv, _pkgt, _pkgu]:
|
||||||
pkg_dir = packages_dir.ensure(pkg_name, dir=True)
|
pkg_dir = packages_dir.ensure(pkg_name, dir=True)
|
||||||
pkg_file = pkg_dir.join("package.py")
|
pkg_file = pkg_dir.join("package.py")
|
||||||
with open(str(pkg_file), "w") as f:
|
with open(str(pkg_file), "w") as f:
|
||||||
@@ -144,6 +166,45 @@ def test_requirement_isnt_optional(concretize_scope, test_repo):
|
|||||||
Spec("x@1.1").concretize()
|
Spec("x@1.1").concretize()
|
||||||
|
|
||||||
|
|
||||||
|
def test_require_undefined_version(concretize_scope, test_repo):
|
||||||
|
"""If a requirement specifies a numbered version that isn't in
|
||||||
|
the associated package.py and isn't part of a Git hash
|
||||||
|
equivalence (hash=number), then Spack should raise an error
|
||||||
|
(it is assumed this is a typo, and raising the error here
|
||||||
|
avoids a likely error when Spack attempts to fetch the version).
|
||||||
|
"""
|
||||||
|
if spack.config.get("config:concretizer") == "original":
|
||||||
|
pytest.skip("Original concretizer does not support configuration requirements")
|
||||||
|
|
||||||
|
conf_str = """\
|
||||||
|
packages:
|
||||||
|
x:
|
||||||
|
require: "@1.2"
|
||||||
|
"""
|
||||||
|
update_packages_config(conf_str)
|
||||||
|
with pytest.raises(spack.config.ConfigError):
|
||||||
|
Spec("x").concretize()
|
||||||
|
|
||||||
|
|
||||||
|
def test_require_truncated(concretize_scope, test_repo):
|
||||||
|
"""A requirement specifies a version range, with satisfying
|
||||||
|
versions defined in the package.py. Make sure we choose one
|
||||||
|
of the defined versions (vs. allowing the requirement to
|
||||||
|
define a new version).
|
||||||
|
"""
|
||||||
|
if spack.config.get("config:concretizer") == "original":
|
||||||
|
pytest.skip("Original concretizer does not support configuration requirements")
|
||||||
|
|
||||||
|
conf_str = """\
|
||||||
|
packages:
|
||||||
|
x:
|
||||||
|
require: "@1"
|
||||||
|
"""
|
||||||
|
update_packages_config(conf_str)
|
||||||
|
xspec = Spec("x").concretized()
|
||||||
|
assert xspec.satisfies("@1.1")
|
||||||
|
|
||||||
|
|
||||||
def test_git_user_supplied_reference_satisfaction(
|
def test_git_user_supplied_reference_satisfaction(
|
||||||
concretize_scope, test_repo, mock_git_version_info, monkeypatch
|
concretize_scope, test_repo, mock_git_version_info, monkeypatch
|
||||||
):
|
):
|
||||||
@@ -220,6 +281,40 @@ def test_requirement_adds_new_version(
|
|||||||
assert s1.version.ref == a_commit_hash
|
assert s1.version.ref == a_commit_hash
|
||||||
|
|
||||||
|
|
||||||
|
def test_requirement_adds_version_satisfies(
|
||||||
|
concretize_scope, test_repo, mock_git_version_info, monkeypatch
|
||||||
|
):
|
||||||
|
"""Make sure that new versions added by requirements are factored into
|
||||||
|
conditions. In this case create a new version that satisfies a
|
||||||
|
depends_on condition and make sure it is triggered (i.e. the
|
||||||
|
dependency is added).
|
||||||
|
"""
|
||||||
|
if spack.config.get("config:concretizer") == "original":
|
||||||
|
pytest.skip("Original concretizer does not support configuration" " requirements")
|
||||||
|
|
||||||
|
repo_path, filename, commits = mock_git_version_info
|
||||||
|
monkeypatch.setattr(
|
||||||
|
spack.package_base.PackageBase, "git", path_to_file_url(repo_path), raising=False
|
||||||
|
)
|
||||||
|
|
||||||
|
# Sanity check: early version of T does not include U
|
||||||
|
s0 = Spec("t@2.0").concretized()
|
||||||
|
assert not ("u" in s0)
|
||||||
|
|
||||||
|
conf_str = """\
|
||||||
|
packages:
|
||||||
|
t:
|
||||||
|
require: "@{0}=2.2"
|
||||||
|
""".format(
|
||||||
|
commits[0]
|
||||||
|
)
|
||||||
|
update_packages_config(conf_str)
|
||||||
|
|
||||||
|
s1 = Spec("t").concretized()
|
||||||
|
assert "u" in s1
|
||||||
|
assert s1.satisfies("@2.2")
|
||||||
|
|
||||||
|
|
||||||
def test_requirement_adds_git_hash_version(
|
def test_requirement_adds_git_hash_version(
|
||||||
concretize_scope, test_repo, mock_git_version_info, monkeypatch
|
concretize_scope, test_repo, mock_git_version_info, monkeypatch
|
||||||
):
|
):
|
||||||
@@ -272,8 +367,11 @@ def test_requirement_adds_multiple_new_versions(
|
|||||||
def test_preference_adds_new_version(
|
def test_preference_adds_new_version(
|
||||||
concretize_scope, test_repo, mock_git_version_info, monkeypatch
|
concretize_scope, test_repo, mock_git_version_info, monkeypatch
|
||||||
):
|
):
|
||||||
|
"""Normally a preference cannot define a new version, but that constraint
|
||||||
|
is ignored if the version is a Git hash-based version.
|
||||||
|
"""
|
||||||
if spack.config.get("config:concretizer") == "original":
|
if spack.config.get("config:concretizer") == "original":
|
||||||
pytest.skip("Original concretizer does not support configuration requirements")
|
pytest.skip("Original concretizer does not enforce this constraint for preferences")
|
||||||
|
|
||||||
repo_path, filename, commits = mock_git_version_info
|
repo_path, filename, commits = mock_git_version_info
|
||||||
monkeypatch.setattr(
|
monkeypatch.setattr(
|
||||||
@@ -296,6 +394,29 @@ def test_preference_adds_new_version(
|
|||||||
assert not s3.satisfies("@2.3")
|
assert not s3.satisfies("@2.3")
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_adds_new_version_that_is_preferred(concretize_scope, test_repo):
|
||||||
|
"""Test that we can use a version, not declared in package recipe, as the
|
||||||
|
preferred version if that version appears in an external spec.
|
||||||
|
"""
|
||||||
|
if spack.config.get("config:concretizer") == "original":
|
||||||
|
pytest.skip("Original concretizer does not enforce this constraint for preferences")
|
||||||
|
|
||||||
|
conf_str = """\
|
||||||
|
packages:
|
||||||
|
y:
|
||||||
|
version: ["2.7"]
|
||||||
|
externals:
|
||||||
|
- spec: y@2.7 # Not defined in y
|
||||||
|
prefix: /fake/nonexistent/path/
|
||||||
|
buildable: false
|
||||||
|
"""
|
||||||
|
update_packages_config(conf_str)
|
||||||
|
|
||||||
|
spec = Spec("x").concretized()
|
||||||
|
assert spec["y"].satisfies("@2.7")
|
||||||
|
assert spack.version.Version("2.7") not in spec["y"].package.versions
|
||||||
|
|
||||||
|
|
||||||
def test_requirement_is_successfully_applied(concretize_scope, test_repo):
|
def test_requirement_is_successfully_applied(concretize_scope, test_repo):
|
||||||
"""If a simple requirement can be satisfied, make sure the
|
"""If a simple requirement can be satisfied, make sure the
|
||||||
concretization succeeds and the requirement spec is applied.
|
concretization succeeds and the requirement spec is applied.
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ lmod:
|
|||||||
hash_length: 0
|
hash_length: 0
|
||||||
|
|
||||||
core_compilers:
|
core_compilers:
|
||||||
- 'clang@3.3'
|
- 'clang@12.0.0'
|
||||||
|
|
||||||
core_specs:
|
core_specs:
|
||||||
- 'mpich@3.0.1'
|
- 'mpich@3.0.1'
|
||||||
|
|||||||
@@ -0,0 +1,5 @@
|
|||||||
|
enable:
|
||||||
|
- lmod
|
||||||
|
lmod:
|
||||||
|
core_compilers:
|
||||||
|
- 'clang@12.0.0'
|
||||||
@@ -0,0 +1,5 @@
|
|||||||
|
enable:
|
||||||
|
- lmod
|
||||||
|
lmod:
|
||||||
|
core_compilers:
|
||||||
|
- 'clang@=12.0.0'
|
||||||
@@ -31,194 +31,164 @@ class Amdfftw(FftwBase):
|
|||||||
Example : spack install amdfftw precision=float
|
Example : spack install amdfftw precision=float
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_name = 'amdfftw'
|
_name = "amdfftw"
|
||||||
homepage = "https://developer.amd.com/amd-aocl/fftw/"
|
homepage = "https://developer.amd.com/amd-aocl/fftw/"
|
||||||
url = "https://github.com/amd/amd-fftw/archive/3.0.tar.gz"
|
url = "https://github.com/amd/amd-fftw/archive/3.0.tar.gz"
|
||||||
git = "https://github.com/amd/amd-fftw.git"
|
git = "https://github.com/amd/amd-fftw.git"
|
||||||
|
|
||||||
maintainers = ['amd-toolchain-support']
|
maintainers("amd-toolchain-support")
|
||||||
|
|
||||||
version('3.1', sha256='3e777f3acef13fa1910db097e818b1d0d03a6a36ef41186247c6ab1ab0afc132')
|
version("3.1", sha256="3e777f3acef13fa1910db097e818b1d0d03a6a36ef41186247c6ab1ab0afc132")
|
||||||
version('3.0.1', sha256='87030c6bbb9c710f0a64f4f306ba6aa91dc4b182bb804c9022b35aef274d1a4c')
|
version("3.0.1", sha256="87030c6bbb9c710f0a64f4f306ba6aa91dc4b182bb804c9022b35aef274d1a4c")
|
||||||
version('3.0', sha256='a69deaf45478a59a69f77c4f7e9872967f1cfe996592dd12beb6318f18ea0bcd')
|
version("3.0", sha256="a69deaf45478a59a69f77c4f7e9872967f1cfe996592dd12beb6318f18ea0bcd")
|
||||||
version('2.2', sha256='de9d777236fb290c335860b458131678f75aa0799c641490c644c843f0e246f8')
|
version("2.2", sha256="de9d777236fb290c335860b458131678f75aa0799c641490c644c843f0e246f8")
|
||||||
|
|
||||||
variant('shared', default=True,
|
variant("shared", default=True, description="Builds a shared version of the library")
|
||||||
description='Builds a shared version of the library')
|
variant("openmp", default=True, description="Enable OpenMP support")
|
||||||
variant('openmp', default=True,
|
variant("threads", default=False, description="Enable SMP threads support")
|
||||||
description='Enable OpenMP support')
|
variant("debug", default=False, description="Builds a debug version of the library")
|
||||||
variant('threads', default=False,
|
|
||||||
description='Enable SMP threads support')
|
|
||||||
variant('debug', default=False,
|
|
||||||
description='Builds a debug version of the library')
|
|
||||||
variant(
|
variant(
|
||||||
'amd-fast-planner',
|
"amd-fast-planner",
|
||||||
default=False,
|
default=False,
|
||||||
description='Option to reduce the planning time without much'
|
description="Option to reduce the planning time without much"
|
||||||
'tradeoff in the performance. It is supported for'
|
"tradeoff in the performance. It is supported for"
|
||||||
'Float and double precisions only.')
|
"Float and double precisions only.",
|
||||||
|
)
|
||||||
|
variant("amd-top-n-planner", default=False, description="Build with amd-top-n-planner support")
|
||||||
variant(
|
variant(
|
||||||
'amd-top-n-planner',
|
"amd-mpi-vader-limit", default=False, description="Build with amd-mpi-vader-limit support"
|
||||||
default=False,
|
)
|
||||||
description='Build with amd-top-n-planner support')
|
variant("static", default=False, description="Build with static suppport")
|
||||||
variant(
|
variant("amd-trans", default=False, description="Build with amd-trans suppport")
|
||||||
'amd-mpi-vader-limit',
|
variant("amd-app-opt", default=False, description="Build with amd-app-opt suppport")
|
||||||
default=False,
|
|
||||||
description='Build with amd-mpi-vader-limit support')
|
|
||||||
variant(
|
|
||||||
'static',
|
|
||||||
default=False,
|
|
||||||
description='Build with static suppport')
|
|
||||||
variant(
|
|
||||||
'amd-trans',
|
|
||||||
default=False,
|
|
||||||
description='Build with amd-trans suppport')
|
|
||||||
variant(
|
|
||||||
'amd-app-opt',
|
|
||||||
default=False,
|
|
||||||
description='Build with amd-app-opt suppport')
|
|
||||||
|
|
||||||
depends_on('texinfo')
|
depends_on("texinfo")
|
||||||
|
|
||||||
provides('fftw-api@3', when='@2:')
|
provides("fftw-api@3", when="@2:")
|
||||||
|
|
||||||
conflicts(
|
conflicts(
|
||||||
'precision=quad',
|
"precision=quad",
|
||||||
when='@2.2 %aocc',
|
when="@2.2 %aocc",
|
||||||
msg='Quad precision is not supported by AOCC clang version 2.2')
|
msg="Quad precision is not supported by AOCC clang version 2.2",
|
||||||
|
)
|
||||||
conflicts(
|
conflicts(
|
||||||
'+debug',
|
"+debug", when="@2.2 %aocc", msg="debug mode is not supported by AOCC clang version 2.2"
|
||||||
when='@2.2 %aocc',
|
)
|
||||||
msg='debug mode is not supported by AOCC clang version 2.2')
|
conflicts("%gcc@:7.2", when="@2.2:", msg="GCC version above 7.2 is required for AMDFFTW")
|
||||||
conflicts(
|
conflicts(
|
||||||
'%gcc@:7.2',
|
"+amd-fast-planner ", when="+mpi", msg="mpi thread is not supported with amd-fast-planner"
|
||||||
when='@2.2:',
|
)
|
||||||
msg='GCC version above 7.2 is required for AMDFFTW')
|
|
||||||
conflicts(
|
conflicts(
|
||||||
'+amd-fast-planner ',
|
"+amd-fast-planner", when="@2.2", msg="amd-fast-planner is supported from 3.0 onwards"
|
||||||
when='+mpi',
|
)
|
||||||
msg='mpi thread is not supported with amd-fast-planner')
|
|
||||||
conflicts(
|
conflicts(
|
||||||
'+amd-fast-planner',
|
"+amd-fast-planner",
|
||||||
when='@2.2',
|
when="precision=quad",
|
||||||
msg='amd-fast-planner is supported from 3.0 onwards')
|
msg="Quad precision is not supported with amd-fast-planner",
|
||||||
|
)
|
||||||
conflicts(
|
conflicts(
|
||||||
'+amd-fast-planner',
|
"+amd-fast-planner",
|
||||||
when='precision=quad',
|
when="precision=long_double",
|
||||||
msg='Quad precision is not supported with amd-fast-planner')
|
msg="long_double precision is not supported with amd-fast-planner",
|
||||||
|
)
|
||||||
conflicts(
|
conflicts(
|
||||||
'+amd-fast-planner',
|
"+amd-top-n-planner",
|
||||||
when='precision=long_double',
|
when="@:3.0.0",
|
||||||
msg='long_double precision is not supported with amd-fast-planner')
|
msg="amd-top-n-planner is supported from 3.0.1 onwards",
|
||||||
|
)
|
||||||
conflicts(
|
conflicts(
|
||||||
'+amd-top-n-planner',
|
"+amd-top-n-planner",
|
||||||
when='@:3.0.0',
|
when="precision=long_double",
|
||||||
msg='amd-top-n-planner is supported from 3.0.1 onwards')
|
msg="long_double precision is not supported with amd-top-n-planner",
|
||||||
|
)
|
||||||
conflicts(
|
conflicts(
|
||||||
'+amd-top-n-planner',
|
"+amd-top-n-planner",
|
||||||
when='precision=long_double',
|
when="precision=quad",
|
||||||
msg='long_double precision is not supported with amd-top-n-planner')
|
msg="Quad precision is not supported with amd-top-n-planner",
|
||||||
|
)
|
||||||
conflicts(
|
conflicts(
|
||||||
'+amd-top-n-planner',
|
"+amd-top-n-planner",
|
||||||
when='precision=quad',
|
when="+amd-fast-planner",
|
||||||
msg='Quad precision is not supported with amd-top-n-planner')
|
msg="amd-top-n-planner cannot be used with amd-fast-planner",
|
||||||
|
)
|
||||||
conflicts(
|
conflicts(
|
||||||
'+amd-top-n-planner',
|
"+amd-top-n-planner", when="+threads", msg="amd-top-n-planner works only for single thread"
|
||||||
when='+amd-fast-planner',
|
)
|
||||||
msg='amd-top-n-planner cannot be used with amd-fast-planner')
|
|
||||||
conflicts(
|
conflicts(
|
||||||
'+amd-top-n-planner',
|
"+amd-top-n-planner", when="+mpi", msg="mpi thread is not supported with amd-top-n-planner"
|
||||||
when='+threads',
|
)
|
||||||
msg='amd-top-n-planner works only for single thread')
|
|
||||||
conflicts(
|
conflicts(
|
||||||
'+amd-top-n-planner',
|
"+amd-top-n-planner",
|
||||||
when='+mpi',
|
when="+openmp",
|
||||||
msg='mpi thread is not supported with amd-top-n-planner')
|
msg="openmp thread is not supported with amd-top-n-planner",
|
||||||
|
)
|
||||||
conflicts(
|
conflicts(
|
||||||
'+amd-top-n-planner',
|
"+amd-mpi-vader-limit",
|
||||||
when='+openmp',
|
when="@:3.0.0",
|
||||||
msg='openmp thread is not supported with amd-top-n-planner')
|
msg="amd-mpi-vader-limit is supported from 3.0.1 onwards",
|
||||||
|
)
|
||||||
conflicts(
|
conflicts(
|
||||||
'+amd-mpi-vader-limit',
|
"+amd-mpi-vader-limit",
|
||||||
when='@:3.0.0',
|
when="precision=quad",
|
||||||
msg='amd-mpi-vader-limit is supported from 3.0.1 onwards')
|
msg="Quad precision is not supported with amd-mpi-vader-limit",
|
||||||
|
)
|
||||||
|
conflicts("+amd-trans", when="+threads", msg="amd-trans works only for single thread")
|
||||||
|
conflicts("+amd-trans", when="+mpi", msg="mpi thread is not supported with amd-trans")
|
||||||
|
conflicts("+amd-trans", when="+openmp", msg="openmp thread is not supported with amd-trans")
|
||||||
conflicts(
|
conflicts(
|
||||||
'+amd-mpi-vader-limit',
|
"+amd-trans",
|
||||||
when='precision=quad',
|
when="precision=long_double",
|
||||||
msg='Quad precision is not supported with amd-mpi-vader-limit')
|
msg="long_double precision is not supported with amd-trans",
|
||||||
|
)
|
||||||
conflicts(
|
conflicts(
|
||||||
'+amd-trans',
|
"+amd-trans", when="precision=quad", msg="Quad precision is not supported with amd-trans"
|
||||||
when='+threads',
|
)
|
||||||
msg='amd-trans works only for single thread')
|
conflicts("+amd-app-opt", when="@:3.0.1", msg="amd-app-opt is supported from 3.1 onwards")
|
||||||
|
conflicts("+amd-app-opt", when="+mpi", msg="mpi thread is not supported with amd-app-opt")
|
||||||
conflicts(
|
conflicts(
|
||||||
'+amd-trans',
|
"+amd-app-opt",
|
||||||
when='+mpi',
|
when="precision=long_double",
|
||||||
msg='mpi thread is not supported with amd-trans')
|
msg="long_double precision is not supported with amd-app-opt",
|
||||||
|
)
|
||||||
conflicts(
|
conflicts(
|
||||||
'+amd-trans',
|
"+amd-app-opt",
|
||||||
when='+openmp',
|
when="precision=quad",
|
||||||
msg='openmp thread is not supported with amd-trans')
|
msg="Quad precision is not supported with amd-app-opt",
|
||||||
conflicts(
|
)
|
||||||
'+amd-trans',
|
|
||||||
when='precision=long_double',
|
|
||||||
msg='long_double precision is not supported with amd-trans')
|
|
||||||
conflicts(
|
|
||||||
'+amd-trans',
|
|
||||||
when='precision=quad',
|
|
||||||
msg='Quad precision is not supported with amd-trans')
|
|
||||||
conflicts(
|
|
||||||
'+amd-app-opt',
|
|
||||||
when='@:3.0.1',
|
|
||||||
msg='amd-app-opt is supported from 3.1 onwards')
|
|
||||||
conflicts(
|
|
||||||
'+amd-app-opt',
|
|
||||||
when='+mpi',
|
|
||||||
msg='mpi thread is not supported with amd-app-opt')
|
|
||||||
conflicts(
|
|
||||||
'+amd-app-opt',
|
|
||||||
when='precision=long_double',
|
|
||||||
msg='long_double precision is not supported with amd-app-opt')
|
|
||||||
conflicts(
|
|
||||||
'+amd-app-opt',
|
|
||||||
when='precision=quad',
|
|
||||||
msg='Quad precision is not supported with amd-app-opt')
|
|
||||||
|
|
||||||
def configure(self, spec, prefix):
|
def configure(self, spec, prefix):
|
||||||
"""Configure function"""
|
"""Configure function"""
|
||||||
# Base options
|
# Base options
|
||||||
options = [
|
options = ["--prefix={0}".format(prefix), "--enable-amd-opt"]
|
||||||
'--prefix={0}'.format(prefix),
|
|
||||||
'--enable-amd-opt'
|
|
||||||
]
|
|
||||||
|
|
||||||
# Check if compiler is AOCC
|
# Check if compiler is AOCC
|
||||||
if '%aocc' in spec:
|
if "%aocc" in spec:
|
||||||
options.append('CC={0}'.format(os.path.basename(spack_cc)))
|
options.append("CC={0}".format(os.path.basename(spack_cc)))
|
||||||
options.append('FC={0}'.format(os.path.basename(spack_fc)))
|
options.append("FC={0}".format(os.path.basename(spack_fc)))
|
||||||
options.append('F77={0}'.format(os.path.basename(spack_fc)))
|
options.append("F77={0}".format(os.path.basename(spack_fc)))
|
||||||
|
|
||||||
if '+debug' in spec:
|
if "+debug" in spec:
|
||||||
options.append('--enable-debug')
|
options.append("--enable-debug")
|
||||||
|
|
||||||
if '+mpi' in spec:
|
if "+mpi" in spec:
|
||||||
options.append('--enable-mpi')
|
options.append("--enable-mpi")
|
||||||
options.append('--enable-amd-mpifft')
|
options.append("--enable-amd-mpifft")
|
||||||
else:
|
else:
|
||||||
options.append('--disable-mpi')
|
options.append("--disable-mpi")
|
||||||
options.append('--disable-amd-mpifft')
|
options.append("--disable-amd-mpifft")
|
||||||
|
|
||||||
options.extend(self.enable_or_disable('shared'))
|
options.extend(self.enable_or_disable("shared"))
|
||||||
options.extend(self.enable_or_disable('openmp'))
|
options.extend(self.enable_or_disable("openmp"))
|
||||||
options.extend(self.enable_or_disable('threads'))
|
options.extend(self.enable_or_disable("threads"))
|
||||||
options.extend(self.enable_or_disable('amd-fast-planner'))
|
options.extend(self.enable_or_disable("amd-fast-planner"))
|
||||||
options.extend(self.enable_or_disable('amd-top-n-planner'))
|
options.extend(self.enable_or_disable("amd-top-n-planner"))
|
||||||
options.extend(self.enable_or_disable('amd-mpi-vader-limit'))
|
options.extend(self.enable_or_disable("amd-mpi-vader-limit"))
|
||||||
options.extend(self.enable_or_disable('static'))
|
options.extend(self.enable_or_disable("static"))
|
||||||
options.extend(self.enable_or_disable('amd-trans'))
|
options.extend(self.enable_or_disable("amd-trans"))
|
||||||
options.extend(self.enable_or_disable('amd-app-opt'))
|
options.extend(self.enable_or_disable("amd-app-opt"))
|
||||||
|
|
||||||
if not self.compiler.f77 or not self.compiler.fc:
|
if not self.compiler.f77 or not self.compiler.fc:
|
||||||
options.append('--disable-fortran')
|
options.append("--disable-fortran")
|
||||||
|
|
||||||
# Cross compilation is supported in amd-fftw by making use of target
|
# Cross compilation is supported in amd-fftw by making use of target
|
||||||
# variable to set AMD_ARCH configure option.
|
# variable to set AMD_ARCH configure option.
|
||||||
@@ -226,17 +196,16 @@ class Amdfftw(FftwBase):
|
|||||||
# use target variable to set appropriate -march option in AMD_ARCH.
|
# use target variable to set appropriate -march option in AMD_ARCH.
|
||||||
arch = spec.architecture
|
arch = spec.architecture
|
||||||
options.append(
|
options.append(
|
||||||
'AMD_ARCH={0}'.format(
|
"AMD_ARCH={0}".format(arch.target.optimization_flags(spec.compiler).split("=")[-1])
|
||||||
arch.target.optimization_flags(
|
)
|
||||||
spec.compiler).split('=')[-1]))
|
|
||||||
|
|
||||||
# Specific SIMD support.
|
# Specific SIMD support.
|
||||||
# float and double precisions are supported
|
# float and double precisions are supported
|
||||||
simd_features = ['sse2', 'avx', 'avx2']
|
simd_features = ["sse2", "avx", "avx2"]
|
||||||
|
|
||||||
simd_options = []
|
simd_options = []
|
||||||
for feature in simd_features:
|
for feature in simd_features:
|
||||||
msg = '--enable-{0}' if feature in spec.target else '--disable-{0}'
|
msg = "--enable-{0}" if feature in spec.target else "--disable-{0}"
|
||||||
simd_options.append(msg.format(feature))
|
simd_options.append(msg.format(feature))
|
||||||
|
|
||||||
# When enabling configure option "--enable-amd-opt", do not use the
|
# When enabling configure option "--enable-amd-opt", do not use the
|
||||||
@@ -246,20 +215,19 @@ class Amdfftw(FftwBase):
|
|||||||
# Double is the default precision, for all the others we need
|
# Double is the default precision, for all the others we need
|
||||||
# to enable the corresponding option.
|
# to enable the corresponding option.
|
||||||
enable_precision = {
|
enable_precision = {
|
||||||
'float': ['--enable-float'],
|
"float": ["--enable-float"],
|
||||||
'double': None,
|
"double": None,
|
||||||
'long_double': ['--enable-long-double'],
|
"long_double": ["--enable-long-double"],
|
||||||
'quad': ['--enable-quad-precision']
|
"quad": ["--enable-quad-precision"],
|
||||||
}
|
}
|
||||||
|
|
||||||
# Different precisions must be configured and compiled one at a time
|
# Different precisions must be configured and compiled one at a time
|
||||||
configure = Executable('../configure')
|
configure = Executable("../configure")
|
||||||
for precision in self.selected_precisions:
|
for precision in self.selected_precisions:
|
||||||
|
|
||||||
opts = (enable_precision[precision] or []) + options[:]
|
opts = (enable_precision[precision] or []) + options[:]
|
||||||
|
|
||||||
# SIMD optimizations are available only for float and double
|
# SIMD optimizations are available only for float and double
|
||||||
if precision in ('float', 'double'):
|
if precision in ("float", "double"):
|
||||||
opts += simd_options
|
opts += simd_options
|
||||||
|
|
||||||
with working_dir(precision, create=True):
|
with working_dir(precision, create=True):
|
||||||
|
|||||||
@@ -16,21 +16,21 @@ from spack.package import *
|
|||||||
|
|
||||||
class Llvm(CMakePackage, CudaPackage):
|
class Llvm(CMakePackage, CudaPackage):
|
||||||
"""The LLVM Project is a collection of modular and reusable compiler and
|
"""The LLVM Project is a collection of modular and reusable compiler and
|
||||||
toolchain technologies. Despite its name, LLVM has little to do
|
toolchain technologies. Despite its name, LLVM has little to do
|
||||||
with traditional virtual machines, though it does provide helpful
|
with traditional virtual machines, though it does provide helpful
|
||||||
libraries that can be used to build them. The name "LLVM" itself
|
libraries that can be used to build them. The name "LLVM" itself
|
||||||
is not an acronym; it is the full name of the project.
|
is not an acronym; it is the full name of the project.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
homepage = "https://llvm.org/"
|
homepage = "https://llvm.org/"
|
||||||
url = "https://github.com/llvm/llvm-project/archive/llvmorg-7.1.0.tar.gz"
|
url = "https://github.com/llvm/llvm-project/archive/llvmorg-7.1.0.tar.gz"
|
||||||
list_url = "https://releases.llvm.org/download.html"
|
list_url = "https://releases.llvm.org/download.html"
|
||||||
git = "https://github.com/llvm/llvm-project"
|
git = "https://github.com/llvm/llvm-project"
|
||||||
maintainers = ['trws', 'haampie']
|
maintainers("trws", "haampie")
|
||||||
|
|
||||||
tags = ['e4s']
|
tags = ["e4s"]
|
||||||
|
|
||||||
generator = 'Ninja'
|
generator = "Ninja"
|
||||||
|
|
||||||
family = "compiler" # Used by lmod
|
family = "compiler" # Used by lmod
|
||||||
|
|
||||||
@@ -80,13 +80,12 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
# to save space, build with `build_type=Release`.
|
# to save space, build with `build_type=Release`.
|
||||||
|
|
||||||
variant(
|
variant(
|
||||||
"clang",
|
"clang", default=True, description="Build the LLVM C/C++/Objective-C compiler frontend"
|
||||||
default=True,
|
|
||||||
description="Build the LLVM C/C++/Objective-C compiler frontend",
|
|
||||||
)
|
)
|
||||||
variant(
|
variant(
|
||||||
"flang",
|
"flang",
|
||||||
default=False, when='@11: +clang',
|
default=False,
|
||||||
|
when="@11: +clang",
|
||||||
description="Build the LLVM Fortran compiler frontend "
|
description="Build the LLVM Fortran compiler frontend "
|
||||||
"(experimental - parser only, needs GCC)",
|
"(experimental - parser only, needs GCC)",
|
||||||
)
|
)
|
||||||
@@ -95,27 +94,23 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
default=False,
|
default=False,
|
||||||
description="Include debugging code in OpenMP runtime libraries",
|
description="Include debugging code in OpenMP runtime libraries",
|
||||||
)
|
)
|
||||||
variant("lldb", default=True, when='+clang', description="Build the LLVM debugger")
|
variant("lldb", default=True, when="+clang", description="Build the LLVM debugger")
|
||||||
variant("lld", default=True, description="Build the LLVM linker")
|
variant("lld", default=True, description="Build the LLVM linker")
|
||||||
variant("mlir", default=False, when='@10:', description="Build with MLIR support")
|
variant("mlir", default=False, when="@10:", description="Build with MLIR support")
|
||||||
variant(
|
variant(
|
||||||
"internal_unwind",
|
"internal_unwind", default=True, when="+clang", description="Build the libcxxabi libunwind"
|
||||||
default=True, when='+clang',
|
|
||||||
description="Build the libcxxabi libunwind",
|
|
||||||
)
|
)
|
||||||
variant(
|
variant(
|
||||||
"polly",
|
"polly",
|
||||||
default=True,
|
default=True,
|
||||||
description="Build the LLVM polyhedral optimization plugin, "
|
description="Build the LLVM polyhedral optimization plugin, " "only builds for 3.7.0+",
|
||||||
"only builds for 3.7.0+",
|
|
||||||
)
|
)
|
||||||
variant(
|
variant(
|
||||||
"libcxx",
|
"libcxx", default=True, when="+clang", description="Build the LLVM C++ standard library"
|
||||||
default=True, when='+clang',
|
|
||||||
description="Build the LLVM C++ standard library",
|
|
||||||
)
|
)
|
||||||
variant(
|
variant(
|
||||||
"compiler-rt", when='+clang',
|
"compiler-rt",
|
||||||
|
when="+clang",
|
||||||
default=True,
|
default=True,
|
||||||
description="Build LLVM compiler runtime, including sanitizers",
|
description="Build LLVM compiler runtime, including sanitizers",
|
||||||
)
|
)
|
||||||
@@ -124,11 +119,7 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
default=(sys.platform != "darwin"),
|
default=(sys.platform != "darwin"),
|
||||||
description="Add support for LTO with the gold linker plugin",
|
description="Add support for LTO with the gold linker plugin",
|
||||||
)
|
)
|
||||||
variant(
|
variant("split_dwarf", default=False, description="Build with split dwarf information")
|
||||||
"split_dwarf",
|
|
||||||
default=False,
|
|
||||||
description="Build with split dwarf information",
|
|
||||||
)
|
|
||||||
variant(
|
variant(
|
||||||
"llvm_dylib",
|
"llvm_dylib",
|
||||||
default=True,
|
default=True,
|
||||||
@@ -136,18 +127,40 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
)
|
)
|
||||||
variant(
|
variant(
|
||||||
"link_llvm_dylib",
|
"link_llvm_dylib",
|
||||||
default=False, when='+llvm_dylib',
|
default=False,
|
||||||
|
when="+llvm_dylib",
|
||||||
description="Link LLVM tools against the LLVM shared library",
|
description="Link LLVM tools against the LLVM shared library",
|
||||||
)
|
)
|
||||||
variant(
|
variant(
|
||||||
"targets",
|
"targets",
|
||||||
default="none",
|
default="none",
|
||||||
description=("What targets to build. Spack's target family is always added "
|
description=(
|
||||||
"(e.g. X86 is automatically enabled when targeting znver2)."),
|
"What targets to build. Spack's target family is always added "
|
||||||
values=("all", "none", "aarch64", "amdgpu", "arm", "avr", "bpf", "cppbackend",
|
"(e.g. X86 is automatically enabled when targeting znver2)."
|
||||||
"hexagon", "lanai", "mips", "msp430", "nvptx", "powerpc", "riscv",
|
),
|
||||||
"sparc", "systemz", "webassembly", "x86", "xcore"),
|
values=(
|
||||||
multi=True
|
"all",
|
||||||
|
"none",
|
||||||
|
"aarch64",
|
||||||
|
"amdgpu",
|
||||||
|
"arm",
|
||||||
|
"avr",
|
||||||
|
"bpf",
|
||||||
|
"cppbackend",
|
||||||
|
"hexagon",
|
||||||
|
"lanai",
|
||||||
|
"mips",
|
||||||
|
"msp430",
|
||||||
|
"nvptx",
|
||||||
|
"powerpc",
|
||||||
|
"riscv",
|
||||||
|
"sparc",
|
||||||
|
"systemz",
|
||||||
|
"webassembly",
|
||||||
|
"x86",
|
||||||
|
"xcore",
|
||||||
|
),
|
||||||
|
multi=True,
|
||||||
)
|
)
|
||||||
variant(
|
variant(
|
||||||
"build_type",
|
"build_type",
|
||||||
@@ -157,51 +170,52 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
)
|
)
|
||||||
variant(
|
variant(
|
||||||
"omp_tsan",
|
"omp_tsan",
|
||||||
default=False, when='@6:',
|
default=False,
|
||||||
|
when="@6:",
|
||||||
description="Build with OpenMP capable thread sanitizer",
|
description="Build with OpenMP capable thread sanitizer",
|
||||||
)
|
)
|
||||||
variant(
|
variant(
|
||||||
"omp_as_runtime",
|
"omp_as_runtime",
|
||||||
default=True,
|
default=True,
|
||||||
when='+clang @12:',
|
when="+clang @12:",
|
||||||
description="Build OpenMP runtime via ENABLE_RUNTIME by just-built Clang",
|
description="Build OpenMP runtime via ENABLE_RUNTIME by just-built Clang",
|
||||||
)
|
)
|
||||||
variant('code_signing', default=False,
|
|
||||||
when='+lldb platform=darwin',
|
|
||||||
description="Enable code-signing on macOS")
|
|
||||||
variant("python", default=False, description="Install python bindings")
|
|
||||||
variant('version_suffix', default='none', description="Add a symbol suffix")
|
|
||||||
variant(
|
variant(
|
||||||
'shlib_symbol_version',
|
"code_signing",
|
||||||
default='none',
|
default=False,
|
||||||
|
when="+lldb platform=darwin",
|
||||||
|
description="Enable code-signing on macOS",
|
||||||
|
)
|
||||||
|
variant("python", default=False, description="Install python bindings")
|
||||||
|
variant("version_suffix", default="none", description="Add a symbol suffix")
|
||||||
|
variant(
|
||||||
|
"shlib_symbol_version",
|
||||||
|
default="none",
|
||||||
description="Add shared library symbol version",
|
description="Add shared library symbol version",
|
||||||
when='@13:'
|
when="@13:",
|
||||||
)
|
)
|
||||||
variant(
|
variant(
|
||||||
'z3',
|
"z3", default=False, when="+clang @8:", description="Use Z3 for the clang static analyzer"
|
||||||
default=False,
|
|
||||||
when='+clang @8:',
|
|
||||||
description='Use Z3 for the clang static analyzer'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
provides('libllvm@14', when='@14.0.0:14')
|
provides("libllvm@14", when="@14.0.0:14")
|
||||||
provides('libllvm@13', when='@13.0.0:13')
|
provides("libllvm@13", when="@13.0.0:13")
|
||||||
provides('libllvm@12', when='@12.0.0:12')
|
provides("libllvm@12", when="@12.0.0:12")
|
||||||
provides('libllvm@11', when='@11.0.0:11')
|
provides("libllvm@11", when="@11.0.0:11")
|
||||||
provides('libllvm@10', when='@10.0.0:10')
|
provides("libllvm@10", when="@10.0.0:10")
|
||||||
provides('libllvm@9', when='@9.0.0:9')
|
provides("libllvm@9", when="@9.0.0:9")
|
||||||
provides('libllvm@8', when='@8.0.0:8')
|
provides("libllvm@8", when="@8.0.0:8")
|
||||||
provides('libllvm@7', when='@7.0.0:7')
|
provides("libllvm@7", when="@7.0.0:7")
|
||||||
provides('libllvm@6', when='@6.0.0:6')
|
provides("libllvm@6", when="@6.0.0:6")
|
||||||
provides('libllvm@5', when='@5.0.0:5')
|
provides("libllvm@5", when="@5.0.0:5")
|
||||||
provides('libllvm@4', when='@4.0.0:4')
|
provides("libllvm@4", when="@4.0.0:4")
|
||||||
provides('libllvm@3', when='@3.0.0:3')
|
provides("libllvm@3", when="@3.0.0:3")
|
||||||
|
|
||||||
extends("python", when="+python")
|
extends("python", when="+python")
|
||||||
|
|
||||||
# Build dependency
|
# Build dependency
|
||||||
depends_on("cmake@3.4.3:", type="build")
|
depends_on("cmake@3.4.3:", type="build")
|
||||||
depends_on('cmake@3.13.4:', type='build', when='@12:')
|
depends_on("cmake@3.13.4:", type="build", when="@12:")
|
||||||
depends_on("ninja", type="build")
|
depends_on("ninja", type="build")
|
||||||
depends_on("python@2.7:2.8", when="@:4 ~python", type="build")
|
depends_on("python@2.7:2.8", when="@:4 ~python", type="build")
|
||||||
depends_on("python", when="@5: ~python", type="build")
|
depends_on("python", when="@5: ~python", type="build")
|
||||||
@@ -242,7 +256,7 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
# clang/lib: a lambda parameter cannot shadow an explicitly captured entity
|
# clang/lib: a lambda parameter cannot shadow an explicitly captured entity
|
||||||
conflicts("%clang@8:", when="@:4")
|
conflicts("%clang@8:", when="@:4")
|
||||||
# Internal compiler error on gcc 8.4 on aarch64 https://bugzilla.redhat.com/show_bug.cgi?id=1958295
|
# Internal compiler error on gcc 8.4 on aarch64 https://bugzilla.redhat.com/show_bug.cgi?id=1958295
|
||||||
conflicts('%gcc@8.4:8.4.9', when='@12: target=aarch64:')
|
conflicts("%gcc@8.4:8.4.9", when="@12: target=aarch64:")
|
||||||
|
|
||||||
# When these versions are concretized, but not explicitly with +libcxx, these
|
# When these versions are concretized, but not explicitly with +libcxx, these
|
||||||
# conflicts will enable clingo to set ~libcxx, making the build successful:
|
# conflicts will enable clingo to set ~libcxx, making the build successful:
|
||||||
@@ -252,17 +266,17 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
# GCC 11 - latest stable release per GCC release page
|
# GCC 11 - latest stable release per GCC release page
|
||||||
# Clang: 11, 12 - latest two stable releases per LLVM release page
|
# Clang: 11, 12 - latest two stable releases per LLVM release page
|
||||||
# AppleClang 12 - latest stable release per Xcode release page
|
# AppleClang 12 - latest stable release per Xcode release page
|
||||||
conflicts("%gcc@:10", when="@13:+libcxx")
|
conflicts("%gcc@:10", when="@13:+libcxx")
|
||||||
conflicts("%clang@:10", when="@13:+libcxx")
|
conflicts("%clang@:10", when="@13:+libcxx")
|
||||||
conflicts("%apple-clang@:11", when="@13:+libcxx")
|
conflicts("%apple-clang@:11", when="@13:+libcxx")
|
||||||
|
|
||||||
# libcxx-4 and compiler-rt-4 fail to build with "newer" clang and gcc versions:
|
# libcxx-4 and compiler-rt-4 fail to build with "newer" clang and gcc versions:
|
||||||
conflicts('%gcc@7:', when='@:4+libcxx')
|
conflicts("%gcc@7:", when="@:4+libcxx")
|
||||||
conflicts('%clang@6:', when='@:4+libcxx')
|
conflicts("%clang@6:", when="@:4+libcxx")
|
||||||
conflicts('%apple-clang@6:', when='@:4+libcxx')
|
conflicts("%apple-clang@6:", when="@:4+libcxx")
|
||||||
conflicts('%gcc@7:', when='@:4+compiler-rt')
|
conflicts("%gcc@7:", when="@:4+compiler-rt")
|
||||||
conflicts('%clang@6:', when='@:4+compiler-rt')
|
conflicts("%clang@6:", when="@:4+compiler-rt")
|
||||||
conflicts('%apple-clang@6:', when='@:4+compiler-rt')
|
conflicts("%apple-clang@6:", when="@:4+compiler-rt")
|
||||||
|
|
||||||
# cuda_arch value must be specified
|
# cuda_arch value must be specified
|
||||||
conflicts("cuda_arch=none", when="+cuda", msg="A value for cuda_arch must be specified.")
|
conflicts("cuda_arch=none", when="+cuda", msg="A value for cuda_arch must be specified.")
|
||||||
@@ -270,27 +284,27 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
# LLVM bug https://bugs.llvm.org/show_bug.cgi?id=48234
|
# LLVM bug https://bugs.llvm.org/show_bug.cgi?id=48234
|
||||||
# CMake bug: https://gitlab.kitware.com/cmake/cmake/-/issues/21469
|
# CMake bug: https://gitlab.kitware.com/cmake/cmake/-/issues/21469
|
||||||
# Fixed in upstream versions of both
|
# Fixed in upstream versions of both
|
||||||
conflicts('^cmake@3.19.0', when='@6:11.0.0')
|
conflicts("^cmake@3.19.0", when="@6:11.0.0")
|
||||||
|
|
||||||
# Github issue #4986
|
# Github issue #4986
|
||||||
patch("llvm_gcc7.patch", when="@4.0.0:4.0.1+lldb %gcc@7.0:")
|
patch("llvm_gcc7.patch", when="@4.0.0:4.0.1+lldb %gcc@7.0:")
|
||||||
|
|
||||||
# sys/ustat.h has been removed in favour of statfs from glibc-2.28. Use fixed sizes:
|
# sys/ustat.h has been removed in favour of statfs from glibc-2.28. Use fixed sizes:
|
||||||
patch('llvm5-sanitizer-ustat.patch', when="@4:6.0.0+compiler-rt")
|
patch("llvm5-sanitizer-ustat.patch", when="@4:6.0.0+compiler-rt")
|
||||||
|
|
||||||
# Fix lld templates: https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=230463
|
# Fix lld templates: https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=230463
|
||||||
patch('llvm4-lld-ELF-Symbols.patch', when="@4+lld%clang@6:")
|
patch("llvm4-lld-ELF-Symbols.patch", when="@4+lld%clang@6:")
|
||||||
patch('llvm5-lld-ELF-Symbols.patch', when="@5+lld%clang@7:")
|
patch("llvm5-lld-ELF-Symbols.patch", when="@5+lld%clang@7:")
|
||||||
|
|
||||||
# Fix missing std:size_t in 'llvm@4:5' when built with '%clang@7:'
|
# Fix missing std:size_t in 'llvm@4:5' when built with '%clang@7:'
|
||||||
patch('xray_buffer_queue-cstddef.patch', when="@4:5+compiler-rt%clang@7:")
|
patch("xray_buffer_queue-cstddef.patch", when="@4:5+compiler-rt%clang@7:")
|
||||||
|
|
||||||
# https://github.com/llvm/llvm-project/commit/947f9692440836dcb8d88b74b69dd379d85974ce
|
# https://github.com/llvm/llvm-project/commit/947f9692440836dcb8d88b74b69dd379d85974ce
|
||||||
patch('sanitizer-ipc_perm_mode.patch', when="@5:7+compiler-rt%clang@11:")
|
patch("sanitizer-ipc_perm_mode.patch", when="@5:7+compiler-rt%clang@11:")
|
||||||
patch('sanitizer-ipc_perm_mode.patch', when="@5:9+compiler-rt%gcc@9:")
|
patch("sanitizer-ipc_perm_mode.patch", when="@5:9+compiler-rt%gcc@9:")
|
||||||
|
|
||||||
# github.com/spack/spack/issues/24270: MicrosoftDemangle for %gcc@10: and %clang@13:
|
# github.com/spack/spack/issues/24270: MicrosoftDemangle for %gcc@10: and %clang@13:
|
||||||
patch('missing-includes.patch', when='@8')
|
patch("missing-includes.patch", when="@8")
|
||||||
|
|
||||||
# Backport from llvm master + additional fix
|
# Backport from llvm master + additional fix
|
||||||
# see https://bugs.llvm.org/show_bug.cgi?id=39696
|
# see https://bugs.llvm.org/show_bug.cgi?id=39696
|
||||||
@@ -315,33 +329,33 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
patch("llvm_python_path.patch", when="@:11")
|
patch("llvm_python_path.patch", when="@:11")
|
||||||
|
|
||||||
# Workaround for issue https://github.com/spack/spack/issues/18197
|
# Workaround for issue https://github.com/spack/spack/issues/18197
|
||||||
patch('llvm7_intel.patch', when='@7 %intel@18.0.2,19.0.0:19.1.99')
|
patch("llvm7_intel.patch", when="@7 %intel@18.0.2,19.0.0:19.1.99")
|
||||||
|
|
||||||
# Remove cyclades support to build against newer kernel headers
|
# Remove cyclades support to build against newer kernel headers
|
||||||
# https://reviews.llvm.org/D102059
|
# https://reviews.llvm.org/D102059
|
||||||
patch('no_cyclades.patch', when='@10:12.0.0')
|
patch("no_cyclades.patch", when="@10:12.0.0")
|
||||||
patch('no_cyclades9.patch', when='@6:9')
|
patch("no_cyclades9.patch", when="@6:9")
|
||||||
|
|
||||||
patch('llvm-gcc11.patch', when='@9:11%gcc@11:')
|
patch("llvm-gcc11.patch", when="@9:11%gcc@11:")
|
||||||
|
|
||||||
# add -lpthread to build OpenMP libraries with Fujitsu compiler
|
# add -lpthread to build OpenMP libraries with Fujitsu compiler
|
||||||
patch('llvm12-thread.patch', when='@12 %fj')
|
patch("llvm12-thread.patch", when="@12 %fj")
|
||||||
patch('llvm13-thread.patch', when='@13 %fj')
|
patch("llvm13-thread.patch", when="@13 %fj")
|
||||||
|
|
||||||
# avoid build failed with Fujitsu compiler
|
# avoid build failed with Fujitsu compiler
|
||||||
patch('llvm13-fujitsu.patch', when='@13 %fj')
|
patch("llvm13-fujitsu.patch", when="@13 %fj")
|
||||||
|
|
||||||
# patch for missing hwloc.h include for libompd
|
# patch for missing hwloc.h include for libompd
|
||||||
patch('llvm14-hwloc-ompd.patch', when='@14')
|
patch("llvm14-hwloc-ompd.patch", when="@14")
|
||||||
|
|
||||||
# make libflags a list in openmp subproject when ~omp_as_runtime
|
# make libflags a list in openmp subproject when ~omp_as_runtime
|
||||||
patch('libomp-libflags-as-list.patch', when='@3.7:')
|
patch("libomp-libflags-as-list.patch", when="@3.7:")
|
||||||
|
|
||||||
# The functions and attributes below implement external package
|
# The functions and attributes below implement external package
|
||||||
# detection for LLVM. See:
|
# detection for LLVM. See:
|
||||||
#
|
#
|
||||||
# https://spack.readthedocs.io/en/latest/packaging_guide.html#making-a-package-discoverable-with-spack-external-find
|
# https://spack.readthedocs.io/en/latest/packaging_guide.html#making-a-package-discoverable-with-spack-external-find
|
||||||
executables = ['clang', 'flang', 'ld.lld', 'lldb']
|
executables = ["clang", "flang", "ld.lld", "lldb"]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def filter_detected_exes(cls, prefix, exes_in_prefix):
|
def filter_detected_exes(cls, prefix, exes_in_prefix):
|
||||||
@@ -351,7 +365,7 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
# on some port and would hang Spack during detection.
|
# on some port and would hang Spack during detection.
|
||||||
# clang-cl and clang-cpp are dev tools that we don't
|
# clang-cl and clang-cpp are dev tools that we don't
|
||||||
# need to test
|
# need to test
|
||||||
if any(x in exe for x in ('vscode', 'cpp', '-cl', '-gpu')):
|
if any(x in exe for x in ("vscode", "cpp", "-cl", "-gpu")):
|
||||||
continue
|
continue
|
||||||
result.append(exe)
|
result.append(exe)
|
||||||
return result
|
return result
|
||||||
@@ -360,20 +374,20 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
def determine_version(cls, exe):
|
def determine_version(cls, exe):
|
||||||
version_regex = re.compile(
|
version_regex = re.compile(
|
||||||
# Normal clang compiler versions are left as-is
|
# Normal clang compiler versions are left as-is
|
||||||
r'clang version ([^ )\n]+)-svn[~.\w\d-]*|'
|
r"clang version ([^ )\n]+)-svn[~.\w\d-]*|"
|
||||||
# Don't include hyphenated patch numbers in the version
|
# Don't include hyphenated patch numbers in the version
|
||||||
# (see https://github.com/spack/spack/pull/14365 for details)
|
# (see https://github.com/spack/spack/pull/14365 for details)
|
||||||
r'clang version ([^ )\n]+?)-[~.\w\d-]*|'
|
r"clang version ([^ )\n]+?)-[~.\w\d-]*|"
|
||||||
r'clang version ([^ )\n]+)|'
|
r"clang version ([^ )\n]+)|"
|
||||||
# LLDB
|
# LLDB
|
||||||
r'lldb version ([^ )\n]+)|'
|
r"lldb version ([^ )\n]+)|"
|
||||||
# LLD
|
# LLD
|
||||||
r'LLD ([^ )\n]+) \(compatible with GNU linkers\)'
|
r"LLD ([^ )\n]+) \(compatible with GNU linkers\)"
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
compiler = Executable(exe)
|
compiler = Executable(exe)
|
||||||
output = compiler('--version', output=str, error=str)
|
output = compiler("--version", output=str, error=str)
|
||||||
if 'Apple' in output:
|
if "Apple" in output:
|
||||||
return None
|
return None
|
||||||
match = version_regex.search(output)
|
match = version_regex.search(output)
|
||||||
if match:
|
if match:
|
||||||
@@ -387,38 +401,39 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def determine_variants(cls, exes, version_str):
|
def determine_variants(cls, exes, version_str):
|
||||||
variants, compilers = ['+clang'], {}
|
variants, compilers = ["+clang"], {}
|
||||||
lld_found, lldb_found = False, False
|
lld_found, lldb_found = False, False
|
||||||
for exe in exes:
|
for exe in exes:
|
||||||
if 'clang++' in exe:
|
if "clang++" in exe:
|
||||||
compilers['cxx'] = exe
|
compilers["cxx"] = exe
|
||||||
elif 'clang' in exe:
|
elif "clang" in exe:
|
||||||
compilers['c'] = exe
|
compilers["c"] = exe
|
||||||
elif 'flang' in exe:
|
elif "flang" in exe:
|
||||||
variants.append('+flang')
|
variants.append("+flang")
|
||||||
compilers['fc'] = exe
|
compilers["fc"] = exe
|
||||||
compilers['f77'] = exe
|
compilers["f77"] = exe
|
||||||
elif 'ld.lld' in exe:
|
elif "ld.lld" in exe:
|
||||||
lld_found = True
|
lld_found = True
|
||||||
compilers['ld'] = exe
|
compilers["ld"] = exe
|
||||||
elif 'lldb' in exe:
|
elif "lldb" in exe:
|
||||||
lldb_found = True
|
lldb_found = True
|
||||||
compilers['lldb'] = exe
|
compilers["lldb"] = exe
|
||||||
|
|
||||||
variants.append('+lld' if lld_found else '~lld')
|
variants.append("+lld" if lld_found else "~lld")
|
||||||
variants.append('+lldb' if lldb_found else '~lldb')
|
variants.append("+lldb" if lldb_found else "~lldb")
|
||||||
|
|
||||||
return ''.join(variants), {'compilers': compilers}
|
return "".join(variants), {"compilers": compilers}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def validate_detected_spec(cls, spec, extra_attributes):
|
def validate_detected_spec(cls, spec, extra_attributes):
|
||||||
# For LLVM 'compilers' is a mandatory attribute
|
# For LLVM 'compilers' is a mandatory attribute
|
||||||
msg = ('the extra attribute "compilers" must be set for '
|
msg = 'the extra attribute "compilers" must be set for ' 'the detected spec "{0}"'.format(
|
||||||
'the detected spec "{0}"'.format(spec))
|
spec
|
||||||
assert 'compilers' in extra_attributes, msg
|
)
|
||||||
compilers = extra_attributes['compilers']
|
assert "compilers" in extra_attributes, msg
|
||||||
for key in ('c', 'cxx'):
|
compilers = extra_attributes["compilers"]
|
||||||
msg = '{0} compiler not found for {1}'
|
for key in ("c", "cxx"):
|
||||||
|
msg = "{0} compiler not found for {1}"
|
||||||
assert key in compilers, msg.format(key, spec)
|
assert key in compilers, msg.format(key, spec)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -426,10 +441,10 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
msg = "cannot retrieve C compiler [spec is not concrete]"
|
msg = "cannot retrieve C compiler [spec is not concrete]"
|
||||||
assert self.spec.concrete, msg
|
assert self.spec.concrete, msg
|
||||||
if self.spec.external:
|
if self.spec.external:
|
||||||
return self.spec.extra_attributes['compilers'].get('c', None)
|
return self.spec.extra_attributes["compilers"].get("c", None)
|
||||||
result = None
|
result = None
|
||||||
if '+clang' in self.spec:
|
if "+clang" in self.spec:
|
||||||
result = os.path.join(self.spec.prefix.bin, 'clang')
|
result = os.path.join(self.spec.prefix.bin, "clang")
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -437,10 +452,10 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
msg = "cannot retrieve C++ compiler [spec is not concrete]"
|
msg = "cannot retrieve C++ compiler [spec is not concrete]"
|
||||||
assert self.spec.concrete, msg
|
assert self.spec.concrete, msg
|
||||||
if self.spec.external:
|
if self.spec.external:
|
||||||
return self.spec.extra_attributes['compilers'].get('cxx', None)
|
return self.spec.extra_attributes["compilers"].get("cxx", None)
|
||||||
result = None
|
result = None
|
||||||
if '+clang' in self.spec:
|
if "+clang" in self.spec:
|
||||||
result = os.path.join(self.spec.prefix.bin, 'clang++')
|
result = os.path.join(self.spec.prefix.bin, "clang++")
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -448,10 +463,10 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
msg = "cannot retrieve Fortran compiler [spec is not concrete]"
|
msg = "cannot retrieve Fortran compiler [spec is not concrete]"
|
||||||
assert self.spec.concrete, msg
|
assert self.spec.concrete, msg
|
||||||
if self.spec.external:
|
if self.spec.external:
|
||||||
return self.spec.extra_attributes['compilers'].get('fc', None)
|
return self.spec.extra_attributes["compilers"].get("fc", None)
|
||||||
result = None
|
result = None
|
||||||
if '+flang' in self.spec:
|
if "+flang" in self.spec:
|
||||||
result = os.path.join(self.spec.prefix.bin, 'flang')
|
result = os.path.join(self.spec.prefix.bin, "flang")
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -459,27 +474,25 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
msg = "cannot retrieve Fortran 77 compiler [spec is not concrete]"
|
msg = "cannot retrieve Fortran 77 compiler [spec is not concrete]"
|
||||||
assert self.spec.concrete, msg
|
assert self.spec.concrete, msg
|
||||||
if self.spec.external:
|
if self.spec.external:
|
||||||
return self.spec.extra_attributes['compilers'].get('f77', None)
|
return self.spec.extra_attributes["compilers"].get("f77", None)
|
||||||
result = None
|
result = None
|
||||||
if '+flang' in self.spec:
|
if "+flang" in self.spec:
|
||||||
result = os.path.join(self.spec.prefix.bin, 'flang')
|
result = os.path.join(self.spec.prefix.bin, "flang")
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def libs(self):
|
def libs(self):
|
||||||
return LibraryList(self.llvm_config("--libfiles", "all",
|
return LibraryList(self.llvm_config("--libfiles", "all", result="list"))
|
||||||
result="list"))
|
|
||||||
|
|
||||||
@run_before('cmake')
|
@run_before("cmake")
|
||||||
def codesign_check(self):
|
def codesign_check(self):
|
||||||
if self.spec.satisfies("+code_signing"):
|
if self.spec.satisfies("+code_signing"):
|
||||||
codesign = which('codesign')
|
codesign = which("codesign")
|
||||||
mkdir('tmp')
|
mkdir("tmp")
|
||||||
llvm_check_file = join_path('tmp', 'llvm_check')
|
llvm_check_file = join_path("tmp", "llvm_check")
|
||||||
copy('/usr/bin/false', llvm_check_file)
|
copy("/usr/bin/false", llvm_check_file)
|
||||||
try:
|
try:
|
||||||
codesign('-f', '-s', 'lldb_codesign', '--dryrun',
|
codesign("-f", "-s", "lldb_codesign", "--dryrun", llvm_check_file)
|
||||||
llvm_check_file)
|
|
||||||
|
|
||||||
except ProcessError:
|
except ProcessError:
|
||||||
# Newer LLVM versions have a simple script that sets up
|
# Newer LLVM versions have a simple script that sets up
|
||||||
@@ -489,32 +502,32 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
setup()
|
setup()
|
||||||
except Exception:
|
except Exception:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
'spack was unable to either find or set up'
|
"spack was unable to either find or set up"
|
||||||
'code-signing on your system. Please refer to'
|
"code-signing on your system. Please refer to"
|
||||||
'https://lldb.llvm.org/resources/build.html#'
|
"https://lldb.llvm.org/resources/build.html#"
|
||||||
'code-signing-on-macos for details on how to'
|
"code-signing-on-macos for details on how to"
|
||||||
'create this identity.'
|
"create this identity."
|
||||||
)
|
)
|
||||||
|
|
||||||
def flag_handler(self, name, flags):
|
def flag_handler(self, name, flags):
|
||||||
if name == 'cxxflags':
|
if name == "cxxflags":
|
||||||
flags.append(self.compiler.cxx11_flag)
|
flags.append(self.compiler.cxx11_flag)
|
||||||
return(None, flags, None)
|
return (None, flags, None)
|
||||||
elif name == 'ldflags' and self.spec.satisfies('%intel'):
|
elif name == "ldflags" and self.spec.satisfies("%intel"):
|
||||||
flags.append('-shared-intel')
|
flags.append("-shared-intel")
|
||||||
return(None, flags, None)
|
return (None, flags, None)
|
||||||
return(flags, None, None)
|
return (flags, None, None)
|
||||||
|
|
||||||
def setup_build_environment(self, env):
|
def setup_build_environment(self, env):
|
||||||
"""When using %clang, add only its ld.lld-$ver and/or ld.lld to our PATH"""
|
"""When using %clang, add only its ld.lld-$ver and/or ld.lld to our PATH"""
|
||||||
if self.compiler.name in ['clang', 'apple-clang']:
|
if self.compiler.name in ["clang", "apple-clang"]:
|
||||||
for lld in 'ld.lld-{0}'.format(self.compiler.version.version[0]), 'ld.lld':
|
for lld in "ld.lld-{0}".format(self.compiler.version.version[0]), "ld.lld":
|
||||||
bin = os.path.join(os.path.dirname(self.compiler.cc), lld)
|
bin = os.path.join(os.path.dirname(self.compiler.cc), lld)
|
||||||
sym = os.path.join(self.stage.path, 'ld.lld')
|
sym = os.path.join(self.stage.path, "ld.lld")
|
||||||
if os.path.exists(bin) and not os.path.exists(sym):
|
if os.path.exists(bin) and not os.path.exists(sym):
|
||||||
mkdirp(self.stage.path)
|
mkdirp(self.stage.path)
|
||||||
os.symlink(bin, sym)
|
os.symlink(bin, sym)
|
||||||
env.prepend_path('PATH', self.stage.path)
|
env.prepend_path("PATH", self.stage.path)
|
||||||
|
|
||||||
def setup_run_environment(self, env):
|
def setup_run_environment(self, env):
|
||||||
if "+clang" in self.spec:
|
if "+clang" in self.spec:
|
||||||
@@ -531,7 +544,7 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
define = CMakePackage.define
|
define = CMakePackage.define
|
||||||
from_variant = self.define_from_variant
|
from_variant = self.define_from_variant
|
||||||
|
|
||||||
python = spec['python']
|
python = spec["python"]
|
||||||
cmake_args = [
|
cmake_args = [
|
||||||
define("LLVM_REQUIRES_RTTI", True),
|
define("LLVM_REQUIRES_RTTI", True),
|
||||||
define("LLVM_ENABLE_RTTI", True),
|
define("LLVM_ENABLE_RTTI", True),
|
||||||
@@ -544,14 +557,13 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
define("LIBOMP_HWLOC_INSTALL_DIR", spec["hwloc"].prefix),
|
define("LIBOMP_HWLOC_INSTALL_DIR", spec["hwloc"].prefix),
|
||||||
]
|
]
|
||||||
|
|
||||||
version_suffix = spec.variants['version_suffix'].value
|
version_suffix = spec.variants["version_suffix"].value
|
||||||
if version_suffix != 'none':
|
if version_suffix != "none":
|
||||||
cmake_args.append(define('LLVM_VERSION_SUFFIX', version_suffix))
|
cmake_args.append(define("LLVM_VERSION_SUFFIX", version_suffix))
|
||||||
|
|
||||||
shlib_symbol_version = spec.variants.get('shlib_symbol_version', None)
|
shlib_symbol_version = spec.variants.get("shlib_symbol_version", None)
|
||||||
if shlib_symbol_version is not None and shlib_symbol_version.value != 'none':
|
if shlib_symbol_version is not None and shlib_symbol_version.value != "none":
|
||||||
cmake_args.append(define('LLVM_SHLIB_SYMBOL_VERSION',
|
cmake_args.append(define("LLVM_SHLIB_SYMBOL_VERSION", shlib_symbol_version.value))
|
||||||
shlib_symbol_version.value))
|
|
||||||
|
|
||||||
if python.version >= Version("3"):
|
if python.version >= Version("3"):
|
||||||
cmake_args.append(define("Python3_EXECUTABLE", python.command.path))
|
cmake_args.append(define("Python3_EXECUTABLE", python.command.path))
|
||||||
@@ -562,47 +574,56 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
runtimes = []
|
runtimes = []
|
||||||
|
|
||||||
if "+cuda" in spec:
|
if "+cuda" in spec:
|
||||||
cmake_args.extend([
|
cmake_args.extend(
|
||||||
define("CUDA_TOOLKIT_ROOT_DIR", spec["cuda"].prefix),
|
[
|
||||||
define("LIBOMPTARGET_NVPTX_COMPUTE_CAPABILITIES",
|
define("CUDA_TOOLKIT_ROOT_DIR", spec["cuda"].prefix),
|
||||||
",".join(spec.variants["cuda_arch"].value)),
|
define(
|
||||||
define("CLANG_OPENMP_NVPTX_DEFAULT_ARCH",
|
"LIBOMPTARGET_NVPTX_COMPUTE_CAPABILITIES",
|
||||||
"sm_{0}".format(spec.variants["cuda_arch"].value[-1])),
|
",".join(spec.variants["cuda_arch"].value),
|
||||||
])
|
),
|
||||||
|
define(
|
||||||
|
"CLANG_OPENMP_NVPTX_DEFAULT_ARCH",
|
||||||
|
"sm_{0}".format(spec.variants["cuda_arch"].value[-1]),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
if "+omp_as_runtime" in spec:
|
if "+omp_as_runtime" in spec:
|
||||||
cmake_args.extend([
|
cmake_args.extend(
|
||||||
define("LIBOMPTARGET_NVPTX_ENABLE_BCLIB", True),
|
[
|
||||||
# work around bad libelf detection in libomptarget
|
define("LIBOMPTARGET_NVPTX_ENABLE_BCLIB", True),
|
||||||
define("LIBOMPTARGET_DEP_LIBELF_INCLUDE_DIR",
|
# work around bad libelf detection in libomptarget
|
||||||
spec["libelf"].prefix.include),
|
define(
|
||||||
])
|
"LIBOMPTARGET_DEP_LIBELF_INCLUDE_DIR", spec["libelf"].prefix.include
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
# still build libomptarget but disable cuda
|
# still build libomptarget but disable cuda
|
||||||
cmake_args.extend([
|
cmake_args.extend(
|
||||||
define("CUDA_TOOLKIT_ROOT_DIR", "IGNORE"),
|
[
|
||||||
define("CUDA_SDK_ROOT_DIR", "IGNORE"),
|
define("CUDA_TOOLKIT_ROOT_DIR", "IGNORE"),
|
||||||
define("CUDA_NVCC_EXECUTABLE", "IGNORE"),
|
define("CUDA_SDK_ROOT_DIR", "IGNORE"),
|
||||||
define("LIBOMPTARGET_DEP_CUDA_DRIVER_LIBRARIES", "IGNORE"),
|
define("CUDA_NVCC_EXECUTABLE", "IGNORE"),
|
||||||
])
|
define("LIBOMPTARGET_DEP_CUDA_DRIVER_LIBRARIES", "IGNORE"),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
cmake_args.append(from_variant("LIBOMPTARGET_ENABLE_DEBUG", "omp_debug"))
|
cmake_args.append(from_variant("LIBOMPTARGET_ENABLE_DEBUG", "omp_debug"))
|
||||||
|
|
||||||
if "+lldb" in spec:
|
if "+lldb" in spec:
|
||||||
projects.append("lldb")
|
projects.append("lldb")
|
||||||
cmake_args.append(define('LLDB_ENABLE_LIBEDIT', True))
|
cmake_args.append(define("LLDB_ENABLE_LIBEDIT", True))
|
||||||
cmake_args.append(define('LLDB_ENABLE_NCURSES', True))
|
cmake_args.append(define("LLDB_ENABLE_NCURSES", True))
|
||||||
cmake_args.append(define('LLDB_ENABLE_LIBXML2', False))
|
cmake_args.append(define("LLDB_ENABLE_LIBXML2", False))
|
||||||
if spec.version >= Version('10'):
|
if spec.version >= Version("10"):
|
||||||
cmake_args.append(from_variant("LLDB_ENABLE_PYTHON", 'python'))
|
cmake_args.append(from_variant("LLDB_ENABLE_PYTHON", "python"))
|
||||||
else:
|
else:
|
||||||
cmake_args.append(define("LLDB_DISABLE_PYTHON", '~python' in spec))
|
cmake_args.append(define("LLDB_DISABLE_PYTHON", "~python" in spec))
|
||||||
if spec.satisfies("@5.0.0: +python"):
|
if spec.satisfies("@5.0.0: +python"):
|
||||||
cmake_args.append(define("LLDB_USE_SYSTEM_SIX", True))
|
cmake_args.append(define("LLDB_USE_SYSTEM_SIX", True))
|
||||||
|
|
||||||
if "+gold" in spec:
|
if "+gold" in spec:
|
||||||
cmake_args.append(
|
cmake_args.append(define("LLVM_BINUTILS_INCDIR", spec["binutils"].prefix.include))
|
||||||
define("LLVM_BINUTILS_INCDIR", spec["binutils"].prefix.include)
|
|
||||||
)
|
|
||||||
|
|
||||||
if "+clang" in spec:
|
if "+clang" in spec:
|
||||||
projects.append("clang")
|
projects.append("clang")
|
||||||
@@ -612,10 +633,10 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
else:
|
else:
|
||||||
projects.append("openmp")
|
projects.append("openmp")
|
||||||
|
|
||||||
if '@8' in spec:
|
if "@8" in spec:
|
||||||
cmake_args.append(from_variant('CLANG_ANALYZER_ENABLE_Z3_SOLVER', 'z3'))
|
cmake_args.append(from_variant("CLANG_ANALYZER_ENABLE_Z3_SOLVER", "z3"))
|
||||||
elif '@9:' in spec:
|
elif "@9:" in spec:
|
||||||
cmake_args.append(from_variant('LLVM_ENABLE_Z3_SOLVER', 'z3'))
|
cmake_args.append(from_variant("LLVM_ENABLE_Z3_SOLVER", "z3"))
|
||||||
|
|
||||||
if "+flang" in spec:
|
if "+flang" in spec:
|
||||||
projects.append("flang")
|
projects.append("flang")
|
||||||
@@ -634,26 +655,26 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
projects.append("polly")
|
projects.append("polly")
|
||||||
cmake_args.append(define("LINK_POLLY_INTO_TOOLS", True))
|
cmake_args.append(define("LINK_POLLY_INTO_TOOLS", True))
|
||||||
|
|
||||||
cmake_args.extend([
|
cmake_args.extend(
|
||||||
define("BUILD_SHARED_LIBS", False),
|
[
|
||||||
from_variant("LLVM_BUILD_LLVM_DYLIB", "llvm_dylib"),
|
define("BUILD_SHARED_LIBS", False),
|
||||||
from_variant("LLVM_LINK_LLVM_DYLIB", "link_llvm_dylib"),
|
from_variant("LLVM_BUILD_LLVM_DYLIB", "llvm_dylib"),
|
||||||
from_variant("LLVM_USE_SPLIT_DWARF", "split_dwarf"),
|
from_variant("LLVM_LINK_LLVM_DYLIB", "link_llvm_dylib"),
|
||||||
# By default on Linux, libc++.so is a ldscript. CMake fails to add
|
from_variant("LLVM_USE_SPLIT_DWARF", "split_dwarf"),
|
||||||
# CMAKE_INSTALL_RPATH to it, which fails. Statically link libc++abi.a
|
# By default on Linux, libc++.so is a ldscript. CMake fails to add
|
||||||
# into libc++.so, linking with -lc++ or -stdlib=libc++ is enough.
|
# CMAKE_INSTALL_RPATH to it, which fails. Statically link libc++abi.a
|
||||||
define('LIBCXX_ENABLE_STATIC_ABI_LIBRARY', True)
|
# into libc++.so, linking with -lc++ or -stdlib=libc++ is enough.
|
||||||
])
|
define("LIBCXX_ENABLE_STATIC_ABI_LIBRARY", True),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
cmake_args.append(define(
|
cmake_args.append(define("LLVM_TARGETS_TO_BUILD", get_llvm_targets_to_build(spec)))
|
||||||
"LLVM_TARGETS_TO_BUILD",
|
|
||||||
get_llvm_targets_to_build(spec)))
|
|
||||||
|
|
||||||
cmake_args.append(from_variant("LIBOMP_TSAN_SUPPORT", "omp_tsan"))
|
cmake_args.append(from_variant("LIBOMP_TSAN_SUPPORT", "omp_tsan"))
|
||||||
|
|
||||||
if self.compiler.name == "gcc":
|
if self.compiler.name == "gcc":
|
||||||
compiler = Executable(self.compiler.cc)
|
compiler = Executable(self.compiler.cc)
|
||||||
gcc_output = compiler('-print-search-dirs', output=str, error=str)
|
gcc_output = compiler("-print-search-dirs", output=str, error=str)
|
||||||
|
|
||||||
for line in gcc_output.splitlines():
|
for line in gcc_output.splitlines():
|
||||||
if line.startswith("install:"):
|
if line.startswith("install:"):
|
||||||
@@ -665,7 +686,7 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
cmake_args.append(define("GCC_INSTALL_PREFIX", gcc_prefix))
|
cmake_args.append(define("GCC_INSTALL_PREFIX", gcc_prefix))
|
||||||
|
|
||||||
if self.spec.satisfies("~code_signing platform=darwin"):
|
if self.spec.satisfies("~code_signing platform=darwin"):
|
||||||
cmake_args.append(define('LLDB_USE_SYSTEM_DEBUGSERVER', True))
|
cmake_args.append(define("LLDB_USE_SYSTEM_DEBUGSERVER", True))
|
||||||
|
|
||||||
# Semicolon seperated list of projects to enable
|
# Semicolon seperated list of projects to enable
|
||||||
cmake_args.append(define("LLVM_ENABLE_PROJECTS", projects))
|
cmake_args.append(define("LLVM_ENABLE_PROJECTS", projects))
|
||||||
@@ -689,20 +710,24 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
# rebuild libomptarget to get bytecode runtime library files
|
# rebuild libomptarget to get bytecode runtime library files
|
||||||
with working_dir(ompdir, create=True):
|
with working_dir(ompdir, create=True):
|
||||||
cmake_args = [
|
cmake_args = [
|
||||||
'-G', 'Ninja',
|
"-G",
|
||||||
define('CMAKE_BUILD_TYPE', spec.variants['build_type'].value),
|
"Ninja",
|
||||||
|
define("CMAKE_BUILD_TYPE", spec.variants["build_type"].value),
|
||||||
define("CMAKE_C_COMPILER", spec.prefix.bin + "/clang"),
|
define("CMAKE_C_COMPILER", spec.prefix.bin + "/clang"),
|
||||||
define("CMAKE_CXX_COMPILER", spec.prefix.bin + "/clang++"),
|
define("CMAKE_CXX_COMPILER", spec.prefix.bin + "/clang++"),
|
||||||
define("CMAKE_INSTALL_PREFIX", spec.prefix),
|
define("CMAKE_INSTALL_PREFIX", spec.prefix),
|
||||||
define('CMAKE_PREFIX_PATH', prefix_paths)
|
define("CMAKE_PREFIX_PATH", prefix_paths),
|
||||||
]
|
]
|
||||||
cmake_args.extend(self.cmake_args())
|
cmake_args.extend(self.cmake_args())
|
||||||
cmake_args.extend([
|
cmake_args.extend(
|
||||||
define("LIBOMPTARGET_NVPTX_ENABLE_BCLIB", True),
|
[
|
||||||
define("LIBOMPTARGET_DEP_LIBELF_INCLUDE_DIR",
|
define("LIBOMPTARGET_NVPTX_ENABLE_BCLIB", True),
|
||||||
spec["libelf"].prefix.include),
|
define(
|
||||||
self.stage.source_path + "/openmp",
|
"LIBOMPTARGET_DEP_LIBELF_INCLUDE_DIR", spec["libelf"].prefix.include
|
||||||
])
|
),
|
||||||
|
self.stage.source_path + "/openmp",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
cmake(*cmake_args)
|
cmake(*cmake_args)
|
||||||
ninja()
|
ninja()
|
||||||
@@ -717,22 +742,22 @@ class Llvm(CMakePackage, CudaPackage):
|
|||||||
install_tree("bin", join_path(self.prefix, "libexec", "llvm"))
|
install_tree("bin", join_path(self.prefix, "libexec", "llvm"))
|
||||||
|
|
||||||
def llvm_config(self, *args, **kwargs):
|
def llvm_config(self, *args, **kwargs):
|
||||||
lc = Executable(self.prefix.bin.join('llvm-config'))
|
lc = Executable(self.prefix.bin.join("llvm-config"))
|
||||||
if not kwargs.get('output'):
|
if not kwargs.get("output"):
|
||||||
kwargs['output'] = str
|
kwargs["output"] = str
|
||||||
ret = lc(*args, **kwargs)
|
ret = lc(*args, **kwargs)
|
||||||
if kwargs.get('result') == "list":
|
if kwargs.get("result") == "list":
|
||||||
return ret.split()
|
return ret.split()
|
||||||
else:
|
else:
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def get_llvm_targets_to_build(spec):
|
def get_llvm_targets_to_build(spec):
|
||||||
targets = spec.variants['targets'].value
|
targets = spec.variants["targets"].value
|
||||||
|
|
||||||
# Build everything?
|
# Build everything?
|
||||||
if 'all' in targets:
|
if "all" in targets:
|
||||||
return 'all'
|
return "all"
|
||||||
|
|
||||||
# Convert targets variant values to CMake LLVM_TARGETS_TO_BUILD array.
|
# Convert targets variant values to CMake LLVM_TARGETS_TO_BUILD array.
|
||||||
spack_to_cmake = {
|
spack_to_cmake = {
|
||||||
@@ -753,10 +778,10 @@ def get_llvm_targets_to_build(spec):
|
|||||||
"systemz": "SystemZ",
|
"systemz": "SystemZ",
|
||||||
"webassembly": "WebAssembly",
|
"webassembly": "WebAssembly",
|
||||||
"x86": "X86",
|
"x86": "X86",
|
||||||
"xcore": "XCore"
|
"xcore": "XCore",
|
||||||
}
|
}
|
||||||
|
|
||||||
if 'none' in targets:
|
if "none" in targets:
|
||||||
llvm_targets = set()
|
llvm_targets = set()
|
||||||
else:
|
else:
|
||||||
llvm_targets = set(spack_to_cmake[target] for target in targets)
|
llvm_targets = set(spack_to_cmake[target] for target in targets)
|
||||||
|
|||||||
@@ -22,127 +22,140 @@ class PyTorch(PythonPackage, CudaPackage):
|
|||||||
with strong GPU acceleration."""
|
with strong GPU acceleration."""
|
||||||
|
|
||||||
homepage = "https://pytorch.org/"
|
homepage = "https://pytorch.org/"
|
||||||
git = "https://github.com/pytorch/pytorch.git"
|
git = "https://github.com/pytorch/pytorch.git"
|
||||||
|
|
||||||
maintainers = ['adamjstewart']
|
maintainers("adamjstewart")
|
||||||
|
|
||||||
# Exact set of modules is version- and variant-specific, just attempt to import the
|
# Exact set of modules is version- and variant-specific, just attempt to import the
|
||||||
# core libraries to ensure that the package was successfully installed.
|
# core libraries to ensure that the package was successfully installed.
|
||||||
import_modules = ['torch', 'torch.autograd', 'torch.nn', 'torch.utils']
|
import_modules = ["torch", "torch.autograd", "torch.nn", "torch.utils"]
|
||||||
|
|
||||||
version('master', branch='master', submodules=True)
|
version("master", branch="master", submodules=True)
|
||||||
version('1.10.1', tag='v1.10.1', submodules=True)
|
version("1.10.1", tag="v1.10.1", submodules=True)
|
||||||
version('1.10.0', tag='v1.10.0', submodules=True)
|
version("1.10.0", tag="v1.10.0", submodules=True)
|
||||||
version('1.9.1', tag='v1.9.1', submodules=True)
|
version("1.9.1", tag="v1.9.1", submodules=True)
|
||||||
version('1.9.0', tag='v1.9.0', submodules=True)
|
version("1.9.0", tag="v1.9.0", submodules=True)
|
||||||
version('1.8.2', tag='v1.8.2', submodules=True)
|
version("1.8.2", tag="v1.8.2", submodules=True)
|
||||||
version('1.8.1', tag='v1.8.1', submodules=True)
|
version("1.8.1", tag="v1.8.1", submodules=True)
|
||||||
version('1.8.0', tag='v1.8.0', submodules=True)
|
version("1.8.0", tag="v1.8.0", submodules=True)
|
||||||
version('1.7.1', tag='v1.7.1', submodules=True)
|
version("1.7.1", tag="v1.7.1", submodules=True)
|
||||||
version('1.7.0', tag='v1.7.0', submodules=True)
|
version("1.7.0", tag="v1.7.0", submodules=True)
|
||||||
version('1.6.0', tag='v1.6.0', submodules=True)
|
version("1.6.0", tag="v1.6.0", submodules=True)
|
||||||
version('1.5.1', tag='v1.5.1', submodules=True)
|
version("1.5.1", tag="v1.5.1", submodules=True)
|
||||||
version('1.5.0', tag='v1.5.0', submodules=True)
|
version("1.5.0", tag="v1.5.0", submodules=True)
|
||||||
version('1.4.1', tag='v1.4.1', submodules=True)
|
version("1.4.1", tag="v1.4.1", submodules=True)
|
||||||
version('1.4.0', tag='v1.4.0', submodules=True, deprecated=True,
|
version(
|
||||||
submodules_delete=['third_party/fbgemm'])
|
"1.4.0",
|
||||||
version('1.3.1', tag='v1.3.1', submodules=True)
|
tag="v1.4.0",
|
||||||
version('1.3.0', tag='v1.3.0', submodules=True)
|
submodules=True,
|
||||||
version('1.2.0', tag='v1.2.0', submodules=True)
|
deprecated=True,
|
||||||
version('1.1.0', tag='v1.1.0', submodules=True)
|
submodules_delete=["third_party/fbgemm"],
|
||||||
version('1.0.1', tag='v1.0.1', submodules=True)
|
)
|
||||||
version('1.0.0', tag='v1.0.0', submodules=True)
|
version("1.3.1", tag="v1.3.1", submodules=True)
|
||||||
version('0.4.1', tag='v0.4.1', submodules=True, deprecated=True,
|
version("1.3.0", tag="v1.3.0", submodules=True)
|
||||||
submodules_delete=['third_party/nervanagpu'])
|
version("1.2.0", tag="v1.2.0", submodules=True)
|
||||||
version('0.4.0', tag='v0.4.0', submodules=True, deprecated=True)
|
version("1.1.0", tag="v1.1.0", submodules=True)
|
||||||
version('0.3.1', tag='v0.3.1', submodules=True, deprecated=True)
|
version("1.0.1", tag="v1.0.1", submodules=True)
|
||||||
|
version("1.0.0", tag="v1.0.0", submodules=True)
|
||||||
|
version(
|
||||||
|
"0.4.1",
|
||||||
|
tag="v0.4.1",
|
||||||
|
submodules=True,
|
||||||
|
deprecated=True,
|
||||||
|
submodules_delete=["third_party/nervanagpu"],
|
||||||
|
)
|
||||||
|
version("0.4.0", tag="v0.4.0", submodules=True, deprecated=True)
|
||||||
|
version("0.3.1", tag="v0.3.1", submodules=True, deprecated=True)
|
||||||
|
|
||||||
is_darwin = sys.platform == 'darwin'
|
is_darwin = sys.platform == "darwin"
|
||||||
|
|
||||||
# All options are defined in CMakeLists.txt.
|
# All options are defined in CMakeLists.txt.
|
||||||
# Some are listed in setup.py, but not all.
|
# Some are listed in setup.py, but not all.
|
||||||
variant('caffe2', default=True, description='Build Caffe2')
|
variant("caffe2", default=True, description="Build Caffe2")
|
||||||
variant('test', default=False, description='Build C++ test binaries')
|
variant("test", default=False, description="Build C++ test binaries")
|
||||||
variant('cuda', default=not is_darwin, description='Use CUDA')
|
variant("cuda", default=not is_darwin, description="Use CUDA")
|
||||||
variant('rocm', default=False, description='Use ROCm')
|
variant("rocm", default=False, description="Use ROCm")
|
||||||
variant('cudnn', default=not is_darwin, description='Use cuDNN')
|
variant("cudnn", default=not is_darwin, description="Use cuDNN")
|
||||||
variant('fbgemm', default=True, description='Use FBGEMM (quantized 8-bit server operators)')
|
variant("fbgemm", default=True, description="Use FBGEMM (quantized 8-bit server operators)")
|
||||||
variant('kineto', default=True, description='Use Kineto profiling library')
|
variant("kineto", default=True, description="Use Kineto profiling library")
|
||||||
variant('magma', default=not is_darwin, description='Use MAGMA')
|
variant("magma", default=not is_darwin, description="Use MAGMA")
|
||||||
variant('metal', default=is_darwin, description='Use Metal for Caffe2 iOS build')
|
variant("metal", default=is_darwin, description="Use Metal for Caffe2 iOS build")
|
||||||
variant('nccl', default=not is_darwin, description='Use NCCL')
|
variant("nccl", default=not is_darwin, description="Use NCCL")
|
||||||
variant('nnpack', default=True, description='Use NNPACK')
|
variant("nnpack", default=True, description="Use NNPACK")
|
||||||
variant('numa', default=not is_darwin, description='Use NUMA')
|
variant("numa", default=not is_darwin, description="Use NUMA")
|
||||||
variant('numpy', default=True, description='Use NumPy')
|
variant("numpy", default=True, description="Use NumPy")
|
||||||
variant('openmp', default=True, description='Use OpenMP for parallel code')
|
variant("openmp", default=True, description="Use OpenMP for parallel code")
|
||||||
variant('qnnpack', default=True, description='Use QNNPACK (quantized 8-bit operators)')
|
variant("qnnpack", default=True, description="Use QNNPACK (quantized 8-bit operators)")
|
||||||
variant('valgrind', default=not is_darwin, description='Use Valgrind')
|
variant("valgrind", default=not is_darwin, description="Use Valgrind")
|
||||||
variant('xnnpack', default=True, description='Use XNNPACK')
|
variant("xnnpack", default=True, description="Use XNNPACK")
|
||||||
variant('mkldnn', default=True, description='Use MKLDNN')
|
variant("mkldnn", default=True, description="Use MKLDNN")
|
||||||
variant('distributed', default=not is_darwin, description='Use distributed')
|
variant("distributed", default=not is_darwin, description="Use distributed")
|
||||||
variant('mpi', default=not is_darwin, description='Use MPI for Caffe2')
|
variant("mpi", default=not is_darwin, description="Use MPI for Caffe2")
|
||||||
variant('gloo', default=not is_darwin, description='Use Gloo')
|
variant("gloo", default=not is_darwin, description="Use Gloo")
|
||||||
variant('tensorpipe', default=not is_darwin, description='Use TensorPipe')
|
variant("tensorpipe", default=not is_darwin, description="Use TensorPipe")
|
||||||
variant('onnx_ml', default=True, description='Enable traditional ONNX ML API')
|
variant("onnx_ml", default=True, description="Enable traditional ONNX ML API")
|
||||||
variant('breakpad', default=True, description='Enable breakpad crash dump library')
|
variant("breakpad", default=True, description="Enable breakpad crash dump library")
|
||||||
|
|
||||||
conflicts('+cuda', when='+rocm')
|
conflicts("+cuda", when="+rocm")
|
||||||
conflicts('+cudnn', when='~cuda')
|
conflicts("+cudnn", when="~cuda")
|
||||||
conflicts('+magma', when='~cuda')
|
conflicts("+magma", when="~cuda")
|
||||||
conflicts('+nccl', when='~cuda~rocm')
|
conflicts("+nccl", when="~cuda~rocm")
|
||||||
conflicts('+nccl', when='platform=darwin')
|
conflicts("+nccl", when="platform=darwin")
|
||||||
conflicts('+numa', when='platform=darwin', msg='Only available on Linux')
|
conflicts("+numa", when="platform=darwin", msg="Only available on Linux")
|
||||||
conflicts('+valgrind', when='platform=darwin', msg='Only available on Linux')
|
conflicts("+valgrind", when="platform=darwin", msg="Only available on Linux")
|
||||||
conflicts('+mpi', when='~distributed')
|
conflicts("+mpi", when="~distributed")
|
||||||
conflicts('+gloo', when='~distributed')
|
conflicts("+gloo", when="~distributed")
|
||||||
conflicts('+tensorpipe', when='~distributed')
|
conflicts("+tensorpipe", when="~distributed")
|
||||||
conflicts('+kineto', when='@:1.7')
|
conflicts("+kineto", when="@:1.7")
|
||||||
conflicts('+valgrind', when='@:1.7')
|
conflicts("+valgrind", when="@:1.7")
|
||||||
conflicts('~caffe2', when='@0.4.0:1.6') # no way to disable caffe2?
|
conflicts("~caffe2", when="@0.4.0:1.6") # no way to disable caffe2?
|
||||||
conflicts('+caffe2', when='@:0.3.1') # caffe2 did not yet exist?
|
conflicts("+caffe2", when="@:0.3.1") # caffe2 did not yet exist?
|
||||||
conflicts('+tensorpipe', when='@:1.5')
|
conflicts("+tensorpipe", when="@:1.5")
|
||||||
conflicts('+xnnpack', when='@:1.4')
|
conflicts("+xnnpack", when="@:1.4")
|
||||||
conflicts('~onnx_ml', when='@:1.4') # no way to disable ONNX?
|
conflicts("~onnx_ml", when="@:1.4") # no way to disable ONNX?
|
||||||
conflicts('+rocm', when='@:0.4')
|
conflicts("+rocm", when="@:0.4")
|
||||||
conflicts('+cudnn', when='@:0.4')
|
conflicts("+cudnn", when="@:0.4")
|
||||||
conflicts('+fbgemm', when='@:0.4,1.4.0')
|
conflicts("+fbgemm", when="@:0.4,1.4.0")
|
||||||
conflicts('+qnnpack', when='@:0.4')
|
conflicts("+qnnpack", when="@:0.4")
|
||||||
conflicts('+mkldnn', when='@:0.4')
|
conflicts("+mkldnn", when="@:0.4")
|
||||||
conflicts('+breakpad', when='@:1.9') # Option appeared in 1.10.0
|
conflicts("+breakpad", when="@:1.9") # Option appeared in 1.10.0
|
||||||
conflicts('+breakpad', when='target=ppc64:', msg='Unsupported')
|
conflicts("+breakpad", when="target=ppc64:", msg="Unsupported")
|
||||||
conflicts('+breakpad', when='target=ppc64le:', msg='Unsupported')
|
conflicts("+breakpad", when="target=ppc64le:", msg="Unsupported")
|
||||||
|
|
||||||
conflicts('cuda_arch=none', when='+cuda',
|
conflicts(
|
||||||
msg='Must specify CUDA compute capabilities of your GPU, see '
|
"cuda_arch=none",
|
||||||
'https://developer.nvidia.com/cuda-gpus')
|
when="+cuda",
|
||||||
|
msg="Must specify CUDA compute capabilities of your GPU, see "
|
||||||
|
"https://developer.nvidia.com/cuda-gpus",
|
||||||
|
)
|
||||||
|
|
||||||
# Required dependencies
|
# Required dependencies
|
||||||
depends_on('cmake@3.5:', type='build')
|
depends_on("cmake@3.5:", type="build")
|
||||||
# Use Ninja generator to speed up build times, automatically used if found
|
# Use Ninja generator to speed up build times, automatically used if found
|
||||||
depends_on('ninja@1.5:', when='@1.1.0:', type='build')
|
depends_on("ninja@1.5:", when="@1.1.0:", type="build")
|
||||||
# See python_min_version in setup.py
|
# See python_min_version in setup.py
|
||||||
depends_on('python@3.6.2:', when='@1.7.1:', type=('build', 'link', 'run'))
|
depends_on("python@3.6.2:", when="@1.7.1:", type=("build", "link", "run"))
|
||||||
depends_on('python@3.6.1:', when='@1.6.0:1.7.0', type=('build', 'link', 'run'))
|
depends_on("python@3.6.1:", when="@1.6.0:1.7.0", type=("build", "link", "run"))
|
||||||
depends_on('python@3.5:', when='@1.5.0:1.5', type=('build', 'link', 'run'))
|
depends_on("python@3.5:", when="@1.5.0:1.5", type=("build", "link", "run"))
|
||||||
depends_on('python@2.7:2.8,3.5:', when='@1.4.0:1.4', type=('build', 'link', 'run'))
|
depends_on("python@2.7:2.8,3.5:", when="@1.4.0:1.4", type=("build", "link", "run"))
|
||||||
depends_on('python@2.7:2.8,3.5:3.7', when='@:1.3', type=('build', 'link', 'run'))
|
depends_on("python@2.7:2.8,3.5:3.7", when="@:1.3", type=("build", "link", "run"))
|
||||||
depends_on('py-setuptools', type=('build', 'run'))
|
depends_on("py-setuptools", type=("build", "run"))
|
||||||
depends_on('py-future', when='@1.5:', type=('build', 'run'))
|
depends_on("py-future", when="@1.5:", type=("build", "run"))
|
||||||
depends_on('py-future', when='@1.1: ^python@:2', type=('build', 'run'))
|
depends_on("py-future", when="@1.1: ^python@:2", type=("build", "run"))
|
||||||
depends_on('py-pyyaml', type=('build', 'run'))
|
depends_on("py-pyyaml", type=("build", "run"))
|
||||||
depends_on('py-typing', when='@0.4: ^python@:3.4', type=('build', 'run'))
|
depends_on("py-typing", when="@0.4: ^python@:3.4", type=("build", "run"))
|
||||||
depends_on('py-typing-extensions', when='@1.7:', type=('build', 'run'))
|
depends_on("py-typing-extensions", when="@1.7:", type=("build", "run"))
|
||||||
depends_on('py-pybind11@2.6.2', when='@1.8.0:', type=('build', 'link', 'run'))
|
depends_on("py-pybind11@2.6.2", when="@1.8.0:", type=("build", "link", "run"))
|
||||||
depends_on('py-pybind11@2.3.0', when='@1.1.0:1.7', type=('build', 'link', 'run'))
|
depends_on("py-pybind11@2.3.0", when="@1.1.0:1.7", type=("build", "link", "run"))
|
||||||
depends_on('py-pybind11@2.2.4', when='@1.0.0:1.0', type=('build', 'link', 'run'))
|
depends_on("py-pybind11@2.2.4", when="@1.0.0:1.0", type=("build", "link", "run"))
|
||||||
depends_on('py-pybind11@2.2.2', when='@0.4.0:0.4', type=('build', 'link', 'run'))
|
depends_on("py-pybind11@2.2.2", when="@0.4.0:0.4", type=("build", "link", "run"))
|
||||||
depends_on('py-dataclasses', when='@1.7: ^python@3.6.0:3.6', type=('build', 'run'))
|
depends_on("py-dataclasses", when="@1.7: ^python@3.6.0:3.6", type=("build", "run"))
|
||||||
depends_on('py-tqdm', type='run')
|
depends_on("py-tqdm", type="run")
|
||||||
depends_on('py-protobuf', when='@0.4:', type=('build', 'run'))
|
depends_on("py-protobuf", when="@0.4:", type=("build", "run"))
|
||||||
depends_on('protobuf', when='@0.4:')
|
depends_on("protobuf", when="@0.4:")
|
||||||
depends_on('blas')
|
depends_on("blas")
|
||||||
depends_on('lapack')
|
depends_on("lapack")
|
||||||
depends_on('eigen', when='@0.4:')
|
depends_on("eigen", when="@0.4:")
|
||||||
# https://github.com/pytorch/pytorch/issues/60329
|
# https://github.com/pytorch/pytorch/issues/60329
|
||||||
# depends_on('cpuinfo@2020-12-17', when='@1.8.0:')
|
# depends_on('cpuinfo@2020-12-17', when='@1.8.0:')
|
||||||
# depends_on('cpuinfo@2020-06-11', when='@1.6.0:1.7')
|
# depends_on('cpuinfo@2020-06-11', when='@1.6.0:1.7')
|
||||||
@@ -152,30 +165,30 @@ class PyTorch(PythonPackage, CudaPackage):
|
|||||||
# depends_on('sleef@3.4.0_2019-07-30', when='@1.6.0:1.7')
|
# depends_on('sleef@3.4.0_2019-07-30', when='@1.6.0:1.7')
|
||||||
# https://github.com/Maratyszcza/FP16/issues/18
|
# https://github.com/Maratyszcza/FP16/issues/18
|
||||||
# depends_on('fp16@2020-05-14', when='@1.6.0:')
|
# depends_on('fp16@2020-05-14', when='@1.6.0:')
|
||||||
depends_on('pthreadpool@2021-04-13', when='@1.9.0:')
|
depends_on("pthreadpool@2021-04-13", when="@1.9.0:")
|
||||||
depends_on('pthreadpool@2020-10-05', when='@1.8.0:1.8')
|
depends_on("pthreadpool@2020-10-05", when="@1.8.0:1.8")
|
||||||
depends_on('pthreadpool@2020-06-15', when='@1.6.0:1.7')
|
depends_on("pthreadpool@2020-06-15", when="@1.6.0:1.7")
|
||||||
depends_on('psimd@2020-05-17', when='@1.6.0:')
|
depends_on("psimd@2020-05-17", when="@1.6.0:")
|
||||||
depends_on('fxdiv@2020-04-17', when='@1.6.0:')
|
depends_on("fxdiv@2020-04-17", when="@1.6.0:")
|
||||||
depends_on('benchmark', when='@1.6:+test')
|
depends_on("benchmark", when="@1.6:+test")
|
||||||
|
|
||||||
# Optional dependencies
|
# Optional dependencies
|
||||||
depends_on('cuda@7.5:', when='+cuda', type=('build', 'link', 'run'))
|
depends_on("cuda@7.5:", when="+cuda", type=("build", "link", "run"))
|
||||||
depends_on('cuda@9:', when='@1.1:+cuda', type=('build', 'link', 'run'))
|
depends_on("cuda@9:", when="@1.1:+cuda", type=("build", "link", "run"))
|
||||||
depends_on('cuda@9.2:', when='@1.6:+cuda', type=('build', 'link', 'run'))
|
depends_on("cuda@9.2:", when="@1.6:+cuda", type=("build", "link", "run"))
|
||||||
depends_on('cudnn@6.0:7', when='@:1.0+cudnn')
|
depends_on("cudnn@6.0:7", when="@:1.0+cudnn")
|
||||||
depends_on('cudnn@7.0:7', when='@1.1.0:1.5+cudnn')
|
depends_on("cudnn@7.0:7", when="@1.1.0:1.5+cudnn")
|
||||||
depends_on('cudnn@7.0:', when='@1.6.0:+cudnn')
|
depends_on("cudnn@7.0:", when="@1.6.0:+cudnn")
|
||||||
depends_on('magma', when='+magma')
|
depends_on("magma", when="+magma")
|
||||||
depends_on('nccl', when='+nccl')
|
depends_on("nccl", when="+nccl")
|
||||||
depends_on('numactl', when='+numa')
|
depends_on("numactl", when="+numa")
|
||||||
depends_on('py-numpy', when='+numpy', type=('build', 'run'))
|
depends_on("py-numpy", when="+numpy", type=("build", "run"))
|
||||||
depends_on('llvm-openmp', when='%apple-clang +openmp')
|
depends_on("llvm-openmp", when="%apple-clang +openmp")
|
||||||
depends_on('valgrind', when='+valgrind')
|
depends_on("valgrind", when="+valgrind")
|
||||||
# https://github.com/pytorch/pytorch/issues/60332
|
# https://github.com/pytorch/pytorch/issues/60332
|
||||||
# depends_on('xnnpack@2021-02-22', when='@1.8.0:+xnnpack')
|
# depends_on('xnnpack@2021-02-22', when='@1.8.0:+xnnpack')
|
||||||
# depends_on('xnnpack@2020-03-23', when='@1.6.0:1.7+xnnpack')
|
# depends_on('xnnpack@2020-03-23', when='@1.6.0:1.7+xnnpack')
|
||||||
depends_on('mpi', when='+mpi')
|
depends_on("mpi", when="+mpi")
|
||||||
# https://github.com/pytorch/pytorch/issues/60270
|
# https://github.com/pytorch/pytorch/issues/60270
|
||||||
# depends_on('gloo@2021-05-04', when='@1.9.0:+gloo')
|
# depends_on('gloo@2021-05-04', when='@1.9.0:+gloo')
|
||||||
# depends_on('gloo@2020-09-18', when='@1.7.0:1.8+gloo')
|
# depends_on('gloo@2020-09-18', when='@1.7.0:1.8+gloo')
|
||||||
@@ -183,31 +196,35 @@ class PyTorch(PythonPackage, CudaPackage):
|
|||||||
# https://github.com/pytorch/pytorch/issues/60331
|
# https://github.com/pytorch/pytorch/issues/60331
|
||||||
# depends_on('onnx@1.8.0_2020-11-03', when='@1.8.0:+onnx_ml')
|
# depends_on('onnx@1.8.0_2020-11-03', when='@1.8.0:+onnx_ml')
|
||||||
# depends_on('onnx@1.7.0_2020-05-31', when='@1.6.0:1.7+onnx_ml')
|
# depends_on('onnx@1.7.0_2020-05-31', when='@1.6.0:1.7+onnx_ml')
|
||||||
depends_on('mkl', when='+mkldnn')
|
depends_on("mkl", when="+mkldnn")
|
||||||
|
|
||||||
# Test dependencies
|
# Test dependencies
|
||||||
depends_on('py-hypothesis', type='test')
|
depends_on("py-hypothesis", type="test")
|
||||||
depends_on('py-six', type='test')
|
depends_on("py-six", type="test")
|
||||||
depends_on('py-psutil', type='test')
|
depends_on("py-psutil", type="test")
|
||||||
|
|
||||||
# Fix BLAS being overridden by MKL
|
# Fix BLAS being overridden by MKL
|
||||||
# https://github.com/pytorch/pytorch/issues/60328
|
# https://github.com/pytorch/pytorch/issues/60328
|
||||||
patch('https://patch-diff.githubusercontent.com/raw/pytorch/pytorch/pull/59220.patch',
|
patch(
|
||||||
sha256='e37afffe45cf7594c22050109942370e49983ad772d12ebccf508377dc9dcfc9',
|
"https://patch-diff.githubusercontent.com/raw/pytorch/pytorch/pull/59220.patch",
|
||||||
when='@1.2.0:')
|
sha256="e37afffe45cf7594c22050109942370e49983ad772d12ebccf508377dc9dcfc9",
|
||||||
|
when="@1.2.0:",
|
||||||
|
)
|
||||||
|
|
||||||
# Fixes build on older systems with glibc <2.12
|
# Fixes build on older systems with glibc <2.12
|
||||||
patch('https://patch-diff.githubusercontent.com/raw/pytorch/pytorch/pull/55063.patch',
|
patch(
|
||||||
sha256='e17eaa42f5d7c18bf0d7c37d7b0910127a01ad53fdce3e226a92893356a70395',
|
"https://patch-diff.githubusercontent.com/raw/pytorch/pytorch/pull/55063.patch",
|
||||||
when='@1.1.0:1.8.1')
|
sha256="e17eaa42f5d7c18bf0d7c37d7b0910127a01ad53fdce3e226a92893356a70395",
|
||||||
|
when="@1.1.0:1.8.1",
|
||||||
|
)
|
||||||
|
|
||||||
# Fixes CMake configuration error when XNNPACK is disabled
|
# Fixes CMake configuration error when XNNPACK is disabled
|
||||||
# https://github.com/pytorch/pytorch/pull/35607
|
# https://github.com/pytorch/pytorch/pull/35607
|
||||||
# https://github.com/pytorch/pytorch/pull/37865
|
# https://github.com/pytorch/pytorch/pull/37865
|
||||||
patch('xnnpack.patch', when='@1.5.0:1.5')
|
patch("xnnpack.patch", when="@1.5.0:1.5")
|
||||||
|
|
||||||
# Fixes build error when ROCm is enabled for pytorch-1.5 release
|
# Fixes build error when ROCm is enabled for pytorch-1.5 release
|
||||||
patch('rocm.patch', when='@1.5.0:1.5+rocm')
|
patch("rocm.patch", when="@1.5.0:1.5+rocm")
|
||||||
|
|
||||||
# Fixes fatal error: sleef.h: No such file or directory
|
# Fixes fatal error: sleef.h: No such file or directory
|
||||||
# https://github.com/pytorch/pytorch/pull/35359
|
# https://github.com/pytorch/pytorch/pull/35359
|
||||||
@@ -216,47 +233,56 @@ class PyTorch(PythonPackage, CudaPackage):
|
|||||||
|
|
||||||
# Fixes compilation with Clang 9.0.0 and Apple Clang 11.0.3
|
# Fixes compilation with Clang 9.0.0 and Apple Clang 11.0.3
|
||||||
# https://github.com/pytorch/pytorch/pull/37086
|
# https://github.com/pytorch/pytorch/pull/37086
|
||||||
patch('https://github.com/pytorch/pytorch/commit/e921cd222a8fbeabf5a3e74e83e0d8dfb01aa8b5.patch',
|
patch(
|
||||||
sha256='17561b16cd2db22f10c0fe1fdcb428aecb0ac3964ba022a41343a6bb8cba7049',
|
"https://github.com/pytorch/pytorch/commit/e921cd222a8fbeabf5a3e74e83e0d8dfb01aa8b5.patch",
|
||||||
when='@1.1:1.5')
|
sha256="17561b16cd2db22f10c0fe1fdcb428aecb0ac3964ba022a41343a6bb8cba7049",
|
||||||
|
when="@1.1:1.5",
|
||||||
|
)
|
||||||
|
|
||||||
# Removes duplicate definition of getCusparseErrorString
|
# Removes duplicate definition of getCusparseErrorString
|
||||||
# https://github.com/pytorch/pytorch/issues/32083
|
# https://github.com/pytorch/pytorch/issues/32083
|
||||||
patch('cusparseGetErrorString.patch', when='@0.4.1:1.0^cuda@10.1.243:')
|
patch("cusparseGetErrorString.patch", when="@0.4.1:1.0^cuda@10.1.243:")
|
||||||
|
|
||||||
# Fixes 'FindOpenMP.cmake'
|
# Fixes 'FindOpenMP.cmake'
|
||||||
# to detect openmp settings used by Fujitsu compiler.
|
# to detect openmp settings used by Fujitsu compiler.
|
||||||
patch('detect_omp_of_fujitsu_compiler.patch', when='%fj')
|
patch("detect_omp_of_fujitsu_compiler.patch", when="%fj")
|
||||||
|
|
||||||
# Fix compilation of +distributed~tensorpipe
|
# Fix compilation of +distributed~tensorpipe
|
||||||
# https://github.com/pytorch/pytorch/issues/68002
|
# https://github.com/pytorch/pytorch/issues/68002
|
||||||
patch('https://github.com/pytorch/pytorch/commit/c075f0f633fa0136e68f0a455b5b74d7b500865c.patch',
|
patch(
|
||||||
sha256='e69e41b5c171bfb00d1b5d4ee55dd5e4c8975483230274af4ab461acd37e40b8', when='@1.10.0+distributed~tensorpipe')
|
"https://github.com/pytorch/pytorch/commit/c075f0f633fa0136e68f0a455b5b74d7b500865c.patch",
|
||||||
|
sha256="e69e41b5c171bfb00d1b5d4ee55dd5e4c8975483230274af4ab461acd37e40b8",
|
||||||
|
when="@1.10.0+distributed~tensorpipe",
|
||||||
|
)
|
||||||
|
|
||||||
# Both build and install run cmake/make/make install
|
# Both build and install run cmake/make/make install
|
||||||
# Only run once to speed up build times
|
# Only run once to speed up build times
|
||||||
phases = ['install']
|
phases = ["install"]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def libs(self):
|
def libs(self):
|
||||||
root = join_path(self.prefix, self.spec['python'].package.site_packages_dir,
|
root = join_path(
|
||||||
'torch', 'lib')
|
self.prefix, self.spec["python"].package.site_packages_dir, "torch", "lib"
|
||||||
return find_libraries('libtorch', root)
|
)
|
||||||
|
return find_libraries("libtorch", root)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def headers(self):
|
def headers(self):
|
||||||
root = join_path(self.prefix, self.spec['python'].package.site_packages_dir,
|
root = join_path(
|
||||||
'torch', 'include')
|
self.prefix, self.spec["python"].package.site_packages_dir, "torch", "include"
|
||||||
|
)
|
||||||
headers = find_all_headers(root)
|
headers = find_all_headers(root)
|
||||||
headers.directories = [root]
|
headers.directories = [root]
|
||||||
return headers
|
return headers
|
||||||
|
|
||||||
@when('@1.5.0:')
|
@when("@1.5.0:")
|
||||||
def patch(self):
|
def patch(self):
|
||||||
# https://github.com/pytorch/pytorch/issues/52208
|
# https://github.com/pytorch/pytorch/issues/52208
|
||||||
filter_file('torch_global_deps PROPERTIES LINKER_LANGUAGE C',
|
filter_file(
|
||||||
'torch_global_deps PROPERTIES LINKER_LANGUAGE CXX',
|
"torch_global_deps PROPERTIES LINKER_LANGUAGE C",
|
||||||
'caffe2/CMakeLists.txt')
|
"torch_global_deps PROPERTIES LINKER_LANGUAGE CXX",
|
||||||
|
"caffe2/CMakeLists.txt",
|
||||||
|
)
|
||||||
|
|
||||||
def setup_build_environment(self, env):
|
def setup_build_environment(self, env):
|
||||||
"""Set environment variables used to control the build.
|
"""Set environment variables used to control the build.
|
||||||
@@ -269,7 +295,8 @@ class PyTorch(PythonPackage, CudaPackage):
|
|||||||
most flags defined in ``CMakeLists.txt`` can be specified as
|
most flags defined in ``CMakeLists.txt`` can be specified as
|
||||||
environment variables.
|
environment variables.
|
||||||
"""
|
"""
|
||||||
def enable_or_disable(variant, keyword='USE', var=None, newer=False):
|
|
||||||
|
def enable_or_disable(variant, keyword="USE", var=None, newer=False):
|
||||||
"""Set environment variable to enable or disable support for a
|
"""Set environment variable to enable or disable support for a
|
||||||
particular variant.
|
particular variant.
|
||||||
|
|
||||||
@@ -284,137 +311,135 @@ class PyTorch(PythonPackage, CudaPackage):
|
|||||||
|
|
||||||
# Version 1.1.0 switched from NO_* to USE_* or BUILD_*
|
# Version 1.1.0 switched from NO_* to USE_* or BUILD_*
|
||||||
# But some newer variants have always used USE_* or BUILD_*
|
# But some newer variants have always used USE_* or BUILD_*
|
||||||
if self.spec.satisfies('@1.1:') or newer:
|
if self.spec.satisfies("@1.1:") or newer:
|
||||||
if '+' + variant in self.spec:
|
if "+" + variant in self.spec:
|
||||||
env.set(keyword + '_' + var, 'ON')
|
env.set(keyword + "_" + var, "ON")
|
||||||
else:
|
else:
|
||||||
env.set(keyword + '_' + var, 'OFF')
|
env.set(keyword + "_" + var, "OFF")
|
||||||
else:
|
else:
|
||||||
if '+' + variant in self.spec:
|
if "+" + variant in self.spec:
|
||||||
env.unset('NO_' + var)
|
env.unset("NO_" + var)
|
||||||
else:
|
else:
|
||||||
env.set('NO_' + var, 'ON')
|
env.set("NO_" + var, "ON")
|
||||||
|
|
||||||
# Build in parallel to speed up build times
|
# Build in parallel to speed up build times
|
||||||
env.set('MAX_JOBS', make_jobs)
|
env.set("MAX_JOBS", make_jobs)
|
||||||
|
|
||||||
# Spack logs have trouble handling colored output
|
# Spack logs have trouble handling colored output
|
||||||
env.set('COLORIZE_OUTPUT', 'OFF')
|
env.set("COLORIZE_OUTPUT", "OFF")
|
||||||
|
|
||||||
if self.spec.satisfies('@0.4:'):
|
if self.spec.satisfies("@0.4:"):
|
||||||
enable_or_disable('test', keyword='BUILD')
|
enable_or_disable("test", keyword="BUILD")
|
||||||
|
|
||||||
if self.spec.satisfies('@1.7:'):
|
if self.spec.satisfies("@1.7:"):
|
||||||
enable_or_disable('caffe2', keyword='BUILD')
|
enable_or_disable("caffe2", keyword="BUILD")
|
||||||
|
|
||||||
enable_or_disable('cuda')
|
enable_or_disable("cuda")
|
||||||
if '+cuda' in self.spec:
|
if "+cuda" in self.spec:
|
||||||
# cmake/public/cuda.cmake
|
# cmake/public/cuda.cmake
|
||||||
# cmake/Modules_CUDA_fix/upstream/FindCUDA.cmake
|
# cmake/Modules_CUDA_fix/upstream/FindCUDA.cmake
|
||||||
env.unset('CUDA_ROOT')
|
env.unset("CUDA_ROOT")
|
||||||
torch_cuda_arch = ';'.join('{0:.1f}'.format(float(i) / 10.0) for i
|
torch_cuda_arch = ";".join(
|
||||||
in
|
"{0:.1f}".format(float(i) / 10.0) for i in self.spec.variants["cuda_arch"].value
|
||||||
self.spec.variants['cuda_arch'].value)
|
)
|
||||||
env.set('TORCH_CUDA_ARCH_LIST', torch_cuda_arch)
|
env.set("TORCH_CUDA_ARCH_LIST", torch_cuda_arch)
|
||||||
|
|
||||||
enable_or_disable('rocm')
|
enable_or_disable("rocm")
|
||||||
|
|
||||||
enable_or_disable('cudnn')
|
enable_or_disable("cudnn")
|
||||||
if '+cudnn' in self.spec:
|
if "+cudnn" in self.spec:
|
||||||
# cmake/Modules_CUDA_fix/FindCUDNN.cmake
|
# cmake/Modules_CUDA_fix/FindCUDNN.cmake
|
||||||
env.set('CUDNN_INCLUDE_DIR', self.spec['cudnn'].prefix.include)
|
env.set("CUDNN_INCLUDE_DIR", self.spec["cudnn"].prefix.include)
|
||||||
env.set('CUDNN_LIBRARY', self.spec['cudnn'].libs[0])
|
env.set("CUDNN_LIBRARY", self.spec["cudnn"].libs[0])
|
||||||
|
|
||||||
enable_or_disable('fbgemm')
|
enable_or_disable("fbgemm")
|
||||||
if self.spec.satisfies('@1.8:'):
|
if self.spec.satisfies("@1.8:"):
|
||||||
enable_or_disable('kineto')
|
enable_or_disable("kineto")
|
||||||
enable_or_disable('magma')
|
enable_or_disable("magma")
|
||||||
enable_or_disable('metal')
|
enable_or_disable("metal")
|
||||||
if self.spec.satisfies('@1.10:'):
|
if self.spec.satisfies("@1.10:"):
|
||||||
enable_or_disable('breakpad')
|
enable_or_disable("breakpad")
|
||||||
|
|
||||||
enable_or_disable('nccl')
|
enable_or_disable("nccl")
|
||||||
if '+nccl' in self.spec:
|
if "+nccl" in self.spec:
|
||||||
env.set('NCCL_LIB_DIR', self.spec['nccl'].libs.directories[0])
|
env.set("NCCL_LIB_DIR", self.spec["nccl"].libs.directories[0])
|
||||||
env.set('NCCL_INCLUDE_DIR', self.spec['nccl'].prefix.include)
|
env.set("NCCL_INCLUDE_DIR", self.spec["nccl"].prefix.include)
|
||||||
|
|
||||||
# cmake/External/nnpack.cmake
|
# cmake/External/nnpack.cmake
|
||||||
enable_or_disable('nnpack')
|
enable_or_disable("nnpack")
|
||||||
|
|
||||||
enable_or_disable('numa')
|
enable_or_disable("numa")
|
||||||
if '+numa' in self.spec:
|
if "+numa" in self.spec:
|
||||||
# cmake/Modules/FindNuma.cmake
|
# cmake/Modules/FindNuma.cmake
|
||||||
env.set('NUMA_ROOT_DIR', self.spec['numactl'].prefix)
|
env.set("NUMA_ROOT_DIR", self.spec["numactl"].prefix)
|
||||||
|
|
||||||
# cmake/Modules/FindNumPy.cmake
|
# cmake/Modules/FindNumPy.cmake
|
||||||
enable_or_disable('numpy')
|
enable_or_disable("numpy")
|
||||||
# cmake/Modules/FindOpenMP.cmake
|
# cmake/Modules/FindOpenMP.cmake
|
||||||
enable_or_disable('openmp', newer=True)
|
enable_or_disable("openmp", newer=True)
|
||||||
enable_or_disable('qnnpack')
|
enable_or_disable("qnnpack")
|
||||||
if self.spec.satisfies('@1.3:'):
|
if self.spec.satisfies("@1.3:"):
|
||||||
enable_or_disable('qnnpack', var='PYTORCH_QNNPACK')
|
enable_or_disable("qnnpack", var="PYTORCH_QNNPACK")
|
||||||
if self.spec.satisfies('@1.8:'):
|
if self.spec.satisfies("@1.8:"):
|
||||||
enable_or_disable('valgrind')
|
enable_or_disable("valgrind")
|
||||||
if self.spec.satisfies('@1.5:'):
|
if self.spec.satisfies("@1.5:"):
|
||||||
enable_or_disable('xnnpack')
|
enable_or_disable("xnnpack")
|
||||||
enable_or_disable('mkldnn')
|
enable_or_disable("mkldnn")
|
||||||
enable_or_disable('distributed')
|
enable_or_disable("distributed")
|
||||||
enable_or_disable('mpi')
|
enable_or_disable("mpi")
|
||||||
# cmake/Modules/FindGloo.cmake
|
# cmake/Modules/FindGloo.cmake
|
||||||
enable_or_disable('gloo', newer=True)
|
enable_or_disable("gloo", newer=True)
|
||||||
if self.spec.satisfies('@1.6:'):
|
if self.spec.satisfies("@1.6:"):
|
||||||
enable_or_disable('tensorpipe')
|
enable_or_disable("tensorpipe")
|
||||||
|
|
||||||
if '+onnx_ml' in self.spec:
|
if "+onnx_ml" in self.spec:
|
||||||
env.set('ONNX_ML', 'ON')
|
env.set("ONNX_ML", "ON")
|
||||||
else:
|
else:
|
||||||
env.set('ONNX_ML', 'OFF')
|
env.set("ONNX_ML", "OFF")
|
||||||
|
|
||||||
if not self.spec.satisfies('@master'):
|
if not self.spec.satisfies("@master"):
|
||||||
env.set('PYTORCH_BUILD_VERSION', self.version)
|
env.set("PYTORCH_BUILD_VERSION", self.version)
|
||||||
env.set('PYTORCH_BUILD_NUMBER', 0)
|
env.set("PYTORCH_BUILD_NUMBER", 0)
|
||||||
|
|
||||||
# BLAS to be used by Caffe2
|
# BLAS to be used by Caffe2
|
||||||
# Options defined in cmake/Dependencies.cmake and cmake/Modules/FindBLAS.cmake
|
# Options defined in cmake/Dependencies.cmake and cmake/Modules/FindBLAS.cmake
|
||||||
if self.spec['blas'].name == 'atlas':
|
if self.spec["blas"].name == "atlas":
|
||||||
env.set('BLAS', 'ATLAS')
|
env.set("BLAS", "ATLAS")
|
||||||
env.set('WITH_BLAS', 'atlas')
|
env.set("WITH_BLAS", "atlas")
|
||||||
elif self.spec['blas'].name in ['blis', 'amdblis']:
|
elif self.spec["blas"].name in ["blis", "amdblis"]:
|
||||||
env.set('BLAS', 'BLIS')
|
env.set("BLAS", "BLIS")
|
||||||
env.set('WITH_BLAS', 'blis')
|
env.set("WITH_BLAS", "blis")
|
||||||
elif self.spec['blas'].name == 'eigen':
|
elif self.spec["blas"].name == "eigen":
|
||||||
env.set('BLAS', 'Eigen')
|
env.set("BLAS", "Eigen")
|
||||||
elif self.spec['lapack'].name in ['libflame', 'amdlibflame']:
|
elif self.spec["lapack"].name in ["libflame", "amdlibflame"]:
|
||||||
env.set('BLAS', 'FLAME')
|
env.set("BLAS", "FLAME")
|
||||||
env.set('WITH_BLAS', 'FLAME')
|
env.set("WITH_BLAS", "FLAME")
|
||||||
elif self.spec['blas'].name in [
|
elif self.spec["blas"].name in ["intel-mkl", "intel-parallel-studio", "intel-oneapi-mkl"]:
|
||||||
'intel-mkl', 'intel-parallel-studio', 'intel-oneapi-mkl']:
|
env.set("BLAS", "MKL")
|
||||||
env.set('BLAS', 'MKL')
|
env.set("WITH_BLAS", "mkl")
|
||||||
env.set('WITH_BLAS', 'mkl')
|
elif self.spec["blas"].name == "openblas":
|
||||||
elif self.spec['blas'].name == 'openblas':
|
env.set("BLAS", "OpenBLAS")
|
||||||
env.set('BLAS', 'OpenBLAS')
|
env.set("WITH_BLAS", "open")
|
||||||
env.set('WITH_BLAS', 'open')
|
elif self.spec["blas"].name == "veclibfort":
|
||||||
elif self.spec['blas'].name == 'veclibfort':
|
env.set("BLAS", "vecLib")
|
||||||
env.set('BLAS', 'vecLib')
|
env.set("WITH_BLAS", "veclib")
|
||||||
env.set('WITH_BLAS', 'veclib')
|
|
||||||
else:
|
else:
|
||||||
env.set('BLAS', 'Generic')
|
env.set("BLAS", "Generic")
|
||||||
env.set('WITH_BLAS', 'generic')
|
env.set("WITH_BLAS", "generic")
|
||||||
|
|
||||||
# Don't use vendored third-party libraries when possible
|
# Don't use vendored third-party libraries when possible
|
||||||
env.set('BUILD_CUSTOM_PROTOBUF', 'OFF')
|
env.set("BUILD_CUSTOM_PROTOBUF", "OFF")
|
||||||
env.set('USE_SYSTEM_NCCL', 'ON')
|
env.set("USE_SYSTEM_NCCL", "ON")
|
||||||
env.set('USE_SYSTEM_EIGEN_INSTALL', 'ON')
|
env.set("USE_SYSTEM_EIGEN_INSTALL", "ON")
|
||||||
if self.spec.satisfies('@0.4:'):
|
if self.spec.satisfies("@0.4:"):
|
||||||
env.set('pybind11_DIR', self.spec['py-pybind11'].prefix)
|
env.set("pybind11_DIR", self.spec["py-pybind11"].prefix)
|
||||||
env.set('pybind11_INCLUDE_DIR',
|
env.set("pybind11_INCLUDE_DIR", self.spec["py-pybind11"].prefix.include)
|
||||||
self.spec['py-pybind11'].prefix.include)
|
if self.spec.satisfies("@1.10:"):
|
||||||
if self.spec.satisfies('@1.10:'):
|
env.set("USE_SYSTEM_PYBIND11", "ON")
|
||||||
env.set('USE_SYSTEM_PYBIND11', 'ON')
|
|
||||||
# https://github.com/pytorch/pytorch/issues/60334
|
# https://github.com/pytorch/pytorch/issues/60334
|
||||||
# if self.spec.satisfies('@1.8:'):
|
# if self.spec.satisfies('@1.8:'):
|
||||||
# env.set('USE_SYSTEM_SLEEF', 'ON')
|
# env.set('USE_SYSTEM_SLEEF', 'ON')
|
||||||
if self.spec.satisfies('@1.6:'):
|
if self.spec.satisfies("@1.6:"):
|
||||||
# env.set('USE_SYSTEM_LIBS', 'ON')
|
# env.set('USE_SYSTEM_LIBS', 'ON')
|
||||||
# https://github.com/pytorch/pytorch/issues/60329
|
# https://github.com/pytorch/pytorch/issues/60329
|
||||||
# env.set('USE_SYSTEM_CPUINFO', 'ON')
|
# env.set('USE_SYSTEM_CPUINFO', 'ON')
|
||||||
@@ -422,27 +447,26 @@ class PyTorch(PythonPackage, CudaPackage):
|
|||||||
# env.set('USE_SYSTEM_GLOO', 'ON')
|
# env.set('USE_SYSTEM_GLOO', 'ON')
|
||||||
# https://github.com/Maratyszcza/FP16/issues/18
|
# https://github.com/Maratyszcza/FP16/issues/18
|
||||||
# env.set('USE_SYSTEM_FP16', 'ON')
|
# env.set('USE_SYSTEM_FP16', 'ON')
|
||||||
env.set('USE_SYSTEM_PTHREADPOOL', 'ON')
|
env.set("USE_SYSTEM_PTHREADPOOL", "ON")
|
||||||
env.set('USE_SYSTEM_PSIMD', 'ON')
|
env.set("USE_SYSTEM_PSIMD", "ON")
|
||||||
env.set('USE_SYSTEM_FXDIV', 'ON')
|
env.set("USE_SYSTEM_FXDIV", "ON")
|
||||||
env.set('USE_SYSTEM_BENCHMARK', 'ON')
|
env.set("USE_SYSTEM_BENCHMARK", "ON")
|
||||||
# https://github.com/pytorch/pytorch/issues/60331
|
# https://github.com/pytorch/pytorch/issues/60331
|
||||||
# env.set('USE_SYSTEM_ONNX', 'ON')
|
# env.set('USE_SYSTEM_ONNX', 'ON')
|
||||||
# https://github.com/pytorch/pytorch/issues/60332
|
# https://github.com/pytorch/pytorch/issues/60332
|
||||||
# env.set('USE_SYSTEM_XNNPACK', 'ON')
|
# env.set('USE_SYSTEM_XNNPACK', 'ON')
|
||||||
|
|
||||||
@run_before('install')
|
@run_before("install")
|
||||||
def build_amd(self):
|
def build_amd(self):
|
||||||
if '+rocm' in self.spec:
|
if "+rocm" in self.spec:
|
||||||
python(os.path.join('tools', 'amd_build', 'build_amd.py'))
|
python(os.path.join("tools", "amd_build", "build_amd.py"))
|
||||||
|
|
||||||
@run_after('install')
|
@run_after("install")
|
||||||
@on_package_attributes(run_tests=True)
|
@on_package_attributes(run_tests=True)
|
||||||
def install_test(self):
|
def install_test(self):
|
||||||
with working_dir('test'):
|
with working_dir("test"):
|
||||||
python('run_test.py')
|
python("run_test.py")
|
||||||
|
|
||||||
# Tests need to be re-added since `phases` was overridden
|
# Tests need to be re-added since `phases` was overridden
|
||||||
run_after('install')(
|
run_after("install")(PythonPackage._run_default_install_time_test_callbacks)
|
||||||
PythonPackage._run_default_install_time_test_callbacks)
|
run_after("install")(PythonPackage.sanity_check_prefix)
|
||||||
run_after('install')(PythonPackage.sanity_check_prefix)
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1399,17 +1399,24 @@ def test_print_install_test_log_skipped(install_mockery, mock_packages, capfd, r
|
|||||||
assert out == ""
|
assert out == ""
|
||||||
|
|
||||||
|
|
||||||
def test_print_install_test_log_missing(
|
def test_print_install_test_log_failures(
|
||||||
tmpdir, install_mockery, mock_packages, ensure_debug, capfd
|
tmpdir, install_mockery, mock_packages, ensure_debug, capfd
|
||||||
):
|
):
|
||||||
"""Confirm expected error on attempt to print missing test log file."""
|
"""Confirm expected outputs when there are test failures."""
|
||||||
name = "trivial-install-test-package"
|
name = "trivial-install-test-package"
|
||||||
s = spack.spec.Spec(name).concretized()
|
s = spack.spec.Spec(name).concretized()
|
||||||
pkg = s.package
|
pkg = s.package
|
||||||
|
|
||||||
|
# Missing test log is an error
|
||||||
pkg.run_tests = True
|
pkg.run_tests = True
|
||||||
pkg.tester.test_log_file = str(tmpdir.join("test-log.txt"))
|
pkg.tester.test_log_file = str(tmpdir.join("test-log.txt"))
|
||||||
pkg.tester.add_failure(AssertionError("test"), "test-failure")
|
pkg.tester.add_failure(AssertionError("test"), "test-failure")
|
||||||
spack.installer.print_install_test_log(pkg)
|
spack.installer.print_install_test_log(pkg)
|
||||||
err = capfd.readouterr()[1]
|
err = capfd.readouterr()[1]
|
||||||
assert "no test log file" in err
|
assert "no test log file" in err
|
||||||
|
|
||||||
|
# Having test log results in path being output
|
||||||
|
fs.touch(pkg.tester.test_log_file)
|
||||||
|
spack.installer.print_install_test_log(pkg)
|
||||||
|
out = capfd.readouterr()[0]
|
||||||
|
assert "See test results at" in out
|
||||||
|
|||||||
@@ -8,6 +8,8 @@
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import spack.cmd.modules
|
||||||
|
import spack.config
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.modules.tcl
|
import spack.modules.tcl
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
@@ -187,3 +189,31 @@ def find_nothing(*args):
|
|||||||
assert module_path
|
assert module_path
|
||||||
|
|
||||||
spack.package_base.PackageBase.uninstall_by_spec(spec)
|
spack.package_base.PackageBase.uninstall_by_spec(spec)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.regression("37649")
|
||||||
|
def test_check_module_set_name(mutable_config):
|
||||||
|
"""Tests that modules set name are validated correctly and an error is reported if the
|
||||||
|
name we require does not exist or is reserved by the configuration."""
|
||||||
|
|
||||||
|
# Minimal modules.yaml config.
|
||||||
|
spack.config.set(
|
||||||
|
"modules",
|
||||||
|
{
|
||||||
|
"prefix_inspections": {"./bin": ["PATH"]},
|
||||||
|
# module sets
|
||||||
|
"first": {},
|
||||||
|
"second": {},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Valid module set name
|
||||||
|
spack.cmd.modules.check_module_set_name("first")
|
||||||
|
|
||||||
|
# Invalid module set names
|
||||||
|
msg = "Valid module set names are"
|
||||||
|
with pytest.raises(spack.config.ConfigError, match=msg):
|
||||||
|
spack.cmd.modules.check_module_set_name("prefix_inspections")
|
||||||
|
|
||||||
|
with pytest.raises(spack.config.ConfigError, match=msg):
|
||||||
|
spack.cmd.modules.check_module_set_name("third")
|
||||||
|
|||||||
@@ -45,6 +45,18 @@ def provider(request):
|
|||||||
|
|
||||||
@pytest.mark.usefixtures("config", "mock_packages")
|
@pytest.mark.usefixtures("config", "mock_packages")
|
||||||
class TestLmod(object):
|
class TestLmod(object):
|
||||||
|
@pytest.mark.regression("37788")
|
||||||
|
@pytest.mark.parametrize("modules_config", ["core_compilers", "core_compilers_at_equal"])
|
||||||
|
def test_layout_for_specs_compiled_with_core_compilers(
|
||||||
|
self, modules_config, module_configuration, factory
|
||||||
|
):
|
||||||
|
"""Tests that specs compiled with core compilers are in the 'Core' folder. Also tests that
|
||||||
|
we can use both ``compiler@version`` and ``compiler@=version`` to specify a core compiler.
|
||||||
|
"""
|
||||||
|
module_configuration(modules_config)
|
||||||
|
module, spec = factory("libelf%clang@12.0.0")
|
||||||
|
assert "Core" in module.layout.available_path_parts
|
||||||
|
|
||||||
def test_file_layout(self, compiler, provider, factory, module_configuration):
|
def test_file_layout(self, compiler, provider, factory, module_configuration):
|
||||||
"""Tests the layout of files in the hierarchy is the one expected."""
|
"""Tests the layout of files in the hierarchy is the one expected."""
|
||||||
module_configuration("complex_hierarchy")
|
module_configuration("complex_hierarchy")
|
||||||
@@ -61,7 +73,7 @@ def test_file_layout(self, compiler, provider, factory, module_configuration):
|
|||||||
# is transformed to r"Core" if the compiler is listed among core
|
# is transformed to r"Core" if the compiler is listed among core
|
||||||
# compilers
|
# compilers
|
||||||
# Check that specs listed as core_specs are transformed to "Core"
|
# Check that specs listed as core_specs are transformed to "Core"
|
||||||
if compiler == "clang@=3.3" or spec_string == "mpich@3.0.1":
|
if compiler == "clang@=12.0.0" or spec_string == "mpich@3.0.1":
|
||||||
assert "Core" in layout.available_path_parts
|
assert "Core" in layout.available_path_parts
|
||||||
else:
|
else:
|
||||||
assert compiler.replace("@=", "/") in layout.available_path_parts
|
assert compiler.replace("@=", "/") in layout.available_path_parts
|
||||||
|
|||||||
@@ -62,7 +62,7 @@ def source_file(tmpdir, is_relocatable):
|
|||||||
src = tmpdir.join("relocatable.c")
|
src = tmpdir.join("relocatable.c")
|
||||||
shutil.copy(template_src, str(src))
|
shutil.copy(template_src, str(src))
|
||||||
else:
|
else:
|
||||||
template_dirs = [os.path.join(spack.paths.test_path, "data", "templates")]
|
template_dirs = (os.path.join(spack.paths.test_path, "data", "templates"),)
|
||||||
env = spack.tengine.make_environment(template_dirs)
|
env = spack.tengine.make_environment(template_dirs)
|
||||||
template = env.get_template("non_relocatable.c")
|
template = env.get_template("non_relocatable.c")
|
||||||
text = template.render({"prefix": spack.store.layout.root})
|
text = template.render({"prefix": spack.store.layout.root})
|
||||||
@@ -173,14 +173,6 @@ def test_ensure_binary_is_relocatable(source_file, is_relocatable):
|
|||||||
assert relocatable == is_relocatable
|
assert relocatable == is_relocatable
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.requires_executables("patchelf", "strings", "file")
|
|
||||||
@skip_unless_linux
|
|
||||||
def test_patchelf_is_relocatable():
|
|
||||||
patchelf = os.path.realpath(spack.relocate._patchelf())
|
|
||||||
assert llnl.util.filesystem.is_exe(patchelf)
|
|
||||||
spack.relocate.ensure_binary_is_relocatable(patchelf)
|
|
||||||
|
|
||||||
|
|
||||||
@skip_unless_linux
|
@skip_unless_linux
|
||||||
def test_ensure_binary_is_relocatable_errors(tmpdir):
|
def test_ensure_binary_is_relocatable_errors(tmpdir):
|
||||||
# The file passed in as argument must exist...
|
# The file passed in as argument must exist...
|
||||||
@@ -241,30 +233,6 @@ def test_normalize_relative_paths(start_path, relative_paths, expected):
|
|||||||
assert normalized == expected
|
assert normalized == expected
|
||||||
|
|
||||||
|
|
||||||
def test_set_elf_rpaths(mock_patchelf):
|
|
||||||
# Try to relocate a mock version of patchelf and check
|
|
||||||
# the call made to patchelf itself
|
|
||||||
patchelf = mock_patchelf("echo $@")
|
|
||||||
rpaths = ["/usr/lib", "/usr/lib64", "/opt/local/lib"]
|
|
||||||
output = spack.relocate._set_elf_rpaths(patchelf, rpaths)
|
|
||||||
|
|
||||||
# Assert that the arguments of the call to patchelf are as expected
|
|
||||||
assert "--force-rpath" in output
|
|
||||||
assert "--set-rpath " + ":".join(rpaths) in output
|
|
||||||
assert patchelf in output
|
|
||||||
|
|
||||||
|
|
||||||
@skip_unless_linux
|
|
||||||
def test_set_elf_rpaths_warning(mock_patchelf):
|
|
||||||
# Mock a failing patchelf command and ensure it warns users
|
|
||||||
patchelf = mock_patchelf("exit 1")
|
|
||||||
rpaths = ["/usr/lib", "/usr/lib64", "/opt/local/lib"]
|
|
||||||
# To avoid using capfd in order to check if the warning was triggered
|
|
||||||
# here we just check that output is not set
|
|
||||||
output = spack.relocate._set_elf_rpaths(patchelf, rpaths)
|
|
||||||
assert output is None
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.requires_executables("patchelf", "strings", "file", "gcc")
|
@pytest.mark.requires_executables("patchelf", "strings", "file", "gcc")
|
||||||
@skip_unless_linux
|
@skip_unless_linux
|
||||||
def test_relocate_text_bin(binary_with_rpaths, prefix_like):
|
def test_relocate_text_bin(binary_with_rpaths, prefix_like):
|
||||||
|
|||||||
@@ -660,6 +660,7 @@ def test_spec_formatting(self, default_mock_concretization):
|
|||||||
("{architecture.os}", "", "os", lambda spec: spec.architecture),
|
("{architecture.os}", "", "os", lambda spec: spec.architecture),
|
||||||
("{architecture.target}", "", "target", lambda spec: spec.architecture),
|
("{architecture.target}", "", "target", lambda spec: spec.architecture),
|
||||||
("{prefix}", "", "prefix", lambda spec: spec),
|
("{prefix}", "", "prefix", lambda spec: spec),
|
||||||
|
("{external}", "", "external", lambda spec: spec), # test we print "False"
|
||||||
]
|
]
|
||||||
|
|
||||||
hash_segments = [
|
hash_segments = [
|
||||||
|
|||||||
@@ -71,7 +71,7 @@ def test_template_retrieval(self):
|
|||||||
"""Tests the template retrieval mechanism hooked into config files"""
|
"""Tests the template retrieval mechanism hooked into config files"""
|
||||||
# Check the directories are correct
|
# Check the directories are correct
|
||||||
template_dirs = spack.config.get("config:template_dirs")
|
template_dirs = spack.config.get("config:template_dirs")
|
||||||
template_dirs = [canonicalize_path(x) for x in template_dirs]
|
template_dirs = tuple([canonicalize_path(x) for x in template_dirs])
|
||||||
assert len(template_dirs) == 3
|
assert len(template_dirs) == 3
|
||||||
|
|
||||||
env = tengine.make_environment(template_dirs)
|
env = tengine.make_environment(template_dirs)
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
|
|
||||||
import spack.install_test
|
import spack.install_test
|
||||||
import spack.spec
|
import spack.spec
|
||||||
|
from spack.install_test import TestStatus
|
||||||
from spack.util.executable import which
|
from spack.util.executable import which
|
||||||
|
|
||||||
|
|
||||||
@@ -20,7 +21,7 @@ def _true(*args, **kwargs):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def ensure_results(filename, expected):
|
def ensure_results(filename, expected, present=True):
|
||||||
assert os.path.exists(filename)
|
assert os.path.exists(filename)
|
||||||
with open(filename, "r") as fd:
|
with open(filename, "r") as fd:
|
||||||
lines = fd.readlines()
|
lines = fd.readlines()
|
||||||
@@ -29,7 +30,10 @@ def ensure_results(filename, expected):
|
|||||||
if expected in line:
|
if expected in line:
|
||||||
have = True
|
have = True
|
||||||
break
|
break
|
||||||
assert have
|
if present:
|
||||||
|
assert have, f"Expected '{expected}' in the file"
|
||||||
|
else:
|
||||||
|
assert not have, f"Expected '{expected}' NOT to be in the file"
|
||||||
|
|
||||||
|
|
||||||
def test_test_log_name(mock_packages, config):
|
def test_test_log_name(mock_packages, config):
|
||||||
@@ -78,8 +82,8 @@ def test_write_test_result(mock_packages, mock_test_stage):
|
|||||||
assert spec.name in msg
|
assert spec.name in msg
|
||||||
|
|
||||||
|
|
||||||
def test_test_uninstalled(mock_packages, install_mockery, mock_test_stage):
|
def test_test_not_installed(mock_packages, install_mockery, mock_test_stage):
|
||||||
"""Attempt to perform stand-alone test for uninstalled package."""
|
"""Attempt to perform stand-alone test for not_installed package."""
|
||||||
spec = spack.spec.Spec("trivial-smoke-test").concretized()
|
spec = spack.spec.Spec("trivial-smoke-test").concretized()
|
||||||
test_suite = spack.install_test.TestSuite([spec])
|
test_suite = spack.install_test.TestSuite([spec])
|
||||||
|
|
||||||
@@ -91,10 +95,7 @@ def test_test_uninstalled(mock_packages, install_mockery, mock_test_stage):
|
|||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"arguments,status,msg",
|
"arguments,status,msg",
|
||||||
[
|
[({}, TestStatus.SKIPPED, "Skipped"), ({"externals": True}, TestStatus.NO_TESTS, "No tests")],
|
||||||
({}, spack.install_test.TestStatus.SKIPPED, "Skipped"),
|
|
||||||
({"externals": True}, spack.install_test.TestStatus.NO_TESTS, "No tests"),
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
def test_test_external(
|
def test_test_external(
|
||||||
mock_packages, install_mockery, mock_test_stage, monkeypatch, arguments, status, msg
|
mock_packages, install_mockery, mock_test_stage, monkeypatch, arguments, status, msg
|
||||||
@@ -156,6 +157,7 @@ def test_test_spec_passes(mock_packages, install_mockery, mock_test_stage, monke
|
|||||||
|
|
||||||
ensure_results(test_suite.results_file, "PASSED")
|
ensure_results(test_suite.results_file, "PASSED")
|
||||||
ensure_results(test_suite.log_file_for_spec(spec), "simple stand-alone")
|
ensure_results(test_suite.log_file_for_spec(spec), "simple stand-alone")
|
||||||
|
ensure_results(test_suite.log_file_for_spec(spec), "standalone-ifc", present=False)
|
||||||
|
|
||||||
|
|
||||||
def test_get_test_suite():
|
def test_get_test_suite():
|
||||||
@@ -212,8 +214,10 @@ def test_test_functions_pkgless(mock_packages, install_mockery, ensure_debug, ca
|
|||||||
spec = spack.spec.Spec("simple-standalone-test").concretized()
|
spec = spack.spec.Spec("simple-standalone-test").concretized()
|
||||||
fns = spack.install_test.test_functions(spec.package, add_virtuals=True)
|
fns = spack.install_test.test_functions(spec.package, add_virtuals=True)
|
||||||
out = capsys.readouterr()
|
out = capsys.readouterr()
|
||||||
assert len(fns) == 1, "Expected only one test function"
|
assert len(fns) == 2, "Expected two test functions"
|
||||||
assert "does not appear to have a package file" in out[1]
|
for f in fns:
|
||||||
|
assert f[1].__name__ in ["test_echo", "test_skip"]
|
||||||
|
assert "virtual does not appear to have a package file" in out[1]
|
||||||
|
|
||||||
|
|
||||||
# TODO: This test should go away when compilers as dependencies is supported
|
# TODO: This test should go away when compilers as dependencies is supported
|
||||||
@@ -301,7 +305,7 @@ def test_test_part_fail(tmpdir, install_mockery_mutable_config, mock_fetch, mock
|
|||||||
|
|
||||||
for part_name, status in pkg.tester.test_parts.items():
|
for part_name, status in pkg.tester.test_parts.items():
|
||||||
assert part_name.endswith(name)
|
assert part_name.endswith(name)
|
||||||
assert status == spack.install_test.TestStatus.FAILED
|
assert status == TestStatus.FAILED
|
||||||
|
|
||||||
|
|
||||||
def test_test_part_pass(install_mockery_mutable_config, mock_fetch, mock_test_stage):
|
def test_test_part_pass(install_mockery_mutable_config, mock_fetch, mock_test_stage):
|
||||||
@@ -317,7 +321,7 @@ def test_test_part_pass(install_mockery_mutable_config, mock_fetch, mock_test_st
|
|||||||
|
|
||||||
for part_name, status in pkg.tester.test_parts.items():
|
for part_name, status in pkg.tester.test_parts.items():
|
||||||
assert part_name.endswith(name)
|
assert part_name.endswith(name)
|
||||||
assert status == spack.install_test.TestStatus.PASSED
|
assert status == TestStatus.PASSED
|
||||||
|
|
||||||
|
|
||||||
def test_test_part_skip(install_mockery_mutable_config, mock_fetch, mock_test_stage):
|
def test_test_part_skip(install_mockery_mutable_config, mock_fetch, mock_test_stage):
|
||||||
@@ -331,7 +335,7 @@ def test_test_part_skip(install_mockery_mutable_config, mock_fetch, mock_test_st
|
|||||||
|
|
||||||
for part_name, status in pkg.tester.test_parts.items():
|
for part_name, status in pkg.tester.test_parts.items():
|
||||||
assert part_name.endswith(name)
|
assert part_name.endswith(name)
|
||||||
assert status == spack.install_test.TestStatus.SKIPPED
|
assert status == TestStatus.SKIPPED
|
||||||
|
|
||||||
|
|
||||||
def test_test_part_missing_exe_fail_fast(
|
def test_test_part_missing_exe_fail_fast(
|
||||||
@@ -354,7 +358,7 @@ def test_test_part_missing_exe_fail_fast(
|
|||||||
assert len(test_parts) == 1
|
assert len(test_parts) == 1
|
||||||
for part_name, status in test_parts.items():
|
for part_name, status in test_parts.items():
|
||||||
assert part_name.endswith(name)
|
assert part_name.endswith(name)
|
||||||
assert status == spack.install_test.TestStatus.FAILED
|
assert status == TestStatus.FAILED
|
||||||
|
|
||||||
|
|
||||||
def test_test_part_missing_exe(
|
def test_test_part_missing_exe(
|
||||||
@@ -375,7 +379,90 @@ def test_test_part_missing_exe(
|
|||||||
assert len(test_parts) == 1
|
assert len(test_parts) == 1
|
||||||
for part_name, status in test_parts.items():
|
for part_name, status in test_parts.items():
|
||||||
assert part_name.endswith(name)
|
assert part_name.endswith(name)
|
||||||
assert status == spack.install_test.TestStatus.FAILED
|
assert status == TestStatus.FAILED
|
||||||
|
|
||||||
|
|
||||||
|
# TODO (embedded test parts): Update this once embedded test part tracking
|
||||||
|
# TODO (embedded test parts): properly handles the nested context managers.
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"current,substatuses,expected",
|
||||||
|
[
|
||||||
|
(TestStatus.PASSED, [TestStatus.PASSED, TestStatus.PASSED], TestStatus.PASSED),
|
||||||
|
(TestStatus.FAILED, [TestStatus.PASSED, TestStatus.PASSED], TestStatus.FAILED),
|
||||||
|
(TestStatus.SKIPPED, [TestStatus.PASSED, TestStatus.PASSED], TestStatus.SKIPPED),
|
||||||
|
(TestStatus.NO_TESTS, [TestStatus.PASSED, TestStatus.PASSED], TestStatus.NO_TESTS),
|
||||||
|
(TestStatus.PASSED, [TestStatus.PASSED, TestStatus.SKIPPED], TestStatus.PASSED),
|
||||||
|
(TestStatus.PASSED, [TestStatus.PASSED, TestStatus.FAILED], TestStatus.FAILED),
|
||||||
|
(TestStatus.PASSED, [TestStatus.SKIPPED, TestStatus.SKIPPED], TestStatus.SKIPPED),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_embedded_test_part_status(
|
||||||
|
install_mockery_mutable_config, mock_fetch, mock_test_stage, current, substatuses, expected
|
||||||
|
):
|
||||||
|
"""Check to ensure the status of the enclosing test part reflects summary of embedded parts."""
|
||||||
|
|
||||||
|
s = spack.spec.Spec("trivial-smoke-test").concretized()
|
||||||
|
pkg = s.package
|
||||||
|
base_name = "test_example"
|
||||||
|
part_name = f"{pkg.__class__.__name__}::{base_name}"
|
||||||
|
|
||||||
|
pkg.tester.test_parts[part_name] = current
|
||||||
|
for i, status in enumerate(substatuses):
|
||||||
|
pkg.tester.test_parts[f"{part_name}_{i}"] = status
|
||||||
|
|
||||||
|
pkg.tester.status(base_name, current)
|
||||||
|
assert pkg.tester.test_parts[part_name] == expected
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"statuses,expected",
|
||||||
|
[
|
||||||
|
([TestStatus.PASSED, TestStatus.PASSED], TestStatus.PASSED),
|
||||||
|
([TestStatus.PASSED, TestStatus.SKIPPED], TestStatus.PASSED),
|
||||||
|
([TestStatus.PASSED, TestStatus.FAILED], TestStatus.FAILED),
|
||||||
|
([TestStatus.SKIPPED, TestStatus.SKIPPED], TestStatus.SKIPPED),
|
||||||
|
([], TestStatus.NO_TESTS),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_write_tested_status(
|
||||||
|
tmpdir, install_mockery_mutable_config, mock_fetch, mock_test_stage, statuses, expected
|
||||||
|
):
|
||||||
|
"""Check to ensure the status of the enclosing test part reflects summary of embedded parts."""
|
||||||
|
s = spack.spec.Spec("trivial-smoke-test").concretized()
|
||||||
|
pkg = s.package
|
||||||
|
for i, status in enumerate(statuses):
|
||||||
|
pkg.tester.test_parts[f"test_{i}"] = status
|
||||||
|
pkg.tester.counts[status] += 1
|
||||||
|
|
||||||
|
pkg.tester.tested_file = tmpdir.join("test-log.txt")
|
||||||
|
pkg.tester.write_tested_status()
|
||||||
|
with open(pkg.tester.tested_file, "r") as f:
|
||||||
|
status = int(f.read().strip("\n"))
|
||||||
|
assert TestStatus(status) == expected
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.regression("37840")
|
||||||
|
def test_write_tested_status_no_repeats(
|
||||||
|
tmpdir, install_mockery_mutable_config, mock_fetch, mock_test_stage
|
||||||
|
):
|
||||||
|
"""Emulate re-running the same stand-alone tests a second time."""
|
||||||
|
s = spack.spec.Spec("trivial-smoke-test").concretized()
|
||||||
|
pkg = s.package
|
||||||
|
statuses = [TestStatus.PASSED, TestStatus.PASSED]
|
||||||
|
for i, status in enumerate(statuses):
|
||||||
|
pkg.tester.test_parts[f"test_{i}"] = status
|
||||||
|
pkg.tester.counts[status] += 1
|
||||||
|
|
||||||
|
pkg.tester.tested_file = tmpdir.join("test-log.txt")
|
||||||
|
pkg.tester.write_tested_status()
|
||||||
|
pkg.tester.write_tested_status()
|
||||||
|
|
||||||
|
# The test should NOT result in a ValueError: invalid literal for int()
|
||||||
|
# with base 10: '2\n2' (i.e., the results being appended instead of
|
||||||
|
# written to the file).
|
||||||
|
with open(pkg.tester.tested_file, "r") as f:
|
||||||
|
status = int(f.read().strip("\n"))
|
||||||
|
assert TestStatus(status) == TestStatus.PASSED
|
||||||
|
|
||||||
|
|
||||||
def test_check_special_outputs(tmpdir):
|
def test_check_special_outputs(tmpdir):
|
||||||
|
|||||||
@@ -119,7 +119,10 @@ def test_dump_environment(prepare_environment_for_tests, tmpdir):
|
|||||||
dumpfile_path = str(tmpdir.join("envdump.txt"))
|
dumpfile_path = str(tmpdir.join("envdump.txt"))
|
||||||
envutil.dump_environment(dumpfile_path)
|
envutil.dump_environment(dumpfile_path)
|
||||||
with open(dumpfile_path, "r") as dumpfile:
|
with open(dumpfile_path, "r") as dumpfile:
|
||||||
assert "TEST_ENV_VAR={0}; export TEST_ENV_VAR\n".format(test_paths) in list(dumpfile)
|
if sys.platform == "win32":
|
||||||
|
assert 'set "TEST_ENV_VAR={}"\n'.format(test_paths) in list(dumpfile)
|
||||||
|
else:
|
||||||
|
assert "TEST_ENV_VAR={0}; export TEST_ENV_VAR\n".format(test_paths) in list(dumpfile)
|
||||||
|
|
||||||
|
|
||||||
def test_reverse_environment_modifications(working_env):
|
def test_reverse_environment_modifications(working_env):
|
||||||
|
|||||||
@@ -171,7 +171,11 @@ def path_put_first(var_name: str, directories: List[Path]):
|
|||||||
BASH_FUNCTION_FINDER = re.compile(r"BASH_FUNC_(.*?)\(\)")
|
BASH_FUNCTION_FINDER = re.compile(r"BASH_FUNC_(.*?)\(\)")
|
||||||
|
|
||||||
|
|
||||||
def _env_var_to_source_line(var: str, val: str) -> str:
|
def _win_env_var_to_set_line(var: str, val: str) -> str:
|
||||||
|
return f'set "{var}={val}"'
|
||||||
|
|
||||||
|
|
||||||
|
def _nix_env_var_to_source_line(var: str, val: str) -> str:
|
||||||
if var.startswith("BASH_FUNC"):
|
if var.startswith("BASH_FUNC"):
|
||||||
source_line = "function {fname}{decl}; export -f {fname}".format(
|
source_line = "function {fname}{decl}; export -f {fname}".format(
|
||||||
fname=BASH_FUNCTION_FINDER.sub(r"\1", var), decl=val
|
fname=BASH_FUNCTION_FINDER.sub(r"\1", var), decl=val
|
||||||
@@ -181,6 +185,13 @@ def _env_var_to_source_line(var: str, val: str) -> str:
|
|||||||
return source_line
|
return source_line
|
||||||
|
|
||||||
|
|
||||||
|
def _env_var_to_source_line(var: str, val: str) -> str:
|
||||||
|
if sys.platform == "win32":
|
||||||
|
return _win_env_var_to_set_line(var, val)
|
||||||
|
else:
|
||||||
|
return _nix_env_var_to_source_line(var, val)
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter(arg_slice=slice(1))
|
@system_path_filter(arg_slice=slice(1))
|
||||||
def dump_environment(path: Path, environment: Optional[MutableMapping[str, str]] = None):
|
def dump_environment(path: Path, environment: Optional[MutableMapping[str, str]] = None):
|
||||||
"""Dump an environment dictionary to a source-able file.
|
"""Dump an environment dictionary to a source-able file.
|
||||||
|
|||||||
@@ -14,6 +14,26 @@ default:
|
|||||||
SPACK_TARGET_PLATFORM: "linux"
|
SPACK_TARGET_PLATFORM: "linux"
|
||||||
SPACK_TARGET_ARCH: "x86_64_v3"
|
SPACK_TARGET_ARCH: "x86_64_v3"
|
||||||
|
|
||||||
|
.linux_skylake:
|
||||||
|
variables:
|
||||||
|
SPACK_TARGET_PLATFORM: "linux"
|
||||||
|
SPACK_TARGET_ARCH: "skylake_avx512"
|
||||||
|
|
||||||
|
.linux_icelake:
|
||||||
|
variables:
|
||||||
|
SPACK_TARGET_PLATFORM: "linux"
|
||||||
|
SPACK_TARGET_ARCH: "icelake"
|
||||||
|
|
||||||
|
.linux_neoverse_n1:
|
||||||
|
variables:
|
||||||
|
SPACK_TARGET_PLATFORM: "linux"
|
||||||
|
SPACK_TARGET_ARCH: "neoverse_n1"
|
||||||
|
|
||||||
|
.linux_neoverse_v1:
|
||||||
|
variables:
|
||||||
|
SPACK_TARGET_PLATFORM: "linux"
|
||||||
|
SPACK_TARGET_ARCH: "neoverse_v1"
|
||||||
|
|
||||||
.linux_aarch64:
|
.linux_aarch64:
|
||||||
variables:
|
variables:
|
||||||
SPACK_TARGET_PLATFORM: "linux"
|
SPACK_TARGET_PLATFORM: "linux"
|
||||||
@@ -108,6 +128,38 @@ default:
|
|||||||
extends: [ ".base-job", ".generate-base" ]
|
extends: [ ".base-job", ".generate-base" ]
|
||||||
tags: ["spack", "public", "medium", "x86_64"]
|
tags: ["spack", "public", "medium", "x86_64"]
|
||||||
|
|
||||||
|
.darwin-generate-base:
|
||||||
|
stage: generate
|
||||||
|
script:
|
||||||
|
- export SPACK_DISABLE_LOCAL_CONFIG=1
|
||||||
|
- export SPACK_USER_CACHE_PATH=$(pwd)/_user_cache
|
||||||
|
- uname -a || true
|
||||||
|
- grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true
|
||||||
|
- nproc || true
|
||||||
|
- . "./share/spack/setup-env.sh"
|
||||||
|
- spack --version
|
||||||
|
- cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME}
|
||||||
|
- spack env activate --without-view .
|
||||||
|
- spack -d ci generate --check-index-only
|
||||||
|
--buildcache-destination "${SPACK_BUILDCACHE_DESTINATION}"
|
||||||
|
--artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||||
|
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml"
|
||||||
|
after_script:
|
||||||
|
- cat /proc/loadavg || true
|
||||||
|
artifacts:
|
||||||
|
paths:
|
||||||
|
- "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||||
|
interruptible: true
|
||||||
|
timeout: 60 minutes
|
||||||
|
retry:
|
||||||
|
max: 2
|
||||||
|
when:
|
||||||
|
- always
|
||||||
|
|
||||||
|
.darwin-generate:
|
||||||
|
extends: [ ".base-job", ".darwin-generate-base" ]
|
||||||
|
|
||||||
|
|
||||||
.generate-deprecated:
|
.generate-deprecated:
|
||||||
extends: [ ".base-job" ]
|
extends: [ ".base-job" ]
|
||||||
stage: generate
|
stage: generate
|
||||||
@@ -216,124 +268,6 @@ protected-publish:
|
|||||||
# - artifacts: True
|
# - artifacts: True
|
||||||
# job: my-super-cool-stack-generate
|
# job: my-super-cool-stack-generate
|
||||||
|
|
||||||
########################################
|
|
||||||
# E4S Mac Stack
|
|
||||||
#
|
|
||||||
# With no near-future plans to have
|
|
||||||
# protected aws runners running mac
|
|
||||||
# builds, it seems best to decouple
|
|
||||||
# them from the rest of the stacks for
|
|
||||||
# the time being. This way they can
|
|
||||||
# still run on UO runners and be signed
|
|
||||||
# using the previous approach.
|
|
||||||
########################################
|
|
||||||
# .e4s-mac:
|
|
||||||
# variables:
|
|
||||||
# SPACK_CI_STACK_NAME: e4s-mac
|
|
||||||
# allow_failure: True
|
|
||||||
|
|
||||||
# .mac-pr:
|
|
||||||
# only:
|
|
||||||
# - /^pr[\d]+_.*$/
|
|
||||||
# - /^github\/pr[\d]+_.*$/
|
|
||||||
# variables:
|
|
||||||
# SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries-prs/${CI_COMMIT_REF_NAME}"
|
|
||||||
# SPACK_PRUNE_UNTOUCHED: "True"
|
|
||||||
|
|
||||||
# .mac-protected:
|
|
||||||
# only:
|
|
||||||
# - /^develop$/
|
|
||||||
# - /^releases\/v.*/
|
|
||||||
# - /^v.*/
|
|
||||||
# - /^github\/develop$/
|
|
||||||
# variables:
|
|
||||||
# SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}"
|
|
||||||
|
|
||||||
# .mac-pr-build:
|
|
||||||
# extends: [ ".mac-pr", ".build" ]
|
|
||||||
# variables:
|
|
||||||
# AWS_ACCESS_KEY_ID: ${PR_MIRRORS_AWS_ACCESS_KEY_ID}
|
|
||||||
# AWS_SECRET_ACCESS_KEY: ${PR_MIRRORS_AWS_SECRET_ACCESS_KEY}
|
|
||||||
# .mac-protected-build:
|
|
||||||
# extends: [ ".mac-protected", ".build" ]
|
|
||||||
# variables:
|
|
||||||
# AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID}
|
|
||||||
# AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY}
|
|
||||||
# SPACK_SIGNING_KEY: ${PACKAGE_SIGNING_KEY}
|
|
||||||
|
|
||||||
# e4s-mac-pr-generate:
|
|
||||||
# extends: [".e4s-mac", ".mac-pr"]
|
|
||||||
# stage: generate
|
|
||||||
# script:
|
|
||||||
# - tmp="$(mktemp -d)"; export SPACK_USER_CONFIG_PATH="$tmp"; export SPACK_USER_CACHE_PATH="$tmp"
|
|
||||||
# - . "./share/spack/setup-env.sh"
|
|
||||||
# - spack --version
|
|
||||||
# - cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME}
|
|
||||||
# - spack env activate --without-view .
|
|
||||||
# - spack ci generate --check-index-only
|
|
||||||
# --buildcache-destination "${SPACK_BUILDCACHE_DESTINATION}"
|
|
||||||
# --artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
|
||||||
# --output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml"
|
|
||||||
# artifacts:
|
|
||||||
# paths:
|
|
||||||
# - "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
|
||||||
# tags:
|
|
||||||
# - lambda
|
|
||||||
# interruptible: true
|
|
||||||
# retry:
|
|
||||||
# max: 2
|
|
||||||
# when:
|
|
||||||
# - runner_system_failure
|
|
||||||
# - stuck_or_timeout_failure
|
|
||||||
# timeout: 60 minutes
|
|
||||||
|
|
||||||
# e4s-mac-protected-generate:
|
|
||||||
# extends: [".e4s-mac", ".mac-protected"]
|
|
||||||
# stage: generate
|
|
||||||
# script:
|
|
||||||
# - tmp="$(mktemp -d)"; export SPACK_USER_CONFIG_PATH="$tmp"; export SPACK_USER_CACHE_PATH="$tmp"
|
|
||||||
# - . "./share/spack/setup-env.sh"
|
|
||||||
# - spack --version
|
|
||||||
# - cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME}
|
|
||||||
# - spack env activate --without-view .
|
|
||||||
# - spack ci generate --check-index-only
|
|
||||||
# --artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
|
||||||
# --output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml"
|
|
||||||
# artifacts:
|
|
||||||
# paths:
|
|
||||||
# - "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
|
||||||
# tags:
|
|
||||||
# - omicron
|
|
||||||
# interruptible: true
|
|
||||||
# retry:
|
|
||||||
# max: 2
|
|
||||||
# when:
|
|
||||||
# - runner_system_failure
|
|
||||||
# - stuck_or_timeout_failure
|
|
||||||
# timeout: 60 minutes
|
|
||||||
|
|
||||||
# e4s-mac-pr-build:
|
|
||||||
# extends: [ ".e4s-mac", ".mac-pr-build" ]
|
|
||||||
# trigger:
|
|
||||||
# include:
|
|
||||||
# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
|
||||||
# job: e4s-mac-pr-generate
|
|
||||||
# strategy: depend
|
|
||||||
# needs:
|
|
||||||
# - artifacts: True
|
|
||||||
# job: e4s-mac-pr-generate
|
|
||||||
|
|
||||||
# e4s-mac-protected-build:
|
|
||||||
# extends: [ ".e4s-mac", ".mac-protected-build" ]
|
|
||||||
# trigger:
|
|
||||||
# include:
|
|
||||||
# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
|
||||||
# job: e4s-mac-protected-generate
|
|
||||||
# strategy: depend
|
|
||||||
# needs:
|
|
||||||
# - artifacts: True
|
|
||||||
# job: e4s-mac-protected-generate
|
|
||||||
|
|
||||||
########################################
|
########################################
|
||||||
# E4S pipeline
|
# E4S pipeline
|
||||||
########################################
|
########################################
|
||||||
@@ -742,6 +676,28 @@ ml-linux-x86_64-rocm-build:
|
|||||||
- artifacts: True
|
- artifacts: True
|
||||||
job: ml-linux-x86_64-rocm-generate
|
job: ml-linux-x86_64-rocm-generate
|
||||||
|
|
||||||
|
########################################
|
||||||
|
# Machine Learning - Darwin aarch64 (MPS)
|
||||||
|
########################################
|
||||||
|
.ml-darwin-aarch64-mps:
|
||||||
|
variables:
|
||||||
|
SPACK_CI_STACK_NAME: ml-darwin-aarch64-mps
|
||||||
|
|
||||||
|
ml-darwin-aarch64-mps-generate:
|
||||||
|
tags: [ "macos-ventura", "apple-clang-14", "aarch64-macos" ]
|
||||||
|
extends: [ ".ml-darwin-aarch64-mps", ".darwin-generate"]
|
||||||
|
|
||||||
|
ml-darwin-aarch64-mps-build:
|
||||||
|
extends: [ ".ml-darwin-aarch64-mps", ".build" ]
|
||||||
|
trigger:
|
||||||
|
include:
|
||||||
|
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||||
|
job: ml-darwin-aarch64-mps-generate
|
||||||
|
strategy: depend
|
||||||
|
needs:
|
||||||
|
- artifacts: True
|
||||||
|
job: ml-darwin-aarch64-mps-generate
|
||||||
|
|
||||||
########################################
|
########################################
|
||||||
# Deprecated CI testing
|
# Deprecated CI testing
|
||||||
########################################
|
########################################
|
||||||
@@ -762,3 +718,100 @@ deprecated-ci-build:
|
|||||||
needs:
|
needs:
|
||||||
- artifacts: True
|
- artifacts: True
|
||||||
job: deprecated-ci-generate
|
job: deprecated-ci-generate
|
||||||
|
|
||||||
|
########################################
|
||||||
|
# AWS PCLUSTER
|
||||||
|
########################################
|
||||||
|
|
||||||
|
.aws-pcluster-generate-image:
|
||||||
|
image: { "name": "ghcr.io/spack/pcluster-amazonlinux-2:v2023-05-25", "entrypoint": [""] }
|
||||||
|
|
||||||
|
.aws-pcluster-generate:
|
||||||
|
before_script:
|
||||||
|
# Use gcc from local container buildcache
|
||||||
|
- - . "./share/spack/setup-env.sh"
|
||||||
|
- . /etc/profile.d/modules.sh
|
||||||
|
- spack mirror add local-cache /bootstrap/local-cache
|
||||||
|
- spack gpg trust /bootstrap/public-key
|
||||||
|
- cd "${CI_PROJECT_DIR}" && curl -sOL https://raw.githubusercontent.com/spack/spack-configs/main/AWS/parallelcluster/postinstall.sh
|
||||||
|
- sed -i -e "s/spack arch -t/echo ${SPACK_TARGET_ARCH}/g" postinstall.sh
|
||||||
|
- /bin/bash postinstall.sh -fg
|
||||||
|
- spack config --scope site add "packages:all:target:\"target=${SPACK_TARGET_ARCH}\""
|
||||||
|
after_script:
|
||||||
|
- - mv "${CI_PROJECT_DIR}/postinstall.sh" "${CI_PROJECT_DIR}/jobs_scratch_dir/"
|
||||||
|
|
||||||
|
# Icelake (one pipeline per target)
|
||||||
|
.aws-pcluster-icelake:
|
||||||
|
variables:
|
||||||
|
SPACK_CI_STACK_NAME: aws-pcluster-icelake
|
||||||
|
|
||||||
|
aws-pcluster-generate-icelake:
|
||||||
|
extends: [ ".linux_icelake", ".aws-pcluster-icelake", ".generate", ".tags-x86_64_v4", ".aws-pcluster-generate", ".aws-pcluster-generate-image" ]
|
||||||
|
|
||||||
|
aws-pcluster-build-icelake:
|
||||||
|
extends: [ ".linux_icelake", ".aws-pcluster-icelake", ".build" ]
|
||||||
|
trigger:
|
||||||
|
include:
|
||||||
|
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||||
|
job: aws-pcluster-generate-icelake
|
||||||
|
strategy: depend
|
||||||
|
needs:
|
||||||
|
- artifacts: True
|
||||||
|
job: aws-pcluster-generate-icelake
|
||||||
|
|
||||||
|
# Skylake_avx512 (one pipeline per target)
|
||||||
|
.aws-pcluster-skylake:
|
||||||
|
variables:
|
||||||
|
SPACK_CI_STACK_NAME: aws-pcluster-skylake
|
||||||
|
|
||||||
|
aws-pcluster-generate-skylake:
|
||||||
|
extends: [ ".linux_skylake", ".aws-pcluster-skylake", ".generate", ".tags-x86_64_v4", ".aws-pcluster-generate", ".aws-pcluster-generate-image" ]
|
||||||
|
|
||||||
|
aws-pcluster-build-skylake:
|
||||||
|
extends: [ ".linux_skylake", ".aws-pcluster-skylake", ".build" ]
|
||||||
|
trigger:
|
||||||
|
include:
|
||||||
|
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||||
|
job: aws-pcluster-generate-skylake
|
||||||
|
strategy: depend
|
||||||
|
needs:
|
||||||
|
- artifacts: True
|
||||||
|
job: aws-pcluster-generate-skylake
|
||||||
|
|
||||||
|
# Neoverse_n1 (one pipeline per target)
|
||||||
|
.aws-pcluster-neoverse_n1:
|
||||||
|
variables:
|
||||||
|
SPACK_CI_STACK_NAME: aws-pcluster-neoverse_n1
|
||||||
|
|
||||||
|
aws-pcluster-generate-neoverse_n1:
|
||||||
|
extends: [ ".linux_neoverse_n1", ".aws-pcluster-neoverse_n1", ".generate-aarch64", ".aws-pcluster-generate", ".aws-pcluster-generate-image" ]
|
||||||
|
|
||||||
|
aws-pcluster-build-neoverse_n1:
|
||||||
|
extends: [ ".linux_neoverse_n1", ".aws-pcluster-neoverse_n1", ".build" ]
|
||||||
|
trigger:
|
||||||
|
include:
|
||||||
|
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||||
|
job: aws-pcluster-generate-neoverse_n1
|
||||||
|
strategy: depend
|
||||||
|
needs:
|
||||||
|
- artifacts: True
|
||||||
|
job: aws-pcluster-generate-neoverse_n1
|
||||||
|
|
||||||
|
# Neoverse_v1 (one pipeline per target)
|
||||||
|
.aws-pcluster-neoverse_v1:
|
||||||
|
variables:
|
||||||
|
SPACK_CI_STACK_NAME: aws-pcluster-neoverse_v1
|
||||||
|
|
||||||
|
aws-pcluster-generate-neoverse_v1:
|
||||||
|
extends: [ ".linux_neoverse_v1", ".aws-pcluster-neoverse_v1", ".generate-aarch64", ".aws-pcluster-generate", ".aws-pcluster-generate-image" ]
|
||||||
|
|
||||||
|
aws-pcluster-build-neoverse_v1:
|
||||||
|
extends: [ ".linux_neoverse_v1", ".aws-pcluster-neoverse_v1", ".build" ]
|
||||||
|
trigger:
|
||||||
|
include:
|
||||||
|
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||||
|
job: aws-pcluster-generate-neoverse_v1
|
||||||
|
strategy: depend
|
||||||
|
needs:
|
||||||
|
- artifacts: True
|
||||||
|
job: aws-pcluster-generate-neoverse_v1
|
||||||
|
|||||||
@@ -0,0 +1,27 @@
|
|||||||
|
compilers:
|
||||||
|
- compiler:
|
||||||
|
spec: apple-clang@14.0.0
|
||||||
|
paths:
|
||||||
|
cc: /usr/bin/clang
|
||||||
|
cxx: /usr/bin/clang++
|
||||||
|
f77: /opt/homebrew/bin/gfortran
|
||||||
|
fc: /opt/homebrew/bin/gfortran
|
||||||
|
flags: {}
|
||||||
|
operating_system: ventura
|
||||||
|
target: aarch64
|
||||||
|
modules: []
|
||||||
|
environment: {}
|
||||||
|
extra_rpaths: []
|
||||||
|
- compiler:
|
||||||
|
spec: gcc@12.2.0
|
||||||
|
paths:
|
||||||
|
cc: /opt/homebrew/bin/gcc-12
|
||||||
|
cxx: /opt/homebrew/bin/g++-12
|
||||||
|
f77: /opt/homebrew/bin/gfortran-12
|
||||||
|
fc: /opt/homebrew/bin/gfortran-12
|
||||||
|
flags: {}
|
||||||
|
operating_system: ventura
|
||||||
|
target: aarch64
|
||||||
|
modules: []
|
||||||
|
environment: {}
|
||||||
|
extra_rpaths: []
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
config:
|
||||||
|
install_tree:
|
||||||
|
root: $spack/opt/spack
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
ci:
|
|
||||||
pipeline-gen:
|
|
||||||
- build-job:
|
|
||||||
script: |
|
|
||||||
- tmp="$(mktemp -d)"; export SPACK_USER_CONFIG_PATH="$tmp"; export SPACK_USER_CACHE_PATH="$tmp"
|
|
||||||
- . "./share/spack/setup-env.sh"
|
|
||||||
- spack --version
|
|
||||||
- spack arch
|
|
||||||
- cd ${SPACK_CONCRETE_ENV_DIR}
|
|
||||||
- spack env activate --without-view .
|
|
||||||
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
|
|
||||||
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
|
|
||||||
- spack --color=always --backtrace ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
|
||||||
tags: ["lambda"]
|
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
ci:
|
||||||
|
pipeline-gen:
|
||||||
|
- any-job:
|
||||||
|
variables:
|
||||||
|
SPACK_TARGET_ARCH: icelake
|
||||||
|
- build-job:
|
||||||
|
before_script:
|
||||||
|
- - curl -LfsS "https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.x86_64-linux-gnu.tar.gz" -o gmake.tar.gz
|
||||||
|
- printf "fef1f59e56d2d11e6d700ba22d3444b6e583c663d6883fd0a4f63ab8bd280f0f gmake.tar.gz" | sha256sum --check --strict --quiet
|
||||||
|
- tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null
|
||||||
|
tags: ["x86_64_v4"]
|
||||||
@@ -0,0 +1,7 @@
|
|||||||
|
ci:
|
||||||
|
pipeline-gen:
|
||||||
|
- any-job:
|
||||||
|
variables:
|
||||||
|
SPACK_TARGET_ARCH: neoverse_n1
|
||||||
|
- build-job:
|
||||||
|
tags: ["aarch64", "graviton2"]
|
||||||
@@ -0,0 +1,7 @@
|
|||||||
|
ci:
|
||||||
|
pipeline-gen:
|
||||||
|
- any-job:
|
||||||
|
variables:
|
||||||
|
SPACK_TARGET_ARCH: neoverse_v1
|
||||||
|
- build-job:
|
||||||
|
tags: ["aarch64", "graviton3"]
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
ci:
|
||||||
|
pipeline-gen:
|
||||||
|
- any-job:
|
||||||
|
variables:
|
||||||
|
SPACK_TARGET_ARCH: skylake_avx512
|
||||||
|
- build-job:
|
||||||
|
before_script:
|
||||||
|
- - curl -LfsS "https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.x86_64-linux-gnu.tar.gz" -o gmake.tar.gz
|
||||||
|
- printf "fef1f59e56d2d11e6d700ba22d3444b6e583c663d6883fd0a4f63ab8bd280f0f gmake.tar.gz" | sha256sum --check --strict --quiet
|
||||||
|
- tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null
|
||||||
|
tags: ["x86_64_v4"]
|
||||||
@@ -0,0 +1,55 @@
|
|||||||
|
spack:
|
||||||
|
view: false
|
||||||
|
|
||||||
|
definitions:
|
||||||
|
- compiler_specs:
|
||||||
|
- gcc
|
||||||
|
- gettext
|
||||||
|
|
||||||
|
- compiler_target:
|
||||||
|
- '%gcc@7.3.1 target=x86_64_v3'
|
||||||
|
|
||||||
|
- optimized_configs:
|
||||||
|
# - gromacs
|
||||||
|
- lammps
|
||||||
|
# - mpas-model
|
||||||
|
- openfoam
|
||||||
|
# - palace
|
||||||
|
# - py-devito
|
||||||
|
# - quantum-espresso
|
||||||
|
# - wrf
|
||||||
|
|
||||||
|
- optimized_libs:
|
||||||
|
- mpich
|
||||||
|
- openmpi
|
||||||
|
|
||||||
|
specs:
|
||||||
|
- matrix:
|
||||||
|
- - $compiler_specs
|
||||||
|
- - $compiler_target
|
||||||
|
- $optimized_configs
|
||||||
|
# - $optimized_libs
|
||||||
|
|
||||||
|
mirrors: { "mirror": "s3://spack-binaries/develop/aws-pcluster-icelake" }
|
||||||
|
|
||||||
|
ci:
|
||||||
|
pipeline-gen:
|
||||||
|
- build-job:
|
||||||
|
image: { "name": "ghcr.io/spack/pcluster-amazonlinux-2:v2023-05-25", "entrypoint": [""] }
|
||||||
|
before_script:
|
||||||
|
- - . "./share/spack/setup-env.sh"
|
||||||
|
- . /etc/profile.d/modules.sh
|
||||||
|
- spack --version
|
||||||
|
- spack arch
|
||||||
|
# Use gcc from local container buildcache
|
||||||
|
- - spack mirror add local-cache /bootstrap/local-cache
|
||||||
|
- spack gpg trust /bootstrap/public-key
|
||||||
|
- - /bin/bash "${SPACK_ARTIFACTS_ROOT}/postinstall.sh" -fg
|
||||||
|
- spack config --scope site add "packages:all:target:\"target=${SPACK_TARGET_ARCH}\""
|
||||||
|
- signing-job:
|
||||||
|
before_script:
|
||||||
|
# Do not distribute Intel & ARM binaries
|
||||||
|
- - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep intel-oneapi | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done
|
||||||
|
- for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep armpl | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done
|
||||||
|
cdash:
|
||||||
|
build-group: AWS Packages
|
||||||
@@ -0,0 +1,58 @@
|
|||||||
|
spack:
|
||||||
|
view: false
|
||||||
|
|
||||||
|
definitions:
|
||||||
|
- compiler_specs:
|
||||||
|
- gcc
|
||||||
|
- gettext
|
||||||
|
|
||||||
|
- compiler_target:
|
||||||
|
- '%gcc@7.3.1 target=aarch64'
|
||||||
|
|
||||||
|
- optimized_configs:
|
||||||
|
- gromacs
|
||||||
|
# - lammps
|
||||||
|
# - mpas-model
|
||||||
|
- openfoam
|
||||||
|
- palace
|
||||||
|
# - py-devito
|
||||||
|
# - quantum-espresso
|
||||||
|
# - wrf
|
||||||
|
|
||||||
|
- optimized_libs:
|
||||||
|
- mpich
|
||||||
|
- openmpi
|
||||||
|
|
||||||
|
specs:
|
||||||
|
- matrix:
|
||||||
|
- - $compiler_specs
|
||||||
|
- - $compiler_target
|
||||||
|
- $optimized_configs
|
||||||
|
- $optimized_libs
|
||||||
|
|
||||||
|
|
||||||
|
mirrors: { "mirror": "s3://spack-binaries/develop/aws-pcluster-neoverse_n1" }
|
||||||
|
|
||||||
|
ci:
|
||||||
|
pipeline-gen:
|
||||||
|
- build-job:
|
||||||
|
image: { "name": "ghcr.io/spack/pcluster-amazonlinux-2:v2023-05-25", "entrypoint": [""] }
|
||||||
|
tags: ["aarch64"]
|
||||||
|
before_script:
|
||||||
|
- - . "./share/spack/setup-env.sh"
|
||||||
|
- . /etc/profile.d/modules.sh
|
||||||
|
- spack --version
|
||||||
|
- spack arch
|
||||||
|
# Use gcc from local container buildcache
|
||||||
|
- - spack mirror add local-cache /bootstrap/local-cache
|
||||||
|
- spack gpg trust /bootstrap/public-key
|
||||||
|
- - /bin/bash "${SPACK_ARTIFACTS_ROOT}/postinstall.sh" -fg
|
||||||
|
- spack config --scope site add "packages:all:target:\"target=${SPACK_TARGET_ARCH}\""
|
||||||
|
- signing-job:
|
||||||
|
before_script:
|
||||||
|
# Do not distribute Intel & ARM binaries
|
||||||
|
- - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep intel-oneapi | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done
|
||||||
|
- for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep armpl | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done
|
||||||
|
|
||||||
|
cdash:
|
||||||
|
build-group: AWS Packages
|
||||||
@@ -0,0 +1,58 @@
|
|||||||
|
spack:
|
||||||
|
view: false
|
||||||
|
|
||||||
|
definitions:
|
||||||
|
- compiler_specs:
|
||||||
|
- gcc
|
||||||
|
- gettext
|
||||||
|
|
||||||
|
- compiler_target:
|
||||||
|
- '%gcc@7.3.1 target=aarch64'
|
||||||
|
|
||||||
|
- optimized_configs:
|
||||||
|
- gromacs
|
||||||
|
# - lammps
|
||||||
|
# - mpas-model
|
||||||
|
- openfoam
|
||||||
|
- palace
|
||||||
|
# - py-devito
|
||||||
|
# - quantum-espresso
|
||||||
|
# - wrf
|
||||||
|
|
||||||
|
- optimized_libs:
|
||||||
|
- mpich
|
||||||
|
- openmpi
|
||||||
|
|
||||||
|
specs:
|
||||||
|
- matrix:
|
||||||
|
- - $compiler_specs
|
||||||
|
- - $compiler_target
|
||||||
|
- $optimized_configs
|
||||||
|
- $optimized_libs
|
||||||
|
|
||||||
|
|
||||||
|
mirrors: { "mirror": "s3://spack-binaries/develop/aws-pcluster-neoverse_v1" }
|
||||||
|
|
||||||
|
ci:
|
||||||
|
pipeline-gen:
|
||||||
|
- build-job:
|
||||||
|
image: { "name": "ghcr.io/spack/pcluster-amazonlinux-2:v2023-05-25", "entrypoint": [""] }
|
||||||
|
tags: ["aarch64"]
|
||||||
|
before_script:
|
||||||
|
- - . "./share/spack/setup-env.sh"
|
||||||
|
- . /etc/profile.d/modules.sh
|
||||||
|
- spack --version
|
||||||
|
- spack arch
|
||||||
|
# Use gcc from local container buildcache
|
||||||
|
- - spack mirror add local-cache /bootstrap/local-cache
|
||||||
|
- spack gpg trust /bootstrap/public-key
|
||||||
|
- - /bin/bash "${SPACK_ARTIFACTS_ROOT}/postinstall.sh" -fg
|
||||||
|
- spack config --scope site add "packages:all:target:\"target=${SPACK_TARGET_ARCH}\""
|
||||||
|
- signing-job:
|
||||||
|
before_script:
|
||||||
|
# Do not distribute Intel & ARM binaries
|
||||||
|
- - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep intel-oneapi | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done
|
||||||
|
- for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep armpl | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done
|
||||||
|
|
||||||
|
cdash:
|
||||||
|
build-group: AWS Packages
|
||||||
@@ -0,0 +1,55 @@
|
|||||||
|
spack:
|
||||||
|
view: false
|
||||||
|
|
||||||
|
definitions:
|
||||||
|
- compiler_specs:
|
||||||
|
- gcc
|
||||||
|
- gettext
|
||||||
|
|
||||||
|
- compiler_target:
|
||||||
|
- '%gcc@7.3.1 target=x86_64_v3'
|
||||||
|
|
||||||
|
- optimized_configs:
|
||||||
|
# - gromacs
|
||||||
|
- lammps
|
||||||
|
# - mpas-model
|
||||||
|
- openfoam
|
||||||
|
# - palace
|
||||||
|
# - py-devito
|
||||||
|
# - quantum-espresso
|
||||||
|
# - wrf
|
||||||
|
|
||||||
|
- optimized_libs:
|
||||||
|
- mpich
|
||||||
|
- openmpi
|
||||||
|
|
||||||
|
specs:
|
||||||
|
- matrix:
|
||||||
|
- - $compiler_specs
|
||||||
|
- - $compiler_target
|
||||||
|
- $optimized_configs
|
||||||
|
# - $optimized_libs
|
||||||
|
|
||||||
|
mirrors: { "mirror": "s3://spack-binaries/develop/aws-pcluster-skylake" }
|
||||||
|
|
||||||
|
ci:
|
||||||
|
pipeline-gen:
|
||||||
|
- build-job:
|
||||||
|
image: { "name": "ghcr.io/spack/pcluster-amazonlinux-2:v2023-05-25", "entrypoint": [""] }
|
||||||
|
before_script:
|
||||||
|
- - . "./share/spack/setup-env.sh"
|
||||||
|
- . /etc/profile.d/modules.sh
|
||||||
|
- spack --version
|
||||||
|
- spack arch
|
||||||
|
# Use gcc from local container buildcache
|
||||||
|
- - spack mirror add local-cache /bootstrap/local-cache
|
||||||
|
- spack gpg trust /bootstrap/public-key
|
||||||
|
- - /bin/bash "${SPACK_ARTIFACTS_ROOT}/postinstall.sh" -fg
|
||||||
|
- spack config --scope site add "packages:all:target:\"target=${SPACK_TARGET_ARCH}\""
|
||||||
|
- signing-job:
|
||||||
|
before_script:
|
||||||
|
# Do not distribute Intel & ARM binaries
|
||||||
|
- - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep intel-oneapi | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done
|
||||||
|
- for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep armpl | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done
|
||||||
|
cdash:
|
||||||
|
build-group: AWS Packages
|
||||||
@@ -24,4 +24,4 @@ spack:
|
|||||||
mirrors: { "mirror": "s3://spack-binaries/develop/build_systems" }
|
mirrors: { "mirror": "s3://spack-binaries/develop/build_systems" }
|
||||||
|
|
||||||
cdash:
|
cdash:
|
||||||
build-group: Build tests for different build systems
|
build-group: Build Systems
|
||||||
|
|||||||
@@ -6,9 +6,9 @@ spack:
|
|||||||
mesa:
|
mesa:
|
||||||
require: "+glx +osmesa +opengl ~opengles +llvm"
|
require: "+glx +osmesa +opengl ~opengles +llvm"
|
||||||
libosmesa:
|
libosmesa:
|
||||||
require: ^mesa +osmesa
|
require: "mesa +osmesa"
|
||||||
libglx:
|
libglx:
|
||||||
require: ^mesa +glx
|
require: "mesa +glx"
|
||||||
ospray:
|
ospray:
|
||||||
require: "@2.8.0 +denoiser +mpi"
|
require: "@2.8.0 +denoiser +mpi"
|
||||||
llvm:
|
llvm:
|
||||||
|
|||||||
@@ -1,40 +0,0 @@
|
|||||||
spack:
|
|
||||||
view: false
|
|
||||||
packages:
|
|
||||||
all:
|
|
||||||
compiler: [apple-clang@13.1.6]
|
|
||||||
target: [m1]
|
|
||||||
|
|
||||||
definitions:
|
|
||||||
- easy_specs:
|
|
||||||
- berkeley-db
|
|
||||||
- ncurses
|
|
||||||
- gcc
|
|
||||||
- py-jupyterlab
|
|
||||||
- py-scipy
|
|
||||||
- py-matplotlib
|
|
||||||
- py-pandas
|
|
||||||
|
|
||||||
- arch:
|
|
||||||
- '%apple-clang@13.1.6 target=m1'
|
|
||||||
|
|
||||||
specs:
|
|
||||||
|
|
||||||
- matrix:
|
|
||||||
- - $easy_specs
|
|
||||||
- - $arch
|
|
||||||
|
|
||||||
mirrors: { "mirror": "s3://spack-binaries/develop/e4s-mac" }
|
|
||||||
|
|
||||||
ci:
|
|
||||||
pipeline-gen:
|
|
||||||
- cleanup-job:
|
|
||||||
before_script: |
|
|
||||||
- export SPACK_USER_CACHE_PATH=$(pwd)/.spack-user-cache
|
|
||||||
- export SPACK_USER_CONFIG_PATH=$(pwd)/.spack-user-config
|
|
||||||
- . "./share/spack/setup-env.sh"
|
|
||||||
- spack --version
|
|
||||||
tags: [lambda]
|
|
||||||
|
|
||||||
cdash:
|
|
||||||
build-group: E4S Mac
|
|
||||||
@@ -269,6 +269,10 @@ spack:
|
|||||||
pipeline-gen:
|
pipeline-gen:
|
||||||
- build-job:
|
- build-job:
|
||||||
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2023-01-01
|
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2023-01-01
|
||||||
|
before_script:
|
||||||
|
- - . /bootstrap/runner/view/lmod/lmod/init/bash
|
||||||
|
- module use /opt/intel/oneapi/modulefiles
|
||||||
|
- module load compiler
|
||||||
|
|
||||||
cdash:
|
cdash:
|
||||||
build-group: E4S OneAPI
|
build-group: E4S OneAPI
|
||||||
|
|||||||
@@ -70,8 +70,9 @@ spack:
|
|||||||
- charliecloud
|
- charliecloud
|
||||||
- conduit
|
- conduit
|
||||||
- datatransferkit
|
- datatransferkit
|
||||||
|
- dealii
|
||||||
- dyninst
|
- dyninst
|
||||||
- ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 +paraview +pnetcdf +sz +unifyfs +veloc +visit +vtkm +zfp
|
- ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 +paraview +pnetcdf +sz +unifyfs +veloc ~visit +vtkm +zfp ^hdf5@1.14
|
||||||
- exaworks
|
- exaworks
|
||||||
- flecsi
|
- flecsi
|
||||||
- flit
|
- flit
|
||||||
@@ -165,7 +166,7 @@ spack:
|
|||||||
- chai ~benchmarks ~tests +cuda ^umpire ~shared
|
- chai ~benchmarks ~tests +cuda ^umpire ~shared
|
||||||
- cusz +cuda
|
- cusz +cuda
|
||||||
- dealii +cuda
|
- dealii +cuda
|
||||||
- ecp-data-vis-sdk +cuda +adios2 +hdf5 +paraview +vtkm +zfp # Removing ascent because Dray is hung in CI. +ascent
|
- ecp-data-vis-sdk +cuda ~ascent +adios2 +hdf5 +paraview +sz +vtkm +zfp ^hdf5@1.14 # Removing ascent because RAJA build failure
|
||||||
- flecsi +cuda
|
- flecsi +cuda
|
||||||
- flux-core +cuda
|
- flux-core +cuda
|
||||||
- ginkgo +cuda
|
- ginkgo +cuda
|
||||||
@@ -199,7 +200,7 @@ spack:
|
|||||||
- cabana +rocm
|
- cabana +rocm
|
||||||
- caliper +rocm
|
- caliper +rocm
|
||||||
- chai ~benchmarks +rocm
|
- chai ~benchmarks +rocm
|
||||||
- ecp-data-vis-sdk +paraview +vtkm +rocm
|
- ecp-data-vis-sdk +adios2 +hdf5 +paraview +pnetcdf +sz +vtkm +zfp +rocm ^hdf5@1.14 # Excludes ascent for now due to C++ standard issues
|
||||||
- gasnet +rocm
|
- gasnet +rocm
|
||||||
- ginkgo +rocm
|
- ginkgo +rocm
|
||||||
- heffte +rocm
|
- heffte +rocm
|
||||||
|
|||||||
@@ -0,0 +1,139 @@
|
|||||||
|
spack:
|
||||||
|
view: false
|
||||||
|
|
||||||
|
concretizer:
|
||||||
|
unify: false
|
||||||
|
reuse: false
|
||||||
|
|
||||||
|
config:
|
||||||
|
concretizer: clingo
|
||||||
|
db_lock_timeout: 120
|
||||||
|
install_tree:
|
||||||
|
root: $spack/opt/spack
|
||||||
|
padded_length: 256
|
||||||
|
projections:
|
||||||
|
all: '{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'
|
||||||
|
|
||||||
|
packages:
|
||||||
|
all:
|
||||||
|
require: target=aarch64
|
||||||
|
variants: +mps~cuda~rocm
|
||||||
|
mpi:
|
||||||
|
require: openmpi
|
||||||
|
|
||||||
|
specs:
|
||||||
|
# Hugging Face
|
||||||
|
- py-transformers
|
||||||
|
|
||||||
|
# JAX
|
||||||
|
- py-jax
|
||||||
|
# - py-jaxlib # bazel codesign
|
||||||
|
|
||||||
|
# Keras
|
||||||
|
- py-keras-applications
|
||||||
|
- py-keras-preprocessing
|
||||||
|
- py-keras2onnx
|
||||||
|
# - py-keras # bazel codesign
|
||||||
|
|
||||||
|
# MXNet
|
||||||
|
- mxnet
|
||||||
|
|
||||||
|
# PyTorch
|
||||||
|
- py-botorch
|
||||||
|
- py-gpytorch
|
||||||
|
- py-pytorch-gradual-warmup-lr
|
||||||
|
- py-segmentation-models-pytorch
|
||||||
|
- py-timm
|
||||||
|
- py-torch
|
||||||
|
- py-torch-cluster
|
||||||
|
- py-torch-geometric
|
||||||
|
- py-torch-sparse
|
||||||
|
- py-torchdata
|
||||||
|
- py-torchfile
|
||||||
|
- py-torchgeo
|
||||||
|
- py-torchvision
|
||||||
|
|
||||||
|
# scikit-learn
|
||||||
|
- py-scikit-learn
|
||||||
|
- py-scikit-learn-extra
|
||||||
|
|
||||||
|
# TensorBoard
|
||||||
|
- py-tensorboard
|
||||||
|
- py-tensorboard-data-server
|
||||||
|
- py-tensorboard-plugin-wit
|
||||||
|
- py-tensorboardx
|
||||||
|
|
||||||
|
# TensorFlow
|
||||||
|
# - py-tensorflow # bazel codesign
|
||||||
|
# - py-tensorflow-datasets # bazel codesign
|
||||||
|
# - py-tensorflow-hub # bazel codesign
|
||||||
|
# - py-tensorflow-metadata # bazel codesign
|
||||||
|
# - py-tensorflow-estimator # bazel codesign
|
||||||
|
# - py-tensorflow-probability # py-dm-tree due to bazel codesign
|
||||||
|
|
||||||
|
# XGBoost
|
||||||
|
- py-xgboost
|
||||||
|
- xgboost
|
||||||
|
|
||||||
|
# ERRORS
|
||||||
|
# - py-efficientnet-pytorch # py-torch
|
||||||
|
# - py-horovod # py-torch
|
||||||
|
# - py-kornia # py-torch
|
||||||
|
# - py-lightning # py-torch
|
||||||
|
# - py-pytorch-lightning # py-torch
|
||||||
|
# - py-torch-nvidia-apex # py-torch
|
||||||
|
# - py-torch-scatter # py-torch
|
||||||
|
# - py-torch-spline-conv # py-torch
|
||||||
|
# - py-torchaudio # py-torchaudio
|
||||||
|
# - py-torchmetrics # py-torch
|
||||||
|
# - py-torchtext # py-torchtext
|
||||||
|
# - py-vector-quantize-pytorch # py-torch
|
||||||
|
# - r-xgboost # r
|
||||||
|
|
||||||
|
mirrors: { "mirror": "s3://spack-binaries/develop/ml-darwin-aarch64-cpu" }
|
||||||
|
|
||||||
|
ci:
|
||||||
|
pipeline-gen:
|
||||||
|
- build-job-remove:
|
||||||
|
image: no-image
|
||||||
|
tags: [spack, public]
|
||||||
|
- build-job:
|
||||||
|
tags: [ "macos-ventura", "apple-clang-14", "aarch64-macos" ]
|
||||||
|
script::
|
||||||
|
- - spack compiler find
|
||||||
|
- cd ${SPACK_CONCRETE_ENV_DIR}
|
||||||
|
- spack env activate --without-view .
|
||||||
|
- if [ -n "$SPACK_BUILD_JOBS" ]; then spack config add "config:build_jobs:$SPACK_BUILD_JOBS"; fi
|
||||||
|
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
|
||||||
|
# AWS runners mount E4S public key (verification), UO runners mount public/private (signing/verification)
|
||||||
|
- if [[ -r /etc/protected-runner/e4s.gpg ]]; then spack gpg trust /etc/protected-runner/e4s.gpg; fi
|
||||||
|
# UO runners mount intermediate ci public key (verification), AWS runners mount public/private (signing/verification)
|
||||||
|
- if [[ -r /etc/protected-runner/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /etc/protected-runner/intermediate_ci_signing_key.gpg; fi
|
||||||
|
- if [[ -r /etc/protected-runner/spack_public_key.gpg ]]; then spack gpg trust /etc/protected-runner/spack_public_key.gpg; fi
|
||||||
|
- spack --color=always --backtrace ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
||||||
|
after_script:
|
||||||
|
- - cat /proc/loadavg || true
|
||||||
|
- signing-job:
|
||||||
|
image: { "name": "ghcr.io/spack/notary:latest", "entrypoint": [""] }
|
||||||
|
tags: ["aws"]
|
||||||
|
script:
|
||||||
|
- - aws s3 sync --exclude "*" --include "*spec.json*" ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache /tmp
|
||||||
|
- /sign.sh
|
||||||
|
- aws s3 sync --exclude "*" --include "*spec.json.sig*" /tmp ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache
|
||||||
|
- aws s3 cp /tmp/public_keys ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/_pgp --recursive --exclude "*" --include "*.pub"
|
||||||
|
- any-job:
|
||||||
|
image: "ghcr.io/spack/e4s-ubuntu-18.04:v2021-10-18"
|
||||||
|
tags: ["spack"]
|
||||||
|
before_script:
|
||||||
|
- - uname -a || true
|
||||||
|
- grep -E "vendor|model name" /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true
|
||||||
|
- nproc || true
|
||||||
|
- - . "./share/spack/setup-env.sh"
|
||||||
|
- spack --version
|
||||||
|
- spack arch
|
||||||
|
|
||||||
|
cdash:
|
||||||
|
build-group: Machine Learning MPS
|
||||||
|
url: https://cdash.spack.io
|
||||||
|
project: Spack Testing
|
||||||
|
site: Cloud Gitlab Infrastructure
|
||||||
@@ -1060,7 +1060,7 @@ _spack_external_list() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
_spack_external_read_cray_manifest() {
|
_spack_external_read_cray_manifest() {
|
||||||
SPACK_COMPREPLY="-h --help --file --directory --dry-run --fail-on-error"
|
SPACK_COMPREPLY="-h --help --file --directory --ignore-default-dir --dry-run --fail-on-error"
|
||||||
}
|
}
|
||||||
|
|
||||||
_spack_fetch() {
|
_spack_fetch() {
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ class Python(Package):
|
|||||||
|
|
||||||
extendable = True
|
extendable = True
|
||||||
|
|
||||||
|
version("3.7.1", md5="aaabbbcccdddeeefffaaabbbcccddd12")
|
||||||
version("3.5.1", md5="be78e48cdfc1a7ad90efff146dce6cfe")
|
version("3.5.1", md5="be78e48cdfc1a7ad90efff146dce6cfe")
|
||||||
version("3.5.0", md5="a56c0c0b45d75a0ec9c6dee933c41c36")
|
version("3.5.0", md5="a56c0c0b45d75a0ec9c6dee933c41c36")
|
||||||
version("2.7.11", md5="6b6076ec9e93f05dd63e47eb9c15728b", preferred=True)
|
version("2.7.11", md5="6b6076ec9e93f05dd63e47eb9c15728b", preferred=True)
|
||||||
|
|||||||
@@ -7,16 +7,24 @@
|
|||||||
|
|
||||||
|
|
||||||
class SimpleStandaloneTest(Package):
|
class SimpleStandaloneTest(Package):
|
||||||
"""This package has a simple stand-alone test features."""
|
"""This package has simple stand-alone test features."""
|
||||||
|
|
||||||
homepage = "http://www.example.com/simple_test"
|
homepage = "http://www.example.com/simple_test"
|
||||||
url = "http://www.unit-test-should-replace-this-url/simple_test-1.0.tar.gz"
|
url = "http://www.unit-test-should-replace-this-url/simple_test-1.0.tar.gz"
|
||||||
|
|
||||||
version("1.0", md5="0123456789abcdef0123456789abcdef")
|
version("1.0", md5="123456789abcdef0123456789abcdefg")
|
||||||
|
version("0.9", md5="0123456789abcdef0123456789abcdef")
|
||||||
|
|
||||||
provides("standalone-test")
|
provides("standalone-ifc")
|
||||||
|
|
||||||
def test_echo(self):
|
def test_echo(self):
|
||||||
"""simple stand-alone test"""
|
"""simple stand-alone test"""
|
||||||
echo = which("echo")
|
echo = which("echo")
|
||||||
echo("testing echo", output=str.split, error=str.split)
|
echo("testing echo", output=str.split, error=str.split)
|
||||||
|
|
||||||
|
def test_skip(self):
|
||||||
|
"""simple skip test"""
|
||||||
|
if self.spec.satisfies("@1.0:"):
|
||||||
|
raise SkipTest("This test is not available from v1.0 on")
|
||||||
|
|
||||||
|
print("Ran test_skip")
|
||||||
|
|||||||
@@ -36,6 +36,40 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
_versions = {
|
_versions = {
|
||||||
|
"23.04.1": {
|
||||||
|
"RHEL-7": (
|
||||||
|
"5e84daaf0510f73c235723112f9241bbd744ed89eb4f70f089bac05cf2aad2c4",
|
||||||
|
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-04-1/arm-compiler-for-linux_23.04.1_RHEL-7_aarch64.tar",
|
||||||
|
),
|
||||||
|
"RHEL-8": (
|
||||||
|
"6ec1f2c7338ea8a2831a7ff353ab44f87804f56716d1f3686576fb950c2f730f",
|
||||||
|
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-04-1/arm-compiler-for-linux_23.04.1_RHEL-8_aarch64.tar",
|
||||||
|
),
|
||||||
|
"RHEL-9": (
|
||||||
|
"dbd6493ea762b9b4c6cb54a76ad42e2223360882165ee3c223c1b7d1ebe927e2",
|
||||||
|
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-04-1/arm-compiler-for-linux_23.04.1_RHEL-9_aarch64.tar",
|
||||||
|
),
|
||||||
|
"SLES-15": (
|
||||||
|
"74c29890d47556114922c77e5a9797b055f8fe49f0c8665d17102465fca766b4",
|
||||||
|
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-04-1/arm-compiler-for-linux_23.04.1_SLES-15_aarch64.tar",
|
||||||
|
),
|
||||||
|
"Ubuntu-20.04": (
|
||||||
|
"78015ff5a246facfe45219a03a3774221b2f3b58db6fa3d9840d2574d103310c",
|
||||||
|
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-04-1/arm-compiler-for-linux_23.04.1_Ubuntu-20.04_aarch64.tar",
|
||||||
|
),
|
||||||
|
"Ubuntu-22.04": (
|
||||||
|
"19213db67aa11de44b617255e9e32efd294f930c6b6145192acf9ee331452ea6",
|
||||||
|
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-04-1/arm-compiler-for-linux_23.04.1_Ubuntu-22.04_aarch64.tar",
|
||||||
|
),
|
||||||
|
"AmazonLinux-2": (
|
||||||
|
"31ba559302a2889e5f0897f1c07563b20a5a8eaa671e623bef406b6490d1f4f2",
|
||||||
|
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-04-1/arm-compiler-for-linux_23.04.1_AmazonLinux-2_aarch64.tar",
|
||||||
|
),
|
||||||
|
"AmazonLinux-2023": (
|
||||||
|
"fa38f3d79775e9a537c59c8ba39c3b10505e895a3602bbd93c09445170db571f",
|
||||||
|
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-04-1/arm-compiler-for-linux_23.04.1_AmazonLinux-2023_aarch64.tar",
|
||||||
|
),
|
||||||
|
},
|
||||||
"23.04": {
|
"23.04": {
|
||||||
"RHEL-7": (
|
"RHEL-7": (
|
||||||
"6526218484e87c195c1145f60536552fabbd25ba98c05cf096f54de18381a422",
|
"6526218484e87c195c1145f60536552fabbd25ba98c05cf096f54de18381a422",
|
||||||
@@ -72,23 +106,23 @@
|
|||||||
},
|
},
|
||||||
"22.1": {
|
"22.1": {
|
||||||
"RHEL-7": (
|
"RHEL-7": (
|
||||||
"bfbfef9099bf0e90480d48b3a1a741d583fc939284f869958e9c09e177098c73",
|
"367b9a60fa13b5fcf2fa787122c12d4bfb14d6f3e3e7b0460efc7627484a56a4",
|
||||||
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/22-1/arm-compiler-for-linux_22.1_RHEL-7_aarch64.tar",
|
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/22-1/arm-compiler-for-linux_22.1_RHEL-7_aarch64.tar",
|
||||||
),
|
),
|
||||||
"RHEL-8": (
|
"RHEL-8": (
|
||||||
"28116f6030c95ee8f69eba89023966974d6b44d4a686098f5c3c03e34f7495f6",
|
"f03ad3381a74df73a4c25baf5f1c15bd466cfd6286498c38b37ddeaa85c9965e",
|
||||||
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/22-1/arm-compiler-for-linux_22.1_RHEL-8_aarch64.tar",
|
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/22-1/arm-compiler-for-linux_22.1_RHEL-8_aarch64.tar",
|
||||||
),
|
),
|
||||||
"SLES-15": (
|
"SLES-15": (
|
||||||
"6616dba1af4a73300ce822b645a0f1dfd363f507db5ea44cab1c6051ea388554",
|
"8a1c5bd570bd195982c342da8dafb7075f8f6b373b44539d4c810e69e8157c1f",
|
||||||
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/22-1/arm-compiler-for-linux_22.1_SLES-15_aarch64.tar",
|
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/22-1/arm-compiler-for-linux_22.1_SLES-15_aarch64.tar",
|
||||||
),
|
),
|
||||||
"Ubuntu-18.04": (
|
"Ubuntu-18.04": (
|
||||||
"3b3dd6f416299fbd14fbaf0b1bddf7e2f4445a186de7a87e9efdae0b9d0dc3d5",
|
"4628599d389efcee07d0986cc3e791931e6a37eddb6e4b93c7846e17efe2148f",
|
||||||
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/22-1/arm-compiler-for-linux_22.1_Ubuntu-18.04_aarch64.tar",
|
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/22-1/arm-compiler-for-linux_22.1_Ubuntu-18.04_aarch64.tar",
|
||||||
),
|
),
|
||||||
"Ubuntu-20.04": (
|
"Ubuntu-20.04": (
|
||||||
"e6361a08f75817c8dbfb56dc72578810eaf5ffb65591215e394cb3ec6bdd9c10",
|
"20d950d16e6bb0b3a4c4f3c8ad393aae2356d4c998303b319da9e9833d4a6d12",
|
||||||
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/22-1/arm-compiler-for-linux_22.1_Ubuntu-20.04_aarch64.tar",
|
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/22-1/arm-compiler-for-linux_22.1_Ubuntu-20.04_aarch64.tar",
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
@@ -199,8 +233,8 @@ class Acfl(Package):
|
|||||||
with a modern LLVM-based compiler framework.
|
with a modern LLVM-based compiler framework.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
homepage = "https://developer.arm.com/tools-and-software/server-and-hpc/arm-allinea-studio"
|
homepage = "https://developer.arm.com/Tools%20and%20Software/Arm%20Compiler%20for%20Linux"
|
||||||
url = "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/22-1/arm-compiler-for-linux_22.1_Ubuntu-20.04_aarch64.tar"
|
url = "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-04-1/arm-compiler-for-linux_23.04.1_Ubuntu-22.04_aarch64.tar"
|
||||||
|
|
||||||
maintainers("annop-w")
|
maintainers("annop-w")
|
||||||
|
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ class Actsvg(CMakePackage):
|
|||||||
|
|
||||||
maintainers("HadrienG2", "wdconinc")
|
maintainers("HadrienG2", "wdconinc")
|
||||||
|
|
||||||
|
version("0.4.33", sha256="25c93b8382bdb1864b4d8de64b146fe8ea86eec84048d594c375700d2fff1d1d")
|
||||||
version("0.4.30", sha256="f7ffea39b3132914fcbb0fac6ab7395bef295cd6078dfd1c2509fd2d9aab0acb")
|
version("0.4.30", sha256="f7ffea39b3132914fcbb0fac6ab7395bef295cd6078dfd1c2509fd2d9aab0acb")
|
||||||
version("0.4.29", sha256="971f4f344c3143b654e6a86422534c6916f785f2c2c3785664c4ae7ddf2f5e4b")
|
version("0.4.29", sha256="971f4f344c3143b654e6a86422534c6916f785f2c2c3785664c4ae7ddf2f5e4b")
|
||||||
version("0.4.28", sha256="12c6f0c41b1aeb21164c949498819976bf91a395968debcb400539713bdfc6b0")
|
version("0.4.28", sha256="12c6f0c41b1aeb21164c949498819976bf91a395968debcb400539713bdfc6b0")
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ class Alluxio(Package):
|
|||||||
list_url = "https://downloads.alluxio.io/downloads/files"
|
list_url = "https://downloads.alluxio.io/downloads/files"
|
||||||
list_depth = 1
|
list_depth = 1
|
||||||
|
|
||||||
|
version("2.9.3", sha256="c71abc5e852d37cfd6b1dea076f056c6997e3f60fbb940bf005acb3a6354a369")
|
||||||
version("2.9.1", sha256="e9456db7a08488af22dee3a44e4135bc03a0444e31c7753bf00f72465f68ffb9")
|
version("2.9.1", sha256="e9456db7a08488af22dee3a44e4135bc03a0444e31c7753bf00f72465f68ffb9")
|
||||||
|
|
||||||
# https://nvd.nist.gov/vuln/detail/CVE-2022-23848
|
# https://nvd.nist.gov/vuln/detail/CVE-2022-23848
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ class Aluminum(CMakePackage, CudaPackage, ROCmPackage):
|
|||||||
maintainers("bvanessen")
|
maintainers("bvanessen")
|
||||||
|
|
||||||
version("master", branch="master")
|
version("master", branch="master")
|
||||||
|
version("1.3.1", sha256="28ce0af6c6f29f97b7f19c5e45184bd2f8a0b1428f1e898b027d96d47cb74b0b")
|
||||||
version("1.3.0", sha256="d0442efbebfdfb89eec793ae65eceb8f1ba65afa9f2e48df009f81985a4c27e3")
|
version("1.3.0", sha256="d0442efbebfdfb89eec793ae65eceb8f1ba65afa9f2e48df009f81985a4c27e3")
|
||||||
version("1.2.3", sha256="9b214bdf30f9b7e8e017f83e6615db6be2631f5be3dd186205dbe3aa62f4018a")
|
version("1.2.3", sha256="9b214bdf30f9b7e8e017f83e6615db6be2631f5be3dd186205dbe3aa62f4018a")
|
||||||
version(
|
version(
|
||||||
|
|||||||
@@ -21,6 +21,8 @@ class Amgx(CMakePackage, CudaPackage):
|
|||||||
|
|
||||||
maintainers("js947")
|
maintainers("js947")
|
||||||
|
|
||||||
|
version("2.3.0", sha256="419b3cd5bd3eb3469cbef79d64a8d19d5db88dd5cce809e49cac6fc4fc2edff1")
|
||||||
|
version("2.2.0", sha256="dac78516bb528135cad903399fe0093aa0904e304565ef2d3da4fae05eda7928")
|
||||||
version("2.1.0", sha256="6245112b768a1dc3486b2b3c049342e232eb6281a6021fffa8b20c11631f63cc")
|
version("2.1.0", sha256="6245112b768a1dc3486b2b3c049342e232eb6281a6021fffa8b20c11631f63cc")
|
||||||
version("2.0.1", sha256="6f9991f1836fbf4ba2114ce9f49febd0edc069a24f533bd94fd9aa9be72435a7")
|
version("2.0.1", sha256="6f9991f1836fbf4ba2114ce9f49febd0edc069a24f533bd94fd9aa9be72435a7")
|
||||||
version("2.0.0", sha256="8ec7ea8412be3de216fcf7243c4e2a8bcf76878e6865468e4238630a082a431b")
|
version("2.0.0", sha256="8ec7ea8412be3de216fcf7243c4e2a8bcf76878e6865468e4238630a082a431b")
|
||||||
|
|||||||
@@ -31,6 +31,7 @@ class Aml(AutotoolsPackage):
|
|||||||
# version string is generated from git tags, requires entire repo
|
# version string is generated from git tags, requires entire repo
|
||||||
version("master", branch="master", submodules=True, get_full_repo=True)
|
version("master", branch="master", submodules=True, get_full_repo=True)
|
||||||
|
|
||||||
|
version("0.2.1", sha256="bae49e89ed0f2a2ad3547430e79b7e4c018d6228c6ed951a12d59afd0b35f71c")
|
||||||
version("0.2.0", sha256="2044a2f3f1d7a19827dd9c0726172b690189b4d3fe938656c4160c022468cc4a")
|
version("0.2.0", sha256="2044a2f3f1d7a19827dd9c0726172b690189b4d3fe938656c4160c022468cc4a")
|
||||||
version(
|
version(
|
||||||
"0.1.0",
|
"0.1.0",
|
||||||
@@ -45,10 +46,12 @@ class Aml(AutotoolsPackage):
|
|||||||
variant("ze", default=False, description="Support for memory operations on top of Level Zero.")
|
variant("ze", default=False, description="Support for memory operations on top of Level Zero.")
|
||||||
variant("hip", default=False, description="Support for memory operations on top of HIP.")
|
variant("hip", default=False, description="Support for memory operations on top of HIP.")
|
||||||
variant("cuda", default=False, description="Support for memory operations on top of CUDA.")
|
variant("cuda", default=False, description="Support for memory operations on top of CUDA.")
|
||||||
variant("hwloc", default=False, description="Enable feature related to topology management")
|
variant("hwloc", default=True, description="Enable feature related to topology management")
|
||||||
variant(
|
variant(
|
||||||
"hip-platform",
|
"hip-platform",
|
||||||
values=disjoint_sets(("amd", "nvidia")),
|
values=("none", conditional("amd", when="+hip"), conditional("nvidia", when="+cuda")),
|
||||||
|
default="none",
|
||||||
|
multi=False,
|
||||||
description="HIP backend platform.",
|
description="HIP backend platform.",
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -68,6 +71,10 @@ class Aml(AutotoolsPackage):
|
|||||||
depends_on("hwloc@2.1:", when="+hwloc")
|
depends_on("hwloc@2.1:", when="+hwloc")
|
||||||
# - ocl-icd >= 2.1 becomes a dependency when +opencl variant is used.
|
# - ocl-icd >= 2.1 becomes a dependency when +opencl variant is used.
|
||||||
depends_on("ocl-icd@2.1:", when="+opencl")
|
depends_on("ocl-icd@2.1:", when="+opencl")
|
||||||
|
# Required on master for autoconf pull the right pkg.m4 macros,
|
||||||
|
# and on other builds to detect dependencies
|
||||||
|
# Note: This does NOT work with pkg-config but requires pkgconf!
|
||||||
|
depends_on("pkgconf", type="build")
|
||||||
|
|
||||||
# when on master, we need all the autotools and extras to generate files.
|
# when on master, we need all the autotools and extras to generate files.
|
||||||
with when("@master"):
|
with when("@master"):
|
||||||
@@ -75,9 +82,6 @@ class Aml(AutotoolsPackage):
|
|||||||
depends_on("autoconf", type="build")
|
depends_on("autoconf", type="build")
|
||||||
depends_on("automake", type="build")
|
depends_on("automake", type="build")
|
||||||
depends_on("libtool", type="build")
|
depends_on("libtool", type="build")
|
||||||
# Required to have pkg config macros in configure.
|
|
||||||
# Note: This does NOT work with pkg-config but requires pkgconf!
|
|
||||||
depends_on("pkgconf", type="build")
|
|
||||||
# Required to generate AML version in configure.
|
# Required to generate AML version in configure.
|
||||||
depends_on("git", type="build")
|
depends_on("git", type="build")
|
||||||
|
|
||||||
@@ -91,9 +95,9 @@ def configure_args(self):
|
|||||||
config_args.extend(self.with_or_without(b))
|
config_args.extend(self.with_or_without(b))
|
||||||
if self.spec.satisfies("%oneapi"):
|
if self.spec.satisfies("%oneapi"):
|
||||||
config_args += ["--with-openmp-flags=-fiopenmp -fopenmp-targets=spir64"]
|
config_args += ["--with-openmp-flags=-fiopenmp -fopenmp-targets=spir64"]
|
||||||
if "hip-platform=amd" in self.spec:
|
if self.spec.variants["hip-platform"].value == "amd":
|
||||||
config_args += ["--with-hip-platform=amd"]
|
config_args += ["--with-hip-platform=amd"]
|
||||||
if "hip-platform=nvidia" in self.spec:
|
if self.spec.variants["hip-platform"].value == "nvidia":
|
||||||
config_args += ["--with-hip-platform=nvidia"]
|
config_args += ["--with-hip-platform=nvidia"]
|
||||||
return config_args
|
return config_args
|
||||||
|
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ class AmqpCpp(CMakePackage):
|
|||||||
|
|
||||||
maintainers("lpottier")
|
maintainers("lpottier")
|
||||||
|
|
||||||
|
version("4.3.24", sha256="c3312f8af813cacabf6c257dfaf41bf9e66606bbf7d62d085a9b7da695355245")
|
||||||
version("4.3.19", sha256="ca29bb349c498948576a4604bed5fd3c27d87240b271a4441ccf04ba3797b31d")
|
version("4.3.19", sha256="ca29bb349c498948576a4604bed5fd3c27d87240b271a4441ccf04ba3797b31d")
|
||||||
|
|
||||||
variant(
|
variant(
|
||||||
|
|||||||
@@ -314,43 +314,20 @@ def cmake_args(self):
|
|||||||
|
|
||||||
return args
|
return args
|
||||||
|
|
||||||
# TODO: Replace this method and its 'get' use for cmake path with
|
|
||||||
# join_path(self.spec['cmake'].prefix.bin, 'cmake') once stand-alone
|
|
||||||
# tests can access build dependencies through self.spec['cmake'].
|
|
||||||
def cmake_bin(self, set=True):
|
|
||||||
"""(Hack) Set/get cmake dependency path."""
|
|
||||||
filepath = join_path(self.install_test_root, "cmake_bin_path.txt")
|
|
||||||
if set:
|
|
||||||
with open(filepath, "w") as out_file:
|
|
||||||
cmake_bin = join_path(self.spec["cmake"].prefix.bin, "cmake")
|
|
||||||
out_file.write("{0}\n".format(cmake_bin))
|
|
||||||
else:
|
|
||||||
with open(filepath, "r") as in_file:
|
|
||||||
return in_file.read().strip()
|
|
||||||
|
|
||||||
@run_after("build")
|
@run_after("build")
|
||||||
def setup_smoke_test(self):
|
def setup_standalone_test(self):
|
||||||
"""Skip setup smoke tests for AMReX versions less than 21.12."""
|
"""Setup stand-alonetests for AMReX versions from 21.12 on."""
|
||||||
if self.spec.satisfies("@:21.11"):
|
if self.spec.satisfies("@:21.11"):
|
||||||
return
|
return
|
||||||
|
|
||||||
self.cache_extra_test_sources(["Tests"])
|
self.cache_extra_test_sources(["Tests"])
|
||||||
|
|
||||||
# TODO: Remove once self.spec['cmake'] is available here
|
def test_run_install_test(self):
|
||||||
self.cmake_bin(set=True)
|
"""build and run AmrCore test"""
|
||||||
|
|
||||||
def test(self):
|
|
||||||
"""Skip smoke tests for AMReX versions less than 21.12."""
|
|
||||||
if self.spec.satisfies("@:21.11"):
|
if self.spec.satisfies("@:21.11"):
|
||||||
print("SKIPPED: Stand-alone tests not supported for this version of AMReX.")
|
raise SkipTest("Test is not supported for versions @:21.11")
|
||||||
return
|
|
||||||
|
|
||||||
"""Perform smoke tests on installed package."""
|
args = ["-S{0}".format(join_path(".", "cache", "amrex", "Tests", "SpackSmokeTest"))]
|
||||||
# TODO: Remove/replace once self.spec['cmake'] is available here
|
|
||||||
cmake_bin = self.cmake_bin(set=False)
|
|
||||||
|
|
||||||
args = []
|
|
||||||
args.append("-S./cache/amrex/Tests/SpackSmokeTest")
|
|
||||||
args.append("-DAMReX_ROOT=" + self.prefix)
|
args.append("-DAMReX_ROOT=" + self.prefix)
|
||||||
if "+mpi" in self.spec:
|
if "+mpi" in self.spec:
|
||||||
args.append("-DMPI_C_COMPILER=" + self.spec["mpi"].mpicc)
|
args.append("-DMPI_C_COMPILER=" + self.spec["mpi"].mpicc)
|
||||||
@@ -360,15 +337,15 @@ def test(self):
|
|||||||
args.append("-DCMAKE_CUDA_COMPILER=" + join_path(self.spec["cuda"].prefix.bin, "nvcc"))
|
args.append("-DCMAKE_CUDA_COMPILER=" + join_path(self.spec["cuda"].prefix.bin, "nvcc"))
|
||||||
|
|
||||||
args.extend(self.cmake_args())
|
args.extend(self.cmake_args())
|
||||||
self.run_test(cmake_bin, args, purpose="Configure with CMake")
|
cmake = which(self.spec["cmake"].prefix.bin.cmake)
|
||||||
|
cmake(*args)
|
||||||
|
|
||||||
self.run_test("make", [], purpose="Compile")
|
make = which("make")
|
||||||
|
make()
|
||||||
|
|
||||||
self.run_test(
|
install_test = which("install_test")
|
||||||
"install_test",
|
inputs_path = join_path(
|
||||||
["./cache/amrex/Tests/Amr/Advection_AmrCore/Exec/inputs-ci"],
|
".", "cache", "amrex", "Tests", "Amr", "Advection_AmrCore", "Exec", "inputs-ci"
|
||||||
["finalized"],
|
|
||||||
installed=False,
|
|
||||||
purpose="AMReX Stand-Alone Smoke Test -- AmrCore",
|
|
||||||
skip_missing=False,
|
|
||||||
)
|
)
|
||||||
|
out = install_test(inputs_path, output=str.split, error=str.split)
|
||||||
|
assert "finalized" in out
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ class Amrfinder(MakefilePackage):
|
|||||||
homepage = "https://github.com/ncbi/amr/wiki"
|
homepage = "https://github.com/ncbi/amr/wiki"
|
||||||
url = "https://github.com/ncbi/amr/archive/refs/tags/amrfinder_v3.10.30.tar.gz"
|
url = "https://github.com/ncbi/amr/archive/refs/tags/amrfinder_v3.10.30.tar.gz"
|
||||||
|
|
||||||
|
version("3.11.8", sha256="8aac87595f28d0ba54ed3e97a1c033f9769a9b03e0aba78bc29cf6aff0cf45d1")
|
||||||
version("3.10.42", sha256="97254f8d6217a4618b7f29c05acbcfe0240ee5e98458f8da7df3840b4be39c1b")
|
version("3.10.42", sha256="97254f8d6217a4618b7f29c05acbcfe0240ee5e98458f8da7df3840b4be39c1b")
|
||||||
version("3.10.30", sha256="2f1e30b86935a27cee740bd7229a41fbce278f2f60b33b8e51592bab8bdf23f1")
|
version("3.10.30", sha256="2f1e30b86935a27cee740bd7229a41fbce278f2f60b33b8e51592bab8bdf23f1")
|
||||||
version("3.10.24", sha256="fce299c980cda740dcc4f53f9b2dc9061c856213e5bdbc2c339185a5fb7dcf6a")
|
version("3.10.24", sha256="fce299c980cda740dcc4f53f9b2dc9061c856213e5bdbc2c339185a5fb7dcf6a")
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import os
|
||||||
|
|
||||||
from spack.package import *
|
from spack.package import *
|
||||||
|
|
||||||
@@ -17,7 +18,10 @@ class Arborx(CMakePackage, CudaPackage, ROCmPackage):
|
|||||||
|
|
||||||
maintainers("aprokop")
|
maintainers("aprokop")
|
||||||
|
|
||||||
|
test_requires_compiler = True
|
||||||
|
|
||||||
version("master", branch="master")
|
version("master", branch="master")
|
||||||
|
version("1.4", sha256="803a1018a6305cf3fea161172b3ada49537f59261279d91c2abbcce9492ee7af")
|
||||||
version("1.3", sha256="3f1e17f029a460ab99f8396e2772cec908eefc4bf3868c8828907624a2d0ce5d")
|
version("1.3", sha256="3f1e17f029a460ab99f8396e2772cec908eefc4bf3868c8828907624a2d0ce5d")
|
||||||
version("1.2", sha256="ed1939110b2330b7994dcbba649b100c241a2353ed2624e627a200a398096c20")
|
version("1.2", sha256="ed1939110b2330b7994dcbba649b100c241a2353ed2624e627a200a398096c20")
|
||||||
version("1.1", sha256="2b5f2d2d5cec57c52f470c2bf4f42621b40271f870b4f80cb57e52df1acd90ce")
|
version("1.1", sha256="2b5f2d2d5cec57c52f470c2bf4f42621b40271f870b4f80cb57e52df1acd90ce")
|
||||||
@@ -61,7 +65,8 @@ class Arborx(CMakePackage, CudaPackage, ROCmPackage):
|
|||||||
# Standalone Kokkos
|
# Standalone Kokkos
|
||||||
depends_on("kokkos@3.1.00:", when="~trilinos")
|
depends_on("kokkos@3.1.00:", when="~trilinos")
|
||||||
depends_on("kokkos@3.4.00:", when="@1.2~trilinos")
|
depends_on("kokkos@3.4.00:", when="@1.2~trilinos")
|
||||||
depends_on("kokkos@3.6.00:", when="@1.3:~trilinos")
|
depends_on("kokkos@3.6.00:", when="@1.3~trilinos")
|
||||||
|
depends_on("kokkos@3.7.01:", when="@1.4:~trilinos")
|
||||||
for backend in kokkos_backends:
|
for backend in kokkos_backends:
|
||||||
depends_on("kokkos+%s" % backend.lower(), when="~trilinos+%s" % backend.lower())
|
depends_on("kokkos+%s" % backend.lower(), when="~trilinos+%s" % backend.lower())
|
||||||
|
|
||||||
@@ -83,7 +88,8 @@ class Arborx(CMakePackage, CudaPackage, ROCmPackage):
|
|||||||
depends_on("trilinos+kokkos", when="+trilinos")
|
depends_on("trilinos+kokkos", when="+trilinos")
|
||||||
depends_on("trilinos+openmp", when="+trilinos+openmp")
|
depends_on("trilinos+openmp", when="+trilinos+openmp")
|
||||||
depends_on("trilinos@13.2.0:", when="@1.2+trilinos")
|
depends_on("trilinos@13.2.0:", when="@1.2+trilinos")
|
||||||
depends_on("trilinos@13.4.0:", when="@1.3:+trilinos")
|
depends_on("trilinos@13.4.0:", when="@1.3+trilinos")
|
||||||
|
depends_on("trilinos@14.0.0:", when="@1.4:+trilinos")
|
||||||
conflicts("~serial", when="+trilinos")
|
conflicts("~serial", when="+trilinos")
|
||||||
conflicts("+cuda", when="+trilinos")
|
conflicts("+cuda", when="+trilinos")
|
||||||
|
|
||||||
@@ -117,18 +123,18 @@ def cached_tests_work_dir(self):
|
|||||||
"""The working directory for cached test sources."""
|
"""The working directory for cached test sources."""
|
||||||
return join_path(self.test_suite.current_test_cache_dir, self.examples_src_dir)
|
return join_path(self.test_suite.current_test_cache_dir, self.examples_src_dir)
|
||||||
|
|
||||||
def build_tests(self):
|
def test_run_ctest(self):
|
||||||
"""Build the stand-alone/smoke test."""
|
"""run ctest tests on the installed package"""
|
||||||
|
|
||||||
arborx_dir = self.spec["arborx"].prefix
|
arborx_dir = self.spec["arborx"].prefix
|
||||||
cmake_prefix_path = "-DCMAKE_PREFIX_PATH={0}".format(arborx_dir)
|
cmake_prefix_path = f"-DCMAKE_PREFIX_PATH={arborx_dir}"
|
||||||
if "+mpi" in self.spec:
|
if "+mpi" in self.spec:
|
||||||
cmake_prefix_path += ";{0}".format(self.spec["mpi"].prefix)
|
cmake_prefix_path += f";{self.spec['mpi'].prefix}"
|
||||||
|
|
||||||
cmake_args = [
|
cmake_args = [
|
||||||
".",
|
".",
|
||||||
cmake_prefix_path,
|
cmake_prefix_path,
|
||||||
"-DCMAKE_CXX_COMPILER={0}".format(self.compiler.cxx),
|
f"-DCMAKE_CXX_COMPILER={os.environ['CXX']}",
|
||||||
self.define(
|
self.define(
|
||||||
"Kokkos_ROOT",
|
"Kokkos_ROOT",
|
||||||
self.spec["kokkos"].prefix
|
self.spec["kokkos"].prefix
|
||||||
@@ -136,23 +142,11 @@ def build_tests(self):
|
|||||||
else self.spec["trilinos"].prefix,
|
else self.spec["trilinos"].prefix,
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
cmake = which(self.spec["cmake"].prefix.bin.cmake)
|
||||||
|
make = which("make")
|
||||||
|
ctest = which("ctest")
|
||||||
|
|
||||||
self.run_test(
|
with working_dir(self.cached_tests_work_dir):
|
||||||
"cmake", cmake_args, purpose="test: calling cmake", work_dir=self.cached_tests_work_dir
|
cmake(*cmake_args)
|
||||||
)
|
make()
|
||||||
|
ctest("-V")
|
||||||
self.run_test(
|
|
||||||
"make", [], purpose="test: building the tests", work_dir=self.cached_tests_work_dir
|
|
||||||
)
|
|
||||||
|
|
||||||
def test(self):
|
|
||||||
"""Perform stand-alone/smoke tests on the installed package."""
|
|
||||||
self.build_tests()
|
|
||||||
|
|
||||||
self.run_test(
|
|
||||||
"ctest",
|
|
||||||
["-V"],
|
|
||||||
purpose="test: running the tests",
|
|
||||||
installed=False,
|
|
||||||
work_dir=self.cached_tests_work_dir,
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from spack.package import *
|
from spack.package import *
|
||||||
@@ -50,25 +49,17 @@ def cache_test_sources(self):
|
|||||||
install test subdirectory for use during `spack test run`."""
|
install test subdirectory for use during `spack test run`."""
|
||||||
self.cache_extra_test_sources(["test"])
|
self.cache_extra_test_sources(["test"])
|
||||||
|
|
||||||
def run_parallel_example_test(self):
|
def test_run_parallel_example(self):
|
||||||
"""Run stand alone test: parallel-simple"""
|
"""build and run parallel-simple"""
|
||||||
|
|
||||||
test_dir = join_path(self.test_suite.current_test_cache_dir, "test", "parallel")
|
test_dir = join_path(self.test_suite.current_test_cache_dir, "test", "parallel")
|
||||||
|
|
||||||
if not os.path.exists(test_dir):
|
if not os.path.exists(test_dir):
|
||||||
print("Skipping archer test")
|
raise SkipTest("Parallel test directory does not exist")
|
||||||
return
|
|
||||||
|
|
||||||
exe = "parallel-simple"
|
test_exe = "parallel-simple"
|
||||||
|
test_src = "{0}.c".format(test_exe)
|
||||||
|
with working_dir(test_dir):
|
||||||
|
clang = which("clang-archer")
|
||||||
|
clang("-o", test_exe, test_src)
|
||||||
|
|
||||||
self.run_test(
|
parallel_simple = which(test_exe)
|
||||||
"clang-archer",
|
parallel_simple()
|
||||||
options=["-o", exe, "{0}".format(join_path(test_dir, "parallel-simple.c"))],
|
|
||||||
purpose="test: compile {0} example".format(exe),
|
|
||||||
work_dir=test_dir,
|
|
||||||
)
|
|
||||||
|
|
||||||
self.run_test(exe, purpose="test: run {0} example".format(exe), work_dir=test_dir)
|
|
||||||
|
|
||||||
def test(self):
|
|
||||||
self.run_parallel_example_test()
|
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ class Armcomputelibrary(SConsPackage):
|
|||||||
url = "https://github.com/ARM-software/ComputeLibrary/archive/refs/tags/v23.02.zip"
|
url = "https://github.com/ARM-software/ComputeLibrary/archive/refs/tags/v23.02.zip"
|
||||||
git = "https://github.com/ARM-software/ComputeLibrary.git"
|
git = "https://github.com/ARM-software/ComputeLibrary.git"
|
||||||
|
|
||||||
maintainers = ["annop-w"]
|
maintainers("annop-w")
|
||||||
|
|
||||||
version("23.02", sha256="bed1b24047ce00155e552204bc3983e86f46775414c554a34a7ece931d67ec62")
|
version("23.02", sha256="bed1b24047ce00155e552204bc3983e86f46775414c554a34a7ece931d67ec62")
|
||||||
version("22.11", sha256="2f70f54d84390625222503ea38650c00c49d4b70bc86a6b9aeeebee9d243865f")
|
version("22.11", sha256="2f70f54d84390625222503ea38650c00c49d4b70bc86a6b9aeeebee9d243865f")
|
||||||
|
|||||||
@@ -36,6 +36,50 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
_versions = {
|
_versions = {
|
||||||
|
"23.04.1_gcc-12.2": {
|
||||||
|
"RHEL-7": ("789cc093cb7e0d9294aff0fdf94b74987435a09cdff4c1b7118a03350548d03c"),
|
||||||
|
"RHEL-8": ("1b668baec6d3df2d48c5aedc70baa6a9b638983b94bf2cd58d378859a1da49f0"),
|
||||||
|
"RHEL-9": ("8a4d7aec2fe109aedcb9e8fdec566dc1ba3adcb3ba79e5c08b78b9717578db1c"),
|
||||||
|
"SLES-15": ("9c8aa114907d3ac9311301b991d732d535422e73516e0f337395637ce6a14c4a"),
|
||||||
|
"Ubuntu-20.04": ("c0a67afb6989b2bdb172052ff7d20a9e3197356714df06c862edd3ac71ef62f0"),
|
||||||
|
"Ubuntu-22.04": ("02e59d834c341164f5acf633555024bf614726aed8a85c1b0b46d024ce7840e2"),
|
||||||
|
"AmazonLinux-2": ("1cbb9a3d777353b42bfb5af327419c231640e7744ab46ab3a13e97802b1ce227"),
|
||||||
|
"AmazonLinux-2023": ("ee9b0b6ee0d881280e473390007020504a147b75bf6076d245832f101b01653e"),
|
||||||
|
},
|
||||||
|
"23.04.1_gcc-11.3": {
|
||||||
|
"RHEL-7": ("522e0269ca03d6251c10ee3aa8d94ceec4618887f47847defb535849434439a5"),
|
||||||
|
"RHEL-8": ("00f6fee4ba4bbff5be6d5ad34137973ab89505fc61a23d8e0c302b8860c70484"),
|
||||||
|
"RHEL-9": ("2402165267b25d07fd64b6d444b3120354dfd27594b11a1f082e85e76465e712"),
|
||||||
|
"SLES-15": ("a928539efe5af760fc86a009e3d87c9648e4d4e91490c13bc136a837591549c3"),
|
||||||
|
"Ubuntu-20.04": ("5754d8a6040bb6d0b1df326c9ab61901a72e5cc6d2d4195e52ca9271e55fb9f6"),
|
||||||
|
"Ubuntu-22.04": ("8af5aca7512a604b051a7808701a5c0285e92d88232138612d8caf973b7b1252"),
|
||||||
|
"AmazonLinux-2": ("8c710cb7bb21694130b915cc2650cfb85fb00cfca7e5fca9bbdec5c59a09c007"),
|
||||||
|
"AmazonLinux-2023": ("8b9c69a72c5b1ed5814e28ddd122ab09dbe5dd3585e4c395242ed590eea6ea79"),
|
||||||
|
},
|
||||||
|
"23.04.1_gcc-10.2": {
|
||||||
|
"RHEL-7": ("40d62517bd978516c308b2e57ab88772699fd8bb579d98bbc10ea397c0bab431"),
|
||||||
|
"RHEL-8": ("76554ea1f3d143f1236afea67e33eea74660f57718ef57c12986843da75e03d3"),
|
||||||
|
"SLES-15": ("63a6acb00300a9e85cfafd2141515ecb28dac82c1f441778d74e8add038724e2"),
|
||||||
|
"Ubuntu-20.04": ("7b6bcb8d1b9ca8be2d29e7620862fa961d965f479fa04873616ac8cc9bb399fc"),
|
||||||
|
"AmazonLinux-2": ("c6410ce2c109ae72568186bb7e162fcf4a9b05ea89da36d17db695b7df34f506"),
|
||||||
|
},
|
||||||
|
"23.04.1_gcc-9.3": {
|
||||||
|
"RHEL-7": ("782bbc27c77c230426086c226a78b8951501066d631947438e65ca51d33f24c3"),
|
||||||
|
"RHEL-8": ("8d3be6381b3e5032c5068a1d2e3d0e69c308a93496f85af42d43a579f9f7d9a3"),
|
||||||
|
"SLES-15": ("abe2245674a66ec93cff3c93dac7ae04a99c6c7e43e2733de214ec188e0d6cae"),
|
||||||
|
"Ubuntu-20.04": ("a7d385b901f2d1c07f243c816030ad19543e00667615dea1969ce16d29759271"),
|
||||||
|
"AmazonLinux-2": ("7113b6e2c795933ce8d18d468889168732d3a52a0df4a48ef4bf4497e891083a"),
|
||||||
|
},
|
||||||
|
"23.04.1_gcc-8.2": {
|
||||||
|
"RHEL-7": ("4e077813121c1cbd8abd1afe5348cafcce5b70f96affa725c7c2d8671e2d5eed"),
|
||||||
|
"RHEL-8": ("772aaab9304191e3a398cba2dec21ec22fd0abadcaf44d44f32114968bd3b59d"),
|
||||||
|
"SLES-15": ("33766ac351fb4628c6b39f16d6bdb310ad09d88b6a6f43740349405c960d4d21"),
|
||||||
|
"AmazonLinux-2": ("c215ed8de77b5144a60b6552f79ef2b59ccbfac5350f083ef135305ddf643a4e"),
|
||||||
|
},
|
||||||
|
"23.04.1_gcc-7.5": {
|
||||||
|
"RHEL-7": ("7b2239b2ce5315e1be14dbd8fe15aff2d3b07968d64b5c80c8ab57140b6a17a8"),
|
||||||
|
"AmazonLinux-2": ("a2e0f176df627c50f851924ac57994f582f63b0f3d42ad0b65c915ea04dc0467"),
|
||||||
|
},
|
||||||
"23.04_gcc-12.2": {
|
"23.04_gcc-12.2": {
|
||||||
"RHEL-7": ("e159f84f14d885aa5e47ca17c16ef3d95128f834a655827bf6b48fcf8d6ec459"),
|
"RHEL-7": ("e159f84f14d885aa5e47ca17c16ef3d95128f834a655827bf6b48fcf8d6ec459"),
|
||||||
"RHEL-8": ("6ac1974ec9bd814d3a4eecf330cefd67cf2c878f026a8b04bc2928368948671a"),
|
"RHEL-8": ("6ac1974ec9bd814d3a4eecf330cefd67cf2c878f026a8b04bc2928368948671a"),
|
||||||
@@ -225,6 +269,13 @@ class ArmplGcc(Package):
|
|||||||
conflicts("target=ppc64:", msg="Only available on Aarch64")
|
conflicts("target=ppc64:", msg="Only available on Aarch64")
|
||||||
conflicts("target=ppc64le:", msg="Only available on Aarch64")
|
conflicts("target=ppc64le:", msg="Only available on Aarch64")
|
||||||
|
|
||||||
|
conflicts("%gcc@:11", when="@23.04.1_gcc-12.2")
|
||||||
|
conflicts("%gcc@:10", when="@23.04.1_gcc-11.3")
|
||||||
|
conflicts("%gcc@:9", when="@23.04.1_gcc-10.2")
|
||||||
|
conflicts("%gcc@:8", when="@23.04.1_gcc-9.3")
|
||||||
|
conflicts("%gcc@:7", when="@23.04.1_gcc-8.2")
|
||||||
|
conflicts("%gcc@:6", when="@23.04.1_gcc-7.5")
|
||||||
|
|
||||||
conflicts("%gcc@:11", when="@23.04_gcc-12.2")
|
conflicts("%gcc@:11", when="@23.04_gcc-12.2")
|
||||||
conflicts("%gcc@:10", when="@23.04_gcc-11.3")
|
conflicts("%gcc@:10", when="@23.04_gcc-11.3")
|
||||||
conflicts("%gcc@:9", when="@23.04_gcc-10.2")
|
conflicts("%gcc@:9", when="@23.04_gcc-10.2")
|
||||||
|
|||||||
@@ -183,6 +183,8 @@ class Ascent(CMakePackage, CudaPackage):
|
|||||||
depends_on("vtk-m+cuda", when="@0.9.0: +vtkh+cuda")
|
depends_on("vtk-m+cuda", when="@0.9.0: +vtkh+cuda")
|
||||||
depends_on("vtk-m+fpic", when="@0.8.0: +vtkh")
|
depends_on("vtk-m+fpic", when="@0.8.0: +vtkh")
|
||||||
depends_on("vtk-m~shared+fpic", when="@0.8.0: +vtkh~shared")
|
depends_on("vtk-m~shared+fpic", when="@0.8.0: +vtkh~shared")
|
||||||
|
# Ascent defaults to C++11
|
||||||
|
depends_on("kokkos std=11", when="+vtkh ^vtk-m +kokkos")
|
||||||
|
|
||||||
#######################
|
#######################
|
||||||
# VTK-h
|
# VTK-h
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user