Compare commits
444 Commits
packages/t
...
traceback-
Author | SHA1 | Date | |
---|---|---|---|
![]() |
01ec40b8ad | ||
![]() |
4d0a5ae724 | ||
![]() |
9c8b5f58c0 | ||
![]() |
50aa5a7b24 | ||
![]() |
ffab156366 | ||
![]() |
e147679d40 | ||
![]() |
ef9bb7ebe5 | ||
![]() |
fc443ea30e | ||
![]() |
b601bace24 | ||
![]() |
cbad3d464a | ||
![]() |
b56e792295 | ||
![]() |
5b279c0732 | ||
![]() |
149753a52e | ||
![]() |
b582eacbc1 | ||
![]() |
037196c2bd | ||
![]() |
d9e8c5f13e | ||
![]() |
275d1d88f4 | ||
![]() |
a07d42d35b | ||
![]() |
19ad29a690 | ||
![]() |
4187c57250 | ||
![]() |
590be9bba1 | ||
![]() |
3edd68d981 | ||
![]() |
5ca0e94bdd | ||
![]() |
f6c9d98c8f | ||
![]() |
9854c9e5f2 | ||
![]() |
e5a602c1bb | ||
![]() |
37fe3b4984 | ||
![]() |
a00fddef4e | ||
![]() |
260b36e272 | ||
![]() |
117480dba9 | ||
![]() |
bc75f23927 | ||
![]() |
b0f1a0eb7c | ||
![]() |
4d616e1168 | ||
![]() |
4de8344c16 | ||
![]() |
411ea019f1 | ||
![]() |
296f99d800 | ||
![]() |
ca4df91e7d | ||
![]() |
9b8c06a049 | ||
![]() |
011ff48f82 | ||
![]() |
adcd05b365 | ||
![]() |
dc160e3a52 | ||
![]() |
ba953352a1 | ||
![]() |
d47e726b76 | ||
![]() |
89ab47284f | ||
![]() |
31bdcd7dc6 | ||
![]() |
f2bd11cbf4 | ||
![]() |
f69e8297a7 | ||
![]() |
c9377d9437 | ||
![]() |
899004e29a | ||
![]() |
df6427d259 | ||
![]() |
31cfcafeba | ||
![]() |
230bc7010a | ||
![]() |
957c0cc9da | ||
![]() |
99e4d6b446 | ||
![]() |
7acd0cd86c | ||
![]() |
d3378ffd25 | ||
![]() |
2356ccc816 | ||
![]() |
1d25275bd1 | ||
![]() |
7678635d36 | ||
![]() |
b2e28a0b08 | ||
![]() |
53385f12da | ||
![]() |
cfae194fbd | ||
![]() |
88c193b83a | ||
![]() |
c006cb573a | ||
![]() |
d8d41e9b0e | ||
![]() |
c6bfe7c6bd | ||
![]() |
4432f5a1fe | ||
![]() |
b9e0914ab2 | ||
![]() |
49a8e84588 | ||
![]() |
d36452cf4e | ||
![]() |
580cc3c91b | ||
![]() |
9ba7af404a | ||
![]() |
2da812cbad | ||
![]() |
420266c5c4 | ||
![]() |
049ade024a | ||
![]() |
75c71f7291 | ||
![]() |
0a7533a609 | ||
![]() |
7ecdc175ff | ||
![]() |
962262a1d3 | ||
![]() |
adaa0a4863 | ||
![]() |
5f56eee8b0 | ||
![]() |
aa6caf9ee6 | ||
![]() |
1eb2cb97ad | ||
![]() |
178a8bbdc5 | ||
![]() |
e4c233710c | ||
![]() |
b661acfa9b | ||
![]() |
7bddcd27d2 | ||
![]() |
5d2c67ec83 | ||
![]() |
62fd5d12c2 | ||
![]() |
64a7525e3f | ||
![]() |
bfe434cbd5 | ||
![]() |
39063baf18 | ||
![]() |
f4a4acd272 | ||
![]() |
8d2a059279 | ||
![]() |
34c89c0f7b | ||
![]() |
e1ea9e12a6 | ||
![]() |
5611523baf | ||
![]() |
4ff07c3918 | ||
![]() |
49489a4815 | ||
![]() |
fb53d31d09 | ||
![]() |
80b9807e10 | ||
![]() |
b573ec3920 | ||
![]() |
cbdc07248f | ||
![]() |
db6a2523d9 | ||
![]() |
c710a1597f | ||
![]() |
8c70912b11 | ||
![]() |
64f90c38be | ||
![]() |
d2f1e29927 | ||
![]() |
57586df91a | ||
![]() |
c00f36b5e2 | ||
![]() |
2a7dd29f95 | ||
![]() |
58e2f7a54f | ||
![]() |
e3afe9a364 | ||
![]() |
b0314faa3d | ||
![]() |
2099e9f5cd | ||
![]() |
5947c13570 | ||
![]() |
1259992159 | ||
![]() |
0477875667 | ||
![]() |
4d5844b460 | ||
![]() |
fc79c37e2d | ||
![]() |
1d76ed7aa4 | ||
![]() |
237f886e5d | ||
![]() |
834ed2f117 | ||
![]() |
73069045ae | ||
![]() |
e0efd2bea2 | ||
![]() |
b9873c5cea | ||
![]() |
2f711bda5f | ||
![]() |
f8381c9a63 | ||
![]() |
c8f61c8662 | ||
![]() |
507965cbc6 | ||
![]() |
1f6ce56d3b | ||
![]() |
3918f83ddc | ||
![]() |
d4dc13fffb | ||
![]() |
5008519a56 | ||
![]() |
dad5ff8796 | ||
![]() |
a24220b53f | ||
![]() |
2186ff720e | ||
![]() |
65d61e12c9 | ||
![]() |
05f3fef72c | ||
![]() |
21c2eedb80 | ||
![]() |
66a3c7bc42 | ||
![]() |
8b3d3ac2de | ||
![]() |
b5610cdb8b | ||
![]() |
6c6b262140 | ||
![]() |
796e372bde | ||
![]() |
78740942f9 | ||
![]() |
02a991688f | ||
![]() |
a8029c8ec4 | ||
![]() |
adb8f37fc5 | ||
![]() |
81b41d5948 | ||
![]() |
0ff980ae87 | ||
![]() |
74a93c04d8 | ||
![]() |
b72c7deacb | ||
![]() |
b061bbbb8f | ||
![]() |
bbfad7e979 | ||
![]() |
3a9963b497 | ||
![]() |
8ac00aa58f | ||
![]() |
13f80ff142 | ||
![]() |
e8291cbd74 | ||
![]() |
0dded55f39 | ||
![]() |
a4ca6452c0 | ||
![]() |
36761715fd | ||
![]() |
02b116bd56 | ||
![]() |
d4d7d5830d | ||
![]() |
389b1824e9 | ||
![]() |
e65be13056 | ||
![]() |
1580c1745c | ||
![]() |
cf54ef0fd3 | ||
![]() |
b8b02e0691 | ||
![]() |
8d986b8a99 | ||
![]() |
4b836cb795 | ||
![]() |
d5966e676d | ||
![]() |
e187508485 | ||
![]() |
80982149d5 | ||
![]() |
a1f2e794c7 | ||
![]() |
dbe323c631 | ||
![]() |
77ddafaaac | ||
![]() |
17efd6153c | ||
![]() |
93f356c1cc | ||
![]() |
386d115333 | ||
![]() |
6b512210d4 | ||
![]() |
ba215ca824 | ||
![]() |
629a3e9396 | ||
![]() |
08b07b9b27 | ||
![]() |
3a38122764 | ||
![]() |
25ab7cc16d | ||
![]() |
41773383ec | ||
![]() |
9855fbf7f1 | ||
![]() |
5ef9d7e3ed | ||
![]() |
5a4b7d3d44 | ||
![]() |
9b40c1e89d | ||
![]() |
edff99aab3 | ||
![]() |
22043617aa | ||
![]() |
7df23c7471 | ||
![]() |
ef87a9a052 | ||
![]() |
af62a062cc | ||
![]() |
e6114f544d | ||
![]() |
8d651625f7 | ||
![]() |
9346306b79 | ||
![]() |
f3a3e85bb9 | ||
![]() |
caaaba464e | ||
![]() |
8fae388f57 | ||
![]() |
a332e0c143 | ||
![]() |
bc662b8764 | ||
![]() |
7a8955597d | ||
![]() |
bcf9c646cf | ||
![]() |
a76fffe8ff | ||
![]() |
26c8714a24 | ||
![]() |
0776ff05d2 | ||
![]() |
d3beef6584 | ||
![]() |
bdd06cb176 | ||
![]() |
f639c4f1e6 | ||
![]() |
f18a106759 | ||
![]() |
5b01ddf832 | ||
![]() |
c1fc98eef8 | ||
![]() |
e9831985e4 | ||
![]() |
30e9545d3e | ||
![]() |
ce0910a82c | ||
![]() |
afc01f9570 | ||
![]() |
fc3a484a8c | ||
![]() |
de0d5ba883 | ||
![]() |
f756ab156c | ||
![]() |
540de118c1 | ||
![]() |
675be13a7b | ||
![]() |
3342866e0e | ||
![]() |
39ff675898 | ||
![]() |
f807337273 | ||
![]() |
8e4e3c9060 | ||
![]() |
6d67992191 | ||
![]() |
0f3fea511e | ||
![]() |
a0611650e2 | ||
![]() |
5959be577f | ||
![]() |
9b5e508d15 | ||
![]() |
66a30aef98 | ||
![]() |
b117074df4 | ||
![]() |
9f4be17451 | ||
![]() |
d70e9e131d | ||
![]() |
d7643d4f88 | ||
![]() |
73b6aa9b92 | ||
![]() |
6d51d94dab | ||
![]() |
1a965e9ec2 | ||
![]() |
a9e9b901d1 | ||
![]() |
95b46dca3d | ||
![]() |
7f6ae2a51e | ||
![]() |
489d5b0f21 | ||
![]() |
f884817009 | ||
![]() |
a30704fdad | ||
![]() |
57eb21ac3d | ||
![]() |
f48c36fc2c | ||
![]() |
a09b9f0659 | ||
![]() |
92d940b7f4 | ||
![]() |
d8c7cbe8f0 | ||
![]() |
717d4800e1 | ||
![]() |
c77916146c | ||
![]() |
f5135018dd | ||
![]() |
adfb3a77ad | ||
![]() |
d5ccf8203d | ||
![]() |
416943f7ed | ||
![]() |
519684978b | ||
![]() |
c9de1cbcda | ||
![]() |
eedc41405b | ||
![]() |
e6f48ceaf5 | ||
![]() |
2ba583e7eb | ||
![]() |
741b6bc0e4 | ||
![]() |
ff98c15065 | ||
![]() |
625d032e80 | ||
![]() |
5227f5f387 | ||
![]() |
170e322701 | ||
![]() |
cb673862d1 | ||
![]() |
31d6e7a901 | ||
![]() |
79db34574b | ||
![]() |
b3831d4e8c | ||
![]() |
35f0feba00 | ||
![]() |
9a04a94a26 | ||
![]() |
a87fc566ec | ||
![]() |
c8f6a19fc0 | ||
![]() |
365892be4c | ||
![]() |
70acce1aad | ||
![]() |
48e2dd8038 | ||
![]() |
2844f7425b | ||
![]() |
f75760d4f2 | ||
![]() |
b8e3f35a8b | ||
![]() |
f610c3e4d0 | ||
![]() |
a0b925dae3 | ||
![]() |
c99518709a | ||
![]() |
d67b5b300c | ||
![]() |
9bcca28afd | ||
![]() |
b07d1e0194 | ||
![]() |
7ad08213dc | ||
![]() |
dce3cf6f31 | ||
![]() |
d763d6f738 | ||
![]() |
d87464e995 | ||
![]() |
280fd1cd68 | ||
![]() |
bfbd0a4d4c | ||
![]() |
be21b0b3bf | ||
![]() |
8f798c01ec | ||
![]() |
853a7b2567 | ||
![]() |
1756aeb45a | ||
![]() |
7a9a634c94 | ||
![]() |
4caf718626 | ||
![]() |
5867d90ccf | ||
![]() |
8a9bfd162d | ||
![]() |
7bc1316ce4 | ||
![]() |
c9d312e9c1 | ||
![]() |
792b576224 | ||
![]() |
5c66f6b994 | ||
![]() |
f17e76e9d8 | ||
![]() |
905e7b9b45 | ||
![]() |
ad294bc19f | ||
![]() |
8dd978ddb9 | ||
![]() |
b0c48b66c2 | ||
![]() |
96880e0b65 | ||
![]() |
48de9d48e2 | ||
![]() |
72581ded8f | ||
![]() |
8c4ff56d9f | ||
![]() |
d2a551c047 | ||
![]() |
9dfe096d3a | ||
![]() |
62f5b18491 | ||
![]() |
8f5af6eb7a | ||
![]() |
d3a0904790 | ||
![]() |
3c2a682876 | ||
![]() |
a52ec2a9cc | ||
![]() |
43a9c6cb66 | ||
![]() |
4cace0cb62 | ||
![]() |
482e2fbde8 | ||
![]() |
ad75e8fc95 | ||
![]() |
af43f6be0d | ||
![]() |
76243bfcd7 | ||
![]() |
feabcb884a | ||
![]() |
1c065f2da9 | ||
![]() |
401e7b4477 | ||
![]() |
1b0020f3ee | ||
![]() |
d21577803f | ||
![]() |
d3518f866b | ||
![]() |
93bf5b302d | ||
![]() |
6e0efdff61 | ||
![]() |
95f16f203a | ||
![]() |
322a83c808 | ||
![]() |
aa53007f82 | ||
![]() |
12fd940e81 | ||
![]() |
e193320ebb | ||
![]() |
77839303ca | ||
![]() |
e572189112 | ||
![]() |
20a18b5710 | ||
![]() |
e17d81692f | ||
![]() |
4bf6c61ea0 | ||
![]() |
21b03d149e | ||
![]() |
21fbebd273 | ||
![]() |
9326e9211c | ||
![]() |
742e8142cf | ||
![]() |
c30a17a302 | ||
![]() |
beecc5dc87 | ||
![]() |
dfb0f58254 | ||
![]() |
31f0905f3f | ||
![]() |
726bf85d15 | ||
![]() |
d187a8ab68 | ||
![]() |
1245a2e058 | ||
![]() |
f8bd11c18f | ||
![]() |
787863e176 | ||
![]() |
1da7d3bfe3 | ||
![]() |
26bc91fe9b | ||
![]() |
470774687d | ||
![]() |
20aec1536a | ||
![]() |
9e1082b625 | ||
![]() |
f8f13ad8aa | ||
![]() |
fcf72201d3 | ||
![]() |
1c528719cb | ||
![]() |
f3faeb0f77 | ||
![]() |
b1707c2e3c | ||
![]() |
0093dd74e3 | ||
![]() |
f92c5d7a2e | ||
![]() |
dcdc678b19 | ||
![]() |
8e3e7a5541 | ||
![]() |
4016172938 | ||
![]() |
95ea678d77 | ||
![]() |
8ccf244c6b | ||
![]() |
308dfeb2a6 | ||
![]() |
118a9e8db7 | ||
![]() |
6740ea79af | ||
![]() |
6af8526aa1 | ||
![]() |
f73cb0ded4 | ||
![]() |
0a683c14a4 | ||
![]() |
9cf5ba82c7 | ||
![]() |
104b5c3f68 | ||
![]() |
c8efea117f | ||
![]() |
486ff2beac | ||
![]() |
12334099f7 | ||
![]() |
e4f571b1ee | ||
![]() |
f8b9178630 | ||
![]() |
ed57db9498 | ||
![]() |
731abee6b4 | ||
![]() |
60469f83e2 | ||
![]() |
f2f3af19c3 | ||
![]() |
cbbdf38f4f | ||
![]() |
44618e31c8 | ||
![]() |
5bc105c01c | ||
![]() |
4e3a8b1928 | ||
![]() |
59179764d7 | ||
![]() |
acce67241e | ||
![]() |
73036f9567 | ||
![]() |
d40ea48db7 | ||
![]() |
fdb5178f99 | ||
![]() |
7a3525a053 | ||
![]() |
394ed4f90d | ||
![]() |
d7f5dbaf89 | ||
![]() |
cb43019455 | ||
![]() |
5303ec9aa5 | ||
![]() |
02499c72c9 | ||
![]() |
f1275536a2 | ||
![]() |
a88239affc | ||
![]() |
260ea91b83 | ||
![]() |
dea075a8b0 | ||
![]() |
5f333c3632 | ||
![]() |
59b5cb0962 | ||
![]() |
16410cda18 | ||
![]() |
100c8b7630 | ||
![]() |
47591f73da | ||
![]() |
d3b4f0bebc | ||
![]() |
5c52ded1be | ||
![]() |
ff7e1b5918 | ||
![]() |
6ea8079919 | ||
![]() |
d48cf4ea3e | ||
![]() |
41044b6f84 | ||
![]() |
338c331115 | ||
![]() |
563f76fbe5 | ||
![]() |
6a2fb38673 | ||
![]() |
429c5149ce | ||
![]() |
043fea4ef9 | ||
![]() |
4aa1f92c9c | ||
![]() |
92d3e0ca6f | ||
![]() |
a28b9ec8f3 | ||
![]() |
d7452f68a0 | ||
![]() |
4c333fb535 | ||
![]() |
f32bdea867 | ||
![]() |
05ad0618cc | ||
![]() |
e0987e0350 | ||
![]() |
519ed7f611 | ||
![]() |
ecbc21a7ce | ||
![]() |
56c61763d7 | ||
![]() |
8168b17ddf | ||
![]() |
d2a5b07d7f | ||
![]() |
b570ca3cf3 | ||
![]() |
343586d832 | ||
![]() |
a8d02bd3b0 |
@@ -5,7 +5,7 @@ coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
threshold: 0.2%
|
||||
threshold: 2.0%
|
||||
|
||||
ignore:
|
||||
- lib/spack/spack/test/.*
|
||||
|
4
.github/workflows/audit.yaml
vendored
4
.github/workflows/audit.yaml
vendored
@@ -28,7 +28,7 @@ jobs:
|
||||
run:
|
||||
shell: ${{ matrix.system.shell }}
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
@@ -66,7 +66,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit externals
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
||||
with:
|
||||
name: coverage-audits-${{ matrix.system.os }}
|
||||
|
98
.github/workflows/bootstrap.yml
vendored
98
.github/workflows/bootstrap.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap clingo
|
||||
@@ -53,33 +53,27 @@ jobs:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest", "windows-latest"]
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' && matrix.runner != 'windows-latest' }}
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: |
|
||||
brew install cmake bison tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
env:
|
||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
||||
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
|
||||
VALIDATE_LAST_EXIT: ${{ matrix.runner == 'windows-latest' && './share/spack/qa/validate_last_exit.ps1' || '' }}
|
||||
run: |
|
||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
${{ env.VALIDATE_LAST_EXIT }}
|
||||
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
|
||||
tree $HOME/.spack/bootstrap/store/
|
||||
|
||||
gnupg-sources:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
@@ -96,7 +90,7 @@ jobs:
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: sudo rm -rf $(command -v gpg gpg2 patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap GnuPG
|
||||
@@ -112,10 +106,10 @@ jobs:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest", "windows-latest"]
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' && matrix.runner != 'windows-latest'}}
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: |
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
@@ -124,13 +118,8 @@ jobs:
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
- name: Setup Windows
|
||||
if: ${{ matrix.runner == 'windows-latest' }}
|
||||
run: |
|
||||
Remove-Item -Path (Get-Command gpg).Path
|
||||
Remove-Item -Path (Get-Command file).Path
|
||||
- name: Checkout
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
@@ -142,20 +131,11 @@ jobs:
|
||||
3.11
|
||||
3.12
|
||||
- name: Set bootstrap sources
|
||||
env:
|
||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
||||
run: |
|
||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
- name: Disable from source bootstrap
|
||||
if: ${{ matrix.runner != 'windows-latest' }}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
- name: Bootstrap clingo
|
||||
# No binary clingo on Windows yet
|
||||
if: ${{ matrix.runner != 'windows-latest' }}
|
||||
run: |
|
||||
set -e
|
||||
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' ; do
|
||||
@@ -178,24 +158,48 @@ jobs:
|
||||
fi
|
||||
done
|
||||
- name: Bootstrap GnuPG
|
||||
env:
|
||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
||||
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
|
||||
VALIDATE_LAST_EXIT: ${{ matrix.runner == 'windows-latest' && './share/spack/qa/validate_last_exit.ps1' || '' }}
|
||||
run: |
|
||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
||||
source share/spack/setup-env.sh
|
||||
spack -d gpg list
|
||||
${{ env.VALIDATE_LAST_EXIT }}
|
||||
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
|
||||
tree $HOME/.spack/bootstrap/store/
|
||||
- name: Bootstrap File
|
||||
env:
|
||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
||||
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
|
||||
VALIDATE_LAST_EXIT: ${{ matrix.runner == 'windows-latest' && './share/spack/qa/validate_last_exit.ps1' || '' }}
|
||||
run: |
|
||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
||||
source share/spack/setup-env.sh
|
||||
spack -d python share/spack/qa/bootstrap-file.py
|
||||
${{ env.VALIDATE_LAST_EXIT }}
|
||||
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
|
||||
tree $HOME/.spack/bootstrap/store/
|
||||
|
||||
windows:
|
||||
runs-on: "windows-latest"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Setup Windows
|
||||
run: |
|
||||
Remove-Item -Path (Get-Command gpg).Path
|
||||
Remove-Item -Path (Get-Command file).Path
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
./share/spack/setup-env.ps1
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
tree $env:userprofile/.spack/bootstrap/store/
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
./share/spack/setup-env.ps1
|
||||
spack -d gpg list
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
tree $env:userprofile/.spack/bootstrap/store/
|
||||
- name: Bootstrap File
|
||||
run: |
|
||||
./share/spack/setup-env.ps1
|
||||
spack -d python share/spack/qa/bootstrap-file.py
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
tree $env:userprofile/.spack/bootstrap/store/
|
||||
|
10
.github/workflows/build-containers.yml
vendored
10
.github/workflows/build-containers.yml
vendored
@@ -55,7 +55,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
id: docker_meta
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||
path: dockerfiles
|
||||
@@ -96,7 +96,7 @@ jobs:
|
||||
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db
|
||||
uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@32945a339266b759abcbdc89316275140b0fc960
|
||||
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
@@ -126,7 +126,7 @@ jobs:
|
||||
needs: deploy-images
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
uses: actions/upload-artifact/merge@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: dockerfiles
|
||||
pattern: dockerfiles_*
|
||||
|
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
4
.github/workflows/coverage.yml
vendored
4
.github/workflows/coverage.yml
vendored
@@ -8,7 +8,7 @@ jobs:
|
||||
upload:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: '3.11'
|
||||
@@ -29,6 +29,6 @@ jobs:
|
||||
- run: coverage xml
|
||||
|
||||
- name: "Upload coverage report to CodeCov"
|
||||
uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
uses: codecov/codecov-action@b9fd7d16f6d7d1b5d2bec1a2887e65ceed900238
|
||||
with:
|
||||
verbose: true
|
||||
|
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
|
@@ -1,4 +1,4 @@
|
||||
black==24.8.0
|
||||
black==24.10.0
|
||||
clingo==5.7.1
|
||||
flake8==7.1.1
|
||||
isort==5.13.2
|
||||
|
43
.github/workflows/unit_tests.yaml
vendored
43
.github/workflows/unit_tests.yaml
vendored
@@ -40,7 +40,7 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
@@ -80,7 +80,7 @@ jobs:
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||
path: coverage
|
||||
@@ -89,7 +89,7 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: coverage-shell
|
||||
path: coverage
|
||||
@@ -130,7 +130,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -149,32 +149,33 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: '3.11'
|
||||
python-version: '3.13'
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
sudo apt-get -y install coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build kcov
|
||||
sudo apt-get -y install coreutils gfortran graphviz gnupg2
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo pytest-xdist
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
- name: Run unit tests (full suite with coverage)
|
||||
env:
|
||||
COVERAGE: true
|
||||
COVERAGE_FILE: coverage/.coverage-clingo-cffi
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
. share/spack/setup-env.sh
|
||||
spack bootstrap disable spack-install
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap status
|
||||
spack solve zlib
|
||||
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretize.py
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: coverage-clingo-cffi
|
||||
path: coverage
|
||||
@@ -187,7 +188,7 @@ jobs:
|
||||
os: [macos-13, macos-14]
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
@@ -199,7 +200,7 @@ jobs:
|
||||
pip install --upgrade pytest coverage[toml] pytest-xdist pytest-cov
|
||||
- name: Setup Homebrew packages
|
||||
run: |
|
||||
brew install dash fish gcc gnupg2 kcov
|
||||
brew install dash fish gcc gnupg kcov
|
||||
- name: Run unit tests
|
||||
env:
|
||||
SPACK_TEST_PARALLEL: 4
|
||||
@@ -212,7 +213,7 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||
path: coverage
|
||||
@@ -225,7 +226,7 @@ jobs:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
@@ -243,7 +244,7 @@ jobs:
|
||||
run: |
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: coverage-windows
|
||||
path: coverage
|
||||
|
16
.github/workflows/valid-style.yml
vendored
16
.github/workflows/valid-style.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: '3.11'
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -85,7 +85,7 @@ jobs:
|
||||
source share/spack/setup-env.sh
|
||||
spack debug report
|
||||
spack -d bootstrap now --dev
|
||||
spack style -t black
|
||||
spack -d style -t black
|
||||
spack unit-test -V
|
||||
import-check:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -98,14 +98,14 @@ jobs:
|
||||
# PR: use the base of the PR as the old commit
|
||||
- name: Checkout PR base commit
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.sha }}
|
||||
path: old
|
||||
# not a PR: use the previous commit as the old commit
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 2
|
||||
path: old
|
||||
@@ -114,11 +114,11 @@ jobs:
|
||||
run: git -C old reset --hard HEAD^
|
||||
|
||||
- name: Checkout new commit
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
path: new
|
||||
- name: Install circular import checker
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
repository: haampie/circular-import-fighter
|
||||
ref: 555519c6fd5564fd2eb844e7b87e84f4d12602e2
|
||||
|
@@ -14,3 +14,26 @@ sphinx:
|
||||
python:
|
||||
install:
|
||||
- requirements: lib/spack/docs/requirements.txt
|
||||
|
||||
search:
|
||||
ranking:
|
||||
spack.html: -10
|
||||
spack.*.html: -10
|
||||
llnl.html: -10
|
||||
llnl.*.html: -10
|
||||
_modules/*: -10
|
||||
command_index.html: -9
|
||||
basic_usage.html: 5
|
||||
configuration.html: 5
|
||||
config_yaml.html: 5
|
||||
packages_yaml.html: 5
|
||||
build_settings.html: 5
|
||||
environments.html: 5
|
||||
containers.html: 5
|
||||
mirrors.html: 5
|
||||
module_file_support.html: 5
|
||||
repositories.html: 5
|
||||
binary_caches.html: 5
|
||||
chain.html: 5
|
||||
pipelines.html: 5
|
||||
packaging_guide.html: 5
|
||||
|
71
CHANGELOG.md
71
CHANGELOG.md
@@ -1,3 +1,64 @@
|
||||
# v0.22.2 (2024-09-21)
|
||||
|
||||
## Bugfixes
|
||||
- Forward compatibility with Spack 0.23 packages with language dependencies (#45205, #45191)
|
||||
- Forward compatibility with `urllib` from Python 3.12.6+ (#46453, #46483)
|
||||
- Bump vendored `archspec` for better aarch64 support (#45721, #46445)
|
||||
- Support macOS Sequoia (#45018, #45127)
|
||||
- Fix regression in `{variants.X}` and `{variants.X.value}` format strings (#46206)
|
||||
- Ensure shell escaping of environment variable values in load and activate commands (#42780)
|
||||
- Fix an issue where `spec[pkg]` considers specs outside the current DAG (#45090)
|
||||
- Do not halt concretization on unknown variants in externals (#45326)
|
||||
- Improve validation of `develop` config section (#46485)
|
||||
- Explicitly disable `ccache` if turned off in config, to avoid cache pollution (#45275)
|
||||
- Improve backwards compatibility in `include_concrete` (#45766)
|
||||
- Fix issue where package tags were sometimes repeated (#45160)
|
||||
- Make `setup-env.sh` "sourced only" by dropping execution bits (#45641)
|
||||
- Make certain source/binary fetch errors recoverable instead of a hard error (#45683)
|
||||
- Remove debug statements in package hash computation (#45235)
|
||||
- Remove redundant clingo warnings (#45269)
|
||||
- Remove hard-coded layout version (#45645)
|
||||
- Do not initialize previous store state in `use_store` (#45268)
|
||||
- Docs improvements (#46475)
|
||||
|
||||
## Package updates
|
||||
- `chapel` major update (#42197, #44931, #45304)
|
||||
|
||||
# v0.22.1 (2024-07-04)
|
||||
|
||||
## Bugfixes
|
||||
- Fix reuse of externals on Linux (#44316)
|
||||
- Ensure parent gcc-runtime version >= child (#44834, #44870)
|
||||
- Ensure the latest gcc-runtime is rpath'ed when multiple exist among link deps (#44219)
|
||||
- Improve version detection of glibc (#44154)
|
||||
- Improve heuristics for solver (#44893, #44976, #45023)
|
||||
- Make strong preferences override reuse (#44373)
|
||||
- Reduce verbosity when C compiler is missing (#44182)
|
||||
- Make missing ccache executable an error when required (#44740)
|
||||
- Make every environment view containing `python` a `venv` (#44382)
|
||||
- Fix external detection for compilers with os but no target (#44156)
|
||||
- Fix version optimization for roots (#44272)
|
||||
- Handle common implementations of pagination of tags in OCI build caches (#43136)
|
||||
- Apply fetched patches to develop specs (#44950)
|
||||
- Avoid Windows wrappers for filesystem utilities on non-Windows (#44126)
|
||||
- Fix issue with long filenames in build caches on Windows (#43851)
|
||||
- Fix formatting issue in `spack audit` (#45045)
|
||||
- CI fixes (#44582, #43965, #43967, #44279, #44213)
|
||||
|
||||
## Package updates
|
||||
- protobuf: fix 3.4:3.21 patch checksum (#44443)
|
||||
- protobuf: update hash for patch needed when="@3.4:3.21" (#44210)
|
||||
- git: bump v2.39 to 2.45; deprecate unsafe versions (#44248)
|
||||
- gcc: use -rpath {rpath_dir} not -rpath={rpath dir} (#44315)
|
||||
- Remove mesa18 and libosmesa (#44264)
|
||||
- Enforce consistency of `gl` providers (#44307)
|
||||
- Require libiconv for iconv (#44335, #45026).
|
||||
Notice that glibc/musl also provide iconv, but are not guaranteed to be
|
||||
complete. Set `packages:iconv:require:[glibc]` to restore the old behavior.
|
||||
- py-matplotlib: qualify when to do a post install (#44191)
|
||||
- rust: fix v1.78.0 instructions (#44127)
|
||||
- suite-sparse: improve setting of the `libs` property (#44214)
|
||||
- netlib-lapack: provide blas and lapack together (#44981)
|
||||
|
||||
# v0.22.0 (2024-05-12)
|
||||
|
||||
@@ -319,6 +380,16 @@
|
||||
* 344 committers to packages
|
||||
* 45 committers to core
|
||||
|
||||
# v0.21.3 (2024-10-02)
|
||||
|
||||
## Bugfixes
|
||||
- Forward compatibility with Spack 0.23 packages with language dependencies (#45205, #45191)
|
||||
- Forward compatibility with `urllib` from Python 3.12.6+ (#46453, #46483)
|
||||
- Bump `archspec` to 0.2.5-dev for better aarch64 and Windows support (#42854, #44005,
|
||||
#45721, #46445)
|
||||
- Support macOS Sequoia (#45018, #45127, #43862)
|
||||
- CI and test maintenance (#42909, #42728, #46711, #41943, #43363)
|
||||
|
||||
# v0.21.2 (2024-03-01)
|
||||
|
||||
## Bugfixes
|
||||
|
@@ -166,3 +166,74 @@ while `py-numpy` still needs an older version:
|
||||
|
||||
Up to Spack v0.20 ``duplicates:strategy:none`` was the default (and only) behavior. From Spack v0.21 the
|
||||
default behavior is ``duplicates:strategy:minimal``.
|
||||
|
||||
--------
|
||||
Splicing
|
||||
--------
|
||||
|
||||
The ``splice`` key covers config attributes for splicing specs in the solver.
|
||||
|
||||
"Splicing" is a method for replacing a dependency with another spec
|
||||
that provides the same package or virtual. There are two types of
|
||||
splices, referring to different behaviors for shared dependencies
|
||||
between the root spec and the new spec replacing a dependency:
|
||||
"transitive" and "intransitive". A "transitive" splice is one that
|
||||
resolves all conflicts by taking the dependency from the new node. An
|
||||
"intransitive" splice is one that resolves all conflicts by taking the
|
||||
dependency from the original root. From a theory perspective, hybrid
|
||||
splices are possible but are not modeled by Spack.
|
||||
|
||||
All spliced specs retain a ``build_spec`` attribute that points to the
|
||||
original Spec before any splice occurred. The ``build_spec`` for a
|
||||
non-spliced spec is itself.
|
||||
|
||||
The figure below shows examples of transitive and intransitive splices:
|
||||
|
||||
.. figure:: images/splices.png
|
||||
:align: center
|
||||
|
||||
The concretizer can be configured to explicitly splice particular
|
||||
replacements for a target spec. Splicing will allow the user to make
|
||||
use of generically built public binary caches, while swapping in
|
||||
highly optimized local builds for performance critical components
|
||||
and/or components that interact closely with the specific hardware
|
||||
details of the system. The most prominent candidate for splicing is
|
||||
MPI providers. MPI packages have relatively well-understood ABI
|
||||
characteristics, and most High Performance Computing facilities deploy
|
||||
highly optimized MPI packages tailored to their particular
|
||||
hardware. The following config block configures Spack to replace
|
||||
whatever MPI provider each spec was concretized to use with the
|
||||
particular package of ``mpich`` with the hash that begins ``abcdef``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
splice:
|
||||
explicit:
|
||||
- target: mpi
|
||||
replacement: mpich/abcdef
|
||||
transitive: false
|
||||
|
||||
.. warning::
|
||||
|
||||
When configuring an explicit splice, you as the user take on the
|
||||
responsibility for ensuring ABI compatibility between the specs
|
||||
matched by the target and the replacement you provide. If they are
|
||||
not compatible, Spack will not warn you and your application will
|
||||
fail to run.
|
||||
|
||||
The ``target`` field of an explicit splice can be any abstract
|
||||
spec. The ``replacement`` field must be a spec that includes the hash
|
||||
of a concrete spec, and the replacement must either be the same
|
||||
package as the target, provide the virtual that is the target, or
|
||||
provide a virtual that the target provides. The ``transitive`` field
|
||||
is optional -- by default, splices will be transitive.
|
||||
|
||||
.. note::
|
||||
|
||||
With explicit splices configured, it is possible for Spack to
|
||||
concretize to a spec that does not satisfy the input. For example,
|
||||
with the config above ``hdf5 ^mvapich2`` will concretize to user
|
||||
``mpich/abcdef`` instead of ``mvapich2`` as the MPI provider. Spack
|
||||
will warn the user in this case, but will not fail the
|
||||
concretization.
|
||||
|
@@ -130,14 +130,19 @@ before or after a particular phase. For example, in ``perl``, we see:
|
||||
|
||||
@run_after("install")
|
||||
def install_cpanm(self):
|
||||
spec = self.spec
|
||||
|
||||
if spec.satisfies("+cpanm"):
|
||||
with working_dir(join_path("cpanm", "cpanm")):
|
||||
perl = spec["perl"].command
|
||||
perl("Makefile.PL")
|
||||
make()
|
||||
make("install")
|
||||
spec = self.spec
|
||||
maker = make
|
||||
cpan_dir = join_path("cpanm", "cpanm")
|
||||
if sys.platform == "win32":
|
||||
maker = nmake
|
||||
cpan_dir = join_path(self.stage.source_path, cpan_dir)
|
||||
cpan_dir = windows_sfn(cpan_dir)
|
||||
if "+cpanm" in spec:
|
||||
with working_dir(cpan_dir):
|
||||
perl = spec["perl"].command
|
||||
perl("Makefile.PL")
|
||||
maker()
|
||||
maker("install")
|
||||
|
||||
This extra step automatically installs ``cpanm`` in addition to the
|
||||
base Perl installation.
|
||||
@@ -176,8 +181,14 @@ In the ``perl`` package, we can see:
|
||||
|
||||
@run_after("build")
|
||||
@on_package_attributes(run_tests=True)
|
||||
def test(self):
|
||||
make("test")
|
||||
def build_test(self):
|
||||
if sys.platform == "win32":
|
||||
win32_dir = os.path.join(self.stage.source_path, "win32")
|
||||
win32_dir = windows_sfn(win32_dir)
|
||||
with working_dir(win32_dir):
|
||||
nmake("test", ignore_quotes=True)
|
||||
else:
|
||||
make("test")
|
||||
|
||||
As you can guess, this runs ``make test`` *after* building the package,
|
||||
if and only if testing is requested. Again, this is not specific to
|
||||
|
@@ -49,14 +49,14 @@ following phases:
|
||||
#. ``install`` - install the package
|
||||
|
||||
Package developers often add unit tests that can be invoked with
|
||||
``scons test`` or ``scons check``. Spack provides a ``test`` method
|
||||
``scons test`` or ``scons check``. Spack provides a ``build_test`` method
|
||||
to handle this. Since we don't know which one the package developer
|
||||
chose, the ``test`` method does nothing by default, but can be easily
|
||||
chose, the ``build_test`` method does nothing by default, but can be easily
|
||||
overridden like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def test(self):
|
||||
def build_test(self):
|
||||
scons("check")
|
||||
|
||||
|
||||
|
@@ -220,6 +220,8 @@ def setup(sphinx):
|
||||
("py:class", "spack.filesystem_view.SimpleFilesystemView"),
|
||||
("py:class", "spack.traverse.EdgeAndDepth"),
|
||||
("py:class", "archspec.cpu.microarchitecture.Microarchitecture"),
|
||||
# TypeVar that is not handled correctly
|
||||
("py:class", "llnl.util.lang.T"),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
|
@@ -281,7 +281,7 @@ When spack queries for configuration parameters, it searches in
|
||||
higher-precedence scopes first. So, settings in a higher-precedence file
|
||||
can override those with the same key in a lower-precedence one. For
|
||||
list-valued settings, Spack *prepends* higher-precedence settings to
|
||||
lower-precedence settings. Completely ignoring higher-level configuration
|
||||
lower-precedence settings. Completely ignoring lower-precedence configuration
|
||||
options is supported with the ``::`` notation for keys (see
|
||||
:ref:`config-overrides` below).
|
||||
|
||||
|
@@ -712,27 +712,27 @@ Release branches
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
There are currently two types of Spack releases: :ref:`major releases
|
||||
<major-releases>` (``0.17.0``, ``0.18.0``, etc.) and :ref:`point releases
|
||||
<point-releases>` (``0.17.1``, ``0.17.2``, ``0.17.3``, etc.). Here is a
|
||||
<major-releases>` (``0.21.0``, ``0.22.0``, etc.) and :ref:`patch releases
|
||||
<patch-releases>` (``0.22.1``, ``0.22.2``, ``0.22.3``, etc.). Here is a
|
||||
diagram of how Spack release branches work::
|
||||
|
||||
o branch: develop (latest version, v0.19.0.dev0)
|
||||
o branch: develop (latest version, v0.23.0.dev0)
|
||||
|
|
||||
o
|
||||
| o branch: releases/v0.18, tag: v0.18.1
|
||||
| o branch: releases/v0.22, tag: v0.22.1
|
||||
o |
|
||||
| o tag: v0.18.0
|
||||
| o tag: v0.22.0
|
||||
o |
|
||||
| o
|
||||
|/
|
||||
o
|
||||
|
|
||||
o
|
||||
| o branch: releases/v0.17, tag: v0.17.2
|
||||
| o branch: releases/v0.21, tag: v0.21.2
|
||||
o |
|
||||
| o tag: v0.17.1
|
||||
| o tag: v0.21.1
|
||||
o |
|
||||
| o tag: v0.17.0
|
||||
| o tag: v0.21.0
|
||||
o |
|
||||
| o
|
||||
|/
|
||||
@@ -743,8 +743,8 @@ requests target ``develop``. The ``develop`` branch will report that its
|
||||
version is that of the next **major** release with a ``.dev0`` suffix.
|
||||
|
||||
Each Spack release series also has a corresponding branch, e.g.
|
||||
``releases/v0.18`` has ``0.18.x`` versions of Spack, and
|
||||
``releases/v0.17`` has ``0.17.x`` versions. A major release is the first
|
||||
``releases/v0.22`` has ``v0.22.x`` versions of Spack, and
|
||||
``releases/v0.21`` has ``v0.21.x`` versions. A major release is the first
|
||||
tagged version on a release branch. Minor releases are back-ported from
|
||||
develop onto release branches. This is typically done by cherry-picking
|
||||
bugfix commits off of ``develop``.
|
||||
@@ -774,27 +774,40 @@ for more details.
|
||||
Scheduling work for releases
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
We schedule work for releases by creating `GitHub projects
|
||||
<https://github.com/spack/spack/projects>`_. At any time, there may be
|
||||
several open release projects. For example, below are two releases (from
|
||||
some past version of the page linked above):
|
||||
We schedule work for **major releases** through `milestones
|
||||
<https://github.com/spack/spack/milestones>`_ and `GitHub Projects
|
||||
<https://github.com/spack/spack/projects>`_, while **patch releases** use `labels
|
||||
<https://github.com/spack/spack/labels>`_.
|
||||
|
||||
.. image:: images/projects.png
|
||||
There is only one milestone open at a time. Its name corresponds to the next major version, for
|
||||
example ``v0.23``. Important issues and pull requests should be assigned to this milestone by
|
||||
core developers, so that they are not forgotten at the time of release. The milestone is closed
|
||||
when the release is made, and a new milestone is created for the next major release.
|
||||
|
||||
This image shows one release in progress for ``0.15.1`` and another for
|
||||
``0.16.0``. Each of these releases has a project board containing issues
|
||||
and pull requests. GitHub shows a status bar with completed work in
|
||||
green, work in progress in purple, and work not started yet in gray, so
|
||||
it's fairly easy to see progress.
|
||||
Bug reports in GitHub issues are automatically labelled ``bug`` and ``triage``. Spack developers
|
||||
assign one of the labels ``impact-low``, ``impact-medium`` or ``impact-high``. This will make the
|
||||
issue appear in the `Triaged bugs <https://github.com/orgs/spack/projects/6>`_ project board.
|
||||
Important issues should be assigned to the next milestone as well, so they appear at the top of
|
||||
the project board.
|
||||
|
||||
Spack's project boards are not firm commitments so we move work between
|
||||
releases frequently. If we need to make a release and some tasks are not
|
||||
yet done, we will simply move them to the next minor or major release, rather
|
||||
than delaying the release to complete them.
|
||||
Spack's milestones are not firm commitments so we move work between releases frequently. If we
|
||||
need to make a release and some tasks are not yet done, we will simply move them to the next major
|
||||
release milestone, rather than delaying the release to complete them.
|
||||
|
||||
For more on using GitHub project boards, see `GitHub's documentation
|
||||
<https://docs.github.com/en/github/managing-your-work-on-github/about-project-boards>`_.
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Backporting bug fixes
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
When a bug is fixed in the ``develop`` branch, it is often necessary to backport the fix to one
|
||||
(or more) of the ``release/vX.Y`` branches. Only the release manager is responsible for doing
|
||||
backports, but Spack maintainers are responsible for labelling pull requests (and issues if no bug
|
||||
fix is available yet) with ``vX.Y.Z`` labels. The label should correspond to the next patch version
|
||||
that the bug fix should be backported to.
|
||||
|
||||
Backports are done publicly by the release manager using a pull request named ``Backports vX.Y.Z``.
|
||||
This pull request is opened from the ``backports/vX.Y.Z`` branch, targets the ``releases/vX.Y``
|
||||
branch and contains a (growing) list of cherry-picked commits from the ``develop`` branch.
|
||||
Typically there are one or two backport pull requests open at any given time.
|
||||
|
||||
.. _major-releases:
|
||||
|
||||
@@ -802,25 +815,21 @@ For more on using GitHub project boards, see `GitHub's documentation
|
||||
Making major releases
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Assuming a project board has already been created and all required work
|
||||
completed, the steps to make the major release are:
|
||||
Assuming all required work from the milestone is completed, the steps to make the major release
|
||||
are:
|
||||
|
||||
#. Create two new project boards:
|
||||
#. `Create a new milestone <https://github.com/spack/spack/milestones>`_ for the next major
|
||||
release.
|
||||
|
||||
* One for the next major release
|
||||
* One for the next point release
|
||||
#. `Create a new label <https://github.com/spack/spack/labels>`_ for the next patch release.
|
||||
|
||||
#. Move any optional tasks that are not done to one of the new project boards.
|
||||
|
||||
In general, small bugfixes should go to the next point release. Major
|
||||
features, refactors, and changes that could affect concretization should
|
||||
go in the next major release.
|
||||
#. Move any optional tasks that are not done to the next milestone.
|
||||
|
||||
#. Create a branch for the release, based on ``develop``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout -b releases/v0.15 develop
|
||||
$ git checkout -b releases/v0.23 develop
|
||||
|
||||
For a version ``vX.Y.Z``, the branch's name should be
|
||||
``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
|
||||
@@ -856,8 +865,8 @@ completed, the steps to make the major release are:
|
||||
|
||||
Create a pull request targeting the ``develop`` branch, bumping the major
|
||||
version in ``lib/spack/spack/__init__.py`` with a ``dev0`` release segment.
|
||||
For instance when you have just released ``v0.15.0``, set the version
|
||||
to ``(0, 16, 0, 'dev0')`` on ``develop``.
|
||||
For instance when you have just released ``v0.23.0``, set the version
|
||||
to ``(0, 24, 0, 'dev0')`` on ``develop``.
|
||||
|
||||
#. Follow the steps in :ref:`publishing-releases`.
|
||||
|
||||
@@ -866,82 +875,52 @@ completed, the steps to make the major release are:
|
||||
#. Follow the steps in :ref:`announcing-releases`.
|
||||
|
||||
|
||||
.. _point-releases:
|
||||
.. _patch-releases:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Making point releases
|
||||
Making patch releases
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Assuming a project board has already been created and all required work
|
||||
completed, the steps to make the point release are:
|
||||
To make the patch release process both efficient and transparent, we use a *backports pull request*
|
||||
which contains cherry-picked commits from the ``develop`` branch. The majority of the work is to
|
||||
cherry-pick the bug fixes, which ideally should be done as soon as they land on ``develop``:
|
||||
this ensures cherry-picking happens in order, and makes conflicts easier to resolve since the
|
||||
changes are fresh in the mind of the developer.
|
||||
|
||||
#. Create a new project board for the next point release.
|
||||
The backports pull request is always titled ``Backports vX.Y.Z`` and is labelled ``backports``. It
|
||||
is opened from a branch named ``backports/vX.Y.Z`` and targets the ``releases/vX.Y`` branch.
|
||||
|
||||
#. Move any optional tasks that are not done to the next project board.
|
||||
Whenever a pull request labelled ``vX.Y.Z`` is merged, cherry-pick the associated squashed commit
|
||||
on ``develop`` to the ``backports/vX.Y.Z`` branch. For pull requests that were rebased (or not
|
||||
squashed), cherry-pick each associated commit individually. Never force push to the
|
||||
``backports/vX.Y.Z`` branch.
|
||||
|
||||
#. Check out the release branch (it should already exist).
|
||||
.. warning::
|
||||
|
||||
For the ``X.Y.Z`` release, the release branch is called ``releases/vX.Y``.
|
||||
For ``v0.15.1``, you would check out ``releases/v0.15``:
|
||||
Sometimes you may **still** get merge conflicts even if you have
|
||||
cherry-picked all the commits in order. This generally means there
|
||||
is some other intervening pull request that the one you're trying
|
||||
to pick depends on. In these cases, you'll need to make a judgment
|
||||
call regarding those pull requests. Consider the number of affected
|
||||
files and/or the resulting differences.
|
||||
|
||||
.. code-block:: console
|
||||
1. If the changes are small, you might just cherry-pick it.
|
||||
|
||||
$ git checkout releases/v0.15
|
||||
2. If the changes are large, then you may decide that this fix is not
|
||||
worth including in a patch release, in which case you should remove
|
||||
the label from the pull request. Remember that large, manual backports
|
||||
are seldom the right choice for a patch release.
|
||||
|
||||
#. If a pull request to the release branch named ``Backports vX.Y.Z`` is not already
|
||||
in the project, create it. This pull request ought to be created as early as
|
||||
possible when working on a release project, so that we can build the release
|
||||
commits incrementally, and identify potential conflicts at an early stage.
|
||||
When all commits are cherry-picked in the ``backports/vX.Y.Z`` branch, make the patch
|
||||
release as follows:
|
||||
|
||||
#. Cherry-pick each pull request in the ``Done`` column of the release
|
||||
project board onto the ``Backports vX.Y.Z`` pull request.
|
||||
#. `Create a new label <https://github.com/spack/spack/labels>`_ ``vX.Y.{Z+1}`` for the next patch
|
||||
release.
|
||||
|
||||
This is **usually** fairly simple since we squash the commits from the
|
||||
vast majority of pull requests. That means there is only one commit
|
||||
per pull request to cherry-pick. For example, `this pull request
|
||||
<https://github.com/spack/spack/pull/15777>`_ has three commits, but
|
||||
they were squashed into a single commit on merge. You can see the
|
||||
commit that was created here:
|
||||
#. Replace the label ``vX.Y.Z`` with ``vX.Y.{Z+1}`` for all PRs and issues that are not done.
|
||||
|
||||
.. image:: images/pr-commit.png
|
||||
|
||||
You can easily cherry pick it like this (assuming you already have the
|
||||
release branch checked out):
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git cherry-pick 7e46da7
|
||||
|
||||
For pull requests that were rebased (or not squashed), you'll need to
|
||||
cherry-pick each associated commit individually.
|
||||
|
||||
.. warning::
|
||||
|
||||
It is important to cherry-pick commits in the order they happened,
|
||||
otherwise you can get conflicts while cherry-picking. When
|
||||
cherry-picking look at the merge date,
|
||||
**not** the number of the pull request or the date it was opened.
|
||||
|
||||
Sometimes you may **still** get merge conflicts even if you have
|
||||
cherry-picked all the commits in order. This generally means there
|
||||
is some other intervening pull request that the one you're trying
|
||||
to pick depends on. In these cases, you'll need to make a judgment
|
||||
call regarding those pull requests. Consider the number of affected
|
||||
files and or the resulting differences.
|
||||
|
||||
1. If the dependency changes are small, you might just cherry-pick it,
|
||||
too. If you do this, add the task to the release board.
|
||||
|
||||
2. If the changes are large, then you may decide that this fix is not
|
||||
worth including in a point release, in which case you should remove
|
||||
the task from the release project.
|
||||
|
||||
3. You can always decide to manually back-port the fix to the release
|
||||
branch if neither of the above options makes sense, but this can
|
||||
require a lot of work. It's seldom the right choice.
|
||||
|
||||
#. When all the commits from the project board are cherry-picked into
|
||||
the ``Backports vX.Y.Z`` pull request, you can push a commit to:
|
||||
#. Manually push a single commit with commit message ``Set version to vX.Y.Z`` to the
|
||||
``backports/vX.Y.Z`` branch, that both bumps the Spack version number and updates the changelog:
|
||||
|
||||
1. Bump the version in ``lib/spack/spack/__init__.py``.
|
||||
2. Update ``CHANGELOG.md`` with a list of the changes.
|
||||
@@ -950,20 +929,22 @@ completed, the steps to make the point release are:
|
||||
release branch. See `the changelog from 0.14.1
|
||||
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
|
||||
|
||||
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
|
||||
is needed to keep track in the release branch of all the commits that were
|
||||
cherry-picked.
|
||||
|
||||
#. Make sure CI passes on the release branch, including:
|
||||
#. Make sure CI passes on the **backports pull request**, including:
|
||||
|
||||
* Regular unit tests
|
||||
* Build tests
|
||||
* The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_
|
||||
|
||||
If CI does not pass, you'll need to figure out why, and make changes
|
||||
to the release branch until it does. You can make more commits, modify
|
||||
or remove cherry-picked commits, or cherry-pick **more** from
|
||||
``develop`` to make this happen.
|
||||
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
|
||||
is needed to keep track in the release branch of all the commits that were
|
||||
cherry-picked.
|
||||
|
||||
#. Make sure CI passes on the last commit of the **release branch**.
|
||||
|
||||
#. In the rare case you need to include additional commits in the patch release after the backports
|
||||
PR is merged, it is best to delete the last commit ``Set version to vX.Y.Z`` from the release
|
||||
branch with a single force push, open a new backports PR named ``Backports vX.Y.Z (2)``, and
|
||||
repeat the process. Avoid repeated force pushes to the release branch.
|
||||
|
||||
#. Follow the steps in :ref:`publishing-releases`.
|
||||
|
||||
@@ -1038,25 +1019,31 @@ Updating `releases/latest`
|
||||
|
||||
If the new release is the **highest** Spack release yet, you should
|
||||
also tag it as ``releases/latest``. For example, suppose the highest
|
||||
release is currently ``0.15.3``:
|
||||
release is currently ``0.22.3``:
|
||||
|
||||
* If you are releasing ``0.15.4`` or ``0.16.0``, then you should tag
|
||||
it with ``releases/latest``, as these are higher than ``0.15.3``.
|
||||
* If you are releasing ``0.22.4`` or ``0.23.0``, then you should tag
|
||||
it with ``releases/latest``, as these are higher than ``0.22.3``.
|
||||
|
||||
* If you are making a new release of an **older** major version of
|
||||
Spack, e.g. ``0.14.4``, then you should not tag it as
|
||||
Spack, e.g. ``0.21.4``, then you should not tag it as
|
||||
``releases/latest`` (as there are newer major versions).
|
||||
|
||||
To tag ``releases/latest``, do this:
|
||||
To do so, first fetch the latest tag created on GitHub, since you may not have it locally:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout releases/vX.Y # vX.Y is the new release's branch
|
||||
$ git tag --force releases/latest
|
||||
$ git push --force --tags
|
||||
$ git fetch --force git@github.com:spack/spack vX.Y.Z
|
||||
|
||||
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing
|
||||
``releases/latest`` tag with the new one.
|
||||
Then tag ``vX.Y.Z`` as ``releases/latest`` and push the individual tag to GitHub.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git tag --force releases/latest vX.Y.Z
|
||||
$ git push --force git@github.com:spack/spack releases/latest
|
||||
|
||||
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing ``releases/latest``
|
||||
tag with the new one. Do **not** use the ``--tags`` flag when pushing, since this will push *all*
|
||||
local tags.
|
||||
|
||||
|
||||
.. _announcing-releases:
|
||||
|
@@ -5,49 +5,56 @@
|
||||
|
||||
.. _environments:
|
||||
|
||||
=========================
|
||||
Environments (spack.yaml)
|
||||
=========================
|
||||
=====================================
|
||||
Environments (spack.yaml, spack.lock)
|
||||
=====================================
|
||||
|
||||
An environment is used to group together a set of specs for the
|
||||
purpose of building, rebuilding and deploying in a coherent fashion.
|
||||
Environments provide a number of advantages over the *à la carte*
|
||||
approach of building and loading individual Spack modules:
|
||||
An environment is used to group a set of specs intended for some purpose
|
||||
to be built, rebuilt, and deployed in a coherent fashion. Environments
|
||||
define aspects of the installation of the software, such as:
|
||||
|
||||
#. Environments separate the steps of (a) choosing what to
|
||||
install, (b) concretizing, and (c) installing. This allows
|
||||
Environments to remain stable and repeatable, even if Spack packages
|
||||
are upgraded: specs are only re-concretized when the user
|
||||
explicitly asks for it. It is even possible to reliably
|
||||
transport environments between different computers running
|
||||
different versions of Spack!
|
||||
#. Environments allow several specs to be built at once; a more robust
|
||||
solution than ad-hoc scripts making multiple calls to ``spack
|
||||
install``.
|
||||
#. An Environment that is built as a whole can be loaded as a whole
|
||||
into the user environment. An Environment can be built to maintain
|
||||
a filesystem view of its packages, and the environment can load
|
||||
that view into the user environment at activation time. Spack can
|
||||
also generate a script to load all modules related to an
|
||||
environment.
|
||||
#. *which* specs to install;
|
||||
#. *how* those specs are configured; and
|
||||
#. *where* the concretized software will be installed.
|
||||
|
||||
Aggregating this information into an environment for processing has advantages
|
||||
over the *à la carte* approach of building and loading individual Spack modules.
|
||||
|
||||
With environments, you concretize, install, or load (activate) all of the
|
||||
specs with a single command. Concretization fully configures the specs
|
||||
and dependencies of the environment in preparation for installing the
|
||||
software. This is a more robust solution than ad-hoc installation scripts.
|
||||
And you can share an environment or even re-use it on a different computer.
|
||||
|
||||
Environment definitions, especially *how* specs are configured, allow the
|
||||
software to remain stable and repeatable even when Spack packages are upgraded. Changes are only picked up when the environment is explicitly re-concretized.
|
||||
|
||||
Defining *where* specs are installed supports a filesystem view of the
|
||||
environment. Yet Spack maintains a single installation of the software that
|
||||
can be re-used across multiple environments.
|
||||
|
||||
Activating an environment determines *when* all of the associated (and
|
||||
installed) specs are loaded so limits the software loaded to those specs
|
||||
actually needed by the environment. Spack can even generate a script to
|
||||
load all modules related to an environment.
|
||||
|
||||
Other packaging systems also provide environments that are similar in
|
||||
some ways to Spack environments; for example, `Conda environments
|
||||
<https://conda.io/docs/user-guide/tasks/manage-environments.html>`_ or
|
||||
`Python Virtual Environments
|
||||
<https://docs.python.org/3/tutorial/venv.html>`_. Spack environments
|
||||
provide some distinctive features:
|
||||
provide some distinctive features though:
|
||||
|
||||
#. A spec installed "in" an environment is no different from the same
|
||||
spec installed anywhere else in Spack. Environments are assembled
|
||||
simply by collecting together a set of specs.
|
||||
#. Spack Environments may contain more than one spec of the same
|
||||
spec installed anywhere else in Spack.
|
||||
#. Spack environments may contain more than one spec of the same
|
||||
package.
|
||||
|
||||
Spack uses a "manifest and lock" model similar to `Bundler gemfiles
|
||||
<https://bundler.io/man/gemfile.5.html>`_ and other package
|
||||
managers. The user input file is named ``spack.yaml`` and the lock
|
||||
file is named ``spack.lock``
|
||||
<https://bundler.io/man/gemfile.5.html>`_ and other package managers.
|
||||
The environment's user input file (or manifest), is named ``spack.yaml``.
|
||||
The lock file, which contains the fully configured and concretized specs,
|
||||
is named ``spack.lock``.
|
||||
|
||||
.. _environments-using:
|
||||
|
||||
@@ -68,55 +75,60 @@ An environment is created by:
|
||||
|
||||
$ spack env create myenv
|
||||
|
||||
Spack then creates the directory ``var/spack/environments/myenv``.
|
||||
The directory ``$SPACK_ROOT/var/spack/environments/myenv`` is created
|
||||
to manage the environment.
|
||||
|
||||
.. note::
|
||||
|
||||
All managed environments by default are stored in the ``var/spack/environments`` folder.
|
||||
This location can be changed by setting the ``environments_root`` variable in ``config.yaml``.
|
||||
All managed environments by default are stored in the
|
||||
``$SPACK_ROOT/var/spack/environments`` folder. This location can be changed
|
||||
by setting the ``environments_root`` variable in ``config.yaml``.
|
||||
|
||||
In the ``var/spack/environments/myenv`` directory, Spack creates the
|
||||
file ``spack.yaml`` and the hidden directory ``.spack-env``.
|
||||
|
||||
Spack stores metadata in the ``.spack-env`` directory. User
|
||||
interaction will occur through the ``spack.yaml`` file and the Spack
|
||||
commands that affect it. When the environment is concretized, Spack
|
||||
will create a file ``spack.lock`` with the concrete information for
|
||||
Spack creates the file ``spack.yaml``, hidden directory ``.spack-env``, and
|
||||
``spack.lock`` file under ``$SPACK_ROOT/var/spack/environments/myenv``. User
|
||||
interaction occurs through the ``spack.yaml`` file and the Spack commands
|
||||
that affect it. Metadata and, by default, the view are stored in the
|
||||
``.spack-env`` directory. When the environment is concretized, Spack creates
|
||||
the ``spack.lock`` file with the fully configured specs and dependencies for
|
||||
the environment.
|
||||
|
||||
In addition to being the default location for the view associated with
|
||||
an Environment, the ``.spack-env`` directory also contains:
|
||||
The ``.spack-env`` subdirectory also contains:
|
||||
|
||||
* ``repo/``: A repo consisting of the Spack packages used in this
|
||||
environment. This allows the environment to build the same, in
|
||||
theory, even on different versions of Spack with different
|
||||
* ``repo/``: A subdirectory acting as the repo consisting of the Spack
|
||||
packages used in the environment. It allows the environment to build
|
||||
the same, in theory, even on different versions of Spack with different
|
||||
packages!
|
||||
* ``logs/``: A directory containing the build logs for the packages
|
||||
in this Environment.
|
||||
* ``logs/``: A subdirectory containing the build logs for the packages
|
||||
in this environment.
|
||||
|
||||
Spack Environments can also be created from either a manifest file
|
||||
(usually but not necessarily named, ``spack.yaml``) or a lockfile.
|
||||
To create an Environment from a manifest:
|
||||
Spack Environments can also be created from either the user input, or
|
||||
manifest, file or the lockfile. Create an environment from a manifest using:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create myenv spack.yaml
|
||||
|
||||
To create an Environment from a ``spack.lock`` lockfile:
|
||||
The resulting environment is guaranteed to have the same root specs as
|
||||
the original but may concretize differently in the presence of different
|
||||
explicit or default configuration settings (e.g., a different version of
|
||||
Spack or for a different user account).
|
||||
|
||||
Create an environment from a ``spack.lock`` file using:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create myenv spack.lock
|
||||
|
||||
Either of these commands can also take a full path to the
|
||||
initialization file.
|
||||
The resulting environment, when on the same or a compatible machine, is
|
||||
guaranteed to initially have the same concrete specs as the original.
|
||||
|
||||
A Spack Environment created from a ``spack.yaml`` manifest is
|
||||
guaranteed to have the same root specs as the original Environment,
|
||||
but may concretize differently. A Spack Environment created from a
|
||||
``spack.lock`` lockfile is guaranteed to have the same concrete specs
|
||||
as the original Environment. Either may obviously then differ as the
|
||||
user modifies it.
|
||||
.. note::
|
||||
|
||||
Environment creation also accepts a full path to the file.
|
||||
|
||||
If the path is not under the ``$SPACK_ROOT/var/spack/environments``
|
||||
directory then the source is referred to as an
|
||||
:ref:`independent environment <independent_environments>`.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Activating an Environment
|
||||
@@ -129,7 +141,7 @@ To activate an environment, use the following command:
|
||||
$ spack env activate myenv
|
||||
|
||||
By default, the ``spack env activate`` will load the view associated
|
||||
with the Environment into the user environment. The ``-v,
|
||||
with the environment into the user environment. The ``-v,
|
||||
--with-view`` argument ensures this behavior, and the ``-V,
|
||||
--without-view`` argument activates the environment without changing
|
||||
the user environment variables.
|
||||
@@ -142,8 +154,11 @@ user's prompt to begin with the environment name in brackets.
|
||||
$ spack env activate -p myenv
|
||||
[myenv] $ ...
|
||||
|
||||
The ``activate`` command can also be used to create a new environment if it does not already
|
||||
exist.
|
||||
The ``activate`` command can also be used to create a new environment, if it is
|
||||
not already defined, by adding the ``--create`` flag. Managed and independent
|
||||
environments can both be created using the same flags that `spack env create`
|
||||
accepts. If an environment already exists then spack will simply activate it
|
||||
and ignore the create-specific flags.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -168,49 +183,50 @@ or the shortcut alias
|
||||
If the environment was activated with its view, deactivating the
|
||||
environment will remove the view from the user environment.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
Anonymous Environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
.. _independent_environments:
|
||||
|
||||
Apart from managed environments, Spack also supports anonymous environments.
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Independent Environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Anonymous environments can be placed in any directory of choice.
|
||||
Independent environments can be located in any directory outside of Spack.
|
||||
|
||||
.. note::
|
||||
|
||||
When uninstalling packages, Spack asks the user to confirm the removal of packages
|
||||
that are still used in a managed environment. This is not the case for anonymous
|
||||
that are still used in a managed environment. This is not the case for independent
|
||||
environments.
|
||||
|
||||
To create an anonymous environment, use one of the following commands:
|
||||
To create an independent environment, use one of the following commands:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create --dir my_env
|
||||
$ spack env create ./my_env
|
||||
|
||||
As a shorthand, you can also create an anonymous environment upon activation if it does not
|
||||
As a shorthand, you can also create an independent environment upon activation if it does not
|
||||
already exist:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate --create ./my_env
|
||||
|
||||
For convenience, Spack can also place an anonymous environment in a temporary directory for you:
|
||||
For convenience, Spack can also place an independent environment in a temporary directory for you:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate --temp
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Environment Sensitive Commands
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Environment-Aware Commands
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack commands are environment sensitive. For example, the ``find``
|
||||
command shows only the specs in the active Environment if an
|
||||
Environment has been activated. Similarly, the ``install`` and
|
||||
``uninstall`` commands act on the active environment.
|
||||
Spack commands are environment-aware. For example, the ``find``
|
||||
command shows only the specs in the active environment if an
|
||||
environment has been activated. Otherwise it shows all specs in
|
||||
the Spack instance. The same rule applies to the ``install`` and
|
||||
``uninstall`` commands.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -255,32 +271,33 @@ Environment has been activated. Similarly, the ``install`` and
|
||||
|
||||
|
||||
Note that when we installed the abstract spec ``zlib@1.2.8``, it was
|
||||
presented as a root of the Environment. All explicitly installed
|
||||
packages will be listed as roots of the Environment.
|
||||
presented as a root of the environment. All explicitly installed
|
||||
packages will be listed as roots of the environment.
|
||||
|
||||
All of the Spack commands that act on the list of installed specs are
|
||||
Environment-sensitive in this way, including ``install``,
|
||||
``uninstall``, ``find``, ``extensions``, and more. In the
|
||||
environment-aware in this way, including ``install``,
|
||||
``uninstall``, ``find``, ``extensions``, etcetera. In the
|
||||
:ref:`environment-configuration` section we will discuss
|
||||
Environment-sensitive commands further.
|
||||
environment-aware commands further.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Adding Abstract Specs
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
An abstract spec is the user-specified spec before Spack has applied
|
||||
any defaults or dependency information.
|
||||
An abstract spec is the user-specified spec before Spack applies
|
||||
defaults or dependency information.
|
||||
|
||||
Users can add abstract specs to an Environment using the ``spack add``
|
||||
command. The most important component of an Environment is a list of
|
||||
Users can add abstract specs to an environment using the ``spack add``
|
||||
command. The most important component of an environment is a list of
|
||||
abstract specs.
|
||||
|
||||
Adding a spec adds to the manifest (the ``spack.yaml`` file), which is
|
||||
used to define the roots of the Environment, but does not affect the
|
||||
concrete specs in the lockfile, nor does it install the spec.
|
||||
Adding a spec adds it as a root spec of the environment in the user
|
||||
input file (``spack.yaml``). It does not affect the concrete specs
|
||||
in the lock file (``spack.lock``) and it does not install the spec.
|
||||
|
||||
The ``spack add`` command is environment aware. It adds to the
|
||||
currently active environment. All environment aware commands can also
|
||||
The ``spack add`` command is environment-aware. It adds the spec to the
|
||||
currently active environment. An error is generated if there isn't an
|
||||
active environment. All environment-aware commands can also
|
||||
be called using the ``spack -e`` flag to specify the environment.
|
||||
|
||||
.. code-block:: console
|
||||
@@ -300,11 +317,11 @@ or
|
||||
Concretizing
|
||||
^^^^^^^^^^^^
|
||||
|
||||
Once some user specs have been added to an environment, they can be concretized.
|
||||
There are at the moment three different modes of operation to concretize an environment,
|
||||
which are explained in details in :ref:`environments_concretization_config`.
|
||||
Regardless of which mode of operation has been chosen, the following
|
||||
command will ensure all the root specs are concretized according to the
|
||||
Once user specs have been added to an environment, they can be concretized.
|
||||
There are three different modes of operation to concretize an environment,
|
||||
explained in detail in :ref:`environments_concretization_config`.
|
||||
Regardless of which mode of operation is chosen, the following
|
||||
command will ensure all of the root specs are concretized according to the
|
||||
constraints that are prescribed in the configuration:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -313,16 +330,15 @@ constraints that are prescribed in the configuration:
|
||||
|
||||
In the case of specs that are not concretized together, the command
|
||||
above will concretize only the specs that were added and not yet
|
||||
concretized. Forcing a re-concretization of all the specs can be done
|
||||
instead with this command:
|
||||
concretized. Forcing a re-concretization of all of the specs can be done
|
||||
by adding the ``-f`` option:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[myenv]$ spack concretize -f
|
||||
|
||||
When the ``-f`` flag is not used to reconcretize all specs, Spack
|
||||
guarantees that already concretized specs are unchanged in the
|
||||
environment.
|
||||
Without the option, Spack guarantees that already concretized specs are
|
||||
unchanged in the environment.
|
||||
|
||||
The ``concretize`` command does not install any packages. For packages
|
||||
that have already been installed outside of the environment, the
|
||||
@@ -355,16 +371,16 @@ installed specs using the ``-c`` (``--concretized``) flag.
|
||||
Installing an Environment
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In addition to installing individual specs into an Environment, one
|
||||
can install the entire Environment at once using the command
|
||||
In addition to adding individual specs to an environment, one
|
||||
can install the entire environment at once using the command
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[myenv]$ spack install
|
||||
|
||||
If the Environment has been concretized, Spack will install the
|
||||
concretized specs. Otherwise, ``spack install`` will first concretize
|
||||
the Environment and then install the concretized specs.
|
||||
If the environment has been concretized, Spack will install the
|
||||
concretized specs. Otherwise, ``spack install`` will concretize
|
||||
the environment before installing the concretized specs.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -385,17 +401,17 @@ the Environment and then install the concretized specs.
|
||||
|
||||
|
||||
As it installs, ``spack install`` creates symbolic links in the
|
||||
``logs/`` directory in the Environment, allowing for easy inspection
|
||||
``logs/`` directory in the environment, allowing for easy inspection
|
||||
of build logs related to that environment. The ``spack install``
|
||||
command also stores a Spack repo containing the ``package.py`` file
|
||||
used at install time for each package in the ``repos/`` directory in
|
||||
the Environment.
|
||||
the environment.
|
||||
|
||||
The ``--no-add`` option can be used in a concrete environment to tell
|
||||
spack to install specs already present in the environment but not to
|
||||
add any new root specs to the environment. For root specs provided
|
||||
to ``spack install`` on the command line, ``--no-add`` is the default,
|
||||
while for dependency specs on the other hand, it is optional. In other
|
||||
while for dependency specs, it is optional. In other
|
||||
words, if there is an unambiguous match in the active concrete environment
|
||||
for a root spec provided to ``spack install`` on the command line, spack
|
||||
does not require you to specify the ``--no-add`` option to prevent the spec
|
||||
@@ -409,9 +425,13 @@ Developing Packages in a Spack Environment
|
||||
|
||||
The ``spack develop`` command allows one to develop Spack packages in
|
||||
an environment. It requires a spec containing a concrete version, and
|
||||
will configure Spack to install the package from local source. By
|
||||
default, it will also clone the package to a subdirectory in the
|
||||
environment. This package will have a special variant ``dev_path``
|
||||
will configure Spack to install the package from local source.
|
||||
If a version is not provided from the command line interface then spack
|
||||
will automatically pick the highest version the package has defined.
|
||||
This means any infinity versions (``develop``, ``main``, ``stable``) will be
|
||||
preferred in this selection process.
|
||||
By default, ``spack develop`` will also clone the package to a subdirectory in the
|
||||
environment for the local source. This package will have a special variant ``dev_path``
|
||||
set, and Spack will ensure the package and its dependents are rebuilt
|
||||
any time the environment is installed if the package's local source
|
||||
code has been modified. Spack's native implementation to check for modifications
|
||||
@@ -430,7 +450,7 @@ also be used as valid concrete versions (see :ref:`version-specifier`).
|
||||
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
||||
the ``main`` branch of the package, and ``spack install`` will install from
|
||||
that git clone if ``foo`` is in the environment.
|
||||
Further development on ``foo`` can be tested by reinstalling the environment,
|
||||
Further development on ``foo`` can be tested by re-installing the environment,
|
||||
and eventually committed and pushed to the upstream git repo.
|
||||
|
||||
If the package being developed supports out-of-source builds then users can use the
|
||||
@@ -615,7 +635,7 @@ manipulate configuration inline in the ``spack.yaml`` file.
|
||||
Inline configurations
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Inline Environment-scope configuration is done using the same yaml
|
||||
Inline environment-scope configuration is done using the same yaml
|
||||
format as standard Spack configuration scopes, covered in the
|
||||
:ref:`configuration` section. Each section is contained under a
|
||||
top-level yaml object with it's name. For example, a ``spack.yaml``
|
||||
@@ -640,7 +660,7 @@ Included configurations
|
||||
|
||||
Spack environments allow an ``include`` heading in their yaml
|
||||
schema. This heading pulls in external configuration files and applies
|
||||
them to the Environment.
|
||||
them to the environment.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -653,6 +673,9 @@ them to the Environment.
|
||||
Environments can include files or URLs. File paths can be relative or
|
||||
absolute. URLs include the path to the text for individual files or
|
||||
can be the path to a directory containing configuration files.
|
||||
Spack supports ``file``, ``http``, ``https`` and ``ftp`` protocols (or
|
||||
schemes). Spack-specific, environment and user path variables may be
|
||||
used in these paths. See :ref:`config-file-variables` for more information.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Configuration precedence
|
||||
@@ -667,7 +690,7 @@ have higher precedence, as the included configs are applied in reverse order.
|
||||
Manually Editing the Specs List
|
||||
-------------------------------
|
||||
|
||||
The list of abstract/root specs in the Environment is maintained in
|
||||
The list of abstract/root specs in the environment is maintained in
|
||||
the ``spack.yaml`` manifest under the heading ``specs``.
|
||||
|
||||
.. code-block:: yaml
|
||||
@@ -775,7 +798,7 @@ evaluates to the cross-product of those specs. Spec matrices also
|
||||
contain an ``excludes`` directive, which eliminates certain
|
||||
combinations from the evaluated result.
|
||||
|
||||
The following two Environment manifests are identical:
|
||||
The following two environment manifests are identical:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -850,7 +873,7 @@ files are identical.
|
||||
In short files like the example, it may be easier to simply list the
|
||||
included specs. However for more complicated examples involving many
|
||||
packages across many toolchains, separately factored lists make
|
||||
Environments substantially more manageable.
|
||||
environments substantially more manageable.
|
||||
|
||||
Additionally, the ``-l`` option to the ``spack add`` command allows
|
||||
one to add to named lists in the definitions section of the manifest
|
||||
@@ -1066,7 +1089,7 @@ true``). The argument ``--without-view`` can be used to create an
|
||||
environment without any view configured.
|
||||
|
||||
The ``spack env view`` command can be used to change the manage views
|
||||
of an Environment. The subcommand ``spack env view enable`` will add a
|
||||
of an environment. The subcommand ``spack env view enable`` will add a
|
||||
view named ``default`` to an environment. It takes an optional
|
||||
argument to specify the path for the new default view. The subcommand
|
||||
``spack env view disable`` will remove the view named ``default`` from
|
||||
@@ -1234,7 +1257,7 @@ gets installed and is available for use in the ``env`` target.
|
||||
$(SPACK) -e . env depfile -o $@ --make-prefix spack
|
||||
|
||||
env: spack/env
|
||||
$(info Environment installed!)
|
||||
$(info environment installed!)
|
||||
|
||||
clean:
|
||||
rm -rf spack.lock env.mk spack/
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 44 KiB |
Binary file not shown.
Before Width: | Height: | Size: 68 KiB |
BIN
lib/spack/docs/images/splices.png
Normal file
BIN
lib/spack/docs/images/splices.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 358 KiB |
@@ -457,11 +457,11 @@ For instance, the following config options,
|
||||
tcl:
|
||||
all:
|
||||
suffixes:
|
||||
^python@3.12: 'python-3.12'
|
||||
^python@3: 'python{^python.version}'
|
||||
^openblas: 'openblas'
|
||||
|
||||
will add a ``python-3.12`` version string to any packages compiled with
|
||||
Python matching the spec, ``python@3.12``. This is useful to know which
|
||||
will add a ``python-3.12.1`` version string to any packages compiled with
|
||||
Python matching the spec, ``python@3``. This is useful to know which
|
||||
version of Python a set of Python extensions is associated with. Likewise, the
|
||||
``openblas`` string is attached to any program that has openblas in the spec,
|
||||
most likely via the ``+blas`` variant specification.
|
||||
|
@@ -592,6 +592,77 @@ the attributes will be merged starting from the bottom match going up to the top
|
||||
|
||||
In the case that no match is found in a submapping section, no additional attributes will be applied.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Dynamic Mapping Sections
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For large scale CI where cost optimization is required, dynamic mapping allows for the use of real-time
|
||||
mapping schemes served by a web service. This type of mapping does not support the ``-remove`` type
|
||||
behavior, but it does follow the rest of the merge rules for configurations.
|
||||
|
||||
The dynamic mapping service needs to implement a single REST API interface for getting
|
||||
requests ``GET <URL>[:PORT][/PATH]?spec=<pkg_name@pkg_version +variant1+variant2%compiler@compiler_version>``.
|
||||
|
||||
example request.
|
||||
|
||||
.. code-block::
|
||||
|
||||
https://my-dyn-mapping.spack.io/allocation?spec=zlib-ng@2.1.6 +compat+opt+shared+pic+new_strategies arch=linux-ubuntu20.04-x86_64_v3%gcc@12.0.0
|
||||
|
||||
|
||||
With an example response the updates kubernetes request variables, overrides the max retries for gitlab,
|
||||
and prepends a note about the modifications made by the my-dyn-mapping.spack.io service.
|
||||
|
||||
.. code-block::
|
||||
|
||||
200 OK
|
||||
|
||||
{
|
||||
"variables":
|
||||
{
|
||||
"KUBERNETES_CPU_REQUEST": "500m",
|
||||
"KUBERNETES_MEMORY_REQUEST": "2G",
|
||||
},
|
||||
"retry": { "max:": "1"}
|
||||
"script+:":
|
||||
[
|
||||
"echo \"Job modified by my-dyn-mapping.spack.io\""
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
The ci.yaml configuration section takes the URL endpoint as well as a number of options to configure how responses are handled.
|
||||
|
||||
It is possible to specify a list of allowed and ignored configuration attributes under ``allow`` and ``ignore``
|
||||
respectively. It is also possible to configure required attributes under ``required`` section.
|
||||
|
||||
Options to configure the client timeout and SSL verification using the ``timeout`` and ``verify_ssl`` options.
|
||||
By default, the ``timeout`` is set to the option in ``config:timeout`` and ``veryify_ssl`` is set the the option in ``config::verify_ssl``.
|
||||
|
||||
Passing header parameters to the request can be achieved through the ``header`` section. The values of the variables passed to the
|
||||
header may be environment variables that are expanded at runtime, such as a private token configured on the runner.
|
||||
|
||||
Here is an example configuration pointing to ``my-dyn-mapping.spack.io/allocation``.
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
ci:
|
||||
- dynamic-mapping:
|
||||
endpoint: my-dyn-mapping.spack.io/allocation
|
||||
timeout: 10
|
||||
verify_ssl: True
|
||||
header:
|
||||
PRIVATE_TOKEN: ${MY_PRIVATE_TOKEN}
|
||||
MY_CONFIG: "fuzz_allocation:false"
|
||||
allow:
|
||||
- variables
|
||||
ignore:
|
||||
- script
|
||||
require: []
|
||||
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Bootstrapping
|
||||
^^^^^^^^^^^^^
|
||||
|
@@ -1,13 +1,13 @@
|
||||
sphinx==7.4.7
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.6.1
|
||||
sphinx-rtd-theme==2.0.0
|
||||
python-levenshtein==0.25.1
|
||||
sphinx-rtd-theme==3.0.1
|
||||
python-levenshtein==0.26.0
|
||||
docutils==0.20.1
|
||||
pygments==2.18.0
|
||||
urllib3==2.2.3
|
||||
pytest==8.3.3
|
||||
isort==5.13.2
|
||||
black==24.8.0
|
||||
black==24.10.0
|
||||
flake8==7.1.1
|
||||
mypy==1.11.1
|
||||
|
276
lib/spack/env/cc
vendored
276
lib/spack/env/cc
vendored
@@ -101,9 +101,10 @@ setsep() {
|
||||
esac
|
||||
}
|
||||
|
||||
# prepend LISTNAME ELEMENT
|
||||
# prepend LISTNAME ELEMENT [SEP]
|
||||
#
|
||||
# Prepend ELEMENT to the list stored in the variable LISTNAME.
|
||||
# Prepend ELEMENT to the list stored in the variable LISTNAME,
|
||||
# assuming the list is separated by SEP.
|
||||
# Handles empty lists and single-element lists.
|
||||
prepend() {
|
||||
varname="$1"
|
||||
@@ -118,39 +119,18 @@ prepend() {
|
||||
fi
|
||||
}
|
||||
|
||||
# contains LISTNAME ELEMENT
|
||||
# append LISTNAME ELEMENT [SEP]
|
||||
#
|
||||
# Test whether LISTNAME contains ELEMENT.
|
||||
# Set $? to 1 if LISTNAME does not contain ELEMENT.
|
||||
# Set $? to 0 if LISTNAME does not contain ELEMENT.
|
||||
contains() {
|
||||
varname="$1"
|
||||
elt="$2"
|
||||
|
||||
setsep "$varname"
|
||||
|
||||
# the list may: 1) only contain the element, 2) start with the element,
|
||||
# 3) contain the element in the middle, or 4) end wtih the element.
|
||||
eval "[ \"\${$varname}\" = \"$elt\" ]" \
|
||||
|| eval "[ \"\${$varname#${elt}${sep}}\" != \"\${$varname}\" ]" \
|
||||
|| eval "[ \"\${$varname#*${sep}${elt}${sep}}\" != \"\${$varname}\" ]" \
|
||||
|| eval "[ \"\${$varname%${sep}${elt}}\" != \"\${$varname}\" ]"
|
||||
}
|
||||
|
||||
# append LISTNAME ELEMENT [unique]
|
||||
#
|
||||
# Append ELEMENT to the list stored in the variable LISTNAME.
|
||||
# Append ELEMENT to the list stored in the variable LISTNAME,
|
||||
# assuming the list is separated by SEP.
|
||||
# Handles empty lists and single-element lists.
|
||||
#
|
||||
# If the third argument is provided and if it is the string 'unique',
|
||||
# this will not append if ELEMENT is already in the list LISTNAME.
|
||||
append() {
|
||||
varname="$1"
|
||||
elt="$2"
|
||||
|
||||
if empty "$varname"; then
|
||||
eval "$varname=\"\${elt}\""
|
||||
elif [ "$3" != "unique" ] || ! contains "$varname" "$elt" ; then
|
||||
else
|
||||
# Get the appropriate separator for the list we're appending to.
|
||||
setsep "$varname"
|
||||
eval "$varname=\"\${$varname}${sep}\${elt}\""
|
||||
@@ -168,21 +148,10 @@ extend() {
|
||||
if [ "$sep" != " " ]; then
|
||||
IFS="$sep"
|
||||
fi
|
||||
eval "for elt in \${$2}; do append $1 \"$3\${elt}\" ${_append_args}; done"
|
||||
eval "for elt in \${$2}; do append $1 \"$3\${elt}\"; done"
|
||||
unset IFS
|
||||
}
|
||||
|
||||
# extend_unique LISTNAME1 LISTNAME2 [PREFIX]
|
||||
#
|
||||
# Append the elements stored in the variable LISTNAME2 to the list
|
||||
# stored in LISTNAME1, if they are not already present.
|
||||
# If PREFIX is provided, prepend it to each element.
|
||||
extend_unique() {
|
||||
_append_args="unique"
|
||||
extend "$@"
|
||||
unset _append_args
|
||||
}
|
||||
|
||||
# preextend LISTNAME1 LISTNAME2 [PREFIX]
|
||||
#
|
||||
# Prepend the elements stored in the list at LISTNAME2
|
||||
@@ -269,36 +238,6 @@ esac
|
||||
}
|
||||
"
|
||||
|
||||
# path_list functions. Path_lists have 3 parts: spack_store_<list>, <list> and system_<list>,
|
||||
# which are used to prioritize paths when assembling the final command line.
|
||||
|
||||
# init_path_lists LISTNAME
|
||||
# Set <LISTNAME>, spack_store_<LISTNAME>, and system_<LISTNAME> to "".
|
||||
init_path_lists() {
|
||||
eval "spack_store_$1=\"\""
|
||||
eval "$1=\"\""
|
||||
eval "system_$1=\"\""
|
||||
}
|
||||
|
||||
# assign_path_lists LISTNAME1 LISTNAME2
|
||||
# Copy contents of LISTNAME2 into LISTNAME1, for each path_list prefix.
|
||||
assign_path_lists() {
|
||||
eval "spack_store_$1=\"\${spack_store_$2}\""
|
||||
eval "$1=\"\${$2}\""
|
||||
eval "system_$1=\"\${system_$2}\""
|
||||
}
|
||||
|
||||
# append_path_lists LISTNAME ELT
|
||||
# Append the provided ELT to the appropriate list, based on the result of path_order().
|
||||
append_path_lists() {
|
||||
path_order "$2"
|
||||
case $? in
|
||||
0) eval "append spack_store_$1 \"\$2\"" ;;
|
||||
1) eval "append $1 \"\$2\"" ;;
|
||||
2) eval "append system_$1 \"\$2\"" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Check if optional parameters are defined
|
||||
# If we aren't asking for debug flags, don't add them
|
||||
if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then
|
||||
@@ -531,7 +470,12 @@ input_command="$*"
|
||||
parse_Wl() {
|
||||
while [ $# -ne 0 ]; do
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
append_path_lists return_rpath_dirs_list "$1"
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
wl_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
@@ -540,14 +484,24 @@ parse_Wl() {
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
fi
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
fi
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
wl_expect_rpath=yes
|
||||
@@ -555,7 +509,8 @@ parse_Wl() {
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
-Wl)
|
||||
# Nested -Wl,-Wl means we're in NAG compiler territory. We don't support it.
|
||||
# Nested -Wl,-Wl means we're in NAG compiler territory, we don't support
|
||||
# it.
|
||||
return 1
|
||||
;;
|
||||
*)
|
||||
@@ -574,10 +529,21 @@ categorize_arguments() {
|
||||
return_other_args_list=""
|
||||
return_isystem_was_used=""
|
||||
|
||||
init_path_lists return_isystem_include_dirs_list
|
||||
init_path_lists return_include_dirs_list
|
||||
init_path_lists return_lib_dirs_list
|
||||
init_path_lists return_rpath_dirs_list
|
||||
return_isystem_spack_store_include_dirs_list=""
|
||||
return_isystem_system_include_dirs_list=""
|
||||
return_isystem_include_dirs_list=""
|
||||
|
||||
return_spack_store_include_dirs_list=""
|
||||
return_system_include_dirs_list=""
|
||||
return_include_dirs_list=""
|
||||
|
||||
return_spack_store_lib_dirs_list=""
|
||||
return_system_lib_dirs_list=""
|
||||
return_lib_dirs_list=""
|
||||
|
||||
return_spack_store_rpath_dirs_list=""
|
||||
return_system_rpath_dirs_list=""
|
||||
return_rpath_dirs_list=""
|
||||
|
||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||
wl_expect_rpath=no
|
||||
@@ -643,17 +609,32 @@ categorize_arguments() {
|
||||
arg="${1#-isystem}"
|
||||
return_isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append_path_lists return_isystem_include_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_isystem_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_isystem_include_dirs_list "$arg" ;;
|
||||
2) append return_isystem_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append_path_lists return_include_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_include_dirs_list "$arg" ;;
|
||||
2) append return_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append_path_lists return_lib_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_lib_dirs_list "$arg" ;;
|
||||
1) append return_lib_dirs_list "$arg" ;;
|
||||
2) append return_system_lib_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
@@ -686,17 +667,32 @@ categorize_arguments() {
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
append_path_lists return_rpath_dirs_list "$1"
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
@@ -713,32 +709,7 @@ categorize_arguments() {
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
# if mode is not ld, we can just add to other args
|
||||
if [ "$mode" != "ld" ]; then
|
||||
append return_other_args_list "$1"
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
|
||||
# if we're in linker mode, we need to parse raw RPATH args
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
shift
|
||||
[ $# -eq 0 ] && break # ignore -rpath without value
|
||||
append_path_lists return_rpath_dirs_list "$1"
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
append return_other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
@@ -760,10 +731,21 @@ categorize_arguments() {
|
||||
|
||||
categorize_arguments "$@"
|
||||
|
||||
assign_path_lists isystem_include_dirs_list return_isystem_include_dirs_list
|
||||
assign_path_lists include_dirs_list return_include_dirs_list
|
||||
assign_path_lists lib_dirs_list return_lib_dirs_list
|
||||
assign_path_lists rpath_dirs_list return_rpath_dirs_list
|
||||
spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
|
||||
isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
other_args_list="$return_other_args_list"
|
||||
@@ -839,10 +821,21 @@ IFS="$lsep"
|
||||
categorize_arguments $spack_flags_list
|
||||
unset IFS
|
||||
|
||||
assign_path_lists spack_flags_isystem_include_dirs_list return_isystem_include_dirs_list
|
||||
assign_path_lists spack_flags_include_dirs_list return_include_dirs_list
|
||||
assign_path_lists spack_flags_lib_dirs_list return_lib_dirs_list
|
||||
assign_path_lists spack_flags_rpath_dirs_list return_rpath_dirs_list
|
||||
spack_flags_isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_flags_spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
@@ -901,7 +894,7 @@ esac
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ] || [ "$isystem_was_used" = "true" ]; then
|
||||
extend spack_store_isystem_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend isystem_spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend isystem_include_dirs_list SPACK_INCLUDE_DIRS
|
||||
else
|
||||
extend spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
@@ -917,63 +910,64 @@ args_list="$flags_list"
|
||||
|
||||
# Include search paths partitioned by (in store, non-sytem, system)
|
||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||
extend args_list spack_store_spack_flags_include_dirs_list -I
|
||||
extend args_list spack_flags_spack_store_include_dirs_list -I
|
||||
extend args_list spack_store_include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_include_dirs_list -I
|
||||
extend args_list include_dirs_list -I
|
||||
|
||||
extend args_list spack_store_spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list spack_store_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list spack_flags_isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
extend args_list system_spack_flags_include_dirs_list -I
|
||||
extend args_list spack_flags_system_include_dirs_list -I
|
||||
extend args_list system_include_dirs_list -I
|
||||
|
||||
extend args_list system_spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list system_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
# Library search paths partitioned by (in store, non-sytem, system)
|
||||
extend args_list spack_store_spack_flags_lib_dirs_list "-L"
|
||||
extend args_list spack_flags_spack_store_lib_dirs_list "-L"
|
||||
extend args_list spack_store_lib_dirs_list "-L"
|
||||
|
||||
extend args_list spack_flags_lib_dirs_list "-L"
|
||||
extend args_list lib_dirs_list "-L"
|
||||
|
||||
extend args_list system_spack_flags_lib_dirs_list "-L"
|
||||
extend args_list spack_flags_system_lib_dirs_list "-L"
|
||||
extend args_list system_lib_dirs_list "-L"
|
||||
|
||||
# RPATHs arguments
|
||||
rpath_prefix=""
|
||||
case "$mode" in
|
||||
ccld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$linker_arg$dtags_to_add"
|
||||
fi
|
||||
rpath_prefix="$rpath"
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "$rpath"
|
||||
extend args_list spack_store_rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
||||
extend args_list rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
||||
extend args_list system_rpath_dirs_list "$rpath"
|
||||
;;
|
||||
ld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$dtags_to_add"
|
||||
fi
|
||||
rpath_prefix="-rpath${lsep}"
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||
;;
|
||||
esac
|
||||
|
||||
# if mode is ccld or ld, extend RPATH lists with the prefix determined above
|
||||
if [ -n "$rpath_prefix" ]; then
|
||||
extend_unique args_list spack_store_spack_flags_rpath_dirs_list "$rpath_prefix"
|
||||
extend_unique args_list spack_store_rpath_dirs_list "$rpath_prefix"
|
||||
|
||||
extend_unique args_list spack_flags_rpath_dirs_list "$rpath_prefix"
|
||||
extend_unique args_list rpath_dirs_list "$rpath_prefix"
|
||||
|
||||
extend_unique args_list system_spack_flags_rpath_dirs_list "$rpath_prefix"
|
||||
extend_unique args_list system_rpath_dirs_list "$rpath_prefix"
|
||||
fi
|
||||
|
||||
# Other arguments from the input command
|
||||
extend args_list other_args_list
|
||||
extend args_list spack_flags_other_args_list
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.5-dev (commit bceb39528ac49dd0c876b2e9bf3e7482e9c2be4a)
|
||||
* Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
@@ -81,8 +81,13 @@ def __init__(self, name, parents, vendor, features, compilers, generation=0, cpu
|
||||
self.generation = generation
|
||||
# Only relevant for AArch64
|
||||
self.cpu_part = cpu_part
|
||||
# Cache the ancestor computation
|
||||
|
||||
# Cache the "ancestor" computation
|
||||
self._ancestors = None
|
||||
# Cache the "generic" computation
|
||||
self._generic = None
|
||||
# Cache the "family" computation
|
||||
self._family = None
|
||||
|
||||
@property
|
||||
def ancestors(self):
|
||||
@@ -174,18 +179,22 @@ def __contains__(self, feature):
|
||||
@property
|
||||
def family(self):
|
||||
"""Returns the architecture family a given target belongs to"""
|
||||
roots = [x for x in [self] + self.ancestors if not x.ancestors]
|
||||
msg = "a target is expected to belong to just one architecture family"
|
||||
msg += f"[found {', '.join(str(x) for x in roots)}]"
|
||||
assert len(roots) == 1, msg
|
||||
if self._family is None:
|
||||
roots = [x for x in [self] + self.ancestors if not x.ancestors]
|
||||
msg = "a target is expected to belong to just one architecture family"
|
||||
msg += f"[found {', '.join(str(x) for x in roots)}]"
|
||||
assert len(roots) == 1, msg
|
||||
self._family = roots.pop()
|
||||
|
||||
return roots.pop()
|
||||
return self._family
|
||||
|
||||
@property
|
||||
def generic(self):
|
||||
"""Returns the best generic architecture that is compatible with self"""
|
||||
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
||||
return max(generics, key=lambda x: len(x.ancestors))
|
||||
if self._generic is None:
|
||||
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
||||
self._generic = max(generics, key=lambda x: len(x.ancestors))
|
||||
return self._generic
|
||||
|
||||
def to_dict(self):
|
||||
"""Returns a dictionary representation of this object."""
|
||||
|
@@ -1482,7 +1482,6 @@
|
||||
"cldemote",
|
||||
"movdir64b",
|
||||
"movdiri",
|
||||
"pdcm",
|
||||
"serialize",
|
||||
"waitpkg"
|
||||
],
|
||||
@@ -2237,6 +2236,84 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"zen5": {
|
||||
"from": ["zen4"],
|
||||
"vendor": "AuthenticAMD",
|
||||
"features": [
|
||||
"abm",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"avx512_bf16",
|
||||
"avx512_bitalg",
|
||||
"avx512bw",
|
||||
"avx512cd",
|
||||
"avx512dq",
|
||||
"avx512f",
|
||||
"avx512ifma",
|
||||
"avx512vbmi",
|
||||
"avx512_vbmi2",
|
||||
"avx512vl",
|
||||
"avx512_vnni",
|
||||
"avx512_vp2intersect",
|
||||
"avx512_vpopcntdq",
|
||||
"avx_vnni",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"clwb",
|
||||
"clzero",
|
||||
"cppc",
|
||||
"cx16",
|
||||
"f16c",
|
||||
"flush_l1d",
|
||||
"fma",
|
||||
"fsgsbase",
|
||||
"gfni",
|
||||
"ibrs_enhanced",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"movdir64b",
|
||||
"movdiri",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"sse4a",
|
||||
"ssse3",
|
||||
"tsc_adjust",
|
||||
"vaes",
|
||||
"vpclmulqdq",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "14.1:",
|
||||
"name": "znver5",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"aocc": [
|
||||
{
|
||||
"versions": "5.0:",
|
||||
"name": "znver5",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "19.1:",
|
||||
"name": "znver5",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"ppc64": {
|
||||
"from": [],
|
||||
"vendor": "generic",
|
||||
|
@@ -41,6 +41,20 @@ def comma_and(sequence: List[str]) -> str:
|
||||
return comma_list(sequence, "and")
|
||||
|
||||
|
||||
def ordinal(number: int) -> str:
|
||||
"""Return the ordinal representation (1st, 2nd, 3rd, etc.) for the provided number.
|
||||
|
||||
Args:
|
||||
number: int to convert to ordinal number
|
||||
|
||||
Returns: number's corresponding ordinal
|
||||
"""
|
||||
idx = (number % 10) << 1
|
||||
tens = number % 100 // 10
|
||||
suffix = "th" if tens == 1 or idx > 6 else "thstndrd"[idx : idx + 2]
|
||||
return f"{number}{suffix}"
|
||||
|
||||
|
||||
def quote(sequence: List[str], q: str = "'") -> List[str]:
|
||||
"""Quotes each item in the input list with the quote character passed as second argument."""
|
||||
return [f"{q}{e}{q}" for e in sequence]
|
||||
|
@@ -47,6 +47,7 @@
|
||||
"copy_mode",
|
||||
"filter_file",
|
||||
"find",
|
||||
"find_first",
|
||||
"find_headers",
|
||||
"find_all_headers",
|
||||
"find_libraries",
|
||||
|
@@ -12,7 +12,7 @@
|
||||
import sys
|
||||
import traceback
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Callable, Iterable, List, Tuple
|
||||
from typing import Callable, Iterable, List, Tuple, TypeVar
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = r"^\.#|~$"
|
||||
@@ -879,9 +879,12 @@ def enum(**kwargs):
|
||||
return type("Enum", (object,), kwargs)
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
def stable_partition(
|
||||
input_iterable: Iterable, predicate_fn: Callable[[Any], bool]
|
||||
) -> Tuple[List[Any], List[Any]]:
|
||||
input_iterable: Iterable[T], predicate_fn: Callable[[T], bool]
|
||||
) -> Tuple[List[T], List[T]]:
|
||||
"""Partition the input iterable according to a custom predicate.
|
||||
|
||||
Args:
|
||||
@@ -893,12 +896,13 @@ def stable_partition(
|
||||
Tuple of the list of elements evaluating to True, and
|
||||
list of elements evaluating to False.
|
||||
"""
|
||||
true_items, false_items = [], []
|
||||
true_items: List[T] = []
|
||||
false_items: List[T] = []
|
||||
for item in input_iterable:
|
||||
if predicate_fn(item):
|
||||
true_items.append(item)
|
||||
continue
|
||||
false_items.append(item)
|
||||
else:
|
||||
false_items.append(item)
|
||||
return true_items, false_items
|
||||
|
||||
|
||||
|
@@ -348,7 +348,19 @@ def close(self):
|
||||
class MultiProcessFd:
|
||||
"""Return an object which stores a file descriptor and can be passed as an
|
||||
argument to a function run with ``multiprocessing.Process``, such that
|
||||
the file descriptor is available in the subprocess."""
|
||||
the file descriptor is available in the subprocess. It provides access via
|
||||
the `fd` property.
|
||||
|
||||
This object takes control over the associated FD: files opened from this
|
||||
using `fdopen` need to use `closefd=False`.
|
||||
"""
|
||||
|
||||
# As for why you have to fdopen(..., closefd=False): when a
|
||||
# multiprocessing.connection.Connection object stores an fd, it assumes
|
||||
# control over it, and will attempt to close it when gc'ed during __del__;
|
||||
# if you fdopen(multiprocessfd.fd, closefd=True) then the resulting file
|
||||
# will also assume control, and you can see warnings when there is an
|
||||
# attempted double close.
|
||||
|
||||
def __init__(self, fd):
|
||||
self._connection = None
|
||||
@@ -361,33 +373,20 @@ def __init__(self, fd):
|
||||
@property
|
||||
def fd(self):
|
||||
if self._connection:
|
||||
return self._connection._handle
|
||||
return self._connection.fileno()
|
||||
else:
|
||||
return self._fd
|
||||
|
||||
def close(self):
|
||||
"""Rather than `.close()`ing any file opened from the associated
|
||||
`.fd`, the `MultiProcessFd` should be closed with this.
|
||||
"""
|
||||
if self._connection:
|
||||
self._connection.close()
|
||||
else:
|
||||
os.close(self._fd)
|
||||
|
||||
|
||||
def close_connection_and_file(multiprocess_fd, file):
|
||||
# MultiprocessFd is intended to transmit a FD
|
||||
# to a child process, this FD is then opened to a Python File object
|
||||
# (using fdopen). In >= 3.8, MultiprocessFd encapsulates a
|
||||
# multiprocessing.connection.Connection; Connection closes the FD
|
||||
# when it is deleted, and prints a warning about duplicate closure if
|
||||
# it is not explicitly closed. In < 3.8, MultiprocessFd encapsulates a
|
||||
# simple FD; closing the FD here appears to conflict with
|
||||
# closure of the File object (in < 3.8 that is). Therefore this needs
|
||||
# to choose whether to close the File or the Connection.
|
||||
if sys.version_info >= (3, 8):
|
||||
multiprocess_fd.close()
|
||||
else:
|
||||
file.close()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def replace_environment(env):
|
||||
"""Replace the current environment (`os.environ`) with `env`.
|
||||
@@ -932,10 +931,10 @@ def _writer_daemon(
|
||||
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
||||
# that prevents unbuffered text I/O.
|
||||
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
||||
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8")
|
||||
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8", closefd=False)
|
||||
|
||||
if stdin_multiprocess_fd:
|
||||
stdin = os.fdopen(stdin_multiprocess_fd.fd)
|
||||
stdin = os.fdopen(stdin_multiprocess_fd.fd, closefd=False)
|
||||
else:
|
||||
stdin = None
|
||||
|
||||
@@ -1025,9 +1024,9 @@ def _writer_daemon(
|
||||
if isinstance(log_file, io.StringIO):
|
||||
control_pipe.send(log_file.getvalue())
|
||||
log_file_wrapper.close()
|
||||
close_connection_and_file(read_multiprocess_fd, in_pipe)
|
||||
read_multiprocess_fd.close()
|
||||
if stdin_multiprocess_fd:
|
||||
close_connection_and_file(stdin_multiprocess_fd, stdin)
|
||||
stdin_multiprocess_fd.close()
|
||||
|
||||
# send echo value back to the parent so it can be preserved.
|
||||
control_pipe.send(echo)
|
||||
|
@@ -39,6 +39,7 @@ def _search_duplicate_compilers(error_cls):
|
||||
import collections
|
||||
import collections.abc
|
||||
import glob
|
||||
import inspect
|
||||
import io
|
||||
import itertools
|
||||
import os
|
||||
@@ -50,6 +51,7 @@ def _search_duplicate_compilers(error_cls):
|
||||
from urllib.request import urlopen
|
||||
|
||||
import llnl.util.lang
|
||||
from llnl.string import plural
|
||||
|
||||
import spack.builder
|
||||
import spack.config
|
||||
@@ -386,6 +388,14 @@ def _make_config_error(config_data, summary, error_cls):
|
||||
)
|
||||
|
||||
|
||||
package_deprecated_attributes = AuditClass(
|
||||
group="packages",
|
||||
tag="PKG-DEPRECATED-ATTRIBUTES",
|
||||
description="Sanity checks to preclude use of deprecated package attributes",
|
||||
kwargs=("pkgs",),
|
||||
)
|
||||
|
||||
|
||||
package_properties = AuditClass(
|
||||
group="packages",
|
||||
tag="PKG-PROPERTIES",
|
||||
@@ -404,22 +414,23 @@ def _make_config_error(config_data, summary, error_cls):
|
||||
)
|
||||
|
||||
|
||||
@package_directives
|
||||
@package_properties
|
||||
def _check_build_test_callbacks(pkgs, error_cls):
|
||||
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
||||
"""Ensure stand-alone test methods are not included in build-time callbacks.
|
||||
|
||||
Test methods are for checking the installed software as stand-alone tests.
|
||||
They could also be called during the post-install phase of a build.
|
||||
"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
|
||||
|
||||
# TODO (post-34236): "test*"->"test_*" once remove deprecated methods
|
||||
# TODO (post-34236): "test"->"test_" once remove deprecated methods
|
||||
has_test_method = test_callbacks and any([m.startswith("test") for m in test_callbacks])
|
||||
has_test_method = test_callbacks and any([m.startswith("test_") for m in test_callbacks])
|
||||
if has_test_method:
|
||||
msg = '{0} package contains "test*" method(s) in ' "build_time_test_callbacks"
|
||||
instr = 'Remove all methods whose names start with "test" from: [{0}]'.format(
|
||||
", ".join(test_callbacks)
|
||||
)
|
||||
msg = f"Package {pkg_name} includes stand-alone test methods in build-time checks."
|
||||
callbacks = ", ".join(test_callbacks)
|
||||
instr = f"Remove the following from 'build_time_test_callbacks': {callbacks}"
|
||||
errors.append(error_cls(msg.format(pkg_name), [instr]))
|
||||
|
||||
return errors
|
||||
@@ -517,6 +528,46 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@package_deprecated_attributes
|
||||
def _search_for_deprecated_package_methods(pkgs, error_cls):
|
||||
"""Ensure the package doesn't define or use deprecated methods"""
|
||||
DEPRECATED_METHOD = (("test", "a name starting with 'test_'"),)
|
||||
DEPRECATED_USE = (
|
||||
("self.cache_extra_test_sources(", "cache_extra_test_sources(self, ..)"),
|
||||
("self.install_test_root(", "install_test_root(self, ..)"),
|
||||
("self.run_test(", "test_part(self, ..)"),
|
||||
)
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
methods = inspect.getmembers(pkg_cls, predicate=lambda x: inspect.isfunction(x))
|
||||
method_errors = collections.defaultdict(list)
|
||||
for name, function in methods:
|
||||
for deprecated_name, alternate in DEPRECATED_METHOD:
|
||||
if name == deprecated_name:
|
||||
msg = f"Rename '{deprecated_name}' method to {alternate} instead."
|
||||
method_errors[name].append(msg)
|
||||
|
||||
source = inspect.getsource(function)
|
||||
for deprecated_name, alternate in DEPRECATED_USE:
|
||||
if deprecated_name in source:
|
||||
msg = f"Change '{deprecated_name}' to '{alternate}' in '{name}' method."
|
||||
method_errors[name].append(msg)
|
||||
|
||||
num_methods = len(method_errors)
|
||||
if num_methods > 0:
|
||||
methods = plural(num_methods, "method", show_n=False)
|
||||
error_msg = (
|
||||
f"Package '{pkg_name}' implements or uses unsupported deprecated {methods}."
|
||||
)
|
||||
instr = [f"Make changes to '{pkg_cls.__module__}':"]
|
||||
for name in sorted(method_errors):
|
||||
instr.extend([f" {msg}" for msg in method_errors[name]])
|
||||
errors.append(error_cls(error_msg, instr))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_properties
|
||||
def _ensure_all_package_names_are_lowercase(pkgs, error_cls):
|
||||
"""Ensure package names are lowercase and consistent"""
|
||||
@@ -771,6 +822,89 @@ def _uses_deprecated_globals(pkgs, error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@package_properties
|
||||
def _ensure_test_docstring(pkgs, error_cls):
|
||||
"""Ensure stand-alone test methods have a docstring.
|
||||
|
||||
The docstring of a test method is implicitly used as the description of
|
||||
the corresponding test part during test results reporting.
|
||||
"""
|
||||
doc_regex = r'\s+("""[^"]+""")'
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
methods = inspect.getmembers(pkg_cls, predicate=lambda x: inspect.isfunction(x))
|
||||
method_names = []
|
||||
for name, test_fn in methods:
|
||||
if not name.startswith("test_"):
|
||||
continue
|
||||
|
||||
# Ensure the test method has a docstring
|
||||
source = inspect.getsource(test_fn)
|
||||
match = re.search(doc_regex, source)
|
||||
if match is None or len(match.group(0).replace('"', "").strip()) == 0:
|
||||
method_names.append(name)
|
||||
|
||||
num_methods = len(method_names)
|
||||
if num_methods > 0:
|
||||
methods = plural(num_methods, "method", show_n=False)
|
||||
docstrings = plural(num_methods, "docstring", show_n=False)
|
||||
msg = f"Package {pkg_name} has test {methods} with empty or missing {docstrings}."
|
||||
names = ", ".join(method_names)
|
||||
instr = [
|
||||
"Docstrings are used as descriptions in test outputs.",
|
||||
f"Add a concise summary to the following {methods} in '{pkg_cls.__module__}':",
|
||||
f"{names}",
|
||||
]
|
||||
errors.append(error_cls(msg, instr))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_properties
|
||||
def _ensure_test_implemented(pkgs, error_cls):
|
||||
"""Ensure stand-alone test methods are implemented.
|
||||
|
||||
The test method is also required to be non-empty.
|
||||
"""
|
||||
|
||||
def skip(line):
|
||||
ln = line.strip()
|
||||
return ln.startswith("#") or "pass" in ln
|
||||
|
||||
doc_regex = r'\s+("""[^"]+""")'
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
methods = inspect.getmembers(pkg_cls, predicate=lambda x: inspect.isfunction(x))
|
||||
method_names = []
|
||||
for name, test_fn in methods:
|
||||
if not name.startswith("test_"):
|
||||
continue
|
||||
|
||||
source = inspect.getsource(test_fn)
|
||||
|
||||
# Attempt to ensure the test method is implemented.
|
||||
impl = re.sub(doc_regex, r"", source).splitlines()[1:]
|
||||
lines = [ln.strip() for ln in impl if not skip(ln)]
|
||||
if not lines:
|
||||
method_names.append(name)
|
||||
|
||||
num_methods = len(method_names)
|
||||
if num_methods > 0:
|
||||
methods = plural(num_methods, "method", show_n=False)
|
||||
msg = f"Package {pkg_name} has empty or missing test {methods}."
|
||||
names = ", ".join(method_names)
|
||||
instr = [
|
||||
f"Implement or remove the following {methods} from '{pkg_cls.__module__}': {names}"
|
||||
]
|
||||
errors.append(error_cls(msg, instr))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_https_directives
|
||||
def _linting_package_file(pkgs, error_cls):
|
||||
"""Check for correctness of links"""
|
||||
|
@@ -35,6 +35,7 @@
|
||||
import spack.caches
|
||||
import spack.config as config
|
||||
import spack.database as spack_db
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.hash_types as ht
|
||||
import spack.hooks
|
||||
@@ -712,15 +713,32 @@ def get_buildfile_manifest(spec):
|
||||
return data
|
||||
|
||||
|
||||
def hashes_to_prefixes(spec):
|
||||
"""Return a dictionary of hashes to prefixes for a spec and its deps, excluding externals"""
|
||||
return {
|
||||
s.dag_hash(): str(s.prefix)
|
||||
def deps_to_relocate(spec):
|
||||
"""Return the transitive link and direct run dependencies of the spec.
|
||||
|
||||
This is a special traversal for dependencies we need to consider when relocating a package.
|
||||
|
||||
Package binaries, scripts, and other files may refer to the prefixes of dependencies, so
|
||||
we need to rewrite those locations when dependencies are in a different place at install time
|
||||
than they were at build time.
|
||||
|
||||
This traversal covers transitive link dependencies and direct run dependencies because:
|
||||
|
||||
1. Spack adds RPATHs for transitive link dependencies so that packages can find needed
|
||||
dependency libraries.
|
||||
2. Packages may call any of their *direct* run dependencies (and may bake their paths into
|
||||
binaries or scripts), so we also need to search for run dependency prefixes when relocating.
|
||||
|
||||
This returns a deduplicated list of transitive link dependencies and direct run dependencies.
|
||||
"""
|
||||
deps = [
|
||||
s
|
||||
for s in itertools.chain(
|
||||
spec.traverse(root=True, deptype="link"), spec.dependencies(deptype="run")
|
||||
)
|
||||
if not s.external
|
||||
}
|
||||
]
|
||||
return llnl.util.lang.dedupe(deps, key=lambda s: s.dag_hash())
|
||||
|
||||
|
||||
def get_buildinfo_dict(spec):
|
||||
@@ -736,7 +754,7 @@ def get_buildinfo_dict(spec):
|
||||
"relocate_binaries": manifest["binary_to_relocate"],
|
||||
"relocate_links": manifest["link_to_relocate"],
|
||||
"hardlinks_deduped": manifest["hardlinks_deduped"],
|
||||
"hash_to_prefix": hashes_to_prefixes(spec),
|
||||
"hash_to_prefix": {d.dag_hash(): str(d.prefix) for d in deps_to_relocate(spec)},
|
||||
}
|
||||
|
||||
|
||||
@@ -1631,7 +1649,6 @@ def _oci_push(
|
||||
Dict[str, spack.oci.oci.Blob],
|
||||
List[Tuple[Spec, BaseException]],
|
||||
]:
|
||||
|
||||
# Spec dag hash -> blob
|
||||
checksums: Dict[str, spack.oci.oci.Blob] = {}
|
||||
|
||||
@@ -2201,11 +2218,36 @@ def relocate_package(spec):
|
||||
# First match specific prefix paths. Possibly the *local* install prefix
|
||||
# of some dependency is in an upstream, so we cannot assume the original
|
||||
# spack store root can be mapped uniformly to the new spack store root.
|
||||
for dag_hash, new_dep_prefix in hashes_to_prefixes(spec).items():
|
||||
if dag_hash in hash_to_old_prefix:
|
||||
old_dep_prefix = hash_to_old_prefix[dag_hash]
|
||||
prefix_to_prefix_bin[old_dep_prefix] = new_dep_prefix
|
||||
prefix_to_prefix_text[old_dep_prefix] = new_dep_prefix
|
||||
#
|
||||
# If the spec is spliced, we need to handle the simultaneous mapping
|
||||
# from the old install_tree to the new install_tree and from the build_spec
|
||||
# to the spliced spec.
|
||||
# Because foo.build_spec is foo for any non-spliced spec, we can simplify
|
||||
# by checking for spliced-in nodes by checking for nodes not in the build_spec
|
||||
# without any explicit check for whether the spec is spliced.
|
||||
# An analog in this algorithm is any spec that shares a name or provides the same virtuals
|
||||
# in the context of the relevant root spec. This ensures that the analog for a spec s
|
||||
# is the spec that s replaced when we spliced.
|
||||
relocation_specs = deps_to_relocate(spec)
|
||||
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
||||
for s in relocation_specs:
|
||||
analog = s
|
||||
if id(s) not in build_spec_ids:
|
||||
analogs = [
|
||||
d
|
||||
for d in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD)
|
||||
if s._splice_match(d, self_root=spec, other_root=spec.build_spec)
|
||||
]
|
||||
if analogs:
|
||||
# Prefer same-name analogs and prefer higher versions
|
||||
# This matches the preferences in Spec.splice, so we will find same node
|
||||
analog = max(analogs, key=lambda a: (a.name == s.name, a.version))
|
||||
|
||||
lookup_dag_hash = analog.dag_hash()
|
||||
if lookup_dag_hash in hash_to_old_prefix:
|
||||
old_dep_prefix = hash_to_old_prefix[lookup_dag_hash]
|
||||
prefix_to_prefix_bin[old_dep_prefix] = str(s.prefix)
|
||||
prefix_to_prefix_text[old_dep_prefix] = str(s.prefix)
|
||||
|
||||
# Only then add the generic fallback of install prefix -> install prefix.
|
||||
prefix_to_prefix_text[old_prefix] = new_prefix
|
||||
@@ -2520,7 +2562,13 @@ def _ensure_common_prefix(tar: tarfile.TarFile) -> str:
|
||||
return pkg_prefix
|
||||
|
||||
|
||||
def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
def install_root_node(
|
||||
spec: spack.spec.Spec,
|
||||
unsigned=False,
|
||||
force: bool = False,
|
||||
sha256: Optional[str] = None,
|
||||
allow_missing: bool = False,
|
||||
) -> None:
|
||||
"""Install the root node of a concrete spec from a buildcache.
|
||||
|
||||
Checking the sha256 sum of a node before installation is usually needed only
|
||||
@@ -2529,11 +2577,10 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
|
||||
Args:
|
||||
spec: spec to be installed (note that only the root node will be installed)
|
||||
unsigned (bool): if True allows installing unsigned binaries
|
||||
force (bool): force installation if the spec is already present in the
|
||||
local store
|
||||
sha256 (str): optional sha256 of the binary package, to be checked
|
||||
before installation
|
||||
unsigned: if True allows installing unsigned binaries
|
||||
force: force installation if the spec is already present in the local store
|
||||
sha256: optional sha256 of the binary package, to be checked before installation
|
||||
allow_missing: when true, allows installing a node with missing dependencies
|
||||
"""
|
||||
# Early termination
|
||||
if spec.external or spec.virtual:
|
||||
@@ -2543,10 +2590,10 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
|
||||
return
|
||||
|
||||
download_result = download_tarball(spec, unsigned)
|
||||
download_result = download_tarball(spec.build_spec, unsigned)
|
||||
if not download_result:
|
||||
msg = 'download of binary cache file for spec "{0}" failed'
|
||||
raise RuntimeError(msg.format(spec.format()))
|
||||
raise RuntimeError(msg.format(spec.build_spec.format()))
|
||||
|
||||
if sha256:
|
||||
checker = spack.util.crypto.Checker(sha256)
|
||||
@@ -2565,8 +2612,13 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
with spack.util.path.filter_padding():
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, force)
|
||||
spec.package.windows_establish_runtime_linkage()
|
||||
if spec.spliced: # overwrite old metadata with new
|
||||
spack.store.STORE.layout.write_spec(
|
||||
spec, spack.store.STORE.layout.spec_file_path(spec)
|
||||
)
|
||||
spack.hooks.post_install(spec, False)
|
||||
spack.store.STORE.db.add(spec)
|
||||
spack.store.STORE.db.add(spec, allow_missing=allow_missing)
|
||||
|
||||
|
||||
def install_single_spec(spec, unsigned=False, force=False):
|
||||
|
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Common basic functions used through the spack.bootstrap package"""
|
||||
import fnmatch
|
||||
import glob
|
||||
import importlib
|
||||
import os.path
|
||||
import re
|
||||
@@ -60,10 +61,19 @@ def _try_import_from_store(
|
||||
python, *_ = candidate_spec.dependencies("python-venv")
|
||||
else:
|
||||
python, *_ = candidate_spec.dependencies("python")
|
||||
module_paths = [
|
||||
os.path.join(candidate_spec.prefix, python.package.purelib),
|
||||
os.path.join(candidate_spec.prefix, python.package.platlib),
|
||||
]
|
||||
|
||||
# if python is installed, ask it for the layout
|
||||
if python.installed:
|
||||
module_paths = [
|
||||
os.path.join(candidate_spec.prefix, python.package.purelib),
|
||||
os.path.join(candidate_spec.prefix, python.package.platlib),
|
||||
]
|
||||
# otherwise search for the site-packages directory
|
||||
# (clingo from binaries with truncated python-venv runtime)
|
||||
else:
|
||||
module_paths = glob.glob(
|
||||
os.path.join(candidate_spec.prefix, "lib", "python*", "site-packages")
|
||||
)
|
||||
path_before = list(sys.path)
|
||||
|
||||
# NOTE: try module_paths first and last, last allows an existing version in path
|
||||
|
@@ -175,7 +175,15 @@ def _install_by_hash(
|
||||
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
|
||||
for match in spack.store.find([f"/{pkg_hash}"], multiple=False, query_fn=query):
|
||||
spack.binary_distribution.install_root_node(
|
||||
match, unsigned=True, force=True, sha256=pkg_sha256
|
||||
# allow_missing is true since when bootstrapping clingo we truncate runtime
|
||||
# deps such as gcc-runtime, since we link libstdc++ statically, and the other
|
||||
# further runtime deps are loaded by the Python interpreter. This just silences
|
||||
# warnings about missing dependencies.
|
||||
match,
|
||||
unsigned=True,
|
||||
force=True,
|
||||
sha256=pkg_sha256,
|
||||
allow_missing=True,
|
||||
)
|
||||
|
||||
def _install_and_test(
|
||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -37,13 +37,14 @@
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import stat
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
from collections import defaultdict
|
||||
from enum import Flag, auto
|
||||
from itertools import chain
|
||||
from typing import Dict, List, Set, Tuple
|
||||
from typing import Callable, Dict, List, Optional, Set, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -74,6 +75,7 @@
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.util.executable
|
||||
import spack.util.libc
|
||||
from spack import traverse
|
||||
from spack.context import Context
|
||||
from spack.error import InstallError, NoHeadersError, NoLibrariesError
|
||||
@@ -89,7 +91,7 @@
|
||||
)
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.log_parse import make_log_context, parse_log_events
|
||||
from spack.util.module_cmd import load_module, path_from_modules
|
||||
from spack.util.module_cmd import load_module
|
||||
|
||||
#
|
||||
# This can be set by the user to globally disable parallel builds.
|
||||
@@ -435,6 +437,35 @@ def optimization_flags(compiler, target):
|
||||
return result
|
||||
|
||||
|
||||
class FilterDefaultDynamicLinkerSearchPaths:
|
||||
"""Remove rpaths to directories that are default search paths of the dynamic linker."""
|
||||
|
||||
def __init__(self, dynamic_linker: Optional[str]) -> None:
|
||||
# Identify directories by (inode, device) tuple, which handles symlinks too.
|
||||
self.default_path_identifiers: Set[Tuple[int, int]] = set()
|
||||
if not dynamic_linker:
|
||||
return
|
||||
for path in spack.util.libc.default_search_paths_from_dynamic_linker(dynamic_linker):
|
||||
try:
|
||||
s = os.stat(path)
|
||||
if stat.S_ISDIR(s.st_mode):
|
||||
self.default_path_identifiers.add((s.st_ino, s.st_dev))
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
def is_dynamic_loader_default_path(self, p: str) -> bool:
|
||||
try:
|
||||
s = os.stat(p)
|
||||
return (s.st_ino, s.st_dev) in self.default_path_identifiers
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
def __call__(self, dirs: List[str]) -> List[str]:
|
||||
if not self.default_path_identifiers:
|
||||
return dirs
|
||||
return [p for p in dirs if not self.is_dynamic_loader_default_path(p)]
|
||||
|
||||
|
||||
def set_wrapper_variables(pkg, env):
|
||||
"""Set environment variables used by the Spack compiler wrapper (which have the prefix
|
||||
`SPACK_`) and also add the compiler wrappers to PATH.
|
||||
@@ -492,69 +523,71 @@ def set_wrapper_variables(pkg, env):
|
||||
env.set("CCACHE_DISABLE", "1")
|
||||
|
||||
# Gather information about various types of dependencies
|
||||
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
||||
rpath_deps = get_rpath_deps(pkg)
|
||||
rpath_hashes = set(s.dag_hash() for s in get_rpath_deps(pkg))
|
||||
link_deps = pkg.spec.traverse(root=False, order="topo", deptype=dt.LINK)
|
||||
external_link_deps, nonexternal_link_deps = stable_partition(link_deps, lambda d: d.external)
|
||||
|
||||
link_dirs = []
|
||||
include_dirs = []
|
||||
rpath_dirs = []
|
||||
|
||||
def _prepend_all(list_to_modify, items_to_add):
|
||||
# Update the original list (creating a new list would be faster but
|
||||
# may not be convenient)
|
||||
for item in reversed(list(items_to_add)):
|
||||
list_to_modify.insert(0, item)
|
||||
for dep in chain(external_link_deps, nonexternal_link_deps):
|
||||
# TODO: is_system_path is wrong, but even if we knew default -L, -I flags from the compiler
|
||||
# and default search dirs from the dynamic linker, it's not obvious how to avoid a possibly
|
||||
# expensive search in `query.libs.directories` and `query.headers.directories`, which is
|
||||
# what this branch is trying to avoid.
|
||||
if is_system_path(dep.prefix):
|
||||
continue
|
||||
# TODO: as of Spack 0.22, multiple instances of the same package may occur among the link
|
||||
# deps, so keying by name is wrong. In practice it is not problematic: we obtain the same
|
||||
# gcc-runtime / glibc here, and repeatedly add the same dirs that are later deduped.
|
||||
query = pkg.spec[dep.name]
|
||||
dep_link_dirs = []
|
||||
try:
|
||||
# Locating libraries can be time consuming, so log start and finish.
|
||||
tty.debug(f"Collecting libraries for {dep.name}")
|
||||
dep_link_dirs.extend(query.libs.directories)
|
||||
tty.debug(f"Libraries for {dep.name} have been collected.")
|
||||
except NoLibrariesError:
|
||||
tty.debug(f"No libraries found for {dep.name}")
|
||||
|
||||
def update_compiler_args_for_dep(dep):
|
||||
if dep in link_deps and (not is_system_path(dep.prefix)):
|
||||
query = pkg.spec[dep.name]
|
||||
dep_link_dirs = list()
|
||||
try:
|
||||
# In some circumstances (particularly for externals) finding
|
||||
# libraries packages can be time consuming, so indicate that
|
||||
# we are performing this operation (and also report when it
|
||||
# finishes).
|
||||
tty.debug("Collecting libraries for {0}".format(dep.name))
|
||||
dep_link_dirs.extend(query.libs.directories)
|
||||
tty.debug("Libraries for {0} have been collected.".format(dep.name))
|
||||
except NoLibrariesError:
|
||||
tty.debug("No libraries found for {0}".format(dep.name))
|
||||
for default_lib_dir in ("lib", "lib64"):
|
||||
default_lib_prefix = os.path.join(dep.prefix, default_lib_dir)
|
||||
if os.path.isdir(default_lib_prefix):
|
||||
dep_link_dirs.append(default_lib_prefix)
|
||||
|
||||
for default_lib_dir in ["lib", "lib64"]:
|
||||
default_lib_prefix = os.path.join(dep.prefix, default_lib_dir)
|
||||
if os.path.isdir(default_lib_prefix):
|
||||
dep_link_dirs.append(default_lib_prefix)
|
||||
link_dirs[:0] = dep_link_dirs
|
||||
if dep.dag_hash() in rpath_hashes:
|
||||
rpath_dirs[:0] = dep_link_dirs
|
||||
|
||||
_prepend_all(link_dirs, dep_link_dirs)
|
||||
if dep in rpath_deps:
|
||||
_prepend_all(rpath_dirs, dep_link_dirs)
|
||||
try:
|
||||
tty.debug(f"Collecting headers for {dep.name}")
|
||||
include_dirs[:0] = query.headers.directories
|
||||
tty.debug(f"Headers for {dep.name} have been collected.")
|
||||
except NoHeadersError:
|
||||
tty.debug(f"No headers found for {dep.name}")
|
||||
|
||||
try:
|
||||
_prepend_all(include_dirs, query.headers.directories)
|
||||
except NoHeadersError:
|
||||
tty.debug("No headers found for {0}".format(dep.name))
|
||||
|
||||
for dspec in pkg.spec.traverse(root=False, order="post"):
|
||||
if dspec.external:
|
||||
update_compiler_args_for_dep(dspec)
|
||||
|
||||
# Just above, we prepended entries for -L/-rpath for externals. We
|
||||
# now do this for non-external packages so that Spack-built packages
|
||||
# are searched first for libraries etc.
|
||||
for dspec in pkg.spec.traverse(root=False, order="post"):
|
||||
if not dspec.external:
|
||||
update_compiler_args_for_dep(dspec)
|
||||
|
||||
# The top-level package is always RPATHed. It hasn't been installed yet
|
||||
# so the RPATHs are added unconditionally (e.g. even though lib64/ may
|
||||
# not be created for the install).
|
||||
for libdir in ["lib64", "lib"]:
|
||||
# The top-level package is heuristically rpath'ed.
|
||||
for libdir in ("lib64", "lib"):
|
||||
lib_path = os.path.join(pkg.prefix, libdir)
|
||||
rpath_dirs.insert(0, lib_path)
|
||||
|
||||
filter_default_dynamic_linker_search_paths = FilterDefaultDynamicLinkerSearchPaths(
|
||||
pkg.compiler.default_dynamic_linker
|
||||
)
|
||||
|
||||
# TODO: filter_system_paths is again wrong (and probably unnecessary due to the is_system_path
|
||||
# branch above). link_dirs should be filtered with entries from _parse_link_paths.
|
||||
link_dirs = list(dedupe(filter_system_paths(link_dirs)))
|
||||
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
||||
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
||||
rpath_dirs = filter_default_dynamic_linker_search_paths(rpath_dirs)
|
||||
|
||||
# TODO: implicit_rpaths is prefiltered by is_system_path, that should be removed in favor of
|
||||
# just this filter.
|
||||
implicit_rpaths = filter_default_dynamic_linker_search_paths(pkg.compiler.implicit_rpaths())
|
||||
if implicit_rpaths:
|
||||
env.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(implicit_rpaths))
|
||||
|
||||
# Spack managed directories include the stage, store and upstream stores. We extend this with
|
||||
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
|
||||
@@ -584,13 +617,11 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
"""
|
||||
module = ModuleChangePropagator(pkg)
|
||||
|
||||
if context == Context.BUILD:
|
||||
module.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
||||
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||
|
||||
jobs = spack.config.determine_number_of_jobs(parallel=pkg.parallel)
|
||||
module.make_jobs = jobs
|
||||
if context == Context.BUILD:
|
||||
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
module.make = MakeExecutable("make", jobs)
|
||||
@@ -759,21 +790,6 @@ def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]
|
||||
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
||||
|
||||
|
||||
def get_rpaths(pkg):
|
||||
"""Get a list of all the rpaths for a package."""
|
||||
rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
|
||||
deps = get_rpath_deps(pkg)
|
||||
rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib))
|
||||
rpaths.extend(d.prefix.lib64 for d in deps if os.path.isdir(d.prefix.lib64))
|
||||
# Second module is our compiler mod name. We use that to get rpaths from
|
||||
# module show output.
|
||||
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
|
||||
mod_rpath = path_from_modules([pkg.compiler.modules[1]])
|
||||
if mod_rpath:
|
||||
rpaths.append(mod_rpath)
|
||||
return list(dedupe(filter_system_paths(rpaths)))
|
||||
|
||||
|
||||
def load_external_modules(pkg):
|
||||
"""Traverse a package's spec DAG and load any external modules.
|
||||
|
||||
@@ -841,10 +857,6 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
|
||||
load_external_modules(pkg)
|
||||
|
||||
implicit_rpaths = pkg.compiler.implicit_rpaths()
|
||||
if implicit_rpaths:
|
||||
env_mods.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(implicit_rpaths))
|
||||
|
||||
# Make sure nothing's strange about the Spack environment.
|
||||
validate(env_mods, tty.warn)
|
||||
env_mods.apply_modifications()
|
||||
@@ -1034,6 +1046,12 @@ def set_all_package_py_globals(self):
|
||||
# This includes runtime dependencies, also runtime deps of direct build deps.
|
||||
set_package_py_globals(pkg, context=Context.RUN)
|
||||
|
||||
# Looping over the set of packages a second time
|
||||
# ensures all globals are loaded into the module space prior to
|
||||
# any package setup. This guarantees package setup methods have
|
||||
# access to expected module level definitions such as "spack_cc"
|
||||
for dspec, flag in chain(self.external, self.nonexternal):
|
||||
pkg = dspec.package
|
||||
for spec in dspec.dependents():
|
||||
# Note: some specs have dependents that are unreachable from the root, so avoid
|
||||
# setting globals for those.
|
||||
@@ -1043,6 +1061,15 @@ def set_all_package_py_globals(self):
|
||||
pkg.setup_dependent_package(dependent_module, spec)
|
||||
dependent_module.propagate_changes_to_mro()
|
||||
|
||||
pkg = self.specs[0].package
|
||||
if self.context == Context.BUILD:
|
||||
module = ModuleChangePropagator(pkg)
|
||||
# std_cmake_args is not sufficiently static to be defined
|
||||
# in set_package_py_globals and is deprecated so its handled
|
||||
# here as a special case
|
||||
module.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
||||
module.propagate_changes_to_mro()
|
||||
|
||||
def get_env_modifications(self) -> EnvironmentModifications:
|
||||
"""Returns the environment variable modifications for the given input specs and context.
|
||||
Environment modifications include:
|
||||
@@ -1112,35 +1139,52 @@ def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
|
||||
env.prepend_path("PATH", bin_dir)
|
||||
|
||||
|
||||
def get_cmake_prefix_path(pkg):
|
||||
# Note that unlike modifications_from_dependencies, this does not include
|
||||
# any edits to CMAKE_PREFIX_PATH defined in custom
|
||||
# setup_dependent_build_environment implementations of dependency packages
|
||||
build_deps = set(pkg.spec.dependencies(deptype=("build", "test")))
|
||||
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
||||
build_link_deps = build_deps | link_deps
|
||||
spack_built = []
|
||||
externals = []
|
||||
# modifications_from_dependencies updates CMAKE_PREFIX_PATH by first
|
||||
# prepending all externals and then all non-externals
|
||||
for dspec in pkg.spec.traverse(root=False, order="post"):
|
||||
if dspec in build_link_deps:
|
||||
if dspec.external:
|
||||
externals.insert(0, dspec)
|
||||
else:
|
||||
spack_built.insert(0, dspec)
|
||||
|
||||
ordered_build_link_deps = spack_built + externals
|
||||
cmake_prefix_path_entries = []
|
||||
for spec in ordered_build_link_deps:
|
||||
cmake_prefix_path_entries.extend(spec.package.cmake_prefix_paths)
|
||||
|
||||
return filter_system_paths(cmake_prefix_path_entries)
|
||||
|
||||
|
||||
def _setup_pkg_and_run(
|
||||
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
|
||||
function: Callable,
|
||||
kwargs: Dict,
|
||||
write_pipe: multiprocessing.connection.Connection,
|
||||
input_multiprocess_fd: Optional[MultiProcessFd],
|
||||
jsfd1: Optional[MultiProcessFd],
|
||||
jsfd2: Optional[MultiProcessFd],
|
||||
):
|
||||
"""Main entry point in the child process for Spack builds.
|
||||
|
||||
``_setup_pkg_and_run`` is called by the child process created in
|
||||
``start_build_process()``, and its main job is to run ``function()`` on behalf of
|
||||
some Spack installation (see :ref:`spack.installer.PackageInstaller._install_task`).
|
||||
|
||||
The child process is passed a ``write_pipe``, on which it's expected to send one of
|
||||
the following:
|
||||
|
||||
* ``StopPhase``: error raised by a build process indicating it's stopping at a
|
||||
particular build phase.
|
||||
|
||||
* ``BaseException``: any exception raised by a child build process, which will be
|
||||
wrapped in ``ChildError`` (which adds a bunch of debug info and log context) and
|
||||
raised in the parent.
|
||||
|
||||
* The return value of ``function()``, which can be anything (except an exception).
|
||||
This is returned to the caller.
|
||||
|
||||
Note: ``jsfd1`` and ``jsfd2`` are passed solely to ensure that the child process
|
||||
does not close these file descriptors. Some ``multiprocessing`` backends will close
|
||||
them automatically in the child if they are not passed at process creation time.
|
||||
|
||||
Arguments:
|
||||
serialized_pkg: Spack package install context object (serialized form of the
|
||||
package that we'll build in the child process).
|
||||
function: function to call in the child process; serialized_pkg is passed to
|
||||
this as the first argument.
|
||||
kwargs: additional keyword arguments to pass to ``function()``.
|
||||
write_pipe: multiprocessing ``Connection`` to the parent process, to which the
|
||||
child *must* send a result (or an error) back to parent on.
|
||||
input_multiprocess_fd: stdin from the parent (not passed currently on Windows)
|
||||
jsfd1: gmake Jobserver file descriptor 1.
|
||||
jsfd2: gmake Jobserver file descriptor 2.
|
||||
|
||||
"""
|
||||
|
||||
context: str = kwargs.get("context", "build")
|
||||
|
||||
try:
|
||||
@@ -1150,7 +1194,7 @@ def _setup_pkg_and_run(
|
||||
# that the parent process is not going to read from it till we
|
||||
# are done with the child, so we undo Python's precaution.
|
||||
if input_multiprocess_fd is not None:
|
||||
sys.stdin = os.fdopen(input_multiprocess_fd.fd)
|
||||
sys.stdin = os.fdopen(input_multiprocess_fd.fd, closefd=False)
|
||||
|
||||
pkg = serialized_pkg.restore()
|
||||
|
||||
@@ -1166,13 +1210,14 @@ def _setup_pkg_and_run(
|
||||
# Do not create a full ChildError from this, it's not an error
|
||||
# it's a control statement.
|
||||
write_pipe.send(e)
|
||||
except BaseException:
|
||||
except BaseException as e:
|
||||
# catch ANYTHING that goes wrong in the child process
|
||||
exc_type, exc, tb = sys.exc_info()
|
||||
|
||||
# Need to unwind the traceback in the child because traceback
|
||||
# objects can't be sent to the parent.
|
||||
tb_string = traceback.format_exc()
|
||||
exc_type = type(e)
|
||||
tb = e.__traceback__
|
||||
tb_string = "".join(traceback.format_exception(exc_type, e, tb))
|
||||
|
||||
# build up some context from the offending package so we can
|
||||
# show that, too.
|
||||
@@ -1189,8 +1234,8 @@ def _setup_pkg_and_run(
|
||||
elif context == "test":
|
||||
logfile = os.path.join(pkg.test_suite.stage, pkg.test_suite.test_log_name(pkg.spec))
|
||||
|
||||
error_msg = str(exc)
|
||||
if isinstance(exc, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
||||
error_msg = str(e)
|
||||
if isinstance(e, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
||||
process = "test the installation" if context == "test" else "build from sources"
|
||||
error_msg = (
|
||||
"The '{}' package cannot find an attribute while trying to {}. "
|
||||
@@ -1200,7 +1245,7 @@ def _setup_pkg_and_run(
|
||||
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
|
||||
).format(pkg.name, process, context)
|
||||
error_msg = colorize("@*R{{{}}}".format(error_msg))
|
||||
error_msg = "{}\n\n{}".format(str(exc), error_msg)
|
||||
error_msg = "{}\n\n{}".format(str(e), error_msg)
|
||||
|
||||
# make a pickleable exception to send to parent.
|
||||
msg = "%s: %s" % (exc_type.__name__, error_msg)
|
||||
|
@@ -7,7 +7,6 @@
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.directives
|
||||
import spack.package_base
|
||||
import spack.util.executable
|
||||
|
||||
from .autotools import AutotoolsBuilder, AutotoolsPackage
|
||||
|
@@ -10,7 +10,6 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
@@ -297,18 +296,6 @@ def initconfig_hardware_entries(self):
|
||||
def std_initconfig_entries(self):
|
||||
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
|
||||
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
|
||||
cmake_rpaths_env = spack.build_environment.get_rpaths(self.pkg)
|
||||
cmake_rpaths_path = ";".join(cmake_rpaths_env)
|
||||
complete_rpath_list = cmake_rpaths_path
|
||||
if "SPACK_COMPILER_EXTRA_RPATHS" in os.environ:
|
||||
spack_extra_rpaths_env = os.environ["SPACK_COMPILER_EXTRA_RPATHS"]
|
||||
spack_extra_rpaths_path = spack_extra_rpaths_env.replace(os.pathsep, ";")
|
||||
complete_rpath_list = "{0};{1}".format(complete_rpath_list, spack_extra_rpaths_path)
|
||||
|
||||
if "SPACK_COMPILER_IMPLICIT_RPATHS" in os.environ:
|
||||
spack_implicit_rpaths_env = os.environ["SPACK_COMPILER_IMPLICIT_RPATHS"]
|
||||
spack_implicit_rpaths_path = spack_implicit_rpaths_env.replace(os.pathsep, ";")
|
||||
complete_rpath_list = "{0};{1}".format(complete_rpath_list, spack_implicit_rpaths_path)
|
||||
|
||||
return [
|
||||
"#------------------{0}".format("-" * 60),
|
||||
@@ -318,8 +305,6 @@ def std_initconfig_entries(self):
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
||||
cmake_cache_string("CMAKE_INSTALL_RPATH_USE_LINK_PATH", "ON"),
|
||||
cmake_cache_string("CMAKE_BUILD_RPATH", complete_rpath_list),
|
||||
cmake_cache_string("CMAKE_INSTALL_RPATH", complete_rpath_list),
|
||||
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
|
||||
]
|
||||
|
||||
|
@@ -8,17 +8,19 @@
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
from typing import List, Optional, Tuple
|
||||
from itertools import chain
|
||||
from typing import List, Optional, Set, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util.lang import stable_partition
|
||||
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
from spack.util.environment import filter_system_paths
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
|
||||
@@ -152,6 +154,24 @@ def _values(x):
|
||||
conflicts(f"generator={x}")
|
||||
|
||||
|
||||
def get_cmake_prefix_path(pkg: spack.package_base.PackageBase) -> List[str]:
|
||||
"""Obtain the CMAKE_PREFIX_PATH entries for a package, based on the cmake_prefix_path package
|
||||
attribute of direct build/test and transitive link dependencies."""
|
||||
# Add direct build/test deps
|
||||
selected: Set[str] = {s.dag_hash() for s in pkg.spec.dependencies(deptype=dt.BUILD | dt.TEST)}
|
||||
# Add transitive link deps
|
||||
selected.update(s.dag_hash() for s in pkg.spec.traverse(root=False, deptype=dt.LINK))
|
||||
# Separate out externals so they do not shadow Spack prefixes
|
||||
externals, spack_built = stable_partition(
|
||||
(s for s in pkg.spec.traverse(root=False, order="topo") if s.dag_hash() in selected),
|
||||
lambda x: x.external,
|
||||
)
|
||||
|
||||
return filter_system_paths(
|
||||
path for spec in chain(spack_built, externals) for path in spec.package.cmake_prefix_paths
|
||||
)
|
||||
|
||||
|
||||
class CMakePackage(spack.package_base.PackageBase):
|
||||
"""Specialized class for packages built using CMake
|
||||
|
||||
@@ -358,6 +378,16 @@ def std_args(pkg, generator=None):
|
||||
"-G",
|
||||
generator,
|
||||
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
|
||||
define("CMAKE_INSTALL_RPATH_USE_LINK_PATH", True),
|
||||
# only include the install prefix lib dirs; rpaths for deps are added by USE_LINK_PATH
|
||||
define(
|
||||
"CMAKE_INSTALL_RPATH",
|
||||
[
|
||||
pathlib.Path(pkg.prefix, "lib").as_posix(),
|
||||
pathlib.Path(pkg.prefix, "lib64").as_posix(),
|
||||
],
|
||||
),
|
||||
define("CMAKE_PREFIX_PATH", get_cmake_prefix_path(pkg)),
|
||||
define("CMAKE_BUILD_TYPE", build_type),
|
||||
]
|
||||
|
||||
@@ -372,15 +402,6 @@ def std_args(pkg, generator=None):
|
||||
_conditional_cmake_defaults(pkg, args)
|
||||
_maybe_set_python_hints(pkg, args)
|
||||
|
||||
# Set up CMake rpath
|
||||
args.extend(
|
||||
[
|
||||
define("CMAKE_INSTALL_RPATH_USE_LINK_PATH", True),
|
||||
define("CMAKE_INSTALL_RPATH", spack.build_environment.get_rpaths(pkg)),
|
||||
define("CMAKE_PREFIX_PATH", spack.build_environment.get_cmake_prefix_path(pkg)),
|
||||
]
|
||||
)
|
||||
|
||||
return args
|
||||
|
||||
@staticmethod
|
||||
@@ -541,6 +562,13 @@ def cmake_args(self):
|
||||
|
||||
def cmake(self, pkg, spec, prefix):
|
||||
"""Runs ``cmake`` in the build directory"""
|
||||
|
||||
# skip cmake phase if it is an incremental develop build
|
||||
if spec.is_develop and os.path.isfile(
|
||||
os.path.join(self.build_directory, "CMakeCache.txt")
|
||||
):
|
||||
return
|
||||
|
||||
options = self.std_cmake_args
|
||||
options += self.cmake_args()
|
||||
options.append(os.path.abspath(self.root_cmakelists_dir))
|
||||
|
@@ -110,8 +110,8 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
|
||||
depends_on("cuda@5.0:10.2", when="cuda_arch=30")
|
||||
depends_on("cuda@5.0:10.2", when="cuda_arch=32")
|
||||
depends_on("cuda@5.0:", when="cuda_arch=35")
|
||||
depends_on("cuda@6.5:", when="cuda_arch=37")
|
||||
depends_on("cuda@5.0:11.8", when="cuda_arch=35")
|
||||
depends_on("cuda@6.5:11.8", when="cuda_arch=37")
|
||||
|
||||
depends_on("cuda@6.0:", when="cuda_arch=50")
|
||||
depends_on("cuda@6.5:", when="cuda_arch=52")
|
||||
@@ -131,6 +131,7 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
depends_on("cuda@11.8:", when="cuda_arch=89")
|
||||
|
||||
depends_on("cuda@12.0:", when="cuda_arch=90")
|
||||
depends_on("cuda@12.0:", when="cuda_arch=90a")
|
||||
|
||||
# From the NVIDIA install guide we know of conflicts for particular
|
||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||
@@ -149,7 +150,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
# minimum supported versions
|
||||
conflicts("%gcc@:4", when="+cuda ^cuda@11.0:")
|
||||
conflicts("%gcc@:5", when="+cuda ^cuda@11.4:")
|
||||
conflicts("%gcc@:7.2", when="+cuda ^cuda@12.4:")
|
||||
conflicts("%clang@:6", when="+cuda ^cuda@12.2:")
|
||||
|
||||
# maximum supported version
|
||||
@@ -241,6 +241,11 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
conflicts("%intel@19.2:", when="+cuda ^cuda@:11.1.0")
|
||||
conflicts("%intel@2021:", when="+cuda ^cuda@:11.4.0")
|
||||
|
||||
# ARM
|
||||
# https://github.com/spack/spack/pull/39666#issuecomment-2377609263
|
||||
# Might need to be expanded to other gcc versions
|
||||
conflicts("%gcc@13.2.0", when="+cuda ^cuda@:12.4 target=aarch64:")
|
||||
|
||||
# XL is mostly relevant for ppc64le Linux
|
||||
conflicts("%xl@:12,14:", when="+cuda ^cuda@:9.1")
|
||||
conflicts("%xl@:12,14:15,17:", when="+cuda ^cuda@9.2")
|
||||
|
@@ -44,16 +44,27 @@ class GoBuilder(BaseBuilder):
|
||||
+-----------------------------------------------+--------------------+
|
||||
| **Method** | **Purpose** |
|
||||
+===============================================+====================+
|
||||
| :py:meth:`~.GoBuilder.build_args` | Specify arguments |
|
||||
| :py:attr:`~.GoBuilder.build_args` | Specify arguments |
|
||||
| | to ``go build`` |
|
||||
+-----------------------------------------------+--------------------+
|
||||
| :py:meth:`~.GoBuilder.check_args` | Specify arguments |
|
||||
| :py:attr:`~.GoBuilder.check_args` | Specify arguments |
|
||||
| | to ``go test`` |
|
||||
+-----------------------------------------------+--------------------+
|
||||
"""
|
||||
|
||||
phases = ("build", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = ("check", "installcheck")
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = (
|
||||
"build_args",
|
||||
"check_args",
|
||||
"build_directory",
|
||||
"install_time_test_callbacks",
|
||||
)
|
||||
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ["check"]
|
||||
|
||||
|
@@ -339,7 +339,7 @@ class PythonPackage(PythonExtension):
|
||||
legacy_buildsystem = "python_pip"
|
||||
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ["test"]
|
||||
install_time_test_callbacks = ["test_imports"]
|
||||
|
||||
build_system("python_pip")
|
||||
|
||||
@@ -429,7 +429,7 @@ class PythonPipBuilder(BaseBuilder):
|
||||
phases = ("install",)
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = ("test",)
|
||||
legacy_methods = ("test_imports",)
|
||||
|
||||
#: Same as legacy_methods, but the signature is different
|
||||
legacy_long_methods = ("install_options", "global_options", "config_settings")
|
||||
@@ -438,7 +438,7 @@ class PythonPipBuilder(BaseBuilder):
|
||||
legacy_attributes = ("archive_files", "build_directory", "install_time_test_callbacks")
|
||||
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ["test"]
|
||||
install_time_test_callbacks = ["test_imports"]
|
||||
|
||||
@staticmethod
|
||||
def std_args(cls) -> List[str]:
|
||||
|
@@ -521,10 +521,6 @@ def stage(self):
|
||||
def prefix(self):
|
||||
return self.pkg.prefix
|
||||
|
||||
def test(self):
|
||||
# Defer tests to virtual and concrete packages
|
||||
pass
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
"""Sets up the build environment for a package.
|
||||
|
||||
|
@@ -10,6 +10,7 @@
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import ssl
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
@@ -19,14 +20,14 @@
|
||||
from collections import defaultdict, namedtuple
|
||||
from typing import Dict, List, Optional, Set, Tuple
|
||||
from urllib.error import HTTPError, URLError
|
||||
from urllib.parse import urlencode
|
||||
from urllib.request import HTTPHandler, Request, build_opener
|
||||
from urllib.parse import quote, urlencode, urlparse
|
||||
from urllib.request import HTTPHandler, HTTPSHandler, Request, build_opener
|
||||
|
||||
import ruamel.yaml
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
from llnl.util.lang import Singleton, memoized
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
|
||||
import spack
|
||||
@@ -50,6 +51,31 @@
|
||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||
|
||||
|
||||
def _urlopen():
|
||||
error_handler = web_util.SpackHTTPDefaultErrorHandler()
|
||||
|
||||
# One opener with HTTPS ssl enabled
|
||||
with_ssl = build_opener(
|
||||
HTTPHandler(), HTTPSHandler(context=web_util.ssl_create_default_context()), error_handler
|
||||
)
|
||||
|
||||
# One opener with HTTPS ssl disabled
|
||||
without_ssl = build_opener(
|
||||
HTTPHandler(), HTTPSHandler(context=ssl._create_unverified_context()), error_handler
|
||||
)
|
||||
|
||||
# And dynamically dispatch based on the config:verify_ssl.
|
||||
def dispatch_open(fullurl, data=None, timeout=None, verify_ssl=True):
|
||||
opener = with_ssl if verify_ssl else without_ssl
|
||||
timeout = timeout or spack.config.get("config:connect_timeout", 1)
|
||||
return opener.open(fullurl, data, timeout)
|
||||
|
||||
return dispatch_open
|
||||
|
||||
|
||||
_dyn_mapping_urlopener = Singleton(_urlopen)
|
||||
|
||||
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
|
||||
JOB_RETRY_CONDITIONS = [
|
||||
# "always",
|
||||
@@ -405,9 +431,20 @@ def __init__(self, ci_config, spec_labels, stages):
|
||||
if name not in ["any", "build"]:
|
||||
jobs[name] = self.__init_job("")
|
||||
|
||||
def __init_job(self, spec):
|
||||
def __init_job(self, release_spec):
|
||||
"""Initialize job object"""
|
||||
return {"spec": spec, "attributes": {}}
|
||||
job_object = {"spec": release_spec, "attributes": {}}
|
||||
if release_spec:
|
||||
job_vars = job_object["attributes"].setdefault("variables", {})
|
||||
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec.dag_hash()
|
||||
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
||||
job_vars["SPACK_JOB_SPEC_PKG_VERSION"] = release_spec.format("{version}")
|
||||
job_vars["SPACK_JOB_SPEC_COMPILER_NAME"] = release_spec.format("{compiler.name}")
|
||||
job_vars["SPACK_JOB_SPEC_COMPILER_VERSION"] = release_spec.format("{compiler.version}")
|
||||
job_vars["SPACK_JOB_SPEC_ARCH"] = release_spec.format("{architecture}")
|
||||
job_vars["SPACK_JOB_SPEC_VARIANTS"] = release_spec.format("{variants}")
|
||||
|
||||
return job_object
|
||||
|
||||
def __is_named(self, section):
|
||||
"""Check if a pipeline-gen configuration section is for a named job,
|
||||
@@ -500,6 +537,7 @@ def generate_ir(self):
|
||||
for section in reversed(pipeline_gen):
|
||||
name = self.__is_named(section)
|
||||
has_submapping = "submapping" in section
|
||||
has_dynmapping = "dynamic-mapping" in section
|
||||
section = cfg.InternalConfigScope._process_dict_keyname_overrides(section)
|
||||
|
||||
if name:
|
||||
@@ -542,6 +580,108 @@ def _apply_section(dest, src):
|
||||
job["attributes"] = self.__apply_submapping(
|
||||
job["attributes"], job["spec"], section
|
||||
)
|
||||
elif has_dynmapping:
|
||||
mapping = section["dynamic-mapping"]
|
||||
|
||||
dynmap_name = mapping.get("name")
|
||||
|
||||
# Check if this section should be skipped
|
||||
dynmap_skip = os.environ.get("SPACK_CI_SKIP_DYNAMIC_MAPPING")
|
||||
if dynmap_name and dynmap_skip:
|
||||
if re.match(dynmap_skip, dynmap_name):
|
||||
continue
|
||||
|
||||
# Get the endpoint
|
||||
endpoint = mapping["endpoint"]
|
||||
endpoint_url = urlparse(endpoint)
|
||||
|
||||
# Configure the request header
|
||||
header = {"User-Agent": web_util.SPACK_USER_AGENT}
|
||||
header.update(mapping.get("header", {}))
|
||||
|
||||
# Expand header environment variables
|
||||
# ie. if tokens are passed
|
||||
for value in header.values():
|
||||
value = os.path.expandvars(value)
|
||||
|
||||
verify_ssl = mapping.get("verify_ssl", spack.config.get("config:verify_ssl", True))
|
||||
timeout = mapping.get("timeout", spack.config.get("config:connect_timeout", 1))
|
||||
|
||||
required = mapping.get("require", [])
|
||||
allowed = mapping.get("allow", [])
|
||||
ignored = mapping.get("ignore", [])
|
||||
|
||||
# required keys are implicitly allowed
|
||||
allowed = sorted(set(allowed + required))
|
||||
ignored = sorted(set(ignored))
|
||||
required = sorted(set(required))
|
||||
|
||||
# Make sure required things are not also ignored
|
||||
assert not any([ikey in required for ikey in ignored])
|
||||
|
||||
def job_query(job):
|
||||
job_vars = job["attributes"]["variables"]
|
||||
query = (
|
||||
"{SPACK_JOB_SPEC_PKG_NAME}@{SPACK_JOB_SPEC_PKG_VERSION}"
|
||||
# The preceding spaces are required (ref. https://github.com/spack/spack-gantry/blob/develop/docs/api.md#allocation)
|
||||
" {SPACK_JOB_SPEC_VARIANTS}"
|
||||
" arch={SPACK_JOB_SPEC_ARCH}"
|
||||
"%{SPACK_JOB_SPEC_COMPILER_NAME}@{SPACK_JOB_SPEC_COMPILER_VERSION}"
|
||||
).format_map(job_vars)
|
||||
return f"spec={quote(query)}"
|
||||
|
||||
for job in jobs.values():
|
||||
if not job["spec"]:
|
||||
continue
|
||||
|
||||
# Create request for this job
|
||||
query = job_query(job)
|
||||
request = Request(
|
||||
endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
|
||||
)
|
||||
try:
|
||||
response = _dyn_mapping_urlopener(
|
||||
request, verify_ssl=verify_ssl, timeout=timeout
|
||||
)
|
||||
except Exception as e:
|
||||
# For now just ignore any errors from dynamic mapping and continue
|
||||
# This is still experimental, and failures should not stop CI
|
||||
# from running normally
|
||||
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}")
|
||||
tty.warn(f"{e}")
|
||||
continue
|
||||
|
||||
config = json.load(codecs.getreader("utf-8")(response))
|
||||
|
||||
# Strip ignore keys
|
||||
if ignored:
|
||||
for key in ignored:
|
||||
if key in config:
|
||||
config.pop(key)
|
||||
|
||||
# Only keep allowed keys
|
||||
clean_config = {}
|
||||
if allowed:
|
||||
for key in allowed:
|
||||
if key in config:
|
||||
clean_config[key] = config[key]
|
||||
else:
|
||||
clean_config = config
|
||||
|
||||
# Verify all of the required keys are present
|
||||
if required:
|
||||
missing_keys = []
|
||||
for key in required:
|
||||
if key not in clean_config.keys():
|
||||
missing_keys.append(key)
|
||||
|
||||
if missing_keys:
|
||||
tty.warn(f"Response missing required keys: {missing_keys}")
|
||||
|
||||
if clean_config:
|
||||
job["attributes"] = spack.config.merge_yaml(
|
||||
job.get("attributes", {}), clean_config
|
||||
)
|
||||
|
||||
for _, job in jobs.items():
|
||||
if job["spec"]:
|
||||
@@ -952,15 +1092,6 @@ def main_script_replacements(cmd):
|
||||
|
||||
job_name = get_job_name(release_spec, build_group)
|
||||
|
||||
job_vars = job_object.setdefault("variables", {})
|
||||
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
|
||||
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
||||
job_vars["SPACK_JOB_SPEC_PKG_VERSION"] = release_spec.format("{version}")
|
||||
job_vars["SPACK_JOB_SPEC_COMPILER_NAME"] = release_spec.format("{compiler.name}")
|
||||
job_vars["SPACK_JOB_SPEC_COMPILER_VERSION"] = release_spec.format("{compiler.version}")
|
||||
job_vars["SPACK_JOB_SPEC_ARCH"] = release_spec.format("{architecture}")
|
||||
job_vars["SPACK_JOB_SPEC_VARIANTS"] = release_spec.format("{variants}")
|
||||
|
||||
job_object["needs"] = []
|
||||
if spec_label in dependencies:
|
||||
if enable_artifacts_buildcache:
|
||||
@@ -1038,6 +1169,7 @@ def main_script_replacements(cmd):
|
||||
|
||||
# Let downstream jobs know whether the spec needed rebuilding, regardless
|
||||
# whether DAG pruning was enabled or not.
|
||||
job_vars = job_object["variables"]
|
||||
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = str(rebuild_spec)
|
||||
|
||||
if cdash_handler:
|
||||
@@ -1272,7 +1404,9 @@ def main_script_replacements(cmd):
|
||||
else:
|
||||
# No jobs were generated
|
||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||
noop_job["retry"] = service_job_retries
|
||||
# If this job fails ignore the status and carry on
|
||||
noop_job["retry"] = 0
|
||||
noop_job["allow_failure"] = True
|
||||
|
||||
if copy_only_pipeline and config_deprecated:
|
||||
tty.debug("Generating no-op job as copy-only is unsupported here.")
|
||||
|
@@ -19,12 +19,23 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
# DEPRECATED: equivalent to --generic --target
|
||||
subparser.add_argument(
|
||||
"-g", "--generic-target", action="store_true", help="show the best generic target"
|
||||
"-g",
|
||||
"--generic-target",
|
||||
action="store_true",
|
||||
help="show the best generic target (deprecated)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--known-targets", action="store_true", help="show a list of all known targets and exit"
|
||||
)
|
||||
target_type = subparser.add_mutually_exclusive_group()
|
||||
target_type.add_argument(
|
||||
"--family", action="store_true", help="print generic ISA (x86_64, aarch64, ppc64le, ...)"
|
||||
)
|
||||
target_type.add_argument(
|
||||
"--generic", action="store_true", help="print feature level (x86_64_v3, armv8.4a, ...)"
|
||||
)
|
||||
parts = subparser.add_mutually_exclusive_group()
|
||||
parts2 = subparser.add_mutually_exclusive_group()
|
||||
parts.add_argument(
|
||||
@@ -80,6 +91,7 @@ def display_target_group(header, target_group):
|
||||
|
||||
def arch(parser, args):
|
||||
if args.generic_target:
|
||||
# TODO: add deprecation warning in 0.24
|
||||
print(archspec.cpu.host().generic)
|
||||
return
|
||||
|
||||
@@ -96,6 +108,10 @@ def arch(parser, args):
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system(os_args)
|
||||
host_target = host_platform.target(target_args)
|
||||
if args.family:
|
||||
host_target = host_target.family
|
||||
elif args.generic:
|
||||
host_target = host_target.generic
|
||||
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
||||
|
||||
if args.platform:
|
||||
|
@@ -660,34 +660,32 @@ def mirror_name_or_url(m):
|
||||
# accidentally to a dir in the current working directory.
|
||||
|
||||
# If there's a \ or / in the name, it's interpreted as a path or url.
|
||||
if "/" in m or "\\" in m:
|
||||
if "/" in m or "\\" in m or m in (".", ".."):
|
||||
return spack.mirror.Mirror(m)
|
||||
|
||||
# Otherwise, the named mirror is required to exist.
|
||||
try:
|
||||
return spack.mirror.require_mirror_name(m)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(
|
||||
str(e) + ". Did you mean {}?".format(os.path.join(".", m))
|
||||
)
|
||||
raise argparse.ArgumentTypeError(f"{e}. Did you mean {os.path.join('.', m)}?") from e
|
||||
|
||||
|
||||
def mirror_url(url):
|
||||
try:
|
||||
return spack.mirror.Mirror.from_url(url)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(str(e))
|
||||
raise argparse.ArgumentTypeError(str(e)) from e
|
||||
|
||||
|
||||
def mirror_directory(path):
|
||||
try:
|
||||
return spack.mirror.Mirror.from_local_path(path)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(str(e))
|
||||
raise argparse.ArgumentTypeError(str(e)) from e
|
||||
|
||||
|
||||
def mirror_name(name):
|
||||
try:
|
||||
return spack.mirror.require_mirror_name(name)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(str(e))
|
||||
raise argparse.ArgumentTypeError(str(e)) from e
|
||||
|
@@ -85,8 +85,14 @@ def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
|
||||
|
||||
|
||||
def develop(parser, args):
|
||||
# Note: we could put develop specs in any scope, but I assume
|
||||
# users would only ever want to do this for either (a) an active
|
||||
# env or (b) a specified config file (e.g. that is included by
|
||||
# an environment)
|
||||
# TODO: when https://github.com/spack/spack/pull/35307 is merged,
|
||||
# an active env is not required if a scope is specified
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
if not args.spec:
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
if args.clone is False:
|
||||
raise SpackError("No spec provided to spack develop command")
|
||||
|
||||
@@ -116,16 +122,18 @@ def develop(parser, args):
|
||||
raise SpackError("spack develop requires at most one named spec")
|
||||
|
||||
spec = specs[0]
|
||||
|
||||
version = spec.versions.concrete_range_as_version
|
||||
if not version:
|
||||
raise SpackError("Packages to develop must have a concrete version")
|
||||
# look up the maximum version so infintiy versions are preferred for develop
|
||||
version = max(spec.package_class.versions.keys())
|
||||
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
|
||||
spec.versions = spack.version.VersionList([version])
|
||||
|
||||
# If user does not specify --path, we choose to create a directory in the
|
||||
# active environment's directory, named after the spec
|
||||
path = args.path or spec.name
|
||||
if not os.path.isabs(path):
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
|
||||
else:
|
||||
abspath = path
|
||||
@@ -149,13 +157,6 @@ def develop(parser, args):
|
||||
|
||||
_retrieve_develop_source(spec, abspath)
|
||||
|
||||
# Note: we could put develop specs in any scope, but I assume
|
||||
# users would only ever want to do this for either (a) an active
|
||||
# env or (b) a specified config file (e.g. that is included by
|
||||
# an environment)
|
||||
# TODO: when https://github.com/spack/spack/pull/35307 is merged,
|
||||
# an active env is not required if a scope is specified
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
tty.debug("Updating develop config for {0} transactionally".format(env.name))
|
||||
with env.write_transaction():
|
||||
if args.build_directory is not None:
|
||||
|
@@ -270,7 +270,7 @@ def env_activate_setup_parser(subparser):
|
||||
nargs="?",
|
||||
default=None,
|
||||
help=(
|
||||
"name of managed environment or directory of the anonymous env"
|
||||
"name of managed environment or directory of the independent env"
|
||||
" (when using --dir/-d) to activate"
|
||||
),
|
||||
)
|
||||
@@ -540,7 +540,7 @@ def env_rename_setup_parser(subparser):
|
||||
def env_rename(args):
|
||||
"""Rename an environment.
|
||||
|
||||
This renames a managed environment or moves an anonymous environment.
|
||||
This renames a managed environment or moves an independent environment.
|
||||
"""
|
||||
|
||||
# Directory option has been specified
|
||||
|
@@ -174,9 +174,9 @@ def query_arguments(args):
|
||||
if (args.missing or args.only_missing) and not args.only_deprecated:
|
||||
installed.append(InstallStatuses.MISSING)
|
||||
|
||||
known = any
|
||||
predicate_fn = None
|
||||
if args.unknown:
|
||||
known = False
|
||||
predicate_fn = lambda x: not spack.repo.PATH.exists(x.spec.name)
|
||||
|
||||
explicit = any
|
||||
if args.explicit:
|
||||
@@ -184,7 +184,7 @@ def query_arguments(args):
|
||||
if args.implicit:
|
||||
explicit = False
|
||||
|
||||
q_args = {"installed": installed, "known": known, "explicit": explicit}
|
||||
q_args = {"installed": installed, "predicate_fn": predicate_fn, "explicit": explicit}
|
||||
|
||||
install_tree = args.install_tree
|
||||
upstreams = spack.config.get("upstreams", {})
|
||||
|
@@ -41,7 +41,7 @@ def setup_parser(subparser):
|
||||
help="do not remove installed build-only dependencies of roots\n"
|
||||
"(default is to keep only link & run dependencies)",
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
spack.cmd.common.arguments.add_common_arguments(subparser, ["yes_to_all", "constraint"])
|
||||
|
||||
|
||||
def roots_from_environments(args, active_env):
|
||||
@@ -97,6 +97,12 @@ def gc(parser, args):
|
||||
root_hashes = None
|
||||
|
||||
specs = spack.store.STORE.db.unused_specs(root_hashes=root_hashes, deptype=deptype)
|
||||
|
||||
# limit search to constraint specs if provided
|
||||
if args.constraint:
|
||||
hashes = set(spec.dag_hash() for spec in args.specs())
|
||||
specs = [spec for spec in specs if spec.dag_hash() in hashes]
|
||||
|
||||
if not specs:
|
||||
tty.msg("There are no unused specs. Spack's store is clean.")
|
||||
return
|
||||
|
@@ -378,7 +378,10 @@ def refresh(module_type, specs, args):
|
||||
def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
||||
# Qualifiers to be used when querying the db for specs
|
||||
constraint_qualifiers = {
|
||||
"refresh": {"installed": True, "known": lambda x: not spack.repo.PATH.exists(x)}
|
||||
"refresh": {
|
||||
"installed": True,
|
||||
"predicate_fn": lambda x: spack.repo.PATH.exists(x.spec.name),
|
||||
}
|
||||
}
|
||||
query_args = constraint_qualifiers.get(args.subparser_name, {})
|
||||
|
||||
|
@@ -33,6 +33,8 @@
|
||||
YamlFilesystemView.
|
||||
|
||||
"""
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.link_tree import MergeConflictError
|
||||
|
||||
@@ -178,7 +180,12 @@ def setup_parser(sp):
|
||||
|
||||
|
||||
def view(parser, args):
|
||||
"Produce a view of a set of packages."
|
||||
"""Produce a view of a set of packages."""
|
||||
|
||||
if sys.platform == "win32" and args.action in ("hardlink", "hard"):
|
||||
# Hard-linked views are not yet allowed on Windows.
|
||||
# See https://github.com/spack/spack/pull/46335#discussion_r1757411915
|
||||
tty.die("Hard linking is not supported on Windows. Please use symlinks or copy methods.")
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs)
|
||||
path = args.path[0]
|
||||
|
@@ -415,14 +415,19 @@ def implicit_rpaths(self) -> List[str]:
|
||||
return list(paths_containing_libs(link_dirs, all_required_libs))
|
||||
|
||||
@property
|
||||
def default_libc(self) -> Optional["spack.spec.Spec"]:
|
||||
"""Determine libc targeted by the compiler from link line"""
|
||||
def default_dynamic_linker(self) -> Optional[str]:
|
||||
"""Determine default dynamic linker from compiler link line"""
|
||||
output = self.compiler_verbose_output
|
||||
|
||||
if not output:
|
||||
return None
|
||||
|
||||
dynamic_linker = spack.util.libc.parse_dynamic_linker(output)
|
||||
return spack.util.libc.parse_dynamic_linker(output)
|
||||
|
||||
@property
|
||||
def default_libc(self) -> Optional["spack.spec.Spec"]:
|
||||
"""Determine libc targeted by the compiler from link line"""
|
||||
dynamic_linker = self.default_dynamic_linker
|
||||
|
||||
if not dynamic_linker:
|
||||
return None
|
||||
|
@@ -92,6 +92,14 @@ def c11_flag(self):
|
||||
else:
|
||||
return "-std=c1x"
|
||||
|
||||
@property
|
||||
def c18_flag(self):
|
||||
# c18 supported since oneapi 2022, which is classic version 2021.5.0
|
||||
if self.real_version < Version("21.5.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C18 standard", "c18_flag", "< 21.5.0")
|
||||
else:
|
||||
return "-std=c18"
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
@@ -293,6 +293,17 @@ def platform_toolset_ver(self):
|
||||
vs22_toolset = Version(toolset_ver) > Version("142")
|
||||
return toolset_ver if not vs22_toolset else "143"
|
||||
|
||||
@property
|
||||
def visual_studio_version(self):
|
||||
"""The four digit Visual Studio version (i.e. 2019 or 2022)
|
||||
|
||||
Note: This differs from the msvc version or toolset version as
|
||||
those properties track the compiler and build tools version
|
||||
respectively, whereas this tracks the VS release associated
|
||||
with a given MSVC compiler.
|
||||
"""
|
||||
return re.search(r"[0-9]{4}", self.cc).group(0)
|
||||
|
||||
def _compiler_version(self, compiler):
|
||||
"""Returns version object for given compiler"""
|
||||
# ignore_errors below is true here due to ifx's
|
||||
|
@@ -7,7 +7,9 @@
|
||||
from os.path import dirname, join
|
||||
|
||||
from llnl.util import tty
|
||||
from llnl.util.filesystem import ancestor
|
||||
|
||||
import spack.util.executable
|
||||
from spack.compiler import Compiler
|
||||
from spack.version import Version
|
||||
|
||||
@@ -116,6 +118,24 @@ def fc_pic_flag(self):
|
||||
def stdcxx_libs(self):
|
||||
return ("-cxxlib",)
|
||||
|
||||
@property
|
||||
def prefix(self):
|
||||
# OneAPI reports its install prefix when running ``--version``
|
||||
# on the line ``InstalledDir: <prefix>/bin/compiler``.
|
||||
cc = spack.util.executable.Executable(self.cc)
|
||||
with self.compiler_environment():
|
||||
oneapi_output = cc("--version", output=str, error=str)
|
||||
|
||||
for line in oneapi_output.splitlines():
|
||||
if line.startswith("InstalledDir:"):
|
||||
oneapi_prefix = line.split(":")[1].strip()
|
||||
# Go from <prefix>/bin/compiler to <prefix>
|
||||
return ancestor(oneapi_prefix, 2)
|
||||
|
||||
raise RuntimeError(
|
||||
"could not find install prefix of OneAPI from output:\n\t{}".format(oneapi_output)
|
||||
)
|
||||
|
||||
def setup_custom_environment(self, pkg, env):
|
||||
# workaround bug in icpx driver where it requires sycl-post-link is on the PATH
|
||||
# It is located in the same directory as the driver. Error message:
|
||||
|
@@ -299,12 +299,9 @@ def __reduce__(self):
|
||||
database. If it is a spec, we'll evaluate
|
||||
``spec.satisfies(query_spec)``
|
||||
|
||||
known (bool or None): Specs that are "known" are those
|
||||
for which Spack can locate a ``package.py`` file -- i.e.,
|
||||
Spack "knows" how to install them. Specs that are unknown may
|
||||
represent packages that existed in a previous version of
|
||||
Spack, but have since either changed their name or
|
||||
been removed
|
||||
predicate_fn: optional predicate taking an InstallRecord as argument, and returning
|
||||
whether that record is selected for the query. It can be used to craft criteria
|
||||
that need some data for selection not provided by the Database itself.
|
||||
|
||||
installed (bool or InstallStatus or typing.Iterable or None):
|
||||
if ``True``, includes only installed
|
||||
@@ -604,6 +601,9 @@ def _path(self, spec: "spack.spec.Spec") -> pathlib.Path:
|
||||
return self.dir / f"{spec.name}-{spec.dag_hash()}"
|
||||
|
||||
|
||||
SelectType = Callable[[InstallRecord], bool]
|
||||
|
||||
|
||||
class Database:
|
||||
#: Fields written for each install record
|
||||
record_fields: Tuple[str, ...] = DEFAULT_INSTALL_RECORD_FIELDS
|
||||
@@ -1245,7 +1245,7 @@ def _add(
|
||||
self._data[key].explicit = explicit
|
||||
|
||||
@_autospec
|
||||
def add(self, spec: "spack.spec.Spec", *, explicit: bool = False) -> None:
|
||||
def add(self, spec: "spack.spec.Spec", *, explicit: bool = False, allow_missing=False) -> None:
|
||||
"""Add spec at path to database, locking and reading DB to sync.
|
||||
|
||||
``add()`` will lock and read from the DB on disk.
|
||||
@@ -1254,7 +1254,7 @@ def add(self, spec: "spack.spec.Spec", *, explicit: bool = False) -> None:
|
||||
# TODO: ensure that spec is concrete?
|
||||
# Entire add is transactional.
|
||||
with self.write_transaction():
|
||||
self._add(spec, explicit=explicit)
|
||||
self._add(spec, explicit=explicit, allow_missing=allow_missing)
|
||||
|
||||
def _get_matching_spec_key(self, spec: "spack.spec.Spec", **kwargs) -> str:
|
||||
"""Get the exact spec OR get a single spec that matches."""
|
||||
@@ -1526,7 +1526,7 @@ def get_by_hash(self, dag_hash, default=None, installed=any):
|
||||
def _query(
|
||||
self,
|
||||
query_spec=any,
|
||||
known=any,
|
||||
predicate_fn: Optional[SelectType] = None,
|
||||
installed=True,
|
||||
explicit=any,
|
||||
start_date=None,
|
||||
@@ -1534,7 +1534,7 @@ def _query(
|
||||
hashes=None,
|
||||
in_buildcache=any,
|
||||
origin=None,
|
||||
):
|
||||
) -> List["spack.spec.Spec"]:
|
||||
"""Run a query on the database."""
|
||||
|
||||
# TODO: Specs are a lot like queries. Should there be a
|
||||
@@ -1580,7 +1580,7 @@ def _query(
|
||||
if explicit is not any and rec.explicit != explicit:
|
||||
continue
|
||||
|
||||
if known is not any and known(rec.spec.name):
|
||||
if predicate_fn is not None and not predicate_fn(rec):
|
||||
continue
|
||||
|
||||
if start_date or end_date:
|
||||
@@ -1665,14 +1665,14 @@ def query(self, *args, **kwargs):
|
||||
query.__doc__ = ""
|
||||
query.__doc__ += _QUERY_DOCSTRING
|
||||
|
||||
def query_one(self, query_spec, known=any, installed=True):
|
||||
def query_one(self, query_spec, predicate_fn=None, installed=True):
|
||||
"""Query for exactly one spec that matches the query spec.
|
||||
|
||||
Raises an assertion error if more than one spec matches the
|
||||
query. Returns None if no installed package matches.
|
||||
|
||||
"""
|
||||
concrete_specs = self.query(query_spec, known=known, installed=installed)
|
||||
concrete_specs = self.query(query_spec, predicate_fn=predicate_fn, installed=installed)
|
||||
assert len(concrete_specs) <= 1
|
||||
return concrete_specs[0] if concrete_specs else None
|
||||
|
||||
|
@@ -11,6 +11,7 @@
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
import warnings
|
||||
from typing import Dict, Iterable, List, Optional, Set, Tuple, Type
|
||||
|
||||
@@ -18,6 +19,7 @@
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty
|
||||
|
||||
import spack.error
|
||||
import spack.spec
|
||||
import spack.util.elf as elf_utils
|
||||
import spack.util.environment
|
||||
@@ -66,6 +68,21 @@ def file_identifier(path):
|
||||
return s.st_dev, s.st_ino
|
||||
|
||||
|
||||
def dedupe_paths(paths: List[str]) -> List[str]:
|
||||
"""Deduplicate paths based on inode and device number. In case the list contains first a
|
||||
symlink and then the directory it points to, the symlink is replaced with the directory path.
|
||||
This ensures that we pick for example ``/usr/bin`` over ``/bin`` if the latter is a symlink to
|
||||
the former`."""
|
||||
seen: Dict[Tuple[int, int], str] = {}
|
||||
for path in paths:
|
||||
identifier = file_identifier(path)
|
||||
if identifier not in seen:
|
||||
seen[identifier] = path
|
||||
elif not os.path.islink(path):
|
||||
seen[identifier] = path
|
||||
return list(seen.values())
|
||||
|
||||
|
||||
def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
|
||||
"""Get the paths of all executables available from the current PATH.
|
||||
|
||||
@@ -82,8 +99,7 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
|
||||
"""
|
||||
search_paths = llnl.util.filesystem.search_paths_for_executables(*path_hints)
|
||||
# Make use we don't doubly list /usr/lib and /lib etc
|
||||
search_paths = list(llnl.util.lang.dedupe(search_paths, key=file_identifier))
|
||||
return path_to_dict(search_paths)
|
||||
return path_to_dict(dedupe_paths(search_paths))
|
||||
|
||||
|
||||
def accept_elf(path, host_compat):
|
||||
@@ -144,7 +160,7 @@ def libraries_in_ld_and_system_library_path(
|
||||
search_paths = list(filter(os.path.isdir, search_paths))
|
||||
|
||||
# Make use we don't doubly list /usr/lib and /lib etc
|
||||
search_paths = list(llnl.util.lang.dedupe(search_paths, key=file_identifier))
|
||||
search_paths = dedupe_paths(search_paths)
|
||||
|
||||
try:
|
||||
host_compat = elf_utils.get_elf_compat(sys.executable)
|
||||
@@ -260,8 +276,12 @@ def detect_specs(
|
||||
)
|
||||
except Exception as e:
|
||||
specs = []
|
||||
if spack.error.SHOW_BACKTRACE:
|
||||
details = traceback.format_exc()
|
||||
else:
|
||||
details = f"[{e.__class__.__name__}: {e}]"
|
||||
warnings.warn(
|
||||
f'error detecting "{pkg.name}" from prefix {candidate_path} [{str(e)}]'
|
||||
f'error detecting "{pkg.name}" from prefix {candidate_path}: {details}'
|
||||
)
|
||||
|
||||
if not specs:
|
||||
@@ -435,9 +455,9 @@ def by_path(
|
||||
llnl.util.tty.debug(
|
||||
f"[EXTERNAL DETECTION] Skipping {pkg_name}: timeout reached"
|
||||
)
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
llnl.util.tty.debug(
|
||||
f"[EXTERNAL DETECTION] Skipping {pkg_name}: exception occured {e}"
|
||||
f"[EXTERNAL DETECTION] Skipping {pkg_name}: {traceback.format_exc()}"
|
||||
)
|
||||
|
||||
return result
|
||||
|
@@ -9,11 +9,13 @@
|
||||
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
from enum import Enum
|
||||
from typing import List, Optional
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.environment.environment as ev
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.traverse as traverse
|
||||
|
||||
@@ -226,6 +228,7 @@ def to_dict(self):
|
||||
"install_deps_target": self._target("install-deps"),
|
||||
"any_hash_target": self._target("%"),
|
||||
"jobserver_support": self.jobserver_support,
|
||||
"spack_script": shlex.quote(spack.paths.spack_script),
|
||||
"adjacency_list": self.make_adjacency_list,
|
||||
"phony_convenience_targets": " ".join(self.phony_convenience_targets),
|
||||
"pkg_ids_variable": self.pkg_identifier_variable,
|
||||
|
@@ -1159,6 +1159,8 @@ def clear(self, re_read=False):
|
||||
# things that cannot be recreated from file
|
||||
self.new_specs = [] # write packages for these on write()
|
||||
|
||||
self.manifest.clear()
|
||||
|
||||
@property
|
||||
def active(self):
|
||||
"""True if this environment is currently active."""
|
||||
@@ -2163,6 +2165,13 @@ def _concrete_specs_dict(self):
|
||||
# Assumes no legacy formats, since this was just created.
|
||||
spec_dict[ht.dag_hash.name] = s.dag_hash()
|
||||
concrete_specs[s.dag_hash()] = spec_dict
|
||||
|
||||
if s.build_spec is not s:
|
||||
for d in s.build_spec.traverse():
|
||||
build_spec_dict = d.node_dict_with_hashes(hash=ht.dag_hash)
|
||||
build_spec_dict[ht.dag_hash.name] = d.dag_hash()
|
||||
concrete_specs[d.dag_hash()] = build_spec_dict
|
||||
|
||||
return concrete_specs
|
||||
|
||||
def _concrete_roots_dict(self):
|
||||
@@ -2322,7 +2331,7 @@ def filter_specs(self, reader, json_specs_by_hash, order_concretized):
|
||||
specs_by_hash[lockfile_key] = spec
|
||||
|
||||
# Second pass: For each spec, get its dependencies from the node dict
|
||||
# and add them to the spec
|
||||
# and add them to the spec, including build specs
|
||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||
name, data = reader.name_and_data(node_dict)
|
||||
for _, dep_hash, deptypes, _, virtuals in reader.dependencies_from_node_dict(data):
|
||||
@@ -2330,6 +2339,10 @@ def filter_specs(self, reader, json_specs_by_hash, order_concretized):
|
||||
specs_by_hash[dep_hash], depflag=dt.canonicalize(deptypes), virtuals=virtuals
|
||||
)
|
||||
|
||||
if "build_spec" in node_dict:
|
||||
_, bhash, _ = reader.extract_build_spec_info_from_node_dict(node_dict)
|
||||
specs_by_hash[lockfile_key]._build_spec = specs_by_hash[bhash]
|
||||
|
||||
# Traverse the root specs one at a time in the order they appear.
|
||||
# The first time we see each DAG hash, that's the one we want to
|
||||
# keep. This is only required as long as we support older lockfile
|
||||
@@ -2789,6 +2802,11 @@ def remove_user_spec(self, user_spec: str) -> None:
|
||||
raise SpackEnvironmentError(msg) from e
|
||||
self.changed = True
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Clear all user specs from the list of root specs"""
|
||||
self.configuration["specs"] = []
|
||||
self.changed = True
|
||||
|
||||
def override_user_spec(self, user_spec: str, idx: int) -> None:
|
||||
"""Overrides the user spec at index idx with the one passed as input.
|
||||
|
||||
|
@@ -12,6 +12,9 @@
|
||||
#: this is module-scoped because it needs to be set very early
|
||||
debug = 0
|
||||
|
||||
#: whether to show a backtrace when an error is printed, enabled with --backtrace.
|
||||
SHOW_BACKTRACE = False
|
||||
|
||||
|
||||
class SpackError(Exception):
|
||||
"""This is the superclass for all Spack errors.
|
||||
|
@@ -100,10 +100,12 @@ def view_copy(
|
||||
|
||||
spack.relocate.relocate_text(files=[dst], prefixes=prefix_to_projection)
|
||||
|
||||
try:
|
||||
os.chown(dst, src_stat.st_uid, src_stat.st_gid)
|
||||
except OSError:
|
||||
tty.debug(f"Can't change the permissions for {dst}")
|
||||
# The os module on Windows does not have a chown function.
|
||||
if sys.platform != "win32":
|
||||
try:
|
||||
os.chown(dst, src_stat.st_uid, src_stat.st_gid)
|
||||
except OSError:
|
||||
tty.debug(f"Can't change the permissions for {dst}")
|
||||
|
||||
|
||||
#: supported string values for `link_type` in an env, mapped to canonical values
|
||||
|
@@ -372,8 +372,7 @@ def phase_tests(
|
||||
builder.pkg.test_suite.current_test_spec = builder.pkg.spec
|
||||
builder.pkg.test_suite.current_base_spec = builder.pkg.spec
|
||||
|
||||
# TODO (post-34236): "test"->"test_" once remove deprecated methods
|
||||
have_tests = any(name.startswith("test") for name in method_names)
|
||||
have_tests = any(name.startswith("test_") for name in method_names)
|
||||
if have_tests:
|
||||
copy_test_files(builder.pkg, builder.pkg.spec)
|
||||
|
||||
@@ -477,16 +476,9 @@ def write_tested_status(self):
|
||||
def test_part(pkg: Pb, test_name: str, purpose: str, work_dir: str = ".", verbose: bool = False):
|
||||
wdir = "." if work_dir is None else work_dir
|
||||
tester = pkg.tester
|
||||
# TODO (post-34236): "test"->"test_" once remove deprecated methods
|
||||
assert test_name and test_name.startswith(
|
||||
"test"
|
||||
), f"Test name must start with 'test' but {test_name} was provided"
|
||||
|
||||
if test_name == "test":
|
||||
tty.warn(
|
||||
"{}: the 'test' method is deprecated. Convert stand-alone "
|
||||
"test(s) to methods with names starting 'test_'.".format(pkg.name)
|
||||
)
|
||||
"test_"
|
||||
), f"Test name must start with 'test_' but {test_name} was provided"
|
||||
|
||||
title = "test: {}: {}".format(test_name, purpose or "unspecified purpose")
|
||||
with fs.working_dir(wdir, create=True):
|
||||
@@ -646,28 +638,11 @@ def test_functions(
|
||||
except spack.repo.UnknownPackageError:
|
||||
tty.debug(f"{vname}: virtual does not appear to have a package file")
|
||||
|
||||
# TODO (post-34236): Remove if removing empty test method check
|
||||
def skip(line):
|
||||
# This should match the lines in the deprecated test() method
|
||||
ln = line.strip()
|
||||
return ln.startswith("#") or ("warn" in ln and "deprecated" in ln)
|
||||
|
||||
doc_regex = r'\s+("""[\w\s\(\)\-\,\;\:]+""")'
|
||||
tests = []
|
||||
for clss in classes:
|
||||
methods = inspect.getmembers(clss, predicate=lambda x: inspect.isfunction(x))
|
||||
for name, test_fn in methods:
|
||||
# TODO (post-34236): "test"->"test_" once remove deprecated methods
|
||||
if not name.startswith("test"):
|
||||
continue
|
||||
|
||||
# TODO (post-34236): Could remove empty method check once remove
|
||||
# TODO (post-34236): deprecated methods though some use cases,
|
||||
# TODO (post-34236): such as checking packages have actual, non-
|
||||
# TODO (post-34236): empty tests, may want this check to remain.
|
||||
source = re.sub(doc_regex, r"", inspect.getsource(test_fn)).splitlines()[1:]
|
||||
lines = [ln.strip() for ln in source if not skip(ln)]
|
||||
if not lines:
|
||||
if not name.startswith("test_"):
|
||||
continue
|
||||
|
||||
tests.append((clss.__name__, test_fn)) # type: ignore[union-attr]
|
||||
|
@@ -2,8 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""
|
||||
This module encapsulates package installation functionality.
|
||||
"""This module encapsulates package installation functionality.
|
||||
|
||||
The PackageInstaller coordinates concurrent builds of packages for the same
|
||||
Spack instance by leveraging the dependency DAG and file system locks. It
|
||||
@@ -17,16 +16,18 @@
|
||||
File system locks enable coordination such that no two processes attempt to
|
||||
build the same or a failed dependency package.
|
||||
|
||||
Failures to install dependency packages result in removal of their dependents'
|
||||
build tasks from the current process. A failure file is also written (and
|
||||
locked) so that other processes can detect the failure and adjust their build
|
||||
tasks accordingly.
|
||||
If a dependency package fails to install, its dependents' tasks will be
|
||||
removed from the installing process's queue. A failure file is also written
|
||||
and locked. Other processes use this file to detect the failure and dequeue
|
||||
its dependents.
|
||||
|
||||
This module supports the coordination of local and distributed concurrent
|
||||
installations of packages in a Spack instance.
|
||||
|
||||
"""
|
||||
|
||||
import copy
|
||||
import enum
|
||||
import glob
|
||||
import heapq
|
||||
import io
|
||||
@@ -35,6 +36,7 @@
|
||||
import shutil
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
from collections import defaultdict
|
||||
from gzip import GzipFile
|
||||
from typing import Dict, Iterator, List, Optional, Set, Tuple, Union
|
||||
@@ -42,6 +44,7 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lock as lk
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import ordinal
|
||||
from llnl.util.lang import pretty_seconds
|
||||
from llnl.util.tty.color import colorize
|
||||
from llnl.util.tty.log import log_output
|
||||
@@ -57,6 +60,7 @@
|
||||
import spack.package_base
|
||||
import spack.package_prefs as prefs
|
||||
import spack.repo
|
||||
import spack.rewiring
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.executable
|
||||
@@ -70,25 +74,32 @@
|
||||
#: were added (see https://docs.python.org/2/library/heapq.html).
|
||||
_counter = itertools.count(0)
|
||||
|
||||
#: Build status indicating task has been added.
|
||||
STATUS_ADDED = "queued"
|
||||
|
||||
#: Build status indicating the spec failed to install
|
||||
STATUS_FAILED = "failed"
|
||||
class BuildStatus(enum.Enum):
|
||||
"""Different build (task) states."""
|
||||
|
||||
#: Build status indicating the spec is being installed (possibly by another
|
||||
#: process)
|
||||
STATUS_INSTALLING = "installing"
|
||||
#: Build status indicating task has been added/queued.
|
||||
QUEUED = enum.auto()
|
||||
|
||||
#: Build status indicating the spec was sucessfully installed
|
||||
STATUS_INSTALLED = "installed"
|
||||
#: Build status indicating the spec failed to install
|
||||
FAILED = enum.auto()
|
||||
|
||||
#: Build status indicating the task has been popped from the queue
|
||||
STATUS_DEQUEUED = "dequeued"
|
||||
#: Build status indicating the spec is being installed (possibly by another
|
||||
#: process)
|
||||
INSTALLING = enum.auto()
|
||||
|
||||
#: Build status indicating task has been removed (to maintain priority
|
||||
#: queue invariants).
|
||||
STATUS_REMOVED = "removed"
|
||||
#: Build status indicating the spec was sucessfully installed
|
||||
INSTALLED = enum.auto()
|
||||
|
||||
#: Build status indicating the task has been popped from the queue
|
||||
DEQUEUED = enum.auto()
|
||||
|
||||
#: Build status indicating task has been removed (to maintain priority
|
||||
#: queue invariants).
|
||||
REMOVED = enum.auto()
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.name.lower()}"
|
||||
|
||||
|
||||
def _write_timer_json(pkg, timer, cache):
|
||||
@@ -101,13 +112,22 @@ def _write_timer_json(pkg, timer, cache):
|
||||
return
|
||||
|
||||
|
||||
class InstallAction:
|
||||
class ExecuteResult(enum.Enum):
|
||||
# Task succeeded
|
||||
SUCCESS = enum.auto()
|
||||
# Task failed
|
||||
FAILED = enum.auto()
|
||||
# Task is missing build spec and will be requeued
|
||||
MISSING_BUILD_SPEC = enum.auto()
|
||||
|
||||
|
||||
class InstallAction(enum.Enum):
|
||||
#: Don't perform an install
|
||||
NONE = 0
|
||||
NONE = enum.auto()
|
||||
#: Do a standard install
|
||||
INSTALL = 1
|
||||
INSTALL = enum.auto()
|
||||
#: Do an overwrite install
|
||||
OVERWRITE = 2
|
||||
OVERWRITE = enum.auto()
|
||||
|
||||
|
||||
class InstallStatus:
|
||||
@@ -431,7 +451,7 @@ def _process_binary_cache_tarball(
|
||||
"""
|
||||
with timer.measure("fetch"):
|
||||
download_result = binary_distribution.download_tarball(
|
||||
pkg.spec, unsigned, mirrors_for_spec
|
||||
pkg.spec.build_spec, unsigned, mirrors_for_spec
|
||||
)
|
||||
|
||||
if download_result is None:
|
||||
@@ -442,6 +462,11 @@ def _process_binary_cache_tarball(
|
||||
with timer.measure("install"), spack.util.path.filter_padding():
|
||||
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
|
||||
|
||||
if pkg.spec.spliced: # overwrite old metadata with new
|
||||
spack.store.STORE.layout.write_spec(
|
||||
pkg.spec, spack.store.STORE.layout.spec_file_path(pkg.spec)
|
||||
)
|
||||
|
||||
if hasattr(pkg, "_post_buildcache_install_hook"):
|
||||
pkg._post_buildcache_install_hook()
|
||||
|
||||
@@ -677,7 +702,7 @@ def log(pkg: "spack.package_base.PackageBase") -> None:
|
||||
def package_id(spec: "spack.spec.Spec") -> str:
|
||||
"""A "unique" package identifier for installation purposes
|
||||
|
||||
The identifier is used to track build tasks, locks, install, and
|
||||
The identifier is used to track tasks, locks, install, and
|
||||
failure statuses.
|
||||
|
||||
The identifier needs to distinguish between combinations of compilers
|
||||
@@ -736,14 +761,14 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Returns a formal representation of the build request."""
|
||||
"""Return a formal representation of the build request."""
|
||||
rep = f"{self.__class__.__name__}("
|
||||
for attr, value in self.__dict__.items():
|
||||
rep += f"{attr}={value.__repr__()}, "
|
||||
return f"{rep.strip(', ')})"
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Returns a printable version of the build request."""
|
||||
"""Return a printable version of the build request."""
|
||||
return f"package={self.pkg.name}, install_args={self.install_args}"
|
||||
|
||||
def _add_default_args(self) -> None:
|
||||
@@ -840,37 +865,42 @@ def traverse_dependencies(self, spec=None, visited=None) -> Iterator["spack.spec
|
||||
yield dep
|
||||
|
||||
|
||||
class BuildTask:
|
||||
"""Class for representing the build task for a package."""
|
||||
class Task:
|
||||
"""Base class for representing a task for a package."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
pkg: "spack.package_base.PackageBase",
|
||||
request: Optional[BuildRequest],
|
||||
compiler: bool,
|
||||
start: float,
|
||||
attempts: int,
|
||||
status: str,
|
||||
installed: Set[str],
|
||||
request: BuildRequest,
|
||||
*,
|
||||
compiler: bool = False,
|
||||
start: float = 0.0,
|
||||
attempts: int = 0,
|
||||
status: BuildStatus = BuildStatus.QUEUED,
|
||||
installed: Set[str] = set(),
|
||||
):
|
||||
"""
|
||||
Instantiate a build task for a package.
|
||||
Instantiate a task for a package.
|
||||
|
||||
Args:
|
||||
pkg: the package to be built and installed
|
||||
request: the associated install request where ``None`` can be
|
||||
used to indicate the package was explicitly requested by the user
|
||||
compiler: whether task is for a bootstrap compiler
|
||||
request: the associated install request
|
||||
start: the initial start time for the package, in seconds
|
||||
attempts: the number of attempts to install the package
|
||||
attempts: the number of attempts to install the package, which
|
||||
should be 0 when the task is initially instantiated
|
||||
status: the installation status
|
||||
installed: the identifiers of packages that have
|
||||
installed: the (string) identifiers of packages that have
|
||||
been installed so far
|
||||
|
||||
Raises:
|
||||
``InstallError`` if the build status is incompatible with the task
|
||||
``TypeError`` if provided an argument of the wrong type
|
||||
``ValueError`` if provided an argument with the wrong value or state
|
||||
"""
|
||||
|
||||
# Ensure dealing with a package that has a concrete spec
|
||||
if not isinstance(pkg, spack.package_base.PackageBase):
|
||||
raise ValueError(f"{str(pkg)} must be a package")
|
||||
raise TypeError(f"{str(pkg)} must be a package")
|
||||
|
||||
self.pkg = pkg
|
||||
if not self.pkg.spec.concrete:
|
||||
@@ -881,26 +911,34 @@ def __init__(
|
||||
|
||||
# The explicit build request associated with the package
|
||||
if not isinstance(request, BuildRequest):
|
||||
raise ValueError(f"{str(pkg)} must have a build request")
|
||||
|
||||
raise TypeError(f"{request} is not a valid build request")
|
||||
self.request = request
|
||||
|
||||
# Initialize the status to an active state. The status is used to
|
||||
# ensure priority queue invariants when tasks are "removed" from the
|
||||
# queue.
|
||||
if status == STATUS_REMOVED:
|
||||
raise spack.error.InstallError(
|
||||
f"Cannot create a build task for {self.pkg_id} with status '{status}'", pkg=pkg
|
||||
)
|
||||
if not isinstance(status, BuildStatus):
|
||||
raise TypeError(f"{status} is not a valid build status")
|
||||
|
||||
# The initial build task cannot have status "removed".
|
||||
if attempts == 0 and status == BuildStatus.REMOVED:
|
||||
raise spack.error.InstallError(
|
||||
f"Cannot create a task for {self.pkg_id} with status '{status}'", pkg=pkg
|
||||
)
|
||||
self.status = status
|
||||
|
||||
# Package is associated with a bootstrap compiler
|
||||
self.compiler = compiler
|
||||
# cache the PID, which is used for distributed build messages in self.execute
|
||||
self.pid = os.getpid()
|
||||
|
||||
# The initial start time for processing the spec
|
||||
self.start = start
|
||||
|
||||
if not isinstance(installed, set):
|
||||
raise TypeError(
|
||||
f"BuildTask constructor requires 'installed' be a 'set', "
|
||||
f"not '{installed.__class__.__name__}'."
|
||||
)
|
||||
|
||||
# Set of dependents, which needs to include the requesting package
|
||||
# to support tracking of parallel, multi-spec, environment installs.
|
||||
self.dependents = set(get_dependent_ids(self.pkg.spec))
|
||||
@@ -921,16 +959,22 @@ def __init__(
|
||||
)
|
||||
|
||||
# List of uninstalled dependencies, which is used to establish
|
||||
# the priority of the build task.
|
||||
#
|
||||
# the priority of the task.
|
||||
self.uninstalled_deps = set(
|
||||
pkg_id for pkg_id in self.dependencies if pkg_id not in installed
|
||||
)
|
||||
|
||||
# Ensure key sequence-related properties are updated accordingly.
|
||||
self.attempts = 0
|
||||
self.attempts = attempts
|
||||
self._update()
|
||||
|
||||
def execute(self, install_status: InstallStatus) -> ExecuteResult:
|
||||
"""Execute the work of this task.
|
||||
|
||||
The ``install_status`` is an ``InstallStatus`` object used to format progress reporting for
|
||||
this task in the context of the full ``BuildRequest``."""
|
||||
raise NotImplementedError
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.key == other.key
|
||||
|
||||
@@ -950,14 +994,14 @@ def __ne__(self, other):
|
||||
return self.key != other.key
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Returns a formal representation of the build task."""
|
||||
"""Returns a formal representation of the task."""
|
||||
rep = f"{self.__class__.__name__}("
|
||||
for attr, value in self.__dict__.items():
|
||||
rep += f"{attr}={value.__repr__()}, "
|
||||
return f"{rep.strip(', ')})"
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Returns a printable version of the build task."""
|
||||
"""Returns a printable version of the task."""
|
||||
dependencies = f"#dependencies={len(self.dependencies)}"
|
||||
return "priority={0}, status={1}, start={2}, {3}".format(
|
||||
self.priority, self.status, self.start, dependencies
|
||||
@@ -974,8 +1018,7 @@ def _update(self) -> None:
|
||||
|
||||
def add_dependent(self, pkg_id: str) -> None:
|
||||
"""
|
||||
Ensure the dependent package id is in the task's list so it will be
|
||||
properly updated when this package is installed.
|
||||
Ensure the package is in this task's ``dependents`` list.
|
||||
|
||||
Args:
|
||||
pkg_id: package identifier of the dependent package
|
||||
@@ -984,6 +1027,20 @@ def add_dependent(self, pkg_id: str) -> None:
|
||||
tty.debug(f"Adding {pkg_id} as a dependent of {self.pkg_id}")
|
||||
self.dependents.add(pkg_id)
|
||||
|
||||
def add_dependency(self, pkg_id, installed=False):
|
||||
"""
|
||||
Ensure the package is in this task's ``dependencies`` list.
|
||||
|
||||
Args:
|
||||
pkg_id (str): package identifier of the dependency package
|
||||
installed (bool): install status of the dependency package
|
||||
"""
|
||||
if pkg_id != self.pkg_id and pkg_id not in self.dependencies:
|
||||
tty.debug(f"Adding {pkg_id} as a depencency of {self.pkg_id}")
|
||||
self.dependencies.add(pkg_id)
|
||||
if not installed:
|
||||
self.uninstalled_deps.add(pkg_id)
|
||||
|
||||
def flag_installed(self, installed: List[str]) -> None:
|
||||
"""
|
||||
Ensure the dependency is not considered to still be uninstalled.
|
||||
@@ -1000,6 +1057,39 @@ def flag_installed(self, installed: List[str]) -> None:
|
||||
level=2,
|
||||
)
|
||||
|
||||
def _setup_install_dir(self, pkg: "spack.package_base.PackageBase") -> None:
|
||||
"""
|
||||
Create and ensure proper access controls for the install directory.
|
||||
Write a small metadata file with the current spack environment.
|
||||
|
||||
Args:
|
||||
pkg: the package to be built and installed
|
||||
"""
|
||||
# Move to a module level method.
|
||||
if not os.path.exists(pkg.spec.prefix):
|
||||
path = spack.util.path.debug_padded_filter(pkg.spec.prefix)
|
||||
tty.debug(f"Creating the installation directory {path}")
|
||||
spack.store.STORE.layout.create_install_directory(pkg.spec)
|
||||
else:
|
||||
# Set the proper group for the prefix
|
||||
group = prefs.get_package_group(pkg.spec)
|
||||
if group:
|
||||
fs.chgrp(pkg.spec.prefix, group)
|
||||
|
||||
# Set the proper permissions.
|
||||
# This has to be done after group because changing groups blows
|
||||
# away the sticky group bit on the directory
|
||||
mode = os.stat(pkg.spec.prefix).st_mode
|
||||
perms = prefs.get_package_dir_permissions(pkg.spec)
|
||||
if mode != perms:
|
||||
os.chmod(pkg.spec.prefix, perms)
|
||||
|
||||
# Ensure the metadata path exists as well
|
||||
fs.mkdirp(spack.store.STORE.layout.metadata_path(pkg.spec), mode=perms)
|
||||
|
||||
# Always write host environment - we assume this can change
|
||||
spack.store.STORE.layout.write_host_environment(pkg.spec)
|
||||
|
||||
@property
|
||||
def explicit(self) -> bool:
|
||||
return self.pkg.spec.dag_hash() in self.request.install_args.get("explicit", [])
|
||||
@@ -1030,7 +1120,7 @@ def key(self) -> Tuple[int, int]:
|
||||
"""The key is the tuple (# uninstalled dependencies, sequence)."""
|
||||
return (self.priority, self.sequence)
|
||||
|
||||
def next_attempt(self, installed) -> "BuildTask":
|
||||
def next_attempt(self, installed) -> "Task":
|
||||
"""Create a new, updated task for the next installation attempt."""
|
||||
task = copy.copy(self)
|
||||
task._update()
|
||||
@@ -1044,6 +1134,100 @@ def priority(self):
|
||||
return len(self.uninstalled_deps)
|
||||
|
||||
|
||||
class BuildTask(Task):
|
||||
"""Class for representing a build task for a package."""
|
||||
|
||||
def execute(self, install_status):
|
||||
"""
|
||||
Perform the installation of the requested spec and/or dependency
|
||||
represented by the build task.
|
||||
"""
|
||||
install_args = self.request.install_args
|
||||
tests = install_args.get("tests")
|
||||
unsigned = install_args.get("unsigned")
|
||||
|
||||
pkg, pkg_id = self.pkg, self.pkg_id
|
||||
|
||||
tty.msg(install_msg(pkg_id, self.pid, install_status))
|
||||
self.start = self.start or time.time()
|
||||
self.status = BuildStatus.INSTALLING
|
||||
|
||||
# Use the binary cache if requested
|
||||
if self.use_cache:
|
||||
if _install_from_cache(pkg, self.explicit, unsigned):
|
||||
return ExecuteResult.SUCCESS
|
||||
elif self.cache_only:
|
||||
raise spack.error.InstallError(
|
||||
"No binary found when cache-only was specified", pkg=pkg
|
||||
)
|
||||
else:
|
||||
tty.msg(f"No binary for {pkg_id} found: installing from source")
|
||||
|
||||
pkg.run_tests = tests is True or tests and pkg.name in tests
|
||||
|
||||
# hook that allows tests to inspect the Package before installation
|
||||
# see unit_test_check() docs.
|
||||
if not pkg.unit_test_check():
|
||||
return ExecuteResult.FAILED
|
||||
|
||||
try:
|
||||
# Create stage object now and let it be serialized for the child process. That
|
||||
# way monkeypatch in tests works correctly.
|
||||
pkg.stage
|
||||
|
||||
self._setup_install_dir(pkg)
|
||||
|
||||
# Create a child process to do the actual installation.
|
||||
# Preserve verbosity settings across installs.
|
||||
spack.package_base.PackageBase._verbose = spack.build_environment.start_build_process(
|
||||
pkg, build_process, install_args
|
||||
)
|
||||
|
||||
# Note: PARENT of the build process adds the new package to
|
||||
# the database, so that we don't need to re-read from file.
|
||||
spack.store.STORE.db.add(pkg.spec, explicit=self.explicit)
|
||||
except spack.error.StopPhase as e:
|
||||
# A StopPhase exception means that do_install was asked to
|
||||
# stop early from clients, and is not an error at this point
|
||||
pid = f"{self.pid}: " if tty.show_pid() else ""
|
||||
tty.debug(f"{pid}{str(e)}")
|
||||
tty.debug(f"Package stage directory: {pkg.stage.source_path}")
|
||||
return ExecuteResult.SUCCESS
|
||||
|
||||
|
||||
class RewireTask(Task):
|
||||
"""Class for representing a rewire task for a package."""
|
||||
|
||||
def execute(self, install_status):
|
||||
"""Execute rewire task
|
||||
|
||||
Rewire tasks are executed by either rewiring self.package.spec.build_spec that is already
|
||||
installed or downloading and rewiring a binary for the it.
|
||||
|
||||
If not available installed or as binary, return ExecuteResult.MISSING_BUILD_SPEC.
|
||||
This will prompt the Installer to requeue the task with a dependency on the BuildTask
|
||||
to install self.pkg.spec.build_spec
|
||||
"""
|
||||
oldstatus = self.status
|
||||
self.status = BuildStatus.INSTALLING
|
||||
tty.msg(install_msg(self.pkg_id, self.pid, install_status))
|
||||
self.start = self.start or time.time()
|
||||
if not self.pkg.spec.build_spec.installed:
|
||||
try:
|
||||
install_args = self.request.install_args
|
||||
unsigned = install_args.get("unsigned")
|
||||
_process_binary_cache_tarball(self.pkg, explicit=self.explicit, unsigned=unsigned)
|
||||
_print_installed_pkg(self.pkg.prefix)
|
||||
return ExecuteResult.SUCCESS
|
||||
except BaseException as e:
|
||||
tty.error(f"Failed to rewire {self.pkg.spec} from binary. {e}")
|
||||
self.status = oldstatus
|
||||
return ExecuteResult.MISSING_BUILD_SPEC
|
||||
spack.rewiring.rewire_node(self.pkg.spec, self.explicit)
|
||||
_print_installed_pkg(self.pkg.prefix)
|
||||
return ExecuteResult.SUCCESS
|
||||
|
||||
|
||||
class PackageInstaller:
|
||||
"""
|
||||
Class for managing the install process for a Spack instance based on a bottom-up DAG approach.
|
||||
@@ -1137,11 +1321,11 @@ def __init__(
|
||||
# List of build requests
|
||||
self.build_requests = [BuildRequest(pkg, install_args) for pkg in packages]
|
||||
|
||||
# Priority queue of build tasks
|
||||
self.build_pq: List[Tuple[Tuple[int, int], BuildTask]] = []
|
||||
# Priority queue of tasks
|
||||
self.build_pq: List[Tuple[Tuple[int, int], Task]] = []
|
||||
|
||||
# Mapping of unique package ids to build task
|
||||
self.build_tasks: Dict[str, BuildTask] = {}
|
||||
# Mapping of unique package ids to task
|
||||
self.build_tasks: Dict[str, Task] = {}
|
||||
|
||||
# Cache of package locks for failed packages, keyed on package's ids
|
||||
self.failed: Dict[str, Optional[lk.Lock]] = {}
|
||||
@@ -1162,6 +1346,9 @@ def __init__(
|
||||
# fast then that option applies to all build requests.
|
||||
self.fail_fast = False
|
||||
|
||||
# Initializing all_dependencies to empty. This will be set later in _init_queue.
|
||||
self.all_dependencies: Dict[str, Set[str]] = {}
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Returns a formal representation of the package installer."""
|
||||
rep = f"{self.__class__.__name__}("
|
||||
@@ -1180,23 +1367,19 @@ def __str__(self) -> str:
|
||||
def _add_init_task(
|
||||
self,
|
||||
pkg: "spack.package_base.PackageBase",
|
||||
request: Optional[BuildRequest],
|
||||
is_compiler: bool,
|
||||
request: BuildRequest,
|
||||
all_deps: Dict[str, Set[str]],
|
||||
) -> None:
|
||||
"""
|
||||
Creates and queus the initial build task for the package.
|
||||
Creates and queues the initial task for the package.
|
||||
|
||||
Args:
|
||||
pkg: the package to be built and installed
|
||||
request (BuildRequest or None): the associated install request
|
||||
where ``None`` can be used to indicate the package was
|
||||
explicitly requested by the user
|
||||
is_compiler (bool): whether task is for a bootstrap compiler
|
||||
all_deps (defaultdict(set)): dictionary of all dependencies and
|
||||
associated dependents
|
||||
request: the associated install request
|
||||
all_deps: dictionary of all dependencies and associated dependents
|
||||
"""
|
||||
task = BuildTask(pkg, request, is_compiler, 0, 0, STATUS_ADDED, self.installed)
|
||||
cls = RewireTask if pkg.spec.spliced else BuildTask
|
||||
task = cls(pkg, request=request, status=BuildStatus.QUEUED, installed=self.installed)
|
||||
for dep_id in task.dependencies:
|
||||
all_deps[dep_id].add(package_id(pkg.spec))
|
||||
|
||||
@@ -1270,7 +1453,7 @@ def _check_deps_status(self, request: BuildRequest) -> None:
|
||||
else:
|
||||
lock.release_read()
|
||||
|
||||
def _prepare_for_install(self, task: BuildTask) -> None:
|
||||
def _prepare_for_install(self, task: Task) -> None:
|
||||
"""
|
||||
Check the database and leftover installation directories/files and
|
||||
prepare for a new install attempt for an uninstalled package.
|
||||
@@ -1278,7 +1461,7 @@ def _prepare_for_install(self, task: BuildTask) -> None:
|
||||
and ensuring the database is up-to-date.
|
||||
|
||||
Args:
|
||||
task (BuildTask): the build task whose associated package is
|
||||
task: the task whose associated package is
|
||||
being checked
|
||||
"""
|
||||
install_args = task.request.install_args
|
||||
@@ -1329,7 +1512,7 @@ def _prepare_for_install(self, task: BuildTask) -> None:
|
||||
spack.store.STORE.db.update_explicit(task.pkg.spec, True)
|
||||
|
||||
def _cleanup_all_tasks(self) -> None:
|
||||
"""Cleanup all build tasks to include releasing their locks."""
|
||||
"""Cleanup all tasks to include releasing their locks."""
|
||||
for pkg_id in self.locks:
|
||||
self._release_lock(pkg_id)
|
||||
|
||||
@@ -1361,7 +1544,7 @@ def _cleanup_failed(self, pkg_id: str) -> None:
|
||||
|
||||
def _cleanup_task(self, pkg: "spack.package_base.PackageBase") -> None:
|
||||
"""
|
||||
Cleanup the build task for the spec
|
||||
Cleanup the task for the spec
|
||||
|
||||
Args:
|
||||
pkg: the package being installed
|
||||
@@ -1433,7 +1616,7 @@ def _ensure_locked(
|
||||
|
||||
if lock_type == "read":
|
||||
# Wait until the other process finishes if there are no more
|
||||
# build tasks with priority 0 (i.e., with no uninstalled
|
||||
# tasks with priority 0 (i.e., with no uninstalled
|
||||
# dependencies).
|
||||
no_p0 = len(self.build_tasks) == 0 or not self._next_is_pri0()
|
||||
timeout = None if no_p0 else 3.0
|
||||
@@ -1485,6 +1668,33 @@ def _ensure_locked(
|
||||
self.locks[pkg_id] = (lock_type, lock)
|
||||
return self.locks[pkg_id]
|
||||
|
||||
def _requeue_with_build_spec_tasks(self, task):
|
||||
"""Requeue the task and its missing build spec dependencies"""
|
||||
# Full install of the build_spec is necessary because it didn't already exist somewhere
|
||||
spec = task.pkg.spec
|
||||
for dep in spec.build_spec.traverse():
|
||||
dep_pkg = dep.package
|
||||
|
||||
dep_id = package_id(dep)
|
||||
if dep_id not in self.build_tasks:
|
||||
self._add_init_task(dep_pkg, task.request, self.all_dependencies)
|
||||
|
||||
# Clear any persistent failure markings _unless_ they are
|
||||
# associated with another process in this parallel build
|
||||
# of the spec.
|
||||
spack.store.STORE.failure_tracker.clear(dep, force=False)
|
||||
|
||||
# Queue the build spec.
|
||||
build_pkg_id = package_id(spec.build_spec)
|
||||
build_spec_task = self.build_tasks[build_pkg_id]
|
||||
spec_pkg_id = package_id(spec)
|
||||
spec_task = task.next_attempt(self.installed)
|
||||
spec_task.status = BuildStatus.QUEUED
|
||||
# Convey a build spec as a dependency of a deployed spec.
|
||||
build_spec_task.add_dependent(spec_pkg_id)
|
||||
spec_task.add_dependency(build_pkg_id)
|
||||
self._push_task(spec_task)
|
||||
|
||||
def _add_tasks(self, request: BuildRequest, all_deps):
|
||||
"""Add tasks to the priority queue for the given build request.
|
||||
|
||||
@@ -1514,7 +1724,7 @@ def _add_tasks(self, request: BuildRequest, all_deps):
|
||||
|
||||
dep_id = package_id(dep)
|
||||
if dep_id not in self.build_tasks:
|
||||
self._add_init_task(dep_pkg, request, False, all_deps)
|
||||
self._add_init_task(dep_pkg, request, all_deps=all_deps)
|
||||
|
||||
# Clear any persistent failure markings _unless_ they are
|
||||
# associated with another process in this parallel build
|
||||
@@ -1532,80 +1742,29 @@ def _add_tasks(self, request: BuildRequest, all_deps):
|
||||
self._check_deps_status(request)
|
||||
|
||||
# Now add the package itself, if appropriate
|
||||
self._add_init_task(request.pkg, request, False, all_deps)
|
||||
self._add_init_task(request.pkg, request, all_deps=all_deps)
|
||||
|
||||
# Ensure if one request is to fail fast then all requests will.
|
||||
fail_fast = bool(request.install_args.get("fail_fast"))
|
||||
self.fail_fast = self.fail_fast or fail_fast
|
||||
|
||||
def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
||||
def _install_task(self, task: Task, install_status: InstallStatus) -> None:
|
||||
"""
|
||||
Perform the installation of the requested spec and/or dependency
|
||||
represented by the build task.
|
||||
represented by the task.
|
||||
|
||||
Args:
|
||||
task: the installation build task for a package
|
||||
task: the installation task for a package
|
||||
install_status: the installation status for the package"""
|
||||
|
||||
explicit = task.explicit
|
||||
install_args = task.request.install_args
|
||||
cache_only = task.cache_only
|
||||
use_cache = task.use_cache
|
||||
tests = install_args.get("tests", False)
|
||||
assert isinstance(tests, (bool, list)) # make mypy happy.
|
||||
unsigned: Optional[bool] = install_args.get("unsigned")
|
||||
|
||||
pkg, pkg_id = task.pkg, task.pkg_id
|
||||
|
||||
tty.msg(install_msg(pkg_id, self.pid, install_status))
|
||||
task.start = task.start or time.time()
|
||||
task.status = STATUS_INSTALLING
|
||||
|
||||
# Use the binary cache if requested
|
||||
if use_cache:
|
||||
if _install_from_cache(pkg, explicit, unsigned):
|
||||
self._update_installed(task)
|
||||
return
|
||||
elif cache_only:
|
||||
raise spack.error.InstallError(
|
||||
"No binary found when cache-only was specified", pkg=pkg
|
||||
)
|
||||
else:
|
||||
tty.msg(f"No binary for {pkg_id} found: installing from source")
|
||||
|
||||
pkg.run_tests = tests if isinstance(tests, bool) else pkg.name in tests
|
||||
|
||||
# hook that allows tests to inspect the Package before installation
|
||||
# see unit_test_check() docs.
|
||||
if not pkg.unit_test_check():
|
||||
return
|
||||
|
||||
try:
|
||||
self._setup_install_dir(pkg)
|
||||
|
||||
# Create stage object now and let it be serialized for the child process. That
|
||||
# way monkeypatch in tests works correctly.
|
||||
pkg.stage
|
||||
|
||||
# Create a child process to do the actual installation.
|
||||
# Preserve verbosity settings across installs.
|
||||
spack.package_base.PackageBase._verbose = spack.build_environment.start_build_process(
|
||||
pkg, build_process, install_args
|
||||
)
|
||||
# Note: PARENT of the build process adds the new package to
|
||||
# the database, so that we don't need to re-read from file.
|
||||
spack.store.STORE.db.add(pkg.spec, explicit=explicit)
|
||||
|
||||
except spack.error.StopPhase as e:
|
||||
# A StopPhase exception means that the installer was asked to stop early from clients,
|
||||
# and is not an error at this point
|
||||
pid = f"{self.pid}: " if tty.show_pid() else ""
|
||||
tty.debug(f"{pid}{str(e)}")
|
||||
tty.debug(f"Package stage directory: {pkg.stage.source_path}")
|
||||
rc = task.execute(install_status)
|
||||
if rc == ExecuteResult.MISSING_BUILD_SPEC:
|
||||
self._requeue_with_build_spec_tasks(task)
|
||||
else: # if rc == ExecuteResult.SUCCESS or rc == ExecuteResult.FAILED
|
||||
self._update_installed(task)
|
||||
|
||||
def _next_is_pri0(self) -> bool:
|
||||
"""
|
||||
Determine if the next build task has priority 0
|
||||
Determine if the next task has priority 0
|
||||
|
||||
Return:
|
||||
True if it does, False otherwise
|
||||
@@ -1615,31 +1774,31 @@ def _next_is_pri0(self) -> bool:
|
||||
task = self.build_pq[0][1]
|
||||
return task.priority == 0
|
||||
|
||||
def _pop_task(self) -> Optional[BuildTask]:
|
||||
def _pop_task(self) -> Optional[Task]:
|
||||
"""
|
||||
Remove and return the lowest priority build task.
|
||||
Remove and return the lowest priority task.
|
||||
|
||||
Source: Variant of function at docs.python.org/2/library/heapq.html
|
||||
"""
|
||||
while self.build_pq:
|
||||
task = heapq.heappop(self.build_pq)[1]
|
||||
if task.status != STATUS_REMOVED:
|
||||
if task.status != BuildStatus.REMOVED:
|
||||
del self.build_tasks[task.pkg_id]
|
||||
task.status = STATUS_DEQUEUED
|
||||
task.status = BuildStatus.DEQUEUED
|
||||
return task
|
||||
return None
|
||||
|
||||
def _push_task(self, task: BuildTask) -> None:
|
||||
def _push_task(self, task: Task) -> None:
|
||||
"""
|
||||
Push (or queue) the specified build task for the package.
|
||||
Push (or queue) the specified task for the package.
|
||||
|
||||
Source: Customization of "add_task" function at
|
||||
docs.python.org/2/library/heapq.html
|
||||
|
||||
Args:
|
||||
task: the installation build task for a package
|
||||
task: the installation task for a package
|
||||
"""
|
||||
msg = "{0} a build task for {1} with status '{2}'"
|
||||
msg = "{0} a task for {1} with status '{2}'"
|
||||
skip = "Skipping requeue of task for {0}: {1}"
|
||||
|
||||
# Ensure do not (re-)queue installed or failed packages whose status
|
||||
@@ -1652,9 +1811,11 @@ def _push_task(self, task: BuildTask) -> None:
|
||||
tty.debug(skip.format(task.pkg_id, "failed"))
|
||||
return
|
||||
|
||||
# Remove any associated build task since its sequence will change
|
||||
# Remove any associated task since its sequence will change
|
||||
self._remove_task(task.pkg_id)
|
||||
desc = "Queueing" if task.attempts == 0 else "Requeueing"
|
||||
desc = (
|
||||
"Queueing" if task.attempts == 1 else f"Requeueing ({ordinal(task.attempts)} attempt)"
|
||||
)
|
||||
tty.debug(msg.format(desc, task.pkg_id, task.status))
|
||||
|
||||
# Now add the new task to the queue with a new sequence number to
|
||||
@@ -1685,9 +1846,9 @@ def _release_lock(self, pkg_id: str) -> None:
|
||||
except Exception as exc:
|
||||
tty.warn(err.format(exc.__class__.__name__, ltype, pkg_id, str(exc)))
|
||||
|
||||
def _remove_task(self, pkg_id: str) -> Optional[BuildTask]:
|
||||
def _remove_task(self, pkg_id: str) -> Optional[Task]:
|
||||
"""
|
||||
Mark the existing package build task as being removed and return it.
|
||||
Mark the existing package task as being removed and return it.
|
||||
Raises KeyError if not found.
|
||||
|
||||
Source: Variant of function at docs.python.org/2/library/heapq.html
|
||||
@@ -1696,71 +1857,39 @@ def _remove_task(self, pkg_id: str) -> Optional[BuildTask]:
|
||||
pkg_id: identifier for the package to be removed
|
||||
"""
|
||||
if pkg_id in self.build_tasks:
|
||||
tty.debug(f"Removing build task for {pkg_id} from list")
|
||||
tty.debug(f"Removing task for {pkg_id} from list")
|
||||
task = self.build_tasks.pop(pkg_id)
|
||||
task.status = STATUS_REMOVED
|
||||
task.status = BuildStatus.REMOVED
|
||||
return task
|
||||
else:
|
||||
return None
|
||||
|
||||
def _requeue_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
||||
def _requeue_task(self, task: Task, install_status: InstallStatus) -> None:
|
||||
"""
|
||||
Requeues a task that appears to be in progress by another process.
|
||||
|
||||
Args:
|
||||
task (BuildTask): the installation build task for a package
|
||||
task (Task): the installation task for a package
|
||||
"""
|
||||
if task.status not in [STATUS_INSTALLED, STATUS_INSTALLING]:
|
||||
if task.status not in [BuildStatus.INSTALLED, BuildStatus.INSTALLING]:
|
||||
tty.debug(
|
||||
f"{install_msg(task.pkg_id, self.pid, install_status)} "
|
||||
"in progress by another process"
|
||||
)
|
||||
|
||||
new_task = task.next_attempt(self.installed)
|
||||
new_task.status = STATUS_INSTALLING
|
||||
new_task.status = BuildStatus.INSTALLING
|
||||
self._push_task(new_task)
|
||||
|
||||
def _setup_install_dir(self, pkg: "spack.package_base.PackageBase") -> None:
|
||||
"""
|
||||
Create and ensure proper access controls for the install directory.
|
||||
Write a small metadata file with the current spack environment.
|
||||
|
||||
Args:
|
||||
pkg: the package to be built and installed
|
||||
"""
|
||||
if not os.path.exists(pkg.spec.prefix):
|
||||
path = spack.util.path.debug_padded_filter(pkg.spec.prefix)
|
||||
tty.debug(f"Creating the installation directory {path}")
|
||||
spack.store.STORE.layout.create_install_directory(pkg.spec)
|
||||
else:
|
||||
# Set the proper group for the prefix
|
||||
group = prefs.get_package_group(pkg.spec)
|
||||
if group:
|
||||
fs.chgrp(pkg.spec.prefix, group)
|
||||
|
||||
# Set the proper permissions.
|
||||
# This has to be done after group because changing groups blows
|
||||
# away the sticky group bit on the directory
|
||||
mode = os.stat(pkg.spec.prefix).st_mode
|
||||
perms = prefs.get_package_dir_permissions(pkg.spec)
|
||||
if mode != perms:
|
||||
os.chmod(pkg.spec.prefix, perms)
|
||||
|
||||
# Ensure the metadata path exists as well
|
||||
fs.mkdirp(spack.store.STORE.layout.metadata_path(pkg.spec), mode=perms)
|
||||
|
||||
# Always write host environment - we assume this can change
|
||||
spack.store.STORE.layout.write_host_environment(pkg.spec)
|
||||
|
||||
def _update_failed(
|
||||
self, task: BuildTask, mark: bool = False, exc: Optional[BaseException] = None
|
||||
self, task: Task, mark: bool = False, exc: Optional[BaseException] = None
|
||||
) -> None:
|
||||
"""
|
||||
Update the task and transitive dependents as failed; optionally mark
|
||||
externally as failed; and remove associated build tasks.
|
||||
externally as failed; and remove associated tasks.
|
||||
|
||||
Args:
|
||||
task: the build task for the failed package
|
||||
task: the task for the failed package
|
||||
mark: ``True`` if the package and its dependencies are to
|
||||
be marked as "failed", otherwise, ``False``
|
||||
exc: optional exception if associated with the failure
|
||||
@@ -1772,34 +1901,34 @@ def _update_failed(
|
||||
self.failed[pkg_id] = spack.store.STORE.failure_tracker.mark(task.pkg.spec)
|
||||
else:
|
||||
self.failed[pkg_id] = None
|
||||
task.status = STATUS_FAILED
|
||||
task.status = BuildStatus.FAILED
|
||||
|
||||
for dep_id in task.dependents:
|
||||
if dep_id in self.build_tasks:
|
||||
tty.warn(f"Skipping build of {dep_id} since {pkg_id} failed")
|
||||
# Ensure the dependent's uninstalled dependents are
|
||||
# up-to-date and their build tasks removed.
|
||||
# up-to-date and their tasks removed.
|
||||
dep_task = self.build_tasks[dep_id]
|
||||
self._update_failed(dep_task, mark)
|
||||
self._remove_task(dep_id)
|
||||
else:
|
||||
tty.debug(f"No build task for {dep_id} to skip since {pkg_id} failed")
|
||||
tty.debug(f"No task for {dep_id} to skip since {pkg_id} failed")
|
||||
|
||||
def _update_installed(self, task: BuildTask) -> None:
|
||||
def _update_installed(self, task: Task) -> None:
|
||||
"""
|
||||
Mark the task as installed and ensure dependent build tasks are aware.
|
||||
Mark the task as installed and ensure dependent tasks are aware.
|
||||
|
||||
Args:
|
||||
task (BuildTask): the build task for the installed package
|
||||
task: the task for the installed package
|
||||
"""
|
||||
task.status = STATUS_INSTALLED
|
||||
task.status = BuildStatus.INSTALLED
|
||||
self._flag_installed(task.pkg, task.dependents)
|
||||
|
||||
def _flag_installed(
|
||||
self, pkg: "spack.package_base.PackageBase", dependent_ids: Optional[Set[str]] = None
|
||||
) -> None:
|
||||
"""
|
||||
Flag the package as installed and ensure known by all build tasks of
|
||||
Flag the package as installed and ensure known by all tasks of
|
||||
known dependents.
|
||||
|
||||
Args:
|
||||
@@ -1827,7 +1956,7 @@ def _flag_installed(
|
||||
dep_task = self.build_tasks[dep_id]
|
||||
self._push_task(dep_task.next_attempt(self.installed))
|
||||
else:
|
||||
tty.debug(f"{dep_id} has no build task to update for {pkg_id}'s success")
|
||||
tty.debug(f"{dep_id} has no task to update for {pkg_id}'s success")
|
||||
|
||||
def _init_queue(self) -> None:
|
||||
"""Initialize the build queue from the list of build requests."""
|
||||
@@ -1846,8 +1975,9 @@ def _init_queue(self) -> None:
|
||||
task = self.build_tasks[dep_id]
|
||||
for dependent_id in dependents.difference(task.dependents):
|
||||
task.add_dependent(dependent_id)
|
||||
self.all_dependencies = all_dependencies
|
||||
|
||||
def _install_action(self, task: BuildTask) -> int:
|
||||
def _install_action(self, task: Task) -> InstallAction:
|
||||
"""
|
||||
Determine whether the installation should be overwritten (if it already
|
||||
exists) or skipped (if has been handled by another process).
|
||||
@@ -1995,7 +2125,6 @@ def install(self) -> None:
|
||||
self._update_installed(task)
|
||||
path = spack.util.path.debug_padded_filter(pkg.prefix)
|
||||
_print_installed_pkg(path)
|
||||
|
||||
else:
|
||||
# At this point we've failed to get a write or a read
|
||||
# lock, which means another process has taken a write
|
||||
@@ -2035,8 +2164,6 @@ def install(self) -> None:
|
||||
# wrapper -- silence mypy
|
||||
OverwriteInstall(self, spack.store.STORE.db, task, install_status).install() # type: ignore[arg-type] # noqa: E501
|
||||
|
||||
self._update_installed(task)
|
||||
|
||||
# If we installed then we should keep the prefix
|
||||
stop_before_phase = getattr(pkg, "stop_before_phase", None)
|
||||
last_phase = getattr(pkg, "last_phase", None)
|
||||
@@ -2080,13 +2207,15 @@ def install(self) -> None:
|
||||
)
|
||||
# Terminate if requested to do so on the first failure.
|
||||
if self.fail_fast:
|
||||
raise spack.error.InstallError(f"{fail_fast_err}: {str(exc)}", pkg=pkg)
|
||||
raise spack.error.InstallError(
|
||||
f"{fail_fast_err}: {str(exc)}", pkg=pkg
|
||||
) from exc
|
||||
|
||||
# Terminate when a single build request has failed, or summarize errors later.
|
||||
if task.is_build_request:
|
||||
if single_requested_spec:
|
||||
raise
|
||||
failed_build_requests.append((pkg, pkg_id, str(exc)))
|
||||
failed_build_requests.append((pkg, pkg_id, exc))
|
||||
|
||||
finally:
|
||||
# Remove the install prefix if anything went wrong during
|
||||
@@ -2096,7 +2225,8 @@ def install(self) -> None:
|
||||
|
||||
# Perform basic task cleanup for the installed spec to
|
||||
# include downgrading the write to a read lock
|
||||
self._cleanup_task(pkg)
|
||||
if pkg.spec.installed:
|
||||
self._cleanup_task(pkg)
|
||||
|
||||
# Cleanup, which includes releasing all of the read locks
|
||||
self._cleanup_all_tasks()
|
||||
@@ -2112,6 +2242,9 @@ def install(self) -> None:
|
||||
if failed_build_requests or missing:
|
||||
for _, pkg_id, err in failed_build_requests:
|
||||
tty.error(f"{pkg_id}: {err}")
|
||||
if spack.error.SHOW_BACKTRACE:
|
||||
# note: in python 3.10+ this can just be print_exception(err)
|
||||
traceback.print_exception(type(err), err, err.__traceback__)
|
||||
|
||||
for _, pkg_id in missing:
|
||||
tty.error(f"{pkg_id}: Package was not installed")
|
||||
@@ -2365,6 +2498,15 @@ def build_process(pkg: "spack.package_base.PackageBase", install_args: dict) ->
|
||||
|
||||
def deprecate(spec: "spack.spec.Spec", deprecator: "spack.spec.Spec", link_fn) -> None:
|
||||
"""Deprecate this package in favor of deprecator spec"""
|
||||
# Here we assume we don't deprecate across different stores, and that same hash
|
||||
# means same binary artifacts
|
||||
if spec.dag_hash() == deprecator.dag_hash():
|
||||
return
|
||||
|
||||
# We can't really have control over external specs, and cannot link anything in their place
|
||||
if spec.external:
|
||||
return
|
||||
|
||||
# Install deprecator if it isn't installed already
|
||||
if not spack.store.STORE.db.query(deprecator):
|
||||
PackageInstaller([deprecator.package], explicit=True).install()
|
||||
@@ -2395,7 +2537,7 @@ def __init__(
|
||||
self,
|
||||
installer: PackageInstaller,
|
||||
database: spack.database.Database,
|
||||
task: BuildTask,
|
||||
task: Task,
|
||||
install_status: InstallStatus,
|
||||
):
|
||||
self.installer = installer
|
||||
|
@@ -102,9 +102,6 @@
|
||||
|
||||
spack_ld_library_path = os.environ.get("LD_LIBRARY_PATH", "")
|
||||
|
||||
#: Whether to print backtraces on error
|
||||
SHOW_BACKTRACE = False
|
||||
|
||||
|
||||
def add_all_commands(parser):
|
||||
"""Add all spack subcommands to the parser."""
|
||||
@@ -527,8 +524,7 @@ def setup_main_options(args):
|
||||
|
||||
if args.debug or args.backtrace:
|
||||
spack.error.debug = True
|
||||
global SHOW_BACKTRACE
|
||||
SHOW_BACKTRACE = True
|
||||
spack.error.SHOW_BACKTRACE = True
|
||||
|
||||
if args.debug:
|
||||
spack.util.debug.register_interrupt_handler()
|
||||
@@ -1021,19 +1017,19 @@ def main(argv=None):
|
||||
e.die() # gracefully die on any SpackErrors
|
||||
|
||||
except KeyboardInterrupt:
|
||||
if spack.config.get("config:debug") or SHOW_BACKTRACE:
|
||||
if spack.config.get("config:debug") or spack.error.SHOW_BACKTRACE:
|
||||
raise
|
||||
sys.stderr.write("\n")
|
||||
tty.error("Keyboard interrupt.")
|
||||
return signal.SIGINT.value
|
||||
|
||||
except SystemExit as e:
|
||||
if spack.config.get("config:debug") or SHOW_BACKTRACE:
|
||||
if spack.config.get("config:debug") or spack.error.SHOW_BACKTRACE:
|
||||
traceback.print_exc()
|
||||
return e.code
|
||||
|
||||
except Exception as e:
|
||||
if spack.config.get("config:debug") or SHOW_BACKTRACE:
|
||||
if spack.config.get("config:debug") or spack.error.SHOW_BACKTRACE:
|
||||
raise
|
||||
tty.error(e)
|
||||
return 3
|
||||
|
@@ -89,9 +89,8 @@ def from_url(url: str):
|
||||
"""Create an anonymous mirror by URL. This method validates the URL."""
|
||||
if not urllib.parse.urlparse(url).scheme in supported_url_schemes:
|
||||
raise ValueError(
|
||||
'"{}" is not a valid mirror URL. Scheme must be once of {}.'.format(
|
||||
url, ", ".join(supported_url_schemes)
|
||||
)
|
||||
f'"{url}" is not a valid mirror URL. '
|
||||
f"Scheme must be one of {supported_url_schemes}."
|
||||
)
|
||||
return Mirror(url)
|
||||
|
||||
@@ -759,7 +758,7 @@ def require_mirror_name(mirror_name):
|
||||
"""Find a mirror by name and raise if it does not exist"""
|
||||
mirror = spack.mirror.MirrorCollection().get(mirror_name)
|
||||
if not mirror:
|
||||
raise ValueError('no mirror named "{0}"'.format(mirror_name))
|
||||
raise ValueError(f'no mirror named "{mirror_name}"')
|
||||
return mirror
|
||||
|
||||
|
||||
|
@@ -527,7 +527,8 @@ def use_name(self):
|
||||
parts = name.split("/")
|
||||
name = os.path.join(*parts)
|
||||
# Add optional suffixes based on constraints
|
||||
path_elements = [name] + self.conf.suffixes
|
||||
path_elements = [name]
|
||||
path_elements.extend(map(self.spec.format, self.conf.suffixes))
|
||||
return "-".join(path_elements)
|
||||
|
||||
@property
|
||||
|
@@ -55,17 +55,9 @@
|
||||
import spack.util.web
|
||||
from spack.error import InstallError, NoURLError, PackageError
|
||||
from spack.filesystem_view import YamlFilesystemView
|
||||
from spack.install_test import (
|
||||
PackageTest,
|
||||
TestFailure,
|
||||
TestStatus,
|
||||
TestSuite,
|
||||
cache_extra_test_sources,
|
||||
install_test_root,
|
||||
)
|
||||
from spack.install_test import PackageTest, TestSuite
|
||||
from spack.solver.version_order import concretization_version_order
|
||||
from spack.stage import DevelopStage, ResourceStage, Stage, StageComposite, compute_stage_name
|
||||
from spack.util.executable import ProcessError, which
|
||||
from spack.util.package_hash import package_hash
|
||||
from spack.version import GitVersion, StandardVersion
|
||||
|
||||
@@ -1355,18 +1347,6 @@ def install_configure_args_path(self):
|
||||
"""Return the configure args file path on successful installation."""
|
||||
return os.path.join(self.metadata_dir, _spack_configure_argsfile)
|
||||
|
||||
# TODO (post-34236): Update tests and all packages that use this as a
|
||||
# TODO (post-34236): package method to the function already available
|
||||
# TODO (post-34236): to packages. Once done, remove this property.
|
||||
@property
|
||||
def install_test_root(self):
|
||||
"""Return the install test root directory."""
|
||||
tty.warn(
|
||||
"The 'pkg.install_test_root' property is deprecated with removal "
|
||||
"expected v0.23. Use 'install_test_root(pkg)' instead."
|
||||
)
|
||||
return install_test_root(self)
|
||||
|
||||
def archive_install_test_log(self):
|
||||
"""Archive the install-phase test log, if present."""
|
||||
if getattr(self, "tester", None):
|
||||
@@ -1875,13 +1855,22 @@ def _has_make_target(self, target):
|
||||
#
|
||||
# BSD Make:
|
||||
# make: don't know how to make test. Stop
|
||||
#
|
||||
# Note: "Stop." is not printed when running a Make jobserver (spack env depfile) that runs
|
||||
# with `make -k/--keep-going`
|
||||
missing_target_msgs = [
|
||||
"No rule to make target `{0}'. Stop.",
|
||||
"No rule to make target '{0}'. Stop.",
|
||||
"don't know how to make {0}. Stop",
|
||||
"No rule to make target `{0}'.",
|
||||
"No rule to make target '{0}'.",
|
||||
"don't know how to make {0}.",
|
||||
]
|
||||
|
||||
kwargs = {"fail_on_error": False, "output": os.devnull, "error": str}
|
||||
kwargs = {
|
||||
"fail_on_error": False,
|
||||
"output": os.devnull,
|
||||
"error": str,
|
||||
# Remove MAKEFLAGS to avoid inherited flags from Make jobserver (spack env depfile)
|
||||
"extra_env": {"MAKEFLAGS": ""},
|
||||
}
|
||||
|
||||
stderr = make("-n", target, **kwargs)
|
||||
|
||||
@@ -1959,31 +1948,6 @@ def _resource_stage(self, resource):
|
||||
resource_stage_folder = "-".join(pieces)
|
||||
return resource_stage_folder
|
||||
|
||||
# TODO (post-34236): Update tests and all packages that use this as a
|
||||
# TODO (post-34236): package method to the routine made available to
|
||||
# TODO (post-34236): packages. Once done, remove this method.
|
||||
def cache_extra_test_sources(self, srcs):
|
||||
"""Copy relative source paths to the corresponding install test subdir
|
||||
|
||||
This method is intended as an optional install test setup helper for
|
||||
grabbing source files/directories during the installation process and
|
||||
copying them to the installation test subdirectory for subsequent use
|
||||
during install testing.
|
||||
|
||||
Args:
|
||||
srcs (str or list): relative path for files and or
|
||||
subdirectories located in the staged source path that are to
|
||||
be copied to the corresponding location(s) under the install
|
||||
testing directory.
|
||||
"""
|
||||
msg = (
|
||||
"'pkg.cache_extra_test_sources(srcs) is deprecated with removal "
|
||||
"expected in v0.23. Use 'cache_extra_test_sources(pkg, srcs)' "
|
||||
"instead."
|
||||
)
|
||||
warnings.warn(msg)
|
||||
cache_extra_test_sources(self, srcs)
|
||||
|
||||
def do_test(self, dirty=False, externals=False):
|
||||
if self.test_requires_compiler:
|
||||
compilers = spack.compilers.compilers_for_spec(
|
||||
@@ -2007,178 +1971,6 @@ def do_test(self, dirty=False, externals=False):
|
||||
|
||||
self.tester.stand_alone_tests(kwargs)
|
||||
|
||||
# TODO (post-34236): Remove this deprecated method when eliminate test,
|
||||
# TODO (post-34236): run_test, etc.
|
||||
@property
|
||||
def _test_deprecated_warning(self):
|
||||
alt = f"Use any name starting with 'test_' instead in {self.spec.name}."
|
||||
return f"The 'test' method is deprecated. {alt}"
|
||||
|
||||
# TODO (post-34236): Remove this deprecated method when eliminate test,
|
||||
# TODO (post-34236): run_test, etc.
|
||||
def test(self):
|
||||
# Defer tests to virtual and concrete packages
|
||||
warnings.warn(self._test_deprecated_warning)
|
||||
|
||||
# TODO (post-34236): Remove this deprecated method when eliminate test,
|
||||
# TODO (post-34236): run_test, etc.
|
||||
def run_test(
|
||||
self,
|
||||
exe,
|
||||
options=[],
|
||||
expected=[],
|
||||
status=0,
|
||||
installed=False,
|
||||
purpose=None,
|
||||
skip_missing=False,
|
||||
work_dir=None,
|
||||
):
|
||||
"""Run the test and confirm the expected results are obtained
|
||||
|
||||
Log any failures and continue, they will be re-raised later
|
||||
|
||||
Args:
|
||||
exe (str): the name of the executable
|
||||
options (str or list): list of options to pass to the runner
|
||||
expected (str or list): list of expected output strings.
|
||||
Each string is a regex expected to match part of the output.
|
||||
status (int or list): possible passing status values
|
||||
with 0 meaning the test is expected to succeed
|
||||
installed (bool): if ``True``, the executable must be in the
|
||||
install prefix
|
||||
purpose (str): message to display before running test
|
||||
skip_missing (bool): skip the test if the executable is not
|
||||
in the install prefix bin directory or the provided work_dir
|
||||
work_dir (str or None): path to the smoke test directory
|
||||
"""
|
||||
|
||||
def test_title(purpose, test_name):
|
||||
if not purpose:
|
||||
return f"test: {test_name}: execute {test_name}"
|
||||
|
||||
match = re.search(r"test: ([^:]*): (.*)", purpose)
|
||||
if match:
|
||||
# The test title has all the expected parts
|
||||
return purpose
|
||||
|
||||
match = re.search(r"test: (.*)", purpose)
|
||||
if match:
|
||||
reason = match.group(1)
|
||||
return f"test: {test_name}: {reason}"
|
||||
|
||||
return f"test: {test_name}: {purpose}"
|
||||
|
||||
base_exe = os.path.basename(exe)
|
||||
alternate = f"Use 'test_part' instead for {self.spec.name} to process {base_exe}."
|
||||
warnings.warn(f"The 'run_test' method is deprecated. {alternate}")
|
||||
|
||||
extra = re.compile(r"[\s,\- ]")
|
||||
details = (
|
||||
[extra.sub("", options)]
|
||||
if isinstance(options, str)
|
||||
else [extra.sub("", os.path.basename(opt)) for opt in options]
|
||||
)
|
||||
details = "_".join([""] + details) if details else ""
|
||||
test_name = f"test_{base_exe}{details}"
|
||||
tty.info(test_title(purpose, test_name), format="g")
|
||||
|
||||
wdir = "." if work_dir is None else work_dir
|
||||
with fsys.working_dir(wdir, create=True):
|
||||
try:
|
||||
runner = which(exe)
|
||||
if runner is None and skip_missing:
|
||||
self.tester.status(test_name, TestStatus.SKIPPED, f"{exe} is missing")
|
||||
return
|
||||
assert runner is not None, f"Failed to find executable '{exe}'"
|
||||
|
||||
self._run_test_helper(runner, options, expected, status, installed, purpose)
|
||||
self.tester.status(test_name, TestStatus.PASSED, None)
|
||||
return True
|
||||
except (AssertionError, BaseException) as e:
|
||||
# print a summary of the error to the log file
|
||||
# so that cdash and junit reporters know about it
|
||||
exc_type, _, tb = sys.exc_info()
|
||||
|
||||
self.tester.status(test_name, TestStatus.FAILED, str(e))
|
||||
|
||||
import traceback
|
||||
|
||||
# remove the current call frame to exclude the extract_stack
|
||||
# call from the error
|
||||
stack = traceback.extract_stack()[:-1]
|
||||
|
||||
# Package files have a line added at import time, so we re-read
|
||||
# the file to make line numbers match. We have to subtract two
|
||||
# from the line number because the original line number is
|
||||
# inflated once by the import statement and the lines are
|
||||
# displaced one by the import statement.
|
||||
for i, entry in enumerate(stack):
|
||||
filename, lineno, function, text = entry
|
||||
if spack.repo.is_package_file(filename):
|
||||
with open(filename, "r") as f:
|
||||
lines = f.readlines()
|
||||
new_lineno = lineno - 2
|
||||
text = lines[new_lineno]
|
||||
stack[i] = (filename, new_lineno, function, text)
|
||||
|
||||
# Format the stack to print and print it
|
||||
out = traceback.format_list(stack)
|
||||
for line in out:
|
||||
print(line.rstrip("\n"))
|
||||
|
||||
if exc_type is spack.util.executable.ProcessError:
|
||||
out = io.StringIO()
|
||||
spack.build_environment.write_log_summary(
|
||||
out, "test", self.tester.test_log_file, last=1
|
||||
)
|
||||
m = out.getvalue()
|
||||
else:
|
||||
# We're below the package context, so get context from
|
||||
# stack instead of from traceback.
|
||||
# The traceback is truncated here, so we can't use it to
|
||||
# traverse the stack.
|
||||
context = spack.build_environment.get_package_context(tb)
|
||||
m = "\n".join(context) if context else ""
|
||||
|
||||
exc = e # e is deleted after this block
|
||||
|
||||
# If we fail fast, raise another error
|
||||
if spack.config.get("config:fail_fast", False):
|
||||
raise TestFailure([(exc, m)])
|
||||
else:
|
||||
self.tester.add_failure(exc, m)
|
||||
return False
|
||||
|
||||
# TODO (post-34236): Remove this deprecated method when eliminate test,
|
||||
# TODO (post-34236): run_test, etc.
|
||||
def _run_test_helper(self, runner, options, expected, status, installed, purpose):
|
||||
status = [status] if isinstance(status, int) else status
|
||||
expected = [expected] if isinstance(expected, str) else expected
|
||||
options = [options] if isinstance(options, str) else options
|
||||
|
||||
if installed:
|
||||
msg = f"Executable '{runner.name}' expected in prefix, "
|
||||
msg += f"found in {runner.path} instead"
|
||||
assert runner.path.startswith(self.spec.prefix), msg
|
||||
|
||||
tty.msg(f"Expecting return code in {status}")
|
||||
|
||||
try:
|
||||
output = runner(*options, output=str.split, error=str.split)
|
||||
|
||||
assert 0 in status, f"Expected {runner.name} execution to fail"
|
||||
except ProcessError as err:
|
||||
output = str(err)
|
||||
match = re.search(r"exited with status ([0-9]+)", output)
|
||||
if not (match and int(match.group(1)) in status):
|
||||
raise
|
||||
|
||||
for check in expected:
|
||||
cmd = " ".join([runner.name] + options)
|
||||
msg = f"Expected '{check}' to match output of `{cmd}`"
|
||||
msg += f"\n\nOutput: {output}"
|
||||
assert re.search(check, output), msg
|
||||
|
||||
def unit_test_check(self):
|
||||
"""Hook for unit tests to assert things about package internals.
|
||||
|
||||
|
@@ -205,23 +205,33 @@ def macho_find_paths(orig_rpaths, deps, idpath, old_layout_root, prefix_to_prefi
|
||||
paths_to_paths dictionary which maps all of the old paths to new paths
|
||||
"""
|
||||
paths_to_paths = dict()
|
||||
# Sort from longest path to shortest, to ensure we try /foo/bar/baz before /foo/bar
|
||||
prefix_iteration_order = sorted(prefix_to_prefix, key=len, reverse=True)
|
||||
for orig_rpath in orig_rpaths:
|
||||
if orig_rpath.startswith(old_layout_root):
|
||||
for old_prefix, new_prefix in prefix_to_prefix.items():
|
||||
for old_prefix in prefix_iteration_order:
|
||||
new_prefix = prefix_to_prefix[old_prefix]
|
||||
if orig_rpath.startswith(old_prefix):
|
||||
new_rpath = re.sub(re.escape(old_prefix), new_prefix, orig_rpath)
|
||||
paths_to_paths[orig_rpath] = new_rpath
|
||||
break
|
||||
else:
|
||||
paths_to_paths[orig_rpath] = orig_rpath
|
||||
|
||||
if idpath:
|
||||
for old_prefix, new_prefix in prefix_to_prefix.items():
|
||||
for old_prefix in prefix_iteration_order:
|
||||
new_prefix = prefix_to_prefix[old_prefix]
|
||||
if idpath.startswith(old_prefix):
|
||||
paths_to_paths[idpath] = re.sub(re.escape(old_prefix), new_prefix, idpath)
|
||||
break
|
||||
|
||||
for dep in deps:
|
||||
for old_prefix, new_prefix in prefix_to_prefix.items():
|
||||
for old_prefix in prefix_iteration_order:
|
||||
new_prefix = prefix_to_prefix[old_prefix]
|
||||
if dep.startswith(old_prefix):
|
||||
paths_to_paths[dep] = re.sub(re.escape(old_prefix), new_prefix, dep)
|
||||
break
|
||||
|
||||
if dep.startswith("@"):
|
||||
paths_to_paths[dep] = dep
|
||||
|
||||
@@ -270,40 +280,14 @@ def modify_macho_object(cur_path, rpaths, deps, idpath, paths_to_paths):
|
||||
install_name_tool = executable.Executable("install_name_tool")
|
||||
install_name_tool(*args)
|
||||
|
||||
return
|
||||
|
||||
|
||||
def modify_object_macholib(cur_path, paths_to_paths):
|
||||
"""
|
||||
This function is used when install machO buildcaches on linux by
|
||||
rewriting mach-o loader commands for dependency library paths of
|
||||
mach-o binaries and the id path for mach-o libraries.
|
||||
Rewritting of rpaths is handled by replace_prefix_bin.
|
||||
Inputs
|
||||
mach-o binary to be modified
|
||||
dictionary mapping paths in old install layout to new install layout
|
||||
"""
|
||||
|
||||
dll = macholib.MachO.MachO(cur_path)
|
||||
dll.rewriteLoadCommands(paths_to_paths.get)
|
||||
|
||||
try:
|
||||
f = open(dll.filename, "rb+")
|
||||
for header in dll.headers:
|
||||
f.seek(0)
|
||||
dll.write(f)
|
||||
f.seek(0, 2)
|
||||
f.flush()
|
||||
f.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return
|
||||
|
||||
|
||||
def macholib_get_paths(cur_path):
|
||||
"""Get rpaths, dependent libraries, and library id of mach-o objects."""
|
||||
headers = macholib.MachO.MachO(cur_path).headers
|
||||
headers = []
|
||||
try:
|
||||
headers = macholib.MachO.MachO(cur_path).headers
|
||||
except ValueError:
|
||||
pass
|
||||
if not headers:
|
||||
tty.warn("Failed to read Mach-O headers: {0}".format(cur_path))
|
||||
commands = []
|
||||
@@ -415,10 +399,7 @@ def relocate_macho_binaries(
|
||||
# normalized paths
|
||||
rel_to_orig = macho_make_paths_normal(orig_path_name, rpaths, deps, idpath)
|
||||
# replace the relativized paths with normalized paths
|
||||
if sys.platform == "darwin":
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, rel_to_orig)
|
||||
else:
|
||||
modify_object_macholib(path_name, rel_to_orig)
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, rel_to_orig)
|
||||
# get the normalized paths in the mach-o binary
|
||||
rpaths, deps, idpath = macholib_get_paths(path_name)
|
||||
# get the mapping of paths in old prefix to path in new prefix
|
||||
@@ -426,10 +407,7 @@ def relocate_macho_binaries(
|
||||
rpaths, deps, idpath, old_layout_root, prefix_to_prefix
|
||||
)
|
||||
# replace the old paths with new paths
|
||||
if sys.platform == "darwin":
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||
else:
|
||||
modify_object_macholib(path_name, paths_to_paths)
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||
# get the new normalized path in the mach-o binary
|
||||
rpaths, deps, idpath = macholib_get_paths(path_name)
|
||||
# get the mapping of paths to relative paths in the new prefix
|
||||
@@ -437,10 +415,7 @@ def relocate_macho_binaries(
|
||||
path_name, new_layout_root, rpaths, deps, idpath
|
||||
)
|
||||
# replace the new paths with relativized paths in the new prefix
|
||||
if sys.platform == "darwin":
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||
else:
|
||||
modify_object_macholib(path_name, paths_to_paths)
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||
else:
|
||||
# get the paths in the old prefix
|
||||
rpaths, deps, idpath = macholib_get_paths(path_name)
|
||||
@@ -449,10 +424,7 @@ def relocate_macho_binaries(
|
||||
rpaths, deps, idpath, old_layout_root, prefix_to_prefix
|
||||
)
|
||||
# replace the old paths with new paths
|
||||
if sys.platform == "darwin":
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||
else:
|
||||
modify_object_macholib(path_name, paths_to_paths)
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||
|
||||
|
||||
def _transform_rpaths(orig_rpaths, orig_root, new_prefixes):
|
||||
|
@@ -2,7 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import re
|
||||
import xml.sax.saxutils
|
||||
from datetime import datetime
|
||||
@@ -42,17 +41,6 @@ def elapsed(current, previous):
|
||||
return diff.total_seconds()
|
||||
|
||||
|
||||
# TODO (post-34236): Should remove with deprecated test methods since don't
|
||||
# TODO (post-34236): have an XFAIL mechanism with the new test_part() approach.
|
||||
def expected_failure(line):
|
||||
if not line:
|
||||
return False
|
||||
|
||||
match = returns_regexp.search(line)
|
||||
xfail = "0" not in match.group(1) if match else False
|
||||
return xfail
|
||||
|
||||
|
||||
def new_part():
|
||||
return {
|
||||
"command": None,
|
||||
@@ -66,14 +54,6 @@ def new_part():
|
||||
}
|
||||
|
||||
|
||||
# TODO (post-34236): Remove this when remove deprecated methods
|
||||
def part_name(source):
|
||||
elements = []
|
||||
for e in source.replace("'", "").split(" "):
|
||||
elements.append(os.path.basename(e) if os.sep in e else e)
|
||||
return "_".join(elements)
|
||||
|
||||
|
||||
def process_part_end(part, curr_time, last_time):
|
||||
if part:
|
||||
if not part["elapsed"]:
|
||||
@@ -81,11 +61,7 @@ def process_part_end(part, curr_time, last_time):
|
||||
|
||||
stat = part["status"]
|
||||
if stat in completed:
|
||||
# TODO (post-34236): remove the expected failure mapping when
|
||||
# TODO (post-34236): remove deprecated test methods.
|
||||
if stat == "passed" and expected_failure(part["desc"]):
|
||||
part["completed"] = "Expected to fail"
|
||||
elif part["completed"] == "Unknown":
|
||||
if part["completed"] == "Unknown":
|
||||
part["completed"] = completed[stat]
|
||||
elif stat is None or stat == "unknown":
|
||||
part["status"] = "passed"
|
||||
@@ -153,14 +129,6 @@ def extract_test_parts(default_name, outputs):
|
||||
if msg.startswith("Installing"):
|
||||
continue
|
||||
|
||||
# TODO (post-34236): Remove this check when remove run_test(),
|
||||
# TODO (post-34236): etc. since no longer supporting expected
|
||||
# TODO (post-34236): failures.
|
||||
if msg.startswith("Expecting return code"):
|
||||
if part:
|
||||
part["desc"] += f"; {msg}"
|
||||
continue
|
||||
|
||||
# Terminate without further parsing if no more test messages
|
||||
if "Completed testing" in msg:
|
||||
# Process last lingering part IF it didn't generate status
|
||||
|
@@ -12,6 +12,7 @@
|
||||
from llnl.util.symlink import readlink, symlink
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.hooks
|
||||
import spack.platforms
|
||||
@@ -52,6 +53,7 @@ def rewire_node(spec, explicit):
|
||||
its subgraph. Binaries, text, and links are all changed in accordance with
|
||||
the splice. The resulting package is then 'installed.'"""
|
||||
tempdir = tempfile.mkdtemp()
|
||||
|
||||
# copy anything installed to a temporary directory
|
||||
shutil.copytree(spec.build_spec.prefix, os.path.join(tempdir, spec.dag_hash()))
|
||||
|
||||
@@ -59,8 +61,21 @@ def rewire_node(spec, explicit):
|
||||
# compute prefix-to-prefix for every node from the build spec to the spliced
|
||||
# spec
|
||||
prefix_to_prefix = OrderedDict({spec.build_spec.prefix: spec.prefix})
|
||||
for build_dep in spec.build_spec.traverse(root=False):
|
||||
prefix_to_prefix[build_dep.prefix] = spec[build_dep.name].prefix
|
||||
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
||||
for s in bindist.deps_to_relocate(spec):
|
||||
analog = s
|
||||
if id(s) not in build_spec_ids:
|
||||
analogs = [
|
||||
d
|
||||
for d in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD)
|
||||
if s._splice_match(d, self_root=spec, other_root=spec.build_spec)
|
||||
]
|
||||
if analogs:
|
||||
# Prefer same-name analogs and prefer higher versions
|
||||
# This matches the preferences in Spec.splice, so we will find same node
|
||||
analog = max(analogs, key=lambda a: (a.name == s.name, a.version))
|
||||
|
||||
prefix_to_prefix[analog.prefix] = s.prefix
|
||||
|
||||
manifest = bindist.get_buildfile_manifest(spec.build_spec)
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
|
@@ -47,7 +47,7 @@
|
||||
"tags": {"type": "array", "items": {"type": "string"}},
|
||||
"variables": {
|
||||
"type": "object",
|
||||
"patternProperties": {r"[\w\d\-_\.]+": {"type": "string"}},
|
||||
"patternProperties": {r"[\w\d\-_\.]+": {"type": ["string", "number"]}},
|
||||
},
|
||||
"before_script": script_schema,
|
||||
"script": script_schema,
|
||||
@@ -77,58 +77,54 @@
|
||||
},
|
||||
}
|
||||
|
||||
named_attributes_schema = {
|
||||
"oneOf": [
|
||||
{
|
||||
dynamic_mapping_schema = {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["dynamic-mapping"],
|
||||
"properties": {
|
||||
"dynamic-mapping": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {"noop-job": attributes_schema, "noop-job-remove": attributes_schema},
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {"build-job": attributes_schema, "build-job-remove": attributes_schema},
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {"copy-job": attributes_schema, "copy-job-remove": attributes_schema},
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["endpoint"],
|
||||
"properties": {
|
||||
"reindex-job": attributes_schema,
|
||||
"reindex-job-remove": attributes_schema,
|
||||
"name": {"type": "string"},
|
||||
# "endpoint" cannot have http patternProperties constaint as it is a required field
|
||||
# Constrain is applied in code
|
||||
"endpoint": {"type": "string"},
|
||||
"timeout": {"type": "integer", "minimum": 0},
|
||||
"verify_ssl": {"type": "boolean", "default": False},
|
||||
"header": {"type": "object", "additionalProperties": False},
|
||||
"allow": {"type": "array", "items": {"type": "string"}},
|
||||
"require": {"type": "array", "items": {"type": "string"}},
|
||||
"ignore": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"signing-job": attributes_schema,
|
||||
"signing-job-remove": attributes_schema,
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"cleanup-job": attributes_schema,
|
||||
"cleanup-job-remove": attributes_schema,
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {"any-job": attributes_schema, "any-job-remove": attributes_schema},
|
||||
},
|
||||
]
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def job_schema(name: str):
|
||||
return {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {f"{name}-job": attributes_schema, f"{name}-job-remove": attributes_schema},
|
||||
}
|
||||
|
||||
|
||||
pipeline_gen_schema = {
|
||||
"type": "array",
|
||||
"items": {"oneOf": [submapping_schema, named_attributes_schema]},
|
||||
"items": {
|
||||
"oneOf": [
|
||||
submapping_schema,
|
||||
dynamic_mapping_schema,
|
||||
job_schema("any"),
|
||||
job_schema("build"),
|
||||
job_schema("cleanup"),
|
||||
job_schema("copy"),
|
||||
job_schema("noop"),
|
||||
job_schema("reindex"),
|
||||
job_schema("signing"),
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
core_shared_properties = union_dicts(
|
||||
|
@@ -55,6 +55,26 @@
|
||||
"unify": {
|
||||
"oneOf": [{"type": "boolean"}, {"type": "string", "enum": ["when_possible"]}]
|
||||
},
|
||||
"splice": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"explicit": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": ["target", "replacement"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"target": {"type": "string"},
|
||||
"replacement": {"type": "string"},
|
||||
"transitive": {"type": "boolean", "default": False},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
"duplicates": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
@@ -523,7 +523,12 @@ def _compute_specs_from_answer_set(self):
|
||||
node = SpecBuilder.make_node(pkg=providers[0])
|
||||
candidate = answer.get(node)
|
||||
|
||||
if candidate and candidate.satisfies(input_spec):
|
||||
if candidate and candidate.build_spec.satisfies(input_spec):
|
||||
if not candidate.satisfies(input_spec):
|
||||
tty.warn(
|
||||
"explicit splice configuration has caused the concretized spec"
|
||||
f" {candidate} not to satisfy the input spec {input_spec}"
|
||||
)
|
||||
self._concrete_specs.append(answer[node])
|
||||
self._concrete_specs_by_input[input_spec] = answer[node]
|
||||
else:
|
||||
@@ -3814,7 +3819,33 @@ def build_specs(self, function_tuples):
|
||||
spack.version.git_ref_lookup.GitRefLookup(spec.fullname)
|
||||
)
|
||||
|
||||
return self._specs
|
||||
specs = self.execute_explicit_splices()
|
||||
|
||||
return specs
|
||||
|
||||
def execute_explicit_splices(self):
|
||||
splice_config = spack.config.CONFIG.get("concretizer:splice:explicit", [])
|
||||
splice_triples = []
|
||||
for splice_set in splice_config:
|
||||
target = splice_set["target"]
|
||||
replacement = spack.spec.Spec(splice_set["replacement"])
|
||||
assert replacement.abstract_hash
|
||||
replacement.replace_hash()
|
||||
transitive = splice_set.get("transitive", False)
|
||||
splice_triples.append((target, replacement, transitive))
|
||||
|
||||
specs = {}
|
||||
for key, spec in self._specs.items():
|
||||
current_spec = spec
|
||||
for target, replacement, transitive in splice_triples:
|
||||
if target in current_spec:
|
||||
# matches root or non-root
|
||||
# e.g. mvapich2%gcc
|
||||
current_spec = current_spec.splice(replacement, transitive)
|
||||
new_key = NodeArgument(id=key.id, pkg=current_spec.name)
|
||||
specs[new_key] = current_spec
|
||||
|
||||
return specs
|
||||
|
||||
|
||||
def _develop_specs_from_env(spec, env):
|
||||
|
@@ -4183,7 +4183,7 @@ def _virtuals_provided(self, root):
|
||||
"""Return set of virtuals provided by self in the context of root"""
|
||||
if root is self:
|
||||
# Could be using any virtual the package can provide
|
||||
return set(self.package.virtuals_provided)
|
||||
return set(v.name for v in self.package.virtuals_provided)
|
||||
|
||||
hashes = [s.dag_hash() for s in root.traverse()]
|
||||
in_edges = set(
|
||||
@@ -4206,7 +4206,7 @@ def _splice_match(self, other, self_root, other_root):
|
||||
return True
|
||||
|
||||
return bool(
|
||||
self._virtuals_provided(self_root)
|
||||
bool(self._virtuals_provided(self_root))
|
||||
and self._virtuals_provided(self_root) <= other._virtuals_provided(other_root)
|
||||
)
|
||||
|
||||
@@ -4226,29 +4226,24 @@ def _splice_detach_and_add_dependents(self, replacement, context):
|
||||
# Only set it if it hasn't been spliced before
|
||||
ancestor._build_spec = ancestor._build_spec or ancestor.copy()
|
||||
ancestor.clear_cached_hashes(ignore=(ht.package_hash.attr,))
|
||||
for edge in ancestor.edges_to_dependencies(depflag=dt.BUILD):
|
||||
if edge.depflag & ~dt.BUILD:
|
||||
edge.depflag &= ~dt.BUILD
|
||||
else:
|
||||
ancestor._dependencies[edge.spec.name].remove(edge)
|
||||
edge.spec._dependents[ancestor.name].remove(edge)
|
||||
|
||||
# For each direct dependent in the link/run graph, replace the dependency on
|
||||
# node with one on replacement
|
||||
# For each build dependent, restrict the edge to build-only
|
||||
for edge in self.edges_from_dependents():
|
||||
if edge.parent not in ancestors_in_context:
|
||||
continue
|
||||
build_dep = edge.depflag & dt.BUILD
|
||||
other_dep = edge.depflag & ~dt.BUILD
|
||||
if build_dep:
|
||||
parent_edge = [e for e in edge.parent._dependencies[self.name] if e.spec is self]
|
||||
assert len(parent_edge) == 1
|
||||
|
||||
edge.depflag = dt.BUILD
|
||||
parent_edge[0].depflag = dt.BUILD
|
||||
else:
|
||||
edge.parent._dependencies.edges[self.name].remove(edge)
|
||||
self._dependents.edges[edge.parent.name].remove(edge)
|
||||
edge.parent._dependencies.edges[self.name].remove(edge)
|
||||
self._dependents.edges[edge.parent.name].remove(edge)
|
||||
edge.parent._add_dependency(replacement, depflag=edge.depflag, virtuals=edge.virtuals)
|
||||
|
||||
if other_dep:
|
||||
edge.parent._add_dependency(replacement, depflag=other_dep, virtuals=edge.virtuals)
|
||||
|
||||
def _splice_helper(self, replacement, self_root, other_root):
|
||||
def _splice_helper(self, replacement):
|
||||
"""Main loop of a transitive splice.
|
||||
|
||||
The while loop around a traversal of self ensures that changes to self from previous
|
||||
@@ -4276,8 +4271,7 @@ def _splice_helper(self, replacement, self_root, other_root):
|
||||
replacements_by_name[node.name].append(node)
|
||||
virtuals = node._virtuals_provided(root=replacement)
|
||||
for virtual in virtuals:
|
||||
# Virtual may be spec or str, get name or return str
|
||||
replacements_by_name[getattr(virtual, "name", virtual)].append(node)
|
||||
replacements_by_name[virtual].append(node)
|
||||
|
||||
changed = True
|
||||
while changed:
|
||||
@@ -4298,8 +4292,8 @@ def _splice_helper(self, replacement, self_root, other_root):
|
||||
for virtual in node._virtuals_provided(root=self):
|
||||
analogs += [
|
||||
r
|
||||
for r in replacements_by_name[getattr(virtual, "name", virtual)]
|
||||
if r._splice_match(node, self_root=self_root, other_root=other_root)
|
||||
for r in replacements_by_name[virtual]
|
||||
if node._splice_match(r, self_root=self, other_root=replacement)
|
||||
]
|
||||
|
||||
# No match, keep iterating over self
|
||||
@@ -4313,34 +4307,56 @@ def _splice_helper(self, replacement, self_root, other_root):
|
||||
# No splice needed here, keep checking
|
||||
if analog == node:
|
||||
continue
|
||||
|
||||
node._splice_detach_and_add_dependents(analog, context=self)
|
||||
changed = True
|
||||
break
|
||||
|
||||
def splice(self, other, transitive):
|
||||
"""Splices dependency "other" into this ("target") Spec, and return the
|
||||
result as a concrete Spec.
|
||||
If transitive, then other and its dependencies will be extrapolated to
|
||||
a list of Specs and spliced in accordingly.
|
||||
For example, let there exist a dependency graph as follows:
|
||||
T
|
||||
| \
|
||||
Z<-H
|
||||
In this example, Spec T depends on H and Z, and H also depends on Z.
|
||||
Suppose, however, that we wish to use a different H, known as H'. This
|
||||
function will splice in the new H' in one of two ways:
|
||||
1. transitively, where H' depends on the Z' it was built with, and the
|
||||
new T* also directly depends on this new Z', or
|
||||
2. intransitively, where the new T* and H' both depend on the original
|
||||
Z.
|
||||
Since the Spec returned by this splicing function is no longer deployed
|
||||
the same way it was built, any such changes are tracked by setting the
|
||||
build_spec to point to the corresponding dependency from the original
|
||||
Spec.
|
||||
"""
|
||||
def splice(self, other: "Spec", transitive: bool = True) -> "Spec":
|
||||
"""Returns a new, spliced concrete Spec with the "other" dependency and,
|
||||
optionally, its dependencies.
|
||||
|
||||
Args:
|
||||
other: alternate dependency
|
||||
transitive: include other's dependencies
|
||||
|
||||
Returns: a concrete, spliced version of the current Spec
|
||||
|
||||
When transitive is "True", use the dependencies from "other" to reconcile
|
||||
conflicting dependencies. When transitive is "False", use dependencies from self.
|
||||
|
||||
For example, suppose we have the following dependency graph:
|
||||
|
||||
T
|
||||
| \
|
||||
Z<-H
|
||||
|
||||
Spec T depends on H and Z, and H also depends on Z. Now we want to use
|
||||
a different H, called H'. This function can be used to splice in H' to
|
||||
create a new spec, called T*. If H' was built with Z', then transitive
|
||||
"True" will ensure H' and T* both depend on Z':
|
||||
|
||||
T*
|
||||
| \
|
||||
Z'<-H'
|
||||
|
||||
If transitive is "False", then H' and T* will both depend on
|
||||
the original Z, resulting in a new H'*
|
||||
|
||||
T*
|
||||
| \
|
||||
Z<-H'*
|
||||
|
||||
Provenance of the build is tracked through the "build_spec" property
|
||||
of the spliced spec and any correspondingly modified dependency specs.
|
||||
The build specs are set to that of the original spec, so the original
|
||||
spec's provenance is preserved unchanged."""
|
||||
assert self.concrete
|
||||
assert other.concrete
|
||||
|
||||
if self._splice_match(other, self_root=self, other_root=other):
|
||||
return other.copy()
|
||||
|
||||
if not any(
|
||||
node._splice_match(other, self_root=self, other_root=other)
|
||||
for node in self.traverse(root=False, deptype=dt.LINK | dt.RUN)
|
||||
@@ -4379,12 +4395,12 @@ def mask_build_deps(in_spec):
|
||||
|
||||
# Transitively splice any relevant nodes from new into base
|
||||
# This handles all shared dependencies between self and other
|
||||
spec._splice_helper(replacement, self_root=self, other_root=other)
|
||||
spec._splice_helper(replacement)
|
||||
else:
|
||||
# Do the same thing as the transitive splice, but reversed
|
||||
node_pairs = make_node_pairs(other, replacement)
|
||||
mask_build_deps(replacement)
|
||||
replacement._splice_helper(spec, self_root=other, other_root=self)
|
||||
replacement._splice_helper(spec)
|
||||
|
||||
# Intransitively splice replacement into spec
|
||||
# This is very simple now that all shared dependencies have been handled
|
||||
@@ -4392,13 +4408,14 @@ def mask_build_deps(in_spec):
|
||||
if node._splice_match(other, self_root=spec, other_root=other):
|
||||
node._splice_detach_and_add_dependents(replacement, context=spec)
|
||||
|
||||
# Set up build dependencies for modified nodes
|
||||
# Also modify build_spec because the existing ones had build deps removed
|
||||
# For nodes that were spliced, modify the build spec to ensure build deps are preserved
|
||||
# For nodes that were not spliced, replace the build deps on the spec itself
|
||||
for orig, copy in node_pairs:
|
||||
for edge in orig.edges_to_dependencies(depflag=dt.BUILD):
|
||||
copy._add_dependency(edge.spec, depflag=dt.BUILD, virtuals=edge.virtuals)
|
||||
if copy._build_spec:
|
||||
copy._build_spec = orig.build_spec.copy()
|
||||
else:
|
||||
for edge in orig.edges_to_dependencies(depflag=dt.BUILD):
|
||||
copy._add_dependency(edge.spec, depflag=dt.BUILD, virtuals=edge.virtuals)
|
||||
|
||||
return spec
|
||||
|
||||
@@ -4797,7 +4814,7 @@ def _load(cls, data):
|
||||
virtuals=virtuals,
|
||||
)
|
||||
if "build_spec" in node.keys():
|
||||
_, bhash, _ = cls.build_spec_from_node_dict(node, hash_type=hash_type)
|
||||
_, bhash, _ = cls.extract_build_spec_info_from_node_dict(node, hash_type=hash_type)
|
||||
node_spec._build_spec = hash_dict[bhash]["node_spec"]
|
||||
|
||||
return hash_dict[root_spec_hash]["node_spec"]
|
||||
@@ -4925,7 +4942,7 @@ def extract_info_from_dep(cls, elt, hash):
|
||||
return dep_hash, deptypes, hash_type, virtuals
|
||||
|
||||
@classmethod
|
||||
def build_spec_from_node_dict(cls, node, hash_type=ht.dag_hash.name):
|
||||
def extract_build_spec_info_from_node_dict(cls, node, hash_type=ht.dag_hash.name):
|
||||
build_spec_dict = node["build_spec"]
|
||||
return build_spec_dict["name"], build_spec_dict[hash_type], hash_type
|
||||
|
||||
|
@@ -27,8 +27,15 @@
|
||||
(["invalid-gitlab-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# This package has invalid GitLab patch URLs
|
||||
(["invalid-selfhosted-gitlab-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# This package has a stand-alone 'test*' method in build-time callbacks
|
||||
(["fail-test-audit"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# This package has a stand-alone test method in build-time callbacks
|
||||
(["fail-test-audit"], ["PKG-PROPERTIES"]),
|
||||
# This package implements and uses several deprecated stand-alone
|
||||
# test methods
|
||||
(["fail-test-audit-deprecated"], ["PKG-DEPRECATED-ATTRIBUTES"]),
|
||||
# This package has stand-alone test methods without non-trivial docstrings
|
||||
(["fail-test-audit-docstring"], ["PKG-PROPERTIES"]),
|
||||
# This package has a stand-alone test method without an implementation
|
||||
(["fail-test-audit-impl"], ["PKG-PROPERTIES"]),
|
||||
# This package has no issues
|
||||
(["mpileaks"], None),
|
||||
# This package has a conflict with a trigger which cannot constrain the constraint
|
||||
@@ -41,7 +48,7 @@ def test_package_audits(packages, expected_errors, mock_packages):
|
||||
|
||||
# Check that errors were reported only for the expected failure
|
||||
actual_errors = [check for check, errors in reports if errors]
|
||||
msg = [str(e) for _, errors in reports for e in errors]
|
||||
msg = "\n".join([str(e) for _, errors in reports for e in errors])
|
||||
if expected_errors:
|
||||
assert expected_errors == actual_errors, msg
|
||||
else:
|
||||
|
@@ -68,22 +68,6 @@ def cache_directory(tmpdir):
|
||||
spack.config.caches = old_cache_path
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def mirror_dir(tmpdir_factory):
|
||||
dir = tmpdir_factory.mktemp("mirror")
|
||||
dir.ensure("build_cache", dir=True)
|
||||
yield str(dir)
|
||||
dir.join("build_cache").remove()
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def test_mirror(mirror_dir):
|
||||
mirror_url = url_util.path_to_file_url(mirror_dir)
|
||||
mirror_cmd("add", "--scope", "site", "test-mirror-func", mirror_url)
|
||||
yield mirror_dir
|
||||
mirror_cmd("rm", "--scope=site", "test-mirror-func")
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def config_directory(tmp_path_factory):
|
||||
# Copy defaults to a temporary "site" scope
|
||||
@@ -222,9 +206,9 @@ def dummy_prefix(tmpdir):
|
||||
@pytest.mark.requires_executables(*args)
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.usefixtures(
|
||||
"default_config", "cache_directory", "install_dir_default_layout", "test_mirror"
|
||||
"default_config", "cache_directory", "install_dir_default_layout", "temporary_mirror"
|
||||
)
|
||||
def test_default_rpaths_create_install_default_layout(mirror_dir):
|
||||
def test_default_rpaths_create_install_default_layout(temporary_mirror_dir):
|
||||
"""
|
||||
Test the creation and installation of buildcaches with default rpaths
|
||||
into the default directory layout scheme.
|
||||
@@ -237,13 +221,12 @@ def test_default_rpaths_create_install_default_layout(mirror_dir):
|
||||
install_cmd("--no-cache", sy_spec.name)
|
||||
|
||||
# Create a buildache
|
||||
buildcache_cmd("push", "-u", mirror_dir, cspec.name, sy_spec.name)
|
||||
|
||||
buildcache_cmd("push", "-u", temporary_mirror_dir, cspec.name, sy_spec.name)
|
||||
# Test force overwrite create buildcache (-f option)
|
||||
buildcache_cmd("push", "-uf", mirror_dir, cspec.name)
|
||||
buildcache_cmd("push", "-uf", temporary_mirror_dir, cspec.name)
|
||||
|
||||
# Create mirror index
|
||||
buildcache_cmd("update-index", mirror_dir)
|
||||
buildcache_cmd("update-index", temporary_mirror_dir)
|
||||
|
||||
# List the buildcaches in the mirror
|
||||
buildcache_cmd("list", "-alv")
|
||||
@@ -271,9 +254,9 @@ def test_default_rpaths_create_install_default_layout(mirror_dir):
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.nomockstage
|
||||
@pytest.mark.usefixtures(
|
||||
"default_config", "cache_directory", "install_dir_non_default_layout", "test_mirror"
|
||||
"default_config", "cache_directory", "install_dir_non_default_layout", "temporary_mirror"
|
||||
)
|
||||
def test_default_rpaths_install_nondefault_layout(mirror_dir):
|
||||
def test_default_rpaths_install_nondefault_layout(temporary_mirror_dir):
|
||||
"""
|
||||
Test the creation and installation of buildcaches with default rpaths
|
||||
into the non-default directory layout scheme.
|
||||
@@ -294,9 +277,9 @@ def test_default_rpaths_install_nondefault_layout(mirror_dir):
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.nomockstage
|
||||
@pytest.mark.usefixtures(
|
||||
"default_config", "cache_directory", "install_dir_default_layout", "test_mirror"
|
||||
"default_config", "cache_directory", "install_dir_default_layout", "temporary_mirror"
|
||||
)
|
||||
def test_relative_rpaths_install_default_layout(mirror_dir):
|
||||
def test_relative_rpaths_install_default_layout(temporary_mirror_dir):
|
||||
"""
|
||||
Test the creation and installation of buildcaches with relative
|
||||
rpaths into the default directory layout scheme.
|
||||
@@ -323,9 +306,9 @@ def test_relative_rpaths_install_default_layout(mirror_dir):
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.nomockstage
|
||||
@pytest.mark.usefixtures(
|
||||
"default_config", "cache_directory", "install_dir_non_default_layout", "test_mirror"
|
||||
"default_config", "cache_directory", "install_dir_non_default_layout", "temporary_mirror"
|
||||
)
|
||||
def test_relative_rpaths_install_nondefault(mirror_dir):
|
||||
def test_relative_rpaths_install_nondefault(temporary_mirror_dir):
|
||||
"""
|
||||
Test the installation of buildcaches with relativized rpaths
|
||||
into the non-default directory layout scheme.
|
||||
@@ -374,9 +357,9 @@ def test_push_and_fetch_keys(mock_gnupghome, tmp_path):
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.nomockstage
|
||||
@pytest.mark.usefixtures(
|
||||
"default_config", "cache_directory", "install_dir_non_default_layout", "test_mirror"
|
||||
"default_config", "cache_directory", "install_dir_non_default_layout", "temporary_mirror"
|
||||
)
|
||||
def test_built_spec_cache(mirror_dir):
|
||||
def test_built_spec_cache(temporary_mirror_dir):
|
||||
"""Because the buildcache list command fetches the buildcache index
|
||||
and uses it to populate the binary_distribution built spec cache, when
|
||||
this test calls get_mirrors_for_spec, it is testing the popluation of
|
||||
@@ -397,7 +380,7 @@ def fake_dag_hash(spec, length=None):
|
||||
return "tal4c7h4z0gqmixb1eqa92mjoybxn5l6"[:length]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("install_mockery", "mock_packages", "mock_fetch", "test_mirror")
|
||||
@pytest.mark.usefixtures("install_mockery", "mock_packages", "mock_fetch", "temporary_mirror")
|
||||
def test_spec_needs_rebuild(monkeypatch, tmpdir):
|
||||
"""Make sure needs_rebuild properly compares remote hash
|
||||
against locally computed one, avoiding unnecessary rebuilds"""
|
||||
@@ -518,7 +501,7 @@ def mock_list_url(url, recursive=False):
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_fetch", "install_mockery")
|
||||
def test_update_sbang(tmpdir, test_mirror):
|
||||
def test_update_sbang(tmpdir, temporary_mirror):
|
||||
"""Test the creation and installation of buildcaches with default rpaths
|
||||
into the non-default directory layout scheme, triggering an update of the
|
||||
sbang.
|
||||
@@ -529,7 +512,7 @@ def test_update_sbang(tmpdir, test_mirror):
|
||||
old_spec_hash_str = "/{0}".format(old_spec.dag_hash())
|
||||
|
||||
# Need a fake mirror with *function* scope.
|
||||
mirror_dir = test_mirror
|
||||
mirror_dir = temporary_mirror
|
||||
|
||||
# Assume all commands will concretize old_spec the same way.
|
||||
install_cmd("--no-cache", old_spec.name)
|
||||
|
@@ -516,6 +516,30 @@ def test_setting_dtags_based_on_config(config_setting, expected_flag, config, mo
|
||||
assert dtags_to_add.value == expected_flag
|
||||
|
||||
|
||||
def test_module_globals_available_at_setup_dependent_time(
|
||||
monkeypatch, mutable_config, mock_packages, working_env
|
||||
):
|
||||
"""Spack built package externaltest depends on an external package
|
||||
externaltool. Externaltool's setup_dependent_package needs to be able to
|
||||
access globals on the dependent"""
|
||||
|
||||
def setup_dependent_package(module, dependent_spec):
|
||||
# Make sure set_package_py_globals was already called on
|
||||
# dependents
|
||||
# ninja is always set by the setup context and is not None
|
||||
dependent_module = dependent_spec.package.module
|
||||
assert hasattr(dependent_module, "ninja")
|
||||
assert dependent_module.ninja is not None
|
||||
dependent_spec.package.test_attr = True
|
||||
|
||||
externaltool = spack.spec.Spec("externaltest").concretized()
|
||||
monkeypatch.setattr(
|
||||
externaltool["externaltool"].package, "setup_dependent_package", setup_dependent_package
|
||||
)
|
||||
spack.build_environment.setup_package(externaltool.package, False)
|
||||
assert externaltool.package.test_attr
|
||||
|
||||
|
||||
def test_build_jobs_sequential_is_sequential():
|
||||
assert (
|
||||
spack.config.determine_number_of_jobs(
|
||||
|
@@ -12,22 +12,39 @@
|
||||
|
||||
|
||||
def test_build_task_errors(install_mockery):
|
||||
with pytest.raises(ValueError, match="must be a package"):
|
||||
inst.BuildTask("abc", None, False, 0, 0, 0, set())
|
||||
|
||||
"""Check expected errors when instantiating a BuildTask."""
|
||||
spec = spack.spec.Spec("trivial-install-test-package")
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
with pytest.raises(ValueError, match="must have a concrete spec"):
|
||||
inst.BuildTask(pkg_cls(spec), None, False, 0, 0, 0, set())
|
||||
|
||||
# The value of the request argument is expected to not be checked.
|
||||
for pkg in [None, "abc"]:
|
||||
with pytest.raises(TypeError, match="must be a package"):
|
||||
inst.BuildTask(pkg, None)
|
||||
|
||||
with pytest.raises(ValueError, match="must have a concrete spec"):
|
||||
inst.BuildTask(pkg_cls(spec), None)
|
||||
|
||||
# Using a concretized package now means the request argument is checked.
|
||||
spec.concretize()
|
||||
assert spec.concrete
|
||||
with pytest.raises(ValueError, match="must have a build request"):
|
||||
inst.BuildTask(spec.package, None, False, 0, 0, 0, set())
|
||||
|
||||
with pytest.raises(TypeError, match="is not a valid build request"):
|
||||
inst.BuildTask(spec.package, None)
|
||||
|
||||
# Using a valid package and spec, the next check is the status argument.
|
||||
request = inst.BuildRequest(spec.package, {})
|
||||
with pytest.raises(spack.error.InstallError, match="Cannot create a build task"):
|
||||
inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_REMOVED, set())
|
||||
|
||||
with pytest.raises(TypeError, match="is not a valid build status"):
|
||||
inst.BuildTask(spec.package, request, status="queued")
|
||||
|
||||
# Now we can check that build tasks cannot be create when the status
|
||||
# indicates the task is/should've been removed.
|
||||
with pytest.raises(spack.error.InstallError, match="Cannot create a task"):
|
||||
inst.BuildTask(spec.package, request, status=inst.BuildStatus.REMOVED)
|
||||
|
||||
# Also make sure to not accept an incompatible installed argument value.
|
||||
with pytest.raises(TypeError, match="'installed' be a 'set', not 'str'"):
|
||||
inst.BuildTask(spec.package, request, installed="mpileaks")
|
||||
|
||||
|
||||
def test_build_task_basics(install_mockery):
|
||||
@@ -37,7 +54,7 @@ def test_build_task_basics(install_mockery):
|
||||
|
||||
# Ensure key properties match expectations
|
||||
request = inst.BuildRequest(spec.package, {})
|
||||
task = inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_ADDED, set())
|
||||
task = inst.BuildTask(spec.package, request=request, status=inst.BuildStatus.QUEUED)
|
||||
assert not task.explicit
|
||||
assert task.priority == len(task.uninstalled_deps)
|
||||
assert task.key == (task.priority, task.sequence)
|
||||
@@ -59,16 +76,16 @@ def test_build_task_strings(install_mockery):
|
||||
|
||||
# Ensure key properties match expectations
|
||||
request = inst.BuildRequest(spec.package, {})
|
||||
task = inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_ADDED, set())
|
||||
task = inst.BuildTask(spec.package, request=request, status=inst.BuildStatus.QUEUED)
|
||||
|
||||
# Cover __repr__
|
||||
irep = task.__repr__()
|
||||
assert irep.startswith(task.__class__.__name__)
|
||||
assert "status='queued'" in irep # == STATUS_ADDED
|
||||
assert "BuildStatus.QUEUED" in irep
|
||||
assert "sequence=" in irep
|
||||
|
||||
# Cover __str__
|
||||
istr = str(task)
|
||||
assert "status=queued" in istr # == STATUS_ADDED
|
||||
assert "status=queued" in istr # == BuildStatus.QUEUED
|
||||
assert "#dependencies=1" in istr
|
||||
assert "priority=" in istr
|
||||
|
@@ -199,7 +199,7 @@ def check_args(cc, args, expected):
|
||||
"""
|
||||
with set_env(SPACK_TEST_COMMAND="dump-args"):
|
||||
cc_modified_args = cc(*args, output=str).strip().split("\n")
|
||||
assert cc_modified_args == expected
|
||||
assert expected == cc_modified_args
|
||||
|
||||
|
||||
def check_args_contents(cc, args, must_contain, must_not_contain):
|
||||
@@ -354,39 +354,6 @@ def test_fc_flags(wrapper_environment, wrapper_flags):
|
||||
)
|
||||
|
||||
|
||||
def test_ld_flags_with_redundant_rpaths(wrapper_environment, wrapper_flags):
|
||||
check_args(
|
||||
ld,
|
||||
test_args + test_rpaths, # ensure thesee are made unique
|
||||
["ld"]
|
||||
+ test_include_paths
|
||||
+ test_library_paths
|
||||
+ ["--disable-new-dtags"]
|
||||
+ test_rpaths
|
||||
+ test_args_without_paths
|
||||
+ spack_ldlibs,
|
||||
)
|
||||
|
||||
|
||||
def test_cc_flags_with_redundant_rpaths(wrapper_environment, wrapper_flags):
|
||||
check_args(
|
||||
cc,
|
||||
test_args + test_wl_rpaths + test_wl_rpaths, # ensure thesee are made unique
|
||||
[real_cc]
|
||||
+ target_args
|
||||
+ test_include_paths
|
||||
+ ["-Lfoo"]
|
||||
+ test_library_paths
|
||||
+ ["-Wl,--disable-new-dtags"]
|
||||
+ test_wl_rpaths
|
||||
+ test_args_without_paths
|
||||
+ spack_cppflags
|
||||
+ spack_cflags
|
||||
+ ["-Wl,--gc-sections"]
|
||||
+ spack_ldlibs,
|
||||
)
|
||||
|
||||
|
||||
def test_always_cflags(wrapper_environment, wrapper_flags):
|
||||
with set_env(SPACK_ALWAYS_CFLAGS="-always1 -always2"):
|
||||
check_args(
|
||||
|
@@ -7,6 +7,7 @@
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
from io import BytesIO
|
||||
from typing import NamedTuple
|
||||
|
||||
import jsonschema
|
||||
@@ -1846,3 +1847,91 @@ def test_ci_generate_mirror_config(
|
||||
pipeline_doc = syaml.load(f)
|
||||
assert fst not in pipeline_doc["rebuild-index"]["script"][0]
|
||||
assert snd in pipeline_doc["rebuild-index"]["script"][0]
|
||||
|
||||
|
||||
def dynamic_mapping_setup(tmpdir):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
specs:
|
||||
- pkg-a
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- dynamic-mapping:
|
||||
endpoint: https://fake.spack.io/mapper
|
||||
require: ["variables"]
|
||||
ignore: ["ignored_field"]
|
||||
allow: ["variables", "retry"]
|
||||
"""
|
||||
)
|
||||
|
||||
spec_a = Spec("pkg-a")
|
||||
spec_a.concretize()
|
||||
|
||||
return ci.get_job_name(spec_a)
|
||||
|
||||
|
||||
def test_ci_dynamic_mapping_empty(
|
||||
tmpdir,
|
||||
working_env,
|
||||
mutable_mock_env_path,
|
||||
install_mockery,
|
||||
mock_packages,
|
||||
monkeypatch,
|
||||
ci_base_environment,
|
||||
):
|
||||
# The test will always return an empty dictionary
|
||||
def fake_dyn_mapping_urlopener(*args, **kwargs):
|
||||
return BytesIO("{}".encode())
|
||||
|
||||
monkeypatch.setattr(ci, "_dyn_mapping_urlopener", fake_dyn_mapping_urlopener)
|
||||
|
||||
_ = dynamic_mapping_setup(tmpdir)
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
outputfile = str(tmpdir.join(".gitlab-ci.yml"))
|
||||
|
||||
with ev.read("test"):
|
||||
output = ci_cmd("generate", "--output-file", outputfile)
|
||||
assert "Response missing required keys: ['variables']" in output
|
||||
|
||||
|
||||
def test_ci_dynamic_mapping_full(
|
||||
tmpdir,
|
||||
working_env,
|
||||
mutable_mock_env_path,
|
||||
install_mockery,
|
||||
mock_packages,
|
||||
monkeypatch,
|
||||
ci_base_environment,
|
||||
):
|
||||
# The test will always return an empty dictionary
|
||||
def fake_dyn_mapping_urlopener(*args, **kwargs):
|
||||
return BytesIO(
|
||||
json.dumps(
|
||||
{"variables": {"MY_VAR": "hello"}, "ignored_field": 0, "unallowed_field": 0}
|
||||
).encode()
|
||||
)
|
||||
|
||||
monkeypatch.setattr(ci, "_dyn_mapping_urlopener", fake_dyn_mapping_urlopener)
|
||||
|
||||
label = dynamic_mapping_setup(tmpdir)
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
outputfile = str(tmpdir.join(".gitlab-ci.yml"))
|
||||
|
||||
with ev.read("test"):
|
||||
ci_cmd("generate", "--output-file", outputfile)
|
||||
|
||||
with open(outputfile) as of:
|
||||
pipeline_doc = syaml.load(of.read())
|
||||
assert label in pipeline_doc
|
||||
job = pipeline_doc[label]
|
||||
|
||||
assert job.get("variables", {}).get("MY_VAR") == "hello"
|
||||
assert "ignored_field" not in job
|
||||
assert "unallowed_field" not in job
|
||||
|
@@ -164,3 +164,30 @@ def test_concretize_deprecated(mock_packages, mock_archive, mock_fetch, install_
|
||||
spec = spack.spec.Spec("libelf@0.8.10")
|
||||
with pytest.raises(spack.spec.SpecDeprecatedError):
|
||||
spec.concretize()
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_packages", "mock_archive", "mock_fetch", "install_mockery")
|
||||
@pytest.mark.regression("46915")
|
||||
def test_deprecate_spec_with_external_dependency(mutable_config, temporary_store, tmp_path):
|
||||
"""Tests that we can deprecate a spec that has an external dependency"""
|
||||
packages_yaml = {
|
||||
"libelf": {
|
||||
"buildable": False,
|
||||
"externals": [{"spec": "libelf@0.8.13", "prefix": str(tmp_path / "libelf")}],
|
||||
}
|
||||
}
|
||||
mutable_config.set("packages", packages_yaml)
|
||||
|
||||
install("--fake", "dyninst ^libdwarf@=20111030")
|
||||
install("--fake", "libdwarf@=20130729")
|
||||
|
||||
# Ensure we are using the external libelf
|
||||
db = temporary_store.db
|
||||
libelf = db.query_one("libelf")
|
||||
assert libelf.external
|
||||
|
||||
deprecated_spec = db.query_one("libdwarf@=20111030")
|
||||
new_libdwarf = db.query_one("libdwarf@=20130729")
|
||||
deprecate("-y", "libdwarf@=20111030", "libdwarf@=20130729")
|
||||
|
||||
assert db.deprecator(deprecated_spec) == new_libdwarf
|
||||
|
@@ -65,6 +65,12 @@ def test_develop_no_clone(self, tmpdir):
|
||||
develop("--no-clone", "-p", str(tmpdir), "mpich@1.0")
|
||||
self.check_develop(e, spack.spec.Spec("mpich@=1.0"), str(tmpdir))
|
||||
|
||||
def test_develop_no_version(self, tmpdir):
|
||||
env("create", "test")
|
||||
with ev.read("test") as e:
|
||||
develop("--no-clone", "-p", str(tmpdir), "mpich")
|
||||
self.check_develop(e, spack.spec.Spec("mpich@=main"), str(tmpdir))
|
||||
|
||||
def test_develop(self):
|
||||
env("create", "test")
|
||||
with ev.read("test") as e:
|
||||
|
@@ -38,6 +38,7 @@
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml
|
||||
from spack.cmd.env import _env_create
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.main import SpackCommand, SpackCommandError
|
||||
from spack.spec import Spec
|
||||
from spack.stage import stage_prefix
|
||||
@@ -278,7 +279,7 @@ def test_env_rename_managed(capfd):
|
||||
assert "baz" in out
|
||||
|
||||
|
||||
def test_env_rename_anonymous(capfd, tmpdir):
|
||||
def test_env_rename_independent(capfd, tmpdir):
|
||||
# Need real environment
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("rename", "-d", "./non-existing", "./also-non-existing")
|
||||
@@ -574,42 +575,76 @@ def test_remove_command():
|
||||
|
||||
with ev.read("test"):
|
||||
add("mpileaks")
|
||||
|
||||
with ev.read("test"):
|
||||
assert "mpileaks" in find()
|
||||
assert "mpileaks@" not in find()
|
||||
assert "mpileaks@" not in find("--show-concretized")
|
||||
|
||||
with ev.read("test"):
|
||||
remove("mpileaks")
|
||||
|
||||
with ev.read("test"):
|
||||
assert "mpileaks" not in find()
|
||||
assert "mpileaks@" not in find()
|
||||
assert "mpileaks@" not in find("--show-concretized")
|
||||
|
||||
with ev.read("test"):
|
||||
add("mpileaks")
|
||||
|
||||
with ev.read("test"):
|
||||
assert "mpileaks" in find()
|
||||
assert "mpileaks@" not in find()
|
||||
assert "mpileaks@" not in find("--show-concretized")
|
||||
|
||||
with ev.read("test"):
|
||||
concretize()
|
||||
|
||||
with ev.read("test"):
|
||||
assert "mpileaks" in find()
|
||||
assert "mpileaks@" not in find()
|
||||
assert "mpileaks@" in find("--show-concretized")
|
||||
|
||||
with ev.read("test"):
|
||||
remove("mpileaks")
|
||||
|
||||
with ev.read("test"):
|
||||
assert "mpileaks" not in find()
|
||||
# removed but still in last concretized specs
|
||||
assert "mpileaks@" in find("--show-concretized")
|
||||
|
||||
with ev.read("test"):
|
||||
concretize()
|
||||
|
||||
with ev.read("test"):
|
||||
assert "mpileaks" not in find()
|
||||
assert "mpileaks@" not in find()
|
||||
# now the lockfile is regenerated and it's gone.
|
||||
assert "mpileaks@" not in find("--show-concretized")
|
||||
|
||||
|
||||
def test_remove_command_all():
|
||||
# Need separate ev.read calls for each command to ensure we test round-trip to disk
|
||||
env("create", "test")
|
||||
test_pkgs = ("mpileaks", "zlib")
|
||||
|
||||
with ev.read("test"):
|
||||
for name in test_pkgs:
|
||||
add(name)
|
||||
|
||||
with ev.read("test"):
|
||||
for name in test_pkgs:
|
||||
assert name in find()
|
||||
assert f"{name}@" not in find()
|
||||
|
||||
with ev.read("test"):
|
||||
remove("-a")
|
||||
|
||||
with ev.read("test"):
|
||||
for name in test_pkgs:
|
||||
assert name not in find()
|
||||
|
||||
|
||||
def test_bad_remove_included_env():
|
||||
env("create", "test")
|
||||
test = ev.read("test")
|
||||
@@ -769,6 +804,39 @@ def test_user_removed_spec(environment_from_manifest):
|
||||
assert not any(x.name == "hypre" for x in env_specs)
|
||||
|
||||
|
||||
def test_lockfile_spliced_specs(environment_from_manifest, install_mockery):
|
||||
"""Test that an environment can round-trip a spliced spec."""
|
||||
# Create a local install for zmpi to splice in
|
||||
# Default concretization is not using zmpi
|
||||
zmpi = spack.spec.Spec("zmpi").concretized()
|
||||
PackageInstaller([zmpi.package], fake=True).install()
|
||||
|
||||
e1 = environment_from_manifest(
|
||||
f"""
|
||||
spack:
|
||||
specs:
|
||||
- mpileaks
|
||||
concretizer:
|
||||
splice:
|
||||
explicit:
|
||||
- target: mpi
|
||||
replacement: zmpi/{zmpi.dag_hash()}
|
||||
"""
|
||||
)
|
||||
with e1:
|
||||
e1.concretize()
|
||||
e1.write()
|
||||
|
||||
# By reading into a second environment, we force a round trip to json
|
||||
e2 = _env_create("test2", init_file=e1.lock_path)
|
||||
|
||||
# The one spec is mpileaks
|
||||
for _, spec in e2.concretized_specs():
|
||||
assert spec.spliced
|
||||
assert spec["mpi"].satisfies(f"zmpi@{zmpi.version}")
|
||||
assert spec["mpi"].build_spec.satisfies(zmpi)
|
||||
|
||||
|
||||
def test_init_from_lockfile(environment_from_manifest):
|
||||
"""Test that an environment can be instantiated from a lockfile."""
|
||||
e1 = environment_from_manifest(
|
||||
@@ -3547,7 +3615,7 @@ def test_create_and_activate_managed(tmp_path):
|
||||
env("deactivate")
|
||||
|
||||
|
||||
def test_create_and_activate_anonymous(tmp_path):
|
||||
def test_create_and_activate_independent(tmp_path):
|
||||
with fs.working_dir(str(tmp_path)):
|
||||
env_dir = os.path.join(str(tmp_path), "foo")
|
||||
shell = env("activate", "--without-view", "--create", "--sh", env_dir)
|
||||
@@ -3885,7 +3953,7 @@ def test_environment_depfile_makefile(depfile_flags, expected_installs, tmpdir,
|
||||
)
|
||||
|
||||
# Do make dry run.
|
||||
out = make("-n", "-f", makefile, output=str)
|
||||
out = make("-n", "-f", makefile, "SPACK=spack", output=str)
|
||||
|
||||
specs_that_make_would_install = _parse_dry_run_package_installs(out)
|
||||
|
||||
@@ -3923,7 +3991,7 @@ def test_depfile_works_with_gitversions(tmpdir, mock_packages, monkeypatch):
|
||||
env("depfile", "-o", makefile, "--make-disable-jobserver", "--make-prefix=prefix")
|
||||
|
||||
# Do a dry run on the generated depfile
|
||||
out = make("-n", "-f", makefile, output=str)
|
||||
out = make("-n", "-f", makefile, "SPACK=spack", output=str)
|
||||
|
||||
# Check that all specs are there (without duplicates)
|
||||
specs_that_make_would_install = _parse_dry_run_package_installs(out)
|
||||
@@ -3985,7 +4053,12 @@ def test_depfile_phony_convenience_targets(
|
||||
|
||||
# Phony install/* target should install picked package and all its deps
|
||||
specs_that_make_would_install = _parse_dry_run_package_installs(
|
||||
make("-n", picked_spec.format("install/{name}-{version}-{hash}"), output=str)
|
||||
make(
|
||||
"-n",
|
||||
picked_spec.format("install/{name}-{version}-{hash}"),
|
||||
"SPACK=spack",
|
||||
output=str,
|
||||
)
|
||||
)
|
||||
|
||||
assert set(specs_that_make_would_install) == set(expected_installs)
|
||||
@@ -3993,7 +4066,12 @@ def test_depfile_phony_convenience_targets(
|
||||
|
||||
# Phony install-deps/* target shouldn't install picked package
|
||||
specs_that_make_would_install = _parse_dry_run_package_installs(
|
||||
make("-n", picked_spec.format("install-deps/{name}-{version}-{hash}"), output=str)
|
||||
make(
|
||||
"-n",
|
||||
picked_spec.format("install-deps/{name}-{version}-{hash}"),
|
||||
"SPACK=spack",
|
||||
output=str,
|
||||
)
|
||||
)
|
||||
|
||||
assert set(specs_that_make_would_install) == set(expected_installs) - {picked_package}
|
||||
@@ -4053,7 +4131,7 @@ def test_spack_package_ids_variable(tmpdir, mock_packages):
|
||||
make = Executable("make")
|
||||
|
||||
# Do dry run.
|
||||
out = make("-n", "-C", str(tmpdir), output=str)
|
||||
out = make("-n", "-C", str(tmpdir), "SPACK=spack", output=str)
|
||||
|
||||
# post-install: <hash> should've been executed
|
||||
with ev.read("test") as test:
|
||||
|
@@ -70,10 +70,10 @@ def test_query_arguments():
|
||||
|
||||
q_args = query_arguments(args)
|
||||
assert "installed" in q_args
|
||||
assert "known" in q_args
|
||||
assert "predicate_fn" in q_args
|
||||
assert "explicit" in q_args
|
||||
assert q_args["installed"] == ["installed"]
|
||||
assert q_args["known"] is any
|
||||
assert q_args["predicate_fn"] is None
|
||||
assert q_args["explicit"] is any
|
||||
assert "start_date" in q_args
|
||||
assert "end_date" not in q_args
|
||||
|
@@ -35,6 +35,22 @@ def test_gc_with_build_dependency(mutable_database):
|
||||
assert "There are no unused specs." in gc("-y")
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
def test_gc_with_constraints(mutable_database):
|
||||
s_cmake1 = spack.spec.Spec("simple-inheritance ^cmake@3.4.3").concretized()
|
||||
s_cmake2 = spack.spec.Spec("simple-inheritance ^cmake@3.23.1").concretized()
|
||||
PackageInstaller([s_cmake1.package], explicit=True, fake=True).install()
|
||||
PackageInstaller([s_cmake2.package], explicit=True, fake=True).install()
|
||||
|
||||
assert "There are no unused specs." in gc("python")
|
||||
|
||||
assert "Successfully uninstalled cmake@3.4.3" in gc("-y", "cmake@3.4.3")
|
||||
assert "There are no unused specs." in gc("-y", "cmake@3.4.3")
|
||||
|
||||
assert "Successfully uninstalled cmake" in gc("-y", "cmake@3.23.1")
|
||||
assert "There are no unused specs." in gc("-y", "cmake")
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
def test_gc_with_environment(mutable_database, mutable_mock_env_path):
|
||||
s = spack.spec.Spec("simple-inheritance")
|
||||
|
@@ -194,6 +194,9 @@ def test_test_list_all(mock_packages):
|
||||
assert set(pkgs) == set(
|
||||
[
|
||||
"fail-test-audit",
|
||||
"fail-test-audit-deprecated",
|
||||
"fail-test-audit-docstring",
|
||||
"fail-test-audit-impl",
|
||||
"mpich",
|
||||
"perl-extension",
|
||||
"printing-package",
|
||||
|
@@ -4,9 +4,12 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.util.symlink import _windows_can_symlink
|
||||
|
||||
import spack.util.spack_yaml as s_yaml
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.main import SpackCommand
|
||||
@@ -16,7 +19,16 @@
|
||||
install = SpackCommand("install")
|
||||
view = SpackCommand("view")
|
||||
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
if sys.platform == "win32":
|
||||
if not _windows_can_symlink():
|
||||
pytest.skip(
|
||||
"Windows must be able to create symlinks to run tests.", allow_module_level=True
|
||||
)
|
||||
# TODO: Skipping hardlink command testing on windows until robust checks can be added.
|
||||
# See https://github.com/spack/spack/pull/46335#discussion_r1757411915
|
||||
commands = ["symlink", "add", "copy", "relocate"]
|
||||
else:
|
||||
commands = ["hardlink", "symlink", "hard", "add", "copy", "relocate"]
|
||||
|
||||
|
||||
def create_projection_file(tmpdir, projection):
|
||||
@@ -28,7 +40,7 @@ def create_projection_file(tmpdir, projection):
|
||||
return projection_file
|
||||
|
||||
|
||||
@pytest.mark.parametrize("cmd", ["hardlink", "symlink", "hard", "add", "copy", "relocate"])
|
||||
@pytest.mark.parametrize("cmd", commands)
|
||||
def test_view_link_type(tmpdir, mock_packages, mock_archive, mock_fetch, install_mockery, cmd):
|
||||
install("libdwarf")
|
||||
viewpath = str(tmpdir.mkdir("view_{0}".format(cmd)))
|
||||
@@ -41,7 +53,7 @@ def test_view_link_type(tmpdir, mock_packages, mock_archive, mock_fetch, install
|
||||
assert os.path.islink(package_prefix) == is_link_cmd
|
||||
|
||||
|
||||
@pytest.mark.parametrize("add_cmd", ["hardlink", "symlink", "hard", "add", "copy", "relocate"])
|
||||
@pytest.mark.parametrize("add_cmd", commands)
|
||||
def test_view_link_type_remove(
|
||||
tmpdir, mock_packages, mock_archive, mock_fetch, install_mockery, add_cmd
|
||||
):
|
||||
@@ -55,7 +67,7 @@ def test_view_link_type_remove(
|
||||
assert not os.path.exists(bindir)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("cmd", ["hardlink", "symlink", "hard", "add", "copy", "relocate"])
|
||||
@pytest.mark.parametrize("cmd", commands)
|
||||
def test_view_projections(tmpdir, mock_packages, mock_archive, mock_fetch, install_mockery, cmd):
|
||||
install("libdwarf@20130207")
|
||||
|
||||
|
@@ -461,9 +461,13 @@ def test_intel_flags():
|
||||
unsupported_flag_test("cxx14_flag", "intel@=14.0")
|
||||
supported_flag_test("cxx14_flag", "-std=c++1y", "intel@=15.0")
|
||||
supported_flag_test("cxx14_flag", "-std=c++14", "intel@=15.0.2")
|
||||
unsupported_flag_test("cxx17_flag", "intel@=18")
|
||||
supported_flag_test("cxx17_flag", "-std=c++17", "intel@=19.0")
|
||||
unsupported_flag_test("c99_flag", "intel@=11.0")
|
||||
supported_flag_test("c99_flag", "-std=c99", "intel@=12.0")
|
||||
unsupported_flag_test("c11_flag", "intel@=15.0")
|
||||
supported_flag_test("c18_flag", "-std=c18", "intel@=21.5.0")
|
||||
unsupported_flag_test("c18_flag", "intel@=21.4.0")
|
||||
supported_flag_test("c11_flag", "-std=c1x", "intel@=16.0")
|
||||
supported_flag_test("cc_pic_flag", "-fPIC", "intel@=1.0")
|
||||
supported_flag_test("cxx_pic_flag", "-fPIC", "intel@=1.0")
|
||||
|
@@ -2281,6 +2281,31 @@ def test_virtuals_are_annotated_on_edges(self, spec_str):
|
||||
edges = spec.edges_to_dependencies(name="callpath")
|
||||
assert len(edges) == 1 and edges[0].virtuals == ()
|
||||
|
||||
@pytest.mark.parametrize("transitive", [True, False])
|
||||
def test_explicit_splices(
|
||||
self, mutable_config, database_mutable_config, mock_packages, transitive, capfd
|
||||
):
|
||||
mpich_spec = database_mutable_config.query("mpich")[0]
|
||||
splice_info = {
|
||||
"target": "mpi",
|
||||
"replacement": f"/{mpich_spec.dag_hash()}",
|
||||
"transitive": transitive,
|
||||
}
|
||||
spack.config.CONFIG.set("concretizer", {"splice": {"explicit": [splice_info]}})
|
||||
|
||||
spec = spack.spec.Spec("hdf5 ^zmpi").concretized()
|
||||
|
||||
assert spec.satisfies(f"^mpich@{mpich_spec.version}")
|
||||
assert spec.build_spec.dependencies(name="zmpi", deptype="link")
|
||||
assert spec["mpi"].build_spec.satisfies(mpich_spec)
|
||||
assert not spec.build_spec.satisfies(f"^mpich/{mpich_spec.dag_hash()}")
|
||||
assert not spec.dependencies(name="zmpi", deptype="link")
|
||||
|
||||
captured = capfd.readouterr()
|
||||
assert "Warning: explicit splice configuration has caused" in captured.err
|
||||
assert "hdf5 ^zmpi" in captured.err
|
||||
assert str(spec) in captured.err
|
||||
|
||||
@pytest.mark.db
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,mpi_name",
|
||||
|
@@ -62,8 +62,11 @@
|
||||
import spack.version
|
||||
from spack.fetch_strategy import URLFetchStrategy
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.main import SpackCommand
|
||||
from spack.util.pattern import Bunch
|
||||
|
||||
mirror_cmd = SpackCommand("mirror")
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def check_config_fixture(request):
|
||||
@@ -989,6 +992,38 @@ def install_mockery(temporary_store: spack.store.Store, mutable_config, mock_pac
|
||||
temporary_store.failure_tracker.clear_all()
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def temporary_mirror_dir(tmpdir_factory):
|
||||
dir = tmpdir_factory.mktemp("mirror")
|
||||
dir.ensure("build_cache", dir=True)
|
||||
yield str(dir)
|
||||
dir.join("build_cache").remove()
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def temporary_mirror(temporary_mirror_dir):
|
||||
mirror_url = url_util.path_to_file_url(temporary_mirror_dir)
|
||||
mirror_cmd("add", "--scope", "site", "test-mirror-func", mirror_url)
|
||||
yield temporary_mirror_dir
|
||||
mirror_cmd("rm", "--scope=site", "test-mirror-func")
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def mutable_temporary_mirror_dir(tmpdir_factory):
|
||||
dir = tmpdir_factory.mktemp("mirror")
|
||||
dir.ensure("build_cache", dir=True)
|
||||
yield str(dir)
|
||||
dir.join("build_cache").remove()
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def mutable_temporary_mirror(mutable_temporary_mirror_dir):
|
||||
mirror_url = url_util.path_to_file_url(mutable_temporary_mirror_dir)
|
||||
mirror_cmd("add", "--scope", "site", "test-mirror-func", mirror_url)
|
||||
yield mutable_temporary_mirror_dir
|
||||
mirror_cmd("rm", "--scope=site", "test-mirror-func")
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def temporary_store(tmpdir, request):
|
||||
"""Hooks a temporary empty store for the test function."""
|
||||
@@ -1980,6 +2015,11 @@ def pytest_runtest_setup(item):
|
||||
if not_on_windows_marker and sys.platform == "win32":
|
||||
pytest.skip(*not_on_windows_marker.args)
|
||||
|
||||
# Skip items marked "only windows" if they're run anywhere but Windows
|
||||
only_windows_marker = item.get_closest_marker(name="only_windows")
|
||||
if only_windows_marker and sys.platform != "win32":
|
||||
pytest.skip(*only_windows_marker.args)
|
||||
|
||||
|
||||
def _sequential_executor(*args, **kwargs):
|
||||
return spack.util.parallel.SequentialExecutor()
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user