Compare commits
524 Commits
v1.0.0-alp
...
hs/feature
Author | SHA1 | Date | |
---|---|---|---|
![]() |
01423a9bea | ||
![]() |
0ac63a3799 | ||
![]() |
2519ad20da | ||
![]() |
adfb6ee6aa | ||
![]() |
d17c118a78 | ||
![]() |
1a57df46f6 | ||
![]() |
8ffd6c29bf | ||
![]() |
4372907fc1 | ||
![]() |
63a506ed17 | ||
![]() |
382647c8af | ||
![]() |
4b73da5bb2 | ||
![]() |
17b47c9dbe | ||
![]() |
5075275873 | ||
![]() |
2acdacb129 | ||
![]() |
bedc7bd518 | ||
![]() |
dd8d2a2515 | ||
![]() |
129338c4c9 | ||
![]() |
e7b009e350 | ||
![]() |
9e6e478ccf | ||
![]() |
357089f347 | ||
![]() |
6228247eda | ||
![]() |
a919b67cb4 | ||
![]() |
58ac6f7cba | ||
![]() |
86b57c233d | ||
![]() |
b6dec56f4f | ||
![]() |
d403060cf2 | ||
![]() |
7d0dd27363 | ||
![]() |
cfbc92c2f0 | ||
![]() |
e7bca5b8f6 | ||
![]() |
cf5ba8aee3 | ||
![]() |
32a4eb4ebb | ||
![]() |
a3f4fd68d6 | ||
![]() |
95f8c7e073 | ||
![]() |
20f31ce39d | ||
![]() |
91ef8c056b | ||
![]() |
c6ce7637fc | ||
![]() |
770c6cc612 | ||
![]() |
97bad2f5a7 | ||
![]() |
8ca82fb2b6 | ||
![]() |
36540708f1 | ||
![]() |
7e027cae3e | ||
![]() |
a311d0a8c0 | ||
![]() |
cd8ebdcfbd | ||
![]() |
8b3bfbd95e | ||
![]() |
10afe49877 | ||
![]() |
2afbeded25 | ||
![]() |
415055d303 | ||
![]() |
081e4c463b | ||
![]() |
e5ec08771b | ||
![]() |
98605621e7 | ||
![]() |
625a4b854c | ||
![]() |
dcf2c8744a | ||
![]() |
d1b7cc9b5e | ||
![]() |
8fbe1ad941 | ||
![]() |
440ae973d1 | ||
![]() |
2cb140f9a8 | ||
![]() |
fb2cca4e1e | ||
![]() |
03b0d299f9 | ||
![]() |
8f93ea80fd | ||
![]() |
5cd5fcdd7f | ||
![]() |
da760a898e | ||
![]() |
dd55635fae | ||
![]() |
320c758fea | ||
![]() |
ff82ba24e9 | ||
![]() |
d00b05b71e | ||
![]() |
f8524f9d5e | ||
![]() |
924204828e | ||
![]() |
2f4c5f2aa2 | ||
![]() |
7e6a216d33 | ||
![]() |
87bbcefba9 | ||
![]() |
8ab1011192 | ||
![]() |
b2f8cd22c3 | ||
![]() |
e6e58423aa | ||
![]() |
5255af3981 | ||
![]() |
5d913d0708 | ||
![]() |
5fda19194b | ||
![]() |
3ea92b1983 | ||
![]() |
522fa9dc62 | ||
![]() |
65ec330af5 | ||
![]() |
72c1d0033f | ||
![]() |
6bfe83106d | ||
![]() |
d37e2c600c | ||
![]() |
db9630e9e0 | ||
![]() |
136a658746 | ||
![]() |
f0bfc7d898 | ||
![]() |
03ebb82752 | ||
![]() |
82d808d58d | ||
![]() |
43fa93c8e1 | ||
![]() |
aa70cb34e1 | ||
![]() |
9acb70f204 | ||
![]() |
8296788730 | ||
![]() |
31a8dc6f6c | ||
![]() |
b1ac661ba8 | ||
![]() |
63b437ddf9 | ||
![]() |
578675cec8 | ||
![]() |
751c79872f | ||
![]() |
22f26eec68 | ||
![]() |
4ec2016f56 | ||
![]() |
5c71d36330 | ||
![]() |
035096006e | ||
![]() |
1f797208bc | ||
![]() |
aa41fe05ff | ||
![]() |
f4792c834e | ||
![]() |
98ca90aebc | ||
![]() |
991f26d1ae | ||
![]() |
973a7e6de8 | ||
![]() |
528ba74965 | ||
![]() |
73034c163b | ||
![]() |
62ee56e8a3 | ||
![]() |
01471aee6b | ||
![]() |
c004c8b616 | ||
![]() |
0facab231f | ||
![]() |
ca64050f6a | ||
![]() |
91b3afac88 | ||
![]() |
0327ba1dfe | ||
![]() |
67f091f0d9 | ||
![]() |
c09759353f | ||
![]() |
14d72d2703 | ||
![]() |
288298bd2c | ||
![]() |
964f81d3c2 | ||
![]() |
93220f706e | ||
![]() |
fe9275a5d4 | ||
![]() |
0fa829ae77 | ||
![]() |
451db85657 | ||
![]() |
ce1c2b0f05 | ||
![]() |
88c1eae5d4 | ||
![]() |
4af3bc47a2 | ||
![]() |
a257747cba | ||
![]() |
ed2ddec715 | ||
![]() |
748c7e5420 | ||
![]() |
cf70d71ba8 | ||
![]() |
3913c24c19 | ||
![]() |
032a0dba90 | ||
![]() |
d4a8602577 | ||
![]() |
1ce5ecfbd7 | ||
![]() |
bf6ea7b047 | ||
![]() |
49a17de751 | ||
![]() |
95927df455 | ||
![]() |
0f64f1baec | ||
![]() |
46f7737626 | ||
![]() |
7256508983 | ||
![]() |
75a3d179b1 | ||
![]() |
72b14de89e | ||
![]() |
de6eaa1b4e | ||
![]() |
0f4bfda2a1 | ||
![]() |
6c78d9cab2 | ||
![]() |
7970a04025 | ||
![]() |
22c38e5975 | ||
![]() |
3c4cb0d4f3 | ||
![]() |
61b9e8779b | ||
![]() |
2a25e2b572 | ||
![]() |
7db5b1d5d6 | ||
![]() |
10f309273a | ||
![]() |
3e99a12ea2 | ||
![]() |
d5f5d48bb3 | ||
![]() |
6f2019ece9 | ||
![]() |
35a84f02fa | ||
![]() |
f6123ee160 | ||
![]() |
4b02ecddf4 | ||
![]() |
bd39598e61 | ||
![]() |
df9cac172e | ||
![]() |
511c2750c7 | ||
![]() |
b7a81426b0 | ||
![]() |
654b294641 | ||
![]() |
c19a90b74a | ||
![]() |
03d70feb18 | ||
![]() |
bb1216432a | ||
![]() |
d27aab721a | ||
![]() |
3444d40ae2 | ||
![]() |
37f2683d17 | ||
![]() |
3c4f23f64a | ||
![]() |
db8e56b0a5 | ||
![]() |
ff6dfea9b9 | ||
![]() |
2f3ef790e2 | ||
![]() |
01db307f41 | ||
![]() |
d715b725fa | ||
![]() |
52a995a95c | ||
![]() |
b87c025cd3 | ||
![]() |
360eb4278c | ||
![]() |
7b3fc7dee3 | ||
![]() |
f0acbe4310 | ||
![]() |
f0c676d14a | ||
![]() |
13dd198a09 | ||
![]() |
3f6c66d701 | ||
![]() |
dcd6e61f34 | ||
![]() |
ac7b467897 | ||
![]() |
c5adb934c2 | ||
![]() |
1e70a8d6ad | ||
![]() |
755a4054b2 | ||
![]() |
040d747a86 | ||
![]() |
9440894173 | ||
![]() |
4e42e3c2ec | ||
![]() |
662bf113e2 | ||
![]() |
7e11fd62e2 | ||
![]() |
a6c22f2690 | ||
![]() |
4894668ece | ||
![]() |
199133fca4 | ||
![]() |
ea3a3b51a0 | ||
![]() |
23bd3e6104 | ||
![]() |
c72477e67a | ||
![]() |
2d2a4d1908 | ||
![]() |
2cd773aea4 | ||
![]() |
145b0667cc | ||
![]() |
5b3942a489 | ||
![]() |
a9c879d53e | ||
![]() |
f42f59c84b | ||
![]() |
313b7d4cdb | ||
![]() |
bd41863797 | ||
![]() |
b0dba4ff5a | ||
![]() |
4ff43d7fa9 | ||
![]() |
c1df1c7ee5 | ||
![]() |
9ac6ecd5ba | ||
![]() |
20ddb85020 | ||
![]() |
2ced87297d | ||
![]() |
aa00c3fe1f | ||
![]() |
0158fc46aa | ||
![]() |
8ac826cca8 | ||
![]() |
1b829a4a28 | ||
![]() |
e2ed1c2308 | ||
![]() |
94b828add1 | ||
![]() |
fd7dcf3a3f | ||
![]() |
e3bb0d77bc | ||
![]() |
25761b13e5 | ||
![]() |
ae48faa83a | ||
![]() |
e15a3b0717 | ||
![]() |
2c8afc5443 | ||
![]() |
99479b7e77 | ||
![]() |
5d0b5ed73c | ||
![]() |
151af13be2 | ||
![]() |
93ea3f51e7 | ||
![]() |
a3abc1c492 | ||
![]() |
401484ddf4 | ||
![]() |
fc4e76e6fe | ||
![]() |
0853f42723 | ||
![]() |
19ca69d0d8 | ||
![]() |
036794725f | ||
![]() |
e5a2c9aee3 | ||
![]() |
5364b88777 | ||
![]() |
7d1b6324e1 | ||
![]() |
3d0263755e | ||
![]() |
54ad5dca45 | ||
![]() |
ee206952c9 | ||
![]() |
4ccef372e8 | ||
![]() |
ac6e534806 | ||
![]() |
5983f72439 | ||
![]() |
6e10fac7ae | ||
![]() |
ee6ea5155c | ||
![]() |
48258e8ddc | ||
![]() |
429b0375ed | ||
![]() |
c6925ab83f | ||
![]() |
00d78dfa0c | ||
![]() |
e072a91572 | ||
![]() |
b7eb0308d4 | ||
![]() |
c98ee6d8ac | ||
![]() |
b343ebb64e | ||
![]() |
e178d2c75d | ||
![]() |
9b64560ae6 | ||
![]() |
ca226f3506 | ||
![]() |
8569e04fea | ||
![]() |
32213d5e6b | ||
![]() |
4891f3dbc9 | ||
![]() |
2b5959c3dd | ||
![]() |
353db6752a | ||
![]() |
bf24b8e82c | ||
![]() |
f2d830cd4c | ||
![]() |
070bfa1ed7 | ||
![]() |
c79b6207e8 | ||
![]() |
38d77570b4 | ||
![]() |
d8885b28fa | ||
![]() |
abd3487570 | ||
![]() |
0d760a5fd8 | ||
![]() |
dde91ae181 | ||
![]() |
590dbf67f3 | ||
![]() |
d199738f31 | ||
![]() |
f55f829437 | ||
![]() |
295f3ff915 | ||
![]() |
a0ad02c247 | ||
![]() |
a21d314ba7 | ||
![]() |
a4ad8c8174 | ||
![]() |
aa3ee3fa2a | ||
![]() |
a8584d5eb4 | ||
![]() |
26f7b2c066 | ||
![]() |
3a715c3e07 | ||
![]() |
963519d2b2 | ||
![]() |
34efcb686c | ||
![]() |
5016084213 | ||
![]() |
5a04e84097 | ||
![]() |
ec34e88d79 | ||
![]() |
31fa12ebd3 | ||
![]() |
ecf414ed07 | ||
![]() |
119bec391e | ||
![]() |
d5c0ace993 | ||
![]() |
d6bbd8f758 | ||
![]() |
f74d51bf6e | ||
![]() |
821ebee53c | ||
![]() |
9dada76d34 | ||
![]() |
e9cc1b36bc | ||
![]() |
fd2c040981 | ||
![]() |
33cd7d6033 | ||
![]() |
9c255381b1 | ||
![]() |
fd6c419682 | ||
![]() |
9d1d808f94 | ||
![]() |
7a0ef93332 | ||
![]() |
bf48b7662e | ||
![]() |
d14333cc79 | ||
![]() |
084361124e | ||
![]() |
a1f4cc8b73 | ||
![]() |
b20800e765 | ||
![]() |
01b1e24074 | ||
![]() |
8029279dad | ||
![]() |
5f4e12d8f2 | ||
![]() |
a8728e700b | ||
![]() |
f8adf2b70f | ||
![]() |
d0ef2d9e00 | ||
![]() |
d4bd3e298a | ||
![]() |
40268634b6 | ||
![]() |
b0e8451d83 | ||
![]() |
868a52387b | ||
![]() |
3fe89115c2 | ||
![]() |
412024cf21 | ||
![]() |
91b20ed7d0 | ||
![]() |
0caacc6e21 | ||
![]() |
651126e64c | ||
![]() |
e15a530f32 | ||
![]() |
0f84623914 | ||
![]() |
90afa5c5ef | ||
![]() |
024620bd7b | ||
![]() |
9bec8e2f4b | ||
![]() |
18dd465532 | ||
![]() |
a2431ec00c | ||
![]() |
78abe968a0 | ||
![]() |
38e9043b9e | ||
![]() |
a0599e5e27 | ||
![]() |
1cd6f4e28f | ||
![]() |
d2298e8e99 | ||
![]() |
e3806aeac5 | ||
![]() |
38309ced33 | ||
![]() |
2f21201bf8 | ||
![]() |
95a0f1924d | ||
![]() |
52969dfa78 | ||
![]() |
ee588e4bbe | ||
![]() |
461f1d186b | ||
![]() |
03b864f986 | ||
![]() |
bff4fa2761 | ||
![]() |
ad3fd4e7e9 | ||
![]() |
a574a995f8 | ||
![]() |
0002861daf | ||
![]() |
a65216f0a0 | ||
![]() |
7604869198 | ||
![]() |
d409126c27 | ||
![]() |
2b0d985714 | ||
![]() |
eedec51566 | ||
![]() |
016954fcff | ||
![]() |
0f17672ddb | ||
![]() |
f82de718cd | ||
![]() |
4f6836c878 | ||
![]() |
2806ed2751 | ||
![]() |
92b0cb5e22 | ||
![]() |
f32b5e572a | ||
![]() |
e35c5ec104 | ||
![]() |
60be77f761 | ||
![]() |
69b7c32b5d | ||
![]() |
e2c6914dfe | ||
![]() |
87926e40a9 | ||
![]() |
324d733bf9 | ||
![]() |
07bf35d54b | ||
![]() |
72196ee4a1 | ||
![]() |
738e41d8d2 | ||
![]() |
f3321bdbcf | ||
![]() |
9c6f0392d5 | ||
![]() |
297848c207 | ||
![]() |
e9c2a53d83 | ||
![]() |
77b6923906 | ||
![]() |
8235aa1804 | ||
![]() |
d09c5a4bd4 | ||
![]() |
916755e22a | ||
![]() |
3676381357 | ||
![]() |
de9f92c588 | ||
![]() |
6ba7aa325b | ||
![]() |
c0cbbcfa0a | ||
![]() |
f2dc4ed6d3 | ||
![]() |
38bf1772a0 | ||
![]() |
3460602fb9 | ||
![]() |
a6ce7735e6 | ||
![]() |
4b11266e03 | ||
![]() |
436ff3c818 | ||
![]() |
fa35d8f8ec | ||
![]() |
6f8a3674af | ||
![]() |
39b7276a33 | ||
![]() |
d67afc7191 | ||
![]() |
8823c57b72 | ||
![]() |
c8466c4cd4 | ||
![]() |
f5ff63e68d | ||
![]() |
11f52ce2f6 | ||
![]() |
63895b39f0 | ||
![]() |
64220779d4 | ||
![]() |
774346038e | ||
![]() |
03dbc3035c | ||
![]() |
ad78ed741c | ||
![]() |
599d32d1c2 | ||
![]() |
e5c7fe87aa | ||
![]() |
cc6ab75063 | ||
![]() |
fe00c13afa | ||
![]() |
d610ff6cb1 | ||
![]() |
54f947fc2a | ||
![]() |
a5aa784d69 | ||
![]() |
3bd58f3b49 | ||
![]() |
cac0beaecf | ||
![]() |
406ccc2fe3 | ||
![]() |
40cd8e6ad8 | ||
![]() |
682e4bf4d4 | ||
![]() |
56b2979966 | ||
![]() |
d518aaa4c9 | ||
![]() |
8486a80651 | ||
![]() |
28341ef0a9 | ||
![]() |
f89a2ada4c | ||
![]() |
cf804c4ea8 | ||
![]() |
a45d09abcd | ||
![]() |
cd3068dc0b | ||
![]() |
de9aa3bcc6 | ||
![]() |
db7ab9826d | ||
![]() |
9f69d9b286 | ||
![]() |
d352b71df0 | ||
![]() |
4cb4634c74 | ||
![]() |
594554935d | ||
![]() |
8b56470650 | ||
![]() |
ba4fd64caa | ||
![]() |
07ec8a9ba3 | ||
![]() |
64ba324b4a | ||
![]() |
2aab567782 | ||
![]() |
d4e29c32f0 | ||
![]() |
30e5639995 | ||
![]() |
fa4c09d04e | ||
![]() |
f0a458862f | ||
![]() |
2938680878 | ||
![]() |
a8132e5c94 | ||
![]() |
9875a0e807 | ||
![]() |
cb4d3a9fc2 | ||
![]() |
7d79648cb5 | ||
![]() |
e84e5fa9bf | ||
![]() |
f25cbb0fe4 | ||
![]() |
f3257cea90 | ||
![]() |
d037e658a4 | ||
![]() |
a14acd97bd | ||
![]() |
199cce879f | ||
![]() |
7d66063bd9 | ||
![]() |
47c6fb750a | ||
![]() |
8c3ac352b7 | ||
![]() |
d6ac16ca16 | ||
![]() |
75e37c6db5 | ||
![]() |
3f8dcfc6ed | ||
![]() |
07d4915e82 | ||
![]() |
77ff574d94 | ||
![]() |
5783f950cf | ||
![]() |
1c76c88f2c | ||
![]() |
50b56ee1ce | ||
![]() |
be521c441e | ||
![]() |
61ffb87757 | ||
![]() |
950b4c5847 | ||
![]() |
ac078f262d | ||
![]() |
fd62f0f3a8 | ||
![]() |
ca977ea9e1 | ||
![]() |
0d2c624bcb | ||
![]() |
765b6b7150 | ||
![]() |
a91f96292c | ||
![]() |
18487a45ed | ||
![]() |
29485e2125 | ||
![]() |
7674ea0b7d | ||
![]() |
693376ea97 | ||
![]() |
88bf2a8bcf | ||
![]() |
03e9ca0a76 | ||
![]() |
18399d0bd1 | ||
![]() |
3aabff77d7 | ||
![]() |
aa86342814 | ||
![]() |
170a276f18 | ||
![]() |
313524dc6d | ||
![]() |
5aae6e25a5 | ||
![]() |
b58a52b6ce | ||
![]() |
32760e2885 | ||
![]() |
125feb125c | ||
![]() |
8677063142 | ||
![]() |
f015b18230 | ||
![]() |
aa9e610fa6 | ||
![]() |
7d62045c30 | ||
![]() |
5b03173b99 | ||
![]() |
36fcdb8cfa | ||
![]() |
7d5b17fbf2 | ||
![]() |
d6e3292955 | ||
![]() |
60f54df964 | ||
![]() |
487df807cc | ||
![]() |
cacdf84964 | ||
![]() |
e2293c758f | ||
![]() |
f5a275adf5 | ||
![]() |
615ced32cd | ||
![]() |
bc04d963e5 | ||
![]() |
11051ce5c7 | ||
![]() |
631bddc52e | ||
![]() |
b5f40aa7fb | ||
![]() |
57e0798af2 | ||
![]() |
0161b662f7 | ||
![]() |
aa55b19680 | ||
![]() |
8cfffd88fa | ||
![]() |
2f8dcb8097 | ||
![]() |
5b70fa8cc8 | ||
![]() |
b4025e89ed | ||
![]() |
8db74e1b2f | ||
![]() |
1fcfbadba7 | ||
![]() |
13ec35873f | ||
![]() |
f96b6eac2b | ||
![]() |
933a1a5cd9 | ||
![]() |
b2b9914efc | ||
![]() |
9ce9596981 | ||
![]() |
fc30fe1f6b | ||
![]() |
25a4b98359 | ||
![]() |
05c34b7312 | ||
![]() |
b22842af56 | ||
![]() |
0bef028692 | ||
![]() |
935facd069 | ||
![]() |
87e5255bbc | ||
![]() |
b42f0d793d | ||
![]() |
ccca0d3354 | ||
![]() |
9699bbc7b9 | ||
![]() |
c7e251de9f | ||
![]() |
d788b15529 |
1
.github/workflows/audit.yaml
vendored
1
.github/workflows/audit.yaml
vendored
@@ -59,7 +59,6 @@ jobs:
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack -d audit packages
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit configs
|
||||
|
17
.github/workflows/ci.yaml
vendored
17
.github/workflows/ci.yaml
vendored
@@ -9,6 +9,7 @@ on:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
merge_group:
|
||||
|
||||
concurrency:
|
||||
group: ci-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
@@ -25,13 +26,17 @@ jobs:
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
if: ${{ github.event_name == 'push' || github.event_name == 'merge_group' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# For pull requests it's not necessary to checkout the code
|
||||
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36
|
||||
id: filter
|
||||
with:
|
||||
# For merge group events, compare against the target branch (main)
|
||||
base: ${{ github.event_name == 'merge_group' && github.event.merge_group.base_ref || '' }}
|
||||
# For merge group events, use the merge group head ref
|
||||
ref: ${{ github.event_name == 'merge_group' && github.event.merge_group.head_sha || github.ref }}
|
||||
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
||||
# Don't run if we only modified packages in the
|
||||
# built-in repository or documentation
|
||||
@@ -76,10 +81,11 @@ jobs:
|
||||
|
||||
prechecks:
|
||||
needs: [ changes ]
|
||||
uses: ./.github/workflows/valid-style.yml
|
||||
uses: ./.github/workflows/prechecks.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
with_packages: ${{ needs.changes.outputs.packages }}
|
||||
|
||||
import-check:
|
||||
needs: [ changes ]
|
||||
@@ -93,7 +99,7 @@ jobs:
|
||||
- name: Success
|
||||
run: |
|
||||
if [ "${{ needs.prechecks.result }}" == "failure" ] || [ "${{ needs.prechecks.result }}" == "canceled" ]; then
|
||||
echo "Unit tests failed."
|
||||
echo "Unit tests failed."
|
||||
exit 1
|
||||
else
|
||||
exit 0
|
||||
@@ -101,6 +107,7 @@ jobs:
|
||||
|
||||
coverage:
|
||||
needs: [ unit-tests, prechecks ]
|
||||
if: ${{ needs.changes.outputs.core }}
|
||||
uses: ./.github/workflows/coverage.yml
|
||||
secrets: inherit
|
||||
|
||||
@@ -113,10 +120,10 @@ jobs:
|
||||
- name: Status summary
|
||||
run: |
|
||||
if [ "${{ needs.unit-tests.result }}" == "failure" ] || [ "${{ needs.unit-tests.result }}" == "canceled" ]; then
|
||||
echo "Unit tests failed."
|
||||
echo "Unit tests failed."
|
||||
exit 1
|
||||
elif [ "${{ needs.bootstrap.result }}" == "failure" ] || [ "${{ needs.bootstrap.result }}" == "canceled" ]; then
|
||||
echo "Bootstrap tests failed."
|
||||
echo "Bootstrap tests failed."
|
||||
exit 1
|
||||
else
|
||||
exit 0
|
||||
|
@@ -1,4 +1,4 @@
|
||||
name: style
|
||||
name: prechecks
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
@@ -6,6 +6,9 @@ on:
|
||||
with_coverage:
|
||||
required: true
|
||||
type: string
|
||||
with_packages:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
concurrency:
|
||||
group: style-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
@@ -22,43 +25,56 @@ jobs:
|
||||
with:
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '.github/workflows/requirements/style/requirements.txt'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
run: |
|
||||
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
|
||||
run: |
|
||||
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
|
||||
|
||||
# Run style checks on the files that have been changed
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-depth: 2
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '.github/workflows/requirements/style/requirements.txt'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
- name: Run style tests
|
||||
run: |
|
||||
share/spack/qa/run-style-tests
|
||||
bin/spack style --base HEAD^1
|
||||
bin/spack license verify
|
||||
pylint -j $(nproc) --disable=all --enable=unspecified-encoding --ignore-paths=lib/spack/external lib
|
||||
|
||||
audit:
|
||||
uses: ./.github/workflows/audit.yaml
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ inputs.with_coverage }}
|
||||
python_version: '3.13'
|
||||
|
||||
verify-checksums:
|
||||
if: ${{ inputs.with_packages == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 2
|
||||
- name: Verify Added Checksums
|
||||
run: |
|
||||
bin/spack ci verify-versions HEAD^1 HEAD
|
||||
|
||||
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
|
||||
bootstrap-dev-rhel8:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -86,21 +102,3 @@ jobs:
|
||||
spack -d bootstrap now --dev
|
||||
spack -d style -t black
|
||||
spack unit-test -V
|
||||
|
||||
# Further style checks from pylint
|
||||
pylint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pylint
|
||||
- name: Pylint (Spack Core)
|
||||
run: |
|
||||
pylint -j 4 --disable=all --enable=unspecified-encoding --ignore-paths=lib/spack/external lib
|
@@ -1,7 +1,8 @@
|
||||
black==25.1.0
|
||||
clingo==5.7.1
|
||||
flake8==7.1.2
|
||||
isort==6.0.0
|
||||
clingo==5.8.0
|
||||
flake8==7.2.0
|
||||
isort==6.0.1
|
||||
mypy==1.15.0
|
||||
types-six==1.17.0.20241205
|
||||
types-six==1.17.0.20250403
|
||||
vermin==1.6.0
|
||||
pylint==3.3.6
|
||||
|
3
.github/workflows/unit_tests.yaml
vendored
3
.github/workflows/unit_tests.yaml
vendored
@@ -19,9 +19,6 @@ jobs:
|
||||
on_develop:
|
||||
- ${{ github.ref == 'refs/heads/develop' }}
|
||||
include:
|
||||
- python-version: '3.6'
|
||||
os: ubuntu-20.04
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
- python-version: '3.7'
|
||||
os: ubuntu-22.04
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -201,7 +201,6 @@ tramp
|
||||
|
||||
# Org-mode
|
||||
.org-id-locations
|
||||
*_archive
|
||||
|
||||
# flymake-mode
|
||||
*_flymake.*
|
||||
|
@@ -1,2 +0,0 @@
|
||||
concretizer:
|
||||
static_analysis: true
|
@@ -50,8 +50,11 @@ packages:
|
||||
- spec: apple-libuuid@1353.100.2
|
||||
prefix: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk
|
||||
c:
|
||||
require: apple-clang
|
||||
prefer:
|
||||
- apple-clang
|
||||
cxx:
|
||||
require: apple-clang
|
||||
prefer:
|
||||
- apple-clang
|
||||
fortran:
|
||||
require: gcc
|
||||
prefer:
|
||||
- gcc
|
||||
|
@@ -19,20 +19,21 @@ packages:
|
||||
awk: [gawk]
|
||||
armci: [armcimpi]
|
||||
blas: [openblas, amdblis]
|
||||
c: [gcc, llvm, intel-oneapi-compilers, xl, aocc]
|
||||
cxx: [gcc, llvm, intel-oneapi-compilers, xl, aocc]
|
||||
c: [gcc, llvm, intel-oneapi-compilers]
|
||||
cxx: [gcc, llvm, intel-oneapi-compilers]
|
||||
D: [ldc]
|
||||
daal: [intel-oneapi-daal]
|
||||
elf: [elfutils]
|
||||
fftw-api: [fftw, amdfftw]
|
||||
flame: [libflame, amdlibflame]
|
||||
fortran: [gcc, llvm]
|
||||
fortran: [gcc, llvm, intel-oneapi-compilers]
|
||||
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
|
||||
fuse: [libfuse]
|
||||
gl: [glx, osmesa]
|
||||
glu: [mesa-glu, openglu]
|
||||
golang: [go, gcc]
|
||||
go-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
||||
hip-lang: [llvm-amdgpu]
|
||||
iconv: [libiconv]
|
||||
ipp: [intel-oneapi-ipp]
|
||||
java: [openjdk, jdk]
|
||||
|
@@ -20,3 +20,6 @@ packages:
|
||||
cxx: [msvc]
|
||||
mpi: [msmpi]
|
||||
gl: [wgl]
|
||||
mpi:
|
||||
require:
|
||||
- one_of: [msmpi]
|
||||
|
@@ -1291,55 +1291,61 @@ based on site policies.
|
||||
Variants
|
||||
^^^^^^^^
|
||||
|
||||
Variants are named options associated with a particular package. They are
|
||||
optional, as each package must provide default values for each variant it
|
||||
makes available. Variants can be specified using
|
||||
a flexible parameter syntax ``name=<value>``. For example,
|
||||
``spack install mercury debug=True`` will install mercury built with debug
|
||||
flags. The names of particular variants available for a package depend on
|
||||
Variants are named options associated with a particular package and are
|
||||
typically used to enable or disable certain features at build time. They
|
||||
are optional, as each package must provide default values for each variant
|
||||
it makes available.
|
||||
|
||||
The names of variants available for a particular package depend on
|
||||
what was provided by the package author. ``spack info <package>`` will
|
||||
provide information on what build variants are available.
|
||||
|
||||
For compatibility with earlier versions, variants which happen to be
|
||||
boolean in nature can be specified by a syntax that represents turning
|
||||
options on and off. For example, in the previous spec we could have
|
||||
supplied ``mercury +debug`` with the same effect of enabling the debug
|
||||
compile time option for the libelf package.
|
||||
There are different types of variants:
|
||||
|
||||
Depending on the package a variant may have any default value. For
|
||||
``mercury`` here, ``debug`` is ``False`` by default, and we turned it on
|
||||
with ``debug=True`` or ``+debug``. If a variant is ``True`` by default
|
||||
you can turn it off by either adding ``-name`` or ``~name`` to the spec.
|
||||
1. Boolean variants. Typically used to enable or disable a feature at
|
||||
compile time. For example, a package might have a ``debug`` variant that
|
||||
can be explicitly enabled with ``+debug`` and disabled with ``~debug``.
|
||||
2. Single-valued variants. Often used to set defaults. For example, a package
|
||||
might have a ``compression`` variant that determines the default
|
||||
compression algorithm, which users could set to ``compression=gzip`` or
|
||||
``compression=zstd``.
|
||||
3. Multi-valued variants. A package might have a ``fabrics`` variant that
|
||||
determines which network fabrics to support. Users could set this to
|
||||
``fabrics=verbs,ofi`` to enable both InfiniBand verbs and OpenFabrics
|
||||
interfaces. The values are separated by commas.
|
||||
|
||||
There are two syntaxes here because, depending on context, ``~`` and
|
||||
``-`` may mean different things. In most shells, the following will
|
||||
result in the shell performing home directory substitution:
|
||||
The meaning of ``fabrics=verbs,ofi`` is to enable *at least* the specified
|
||||
fabrics, but other fabrics may be enabled as well. If the intent is to
|
||||
enable *only* the specified fabrics, then the ``fabrics:=verbs,ofi``
|
||||
syntax should be used with the ``:=`` operator.
|
||||
|
||||
.. code-block:: sh
|
||||
.. note::
|
||||
|
||||
mpileaks ~debug # shell may try to substitute this!
|
||||
mpileaks~debug # use this instead
|
||||
In certain shells, the the ``~`` character is expanded to the home
|
||||
directory. To avoid these issues, avoid whitespace between the package
|
||||
name and the variant:
|
||||
|
||||
If there is a user called ``debug``, the ``~`` will be incorrectly
|
||||
expanded. In this situation, you would want to write ``libelf
|
||||
-debug``. However, ``-`` can be ambiguous when included after a
|
||||
package name without spaces:
|
||||
.. code-block:: sh
|
||||
|
||||
.. code-block:: sh
|
||||
mpileaks ~debug # shell may try to substitute this!
|
||||
mpileaks~debug # use this instead
|
||||
|
||||
mpileaks-debug # wrong!
|
||||
mpileaks -debug # right
|
||||
Alternatively, you can use the ``-`` character to disable a variant,
|
||||
but be aware that this requires a space between the package name and
|
||||
the variant:
|
||||
|
||||
Spack allows the ``-`` character to be part of package names, so the
|
||||
above will be interpreted as a request for the ``mpileaks-debug``
|
||||
package, not a request for ``mpileaks`` built without ``debug``
|
||||
options. In this scenario, you should write ``mpileaks~debug`` to
|
||||
avoid ambiguity.
|
||||
.. code-block:: sh
|
||||
|
||||
When spack normalizes specs, it prints them out with no spaces boolean
|
||||
variants using the backwards compatibility syntax and uses only ``~``
|
||||
for disabled boolean variants. The ``-`` and spaces on the command
|
||||
line are provided for convenience and legibility.
|
||||
mpileaks-debug # wrong: refers to a package named "mpileaks-debug"
|
||||
mpileaks -debug # right: refers to a package named mpileaks with debug disabled
|
||||
|
||||
As a last resort, ``debug=False`` can also be used to disable a boolean variant.
|
||||
|
||||
|
||||
|
||||
"""""""""""""""""""""""""""""""""""
|
||||
Variant propagation to dependencies
|
||||
"""""""""""""""""""""""""""""""""""
|
||||
|
||||
Spack allows variants to propagate their value to the package's
|
||||
dependency by using ``++``, ``--``, and ``~~`` for boolean variants.
|
||||
@@ -1409,27 +1415,29 @@ that executables will run without the need to set ``LD_LIBRARY_PATH``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
spec: gcc@4.9.3
|
||||
paths:
|
||||
cc: /opt/gcc/bin/gcc
|
||||
c++: /opt/gcc/bin/g++
|
||||
f77: /opt/gcc/bin/gfortran
|
||||
fc: /opt/gcc/bin/gfortran
|
||||
environment:
|
||||
unset:
|
||||
- BAD_VARIABLE
|
||||
set:
|
||||
GOOD_VARIABLE_NUM: 1
|
||||
GOOD_VARIABLE_STR: good
|
||||
prepend_path:
|
||||
PATH: /path/to/binutils
|
||||
append_path:
|
||||
LD_LIBRARY_PATH: /opt/gcc/lib
|
||||
extra_rpaths:
|
||||
- /path/to/some/compiler/runtime/directory
|
||||
- /path/to/some/other/compiler/runtime/directory
|
||||
packages:
|
||||
gcc:
|
||||
externals:
|
||||
- spec: gcc@4.9.3
|
||||
prefix: /opt/gcc
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /opt/gcc/bin/gcc
|
||||
cxx: /opt/gcc/bin/g++
|
||||
fortran: /opt/gcc/bin/gfortran
|
||||
environment:
|
||||
unset:
|
||||
- BAD_VARIABLE
|
||||
set:
|
||||
GOOD_VARIABLE_NUM: 1
|
||||
GOOD_VARIABLE_STR: good
|
||||
prepend_path:
|
||||
PATH: /path/to/binutils
|
||||
append_path:
|
||||
LD_LIBRARY_PATH: /opt/gcc/lib
|
||||
extra_rpaths:
|
||||
- /path/to/some/compiler/runtime/directory
|
||||
- /path/to/some/other/compiler/runtime/directory
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
@@ -63,7 +63,6 @@ on these ideas for each distinct build system that Spack supports:
|
||||
build_systems/cudapackage
|
||||
build_systems/custompackage
|
||||
build_systems/inteloneapipackage
|
||||
build_systems/intelpackage
|
||||
build_systems/rocmpackage
|
||||
build_systems/sourceforgepackage
|
||||
|
||||
|
@@ -33,9 +33,6 @@ For more information on a specific package, do::
|
||||
|
||||
spack info --all <package-name>
|
||||
|
||||
Intel no longer releases new versions of Parallel Studio, which can be
|
||||
used in Spack via the :ref:`intelpackage`. All of its components can
|
||||
now be found in oneAPI.
|
||||
|
||||
Examples
|
||||
========
|
||||
@@ -50,34 +47,8 @@ Install the oneAPI compilers::
|
||||
|
||||
spack install intel-oneapi-compilers
|
||||
|
||||
Add the compilers to your ``compilers.yaml`` so spack can use them::
|
||||
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin
|
||||
|
||||
Verify that the compilers are available::
|
||||
|
||||
spack compiler list
|
||||
|
||||
Note that 2024 and later releases do not include ``icc``. Before 2024,
|
||||
the package layout was different::
|
||||
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin/intel64
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin
|
||||
|
||||
The ``intel-oneapi-compilers`` package includes 2 families of
|
||||
compilers:
|
||||
|
||||
* ``intel``: ``icc``, ``icpc``, ``ifort``. Intel's *classic*
|
||||
compilers. 2024 and later releases contain ``ifort``, but not
|
||||
``icc`` and ``icpc``.
|
||||
* ``oneapi``: ``icx``, ``icpx``, ``ifx``. Intel's new generation of
|
||||
compilers based on LLVM.
|
||||
|
||||
To build the ``patchelf`` Spack package with ``icc``, do::
|
||||
|
||||
spack install patchelf%intel
|
||||
|
||||
To build with with ``icx``, do ::
|
||||
To build the ``patchelf`` Spack package with ``icx``, do::
|
||||
|
||||
spack install patchelf%oneapi
|
||||
|
||||
@@ -92,15 +63,6 @@ Install the oneAPI compilers::
|
||||
|
||||
spack install intel-oneapi-compilers
|
||||
|
||||
Add the compilers to your ``compilers.yaml`` so Spack can use them::
|
||||
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin
|
||||
|
||||
Verify that the compilers are available::
|
||||
|
||||
spack compiler list
|
||||
|
||||
Clone `spack-configs <https://github.com/spack/spack-configs>`_ repo and activate Intel oneAPI CPU environment::
|
||||
|
||||
git clone https://github.com/spack/spack-configs
|
||||
@@ -149,7 +111,7 @@ Compilers
|
||||
---------
|
||||
|
||||
To use the compilers, add some information about the installation to
|
||||
``compilers.yaml``. For most users, it is sufficient to do::
|
||||
``packages.yaml``. For most users, it is sufficient to do::
|
||||
|
||||
spack compiler add /opt/intel/oneapi/compiler/latest/bin
|
||||
|
||||
@@ -157,7 +119,7 @@ Adapt the paths above if you did not install the tools in the default
|
||||
location. After adding the compilers, using them is the same
|
||||
as if you had installed the ``intel-oneapi-compilers`` package.
|
||||
Another option is to manually add the configuration to
|
||||
``compilers.yaml`` as described in :ref:`Compiler configuration
|
||||
``packages.yaml`` as described in :ref:`Compiler configuration
|
||||
<compiler-config>`.
|
||||
|
||||
Before 2024, the directory structure was different::
|
||||
@@ -200,15 +162,5 @@ You can also use Spack-installed libraries. For example::
|
||||
Will update your environment CPATH, LIBRARY_PATH, and other
|
||||
environment variables for building an application with oneMKL.
|
||||
|
||||
More information
|
||||
================
|
||||
|
||||
This section describes basic use of oneAPI, especially if it has
|
||||
changed compared to Parallel Studio. See :ref:`intelpackage` for more
|
||||
information on :ref:`intel-virtual-packages`,
|
||||
:ref:`intel-unrelated-packages`,
|
||||
:ref:`intel-integrating-external-libraries`, and
|
||||
:ref:`using-mkl-tips`.
|
||||
|
||||
|
||||
.. _`Intel installers`: https://software.intel.com/content/www/us/en/develop/documentation/installation-guide-for-intel-oneapi-toolkits-linux/top.html
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -12,8 +12,7 @@ The ``ROCmPackage`` is not a build system but a helper package. Like ``CudaPacka
|
||||
it provides standard variants, dependencies, and conflicts to facilitate building
|
||||
packages using GPUs though for AMD in this case.
|
||||
|
||||
You can find the source for this package (and suggestions for setting up your
|
||||
``compilers.yaml`` and ``packages.yaml`` files) at
|
||||
You can find the source for this package (and suggestions for setting up your ``packages.yaml`` file) at
|
||||
`<https://github.com/spack/spack/blob/develop/lib/spack/spack/build_systems/rocm.py>`__.
|
||||
|
||||
^^^^^^^^
|
||||
|
@@ -148,15 +148,16 @@ this can expose you to attacks. Use at your own risk.
|
||||
``ssl_certs``
|
||||
--------------------
|
||||
|
||||
Path to custom certificats for SSL verification. The value can be a
|
||||
Path to custom certificats for SSL verification. The value can be a
|
||||
filesytem path, or an environment variable that expands to an absolute file path.
|
||||
The default value is set to the environment variable ``SSL_CERT_FILE``
|
||||
to use the same syntax used by many other applications that automatically
|
||||
detect custom certificates.
|
||||
When ``url_fetch_method:curl`` the ``config:ssl_certs`` should resolve to
|
||||
a single file. Spack will then set the environment variable ``CURL_CA_BUNDLE``
|
||||
in the subprocess calling ``curl``.
|
||||
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
|
||||
in the subprocess calling ``curl``. If additional ``curl`` arguments are required,
|
||||
they can be set in the config, e.g. ``url_fetch_method:'curl -k -q'``.
|
||||
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
|
||||
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
|
||||
will work.
|
||||
In all cases the expanded path must be absolute for Spack to use the certificates.
|
||||
|
@@ -11,9 +11,10 @@ Configuration Files
|
||||
Spack has many configuration files. Here is a quick list of them, in
|
||||
case you want to skip directly to specific docs:
|
||||
|
||||
* :ref:`compilers.yaml <compiler-config>`
|
||||
* :ref:`packages.yaml <compiler-config>`
|
||||
* :ref:`concretizer.yaml <concretizer-options>`
|
||||
* :ref:`config.yaml <config-yaml>`
|
||||
* :ref:`include.yaml <include-yaml>`
|
||||
* :ref:`mirrors.yaml <mirrors>`
|
||||
* :ref:`modules.yaml <modules>`
|
||||
* :ref:`packages.yaml <packages-config>`
|
||||
@@ -45,6 +46,12 @@ Each Spack configuration file is nested under a top-level section
|
||||
corresponding to its name. So, ``config.yaml`` starts with ``config:``,
|
||||
``mirrors.yaml`` starts with ``mirrors:``, etc.
|
||||
|
||||
.. tip::
|
||||
|
||||
Validation and autocompletion of Spack config files can be enabled in
|
||||
your editor with the YAML language server. See `spack/schemas
|
||||
<https://github.com/spack/schemas>`_ for more information.
|
||||
|
||||
.. _configuration-scopes:
|
||||
|
||||
--------------------
|
||||
@@ -94,7 +101,7 @@ are six configuration scopes. From lowest to highest:
|
||||
precedence over all other scopes.
|
||||
|
||||
Each configuration directory may contain several configuration files,
|
||||
such as ``config.yaml``, ``compilers.yaml``, or ``mirrors.yaml``. When
|
||||
such as ``config.yaml``, ``packages.yaml``, or ``mirrors.yaml``. When
|
||||
configurations conflict, settings from higher-precedence scopes override
|
||||
lower-precedence settings.
|
||||
|
||||
|
@@ -457,6 +457,13 @@ developed package in the environment are concretized to match the
|
||||
version (and other constraints) passed as the spec argument to the
|
||||
``spack develop`` command.
|
||||
|
||||
When working deep in the graph it is often desirable to have multiple specs marked
|
||||
as ``develop`` so you don't have to restage and/or do full rebuilds each time you
|
||||
call ``spack install``. The ``--recursive`` flag can be used in these scenarios
|
||||
to ensure that all the dependents of the initial spec you provide are also marked
|
||||
as develop specs. The ``--recursive`` flag requires a pre-concretized environment
|
||||
so the graph can be traversed from the supplied spec all the way to the root specs.
|
||||
|
||||
For packages with ``git`` attributes, git branches, tags, and commits can
|
||||
also be used as valid concrete versions (see :ref:`version-specifier`).
|
||||
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
||||
@@ -660,34 +667,56 @@ a ``packages.yaml`` file) could contain:
|
||||
# ...
|
||||
packages:
|
||||
all:
|
||||
compiler: [intel]
|
||||
providers:
|
||||
mpi: [openmpi]
|
||||
# ...
|
||||
|
||||
This configuration sets the default compiler for all packages to
|
||||
``intel``.
|
||||
This configuration sets the default mpi provider to be openmpi.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Included configurations
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack environments allow an ``include`` heading in their yaml
|
||||
schema. This heading pulls in external configuration files and applies
|
||||
them to the environment.
|
||||
Spack environments allow an ``include`` heading in their yaml schema.
|
||||
This heading pulls in external configuration files and applies them to
|
||||
the environment.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
include:
|
||||
- relative/path/to/config.yaml
|
||||
- https://github.com/path/to/raw/config/compilers.yaml
|
||||
- environment/relative/path/to/config.yaml
|
||||
- path: https://github.com/path/to/raw/config/compilers.yaml
|
||||
sha256: 26e871804a92cd07bb3d611b31b4156ae93d35b6a6d6e0ef3a67871fcb1d258b
|
||||
- /absolute/path/to/packages.yaml
|
||||
- path: /path/to/$os/$target/environment
|
||||
optional: true
|
||||
- path: /path/to/os-specific/config-dir
|
||||
when: os == "ventura"
|
||||
|
||||
Included configuration files are required *unless* they are explicitly optional
|
||||
or the entry's condition evaluates to ``false``. Optional includes are specified
|
||||
with the ``optional`` clause and conditional with the ``when`` clause. (See
|
||||
:ref:`include-yaml` for more information on optional and conditional entries.)
|
||||
|
||||
Files are listed using paths to individual files or directories containing them.
|
||||
Path entries may be absolute or relative to the environment or specified as
|
||||
URLs. URLs to individual files must link to the **raw** form of the file's
|
||||
contents (e.g., `GitHub
|
||||
<https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_
|
||||
or `GitLab
|
||||
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_) **and** include a valid sha256 for the file.
|
||||
Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or schemes) are
|
||||
supported. Spack-specific, environment and user path variables can be used.
|
||||
(See :ref:`config-file-variables` for more information.)
|
||||
|
||||
.. warning::
|
||||
|
||||
Recursive includes are not currently processed in a breadth-first manner
|
||||
so the value of a configuration option that is altered by multiple included
|
||||
files may not be what you expect. This will be addressed in a future
|
||||
update.
|
||||
|
||||
Environments can include files or URLs. File paths can be relative or
|
||||
absolute. URLs include the path to the text for individual files or
|
||||
can be the path to a directory containing configuration files.
|
||||
Spack supports ``file``, ``http``, ``https`` and ``ftp`` protocols (or
|
||||
schemes). Spack-specific, environment and user path variables may be
|
||||
used in these paths. See :ref:`config-file-variables` for more information.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Configuration precedence
|
||||
|
@@ -1,161 +0,0 @@
|
||||
spack:
|
||||
definitions:
|
||||
- compiler-pkgs:
|
||||
- 'llvm+clang@6.0.1 os=centos7'
|
||||
- 'gcc@6.5.0 os=centos7'
|
||||
- 'llvm+clang@6.0.1 os=ubuntu18.04'
|
||||
- 'gcc@6.5.0 os=ubuntu18.04'
|
||||
- pkgs:
|
||||
- readline@7.0
|
||||
# - xsdk@0.4.0
|
||||
- compilers:
|
||||
- '%gcc@5.5.0'
|
||||
- '%gcc@6.5.0'
|
||||
- '%gcc@7.3.0'
|
||||
- '%clang@6.0.0'
|
||||
- '%clang@6.0.1'
|
||||
- oses:
|
||||
- os=ubuntu18.04
|
||||
- os=centos7
|
||||
|
||||
specs:
|
||||
- matrix:
|
||||
- [$pkgs]
|
||||
- [$compilers]
|
||||
- [$oses]
|
||||
exclude:
|
||||
- '%gcc@7.3.0 os=centos7'
|
||||
- '%gcc@5.5.0 os=ubuntu18.04'
|
||||
|
||||
mirrors:
|
||||
cloud_gitlab: https://mirror.spack.io
|
||||
|
||||
compilers:
|
||||
# The .gitlab-ci.yml for this project picks a Docker container which does
|
||||
# not have any compilers pre-built and ready to use, so we need to fake the
|
||||
# existence of those here.
|
||||
- compiler:
|
||||
operating_system: centos7
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: gcc@5.5.0
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: centos7
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: gcc@6.5.0
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: centos7
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: clang@6.0.0
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: centos7
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: clang@6.0.1
|
||||
target: x86_64
|
||||
|
||||
- compiler:
|
||||
operating_system: ubuntu18.04
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: clang@6.0.0
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: ubuntu18.04
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: clang@6.0.1
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: ubuntu18.04
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: gcc@6.5.0
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: ubuntu18.04
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: gcc@7.3.0
|
||||
target: x86_64
|
||||
|
||||
gitlab-ci:
|
||||
bootstrap:
|
||||
- name: compiler-pkgs
|
||||
compiler-agnostic: true
|
||||
mappings:
|
||||
- # spack-cloud-ubuntu
|
||||
match:
|
||||
# these are specs, if *any* match the spec under consideration, this
|
||||
# 'mapping' will be used to generate the CI job
|
||||
- os=ubuntu18.04
|
||||
runner-attributes:
|
||||
# 'tags' and 'image' go directly onto the job, 'variables' will
|
||||
# be added to what we already necessarily create for the job as
|
||||
# a part of the CI workflow
|
||||
tags:
|
||||
- spack-k8s
|
||||
image:
|
||||
name: scottwittenburg/spack_builder_ubuntu_18.04
|
||||
entrypoint: [""]
|
||||
- # spack-cloud-centos
|
||||
match:
|
||||
# these are specs, if *any* match the spec under consideration, this
|
||||
# 'mapping' will be used to generate the CI job
|
||||
- 'os=centos7'
|
||||
runner-attributes:
|
||||
tags:
|
||||
- spack-k8s
|
||||
image:
|
||||
name: scottwittenburg/spack_builder_centos_7
|
||||
entrypoint: [""]
|
||||
|
||||
cdash:
|
||||
build-group: Release Testing
|
||||
url: http://cdash
|
||||
project: Spack Testing
|
||||
site: Spack Docker-Compose Workflow
|
||||
|
||||
repos: []
|
||||
upstreams: {}
|
||||
modules:
|
||||
enable: []
|
||||
packages: {}
|
||||
config: {}
|
@@ -30,7 +30,7 @@ than always choosing the latest versions or default variants.
|
||||
|
||||
.. note::
|
||||
|
||||
As a rule of thumb: requirements + constraints > reuse > preferences > defaults.
|
||||
As a rule of thumb: requirements + constraints > strong preferences > reuse > preferences > defaults.
|
||||
|
||||
The following set of criteria (from lowest to highest precedence) explain
|
||||
common cases where concretization output may seem surprising at first.
|
||||
@@ -56,7 +56,19 @@ common cases where concretization output may seem surprising at first.
|
||||
concretizer:
|
||||
reuse: dependencies # other options are 'true' and 'false'
|
||||
|
||||
3. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
|
||||
3. :ref:`Strong preferences <package-strong-preferences>` configured in ``packages.yaml``
|
||||
are higher priority than reuse, and can be used to strongly prefer a specific version
|
||||
or variant, without erroring out if it's not possible. Strong preferences are specified
|
||||
as follows:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
foo:
|
||||
prefer:
|
||||
- "@1.1: ~mpi"
|
||||
|
||||
4. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
|
||||
and constraints from the command line as well as ``package.py`` files override all
|
||||
of the above. Requirements are specified as follows:
|
||||
|
||||
@@ -66,6 +78,8 @@ common cases where concretization output may seem surprising at first.
|
||||
foo:
|
||||
require:
|
||||
- "@1.2: +mpi"
|
||||
conflicts:
|
||||
- "@1.4"
|
||||
|
||||
Requirements and constraints restrict the set of possible solutions, while reuse
|
||||
behavior and preferences influence what an optimal solution looks like.
|
||||
|
@@ -254,12 +254,11 @@ directory.
|
||||
Compiler configuration
|
||||
----------------------
|
||||
|
||||
Spack has the ability to build packages with multiple compilers and
|
||||
compiler versions. Compilers can be made available to Spack by
|
||||
specifying them manually in ``compilers.yaml`` or ``packages.yaml``,
|
||||
or automatically by running ``spack compiler find``, but for
|
||||
convenience Spack will automatically detect compilers the first time
|
||||
it needs them.
|
||||
Spack has the ability to build packages with multiple compilers and compiler versions.
|
||||
Compilers can be made available to Spack by specifying them manually in ``packages.yaml``,
|
||||
or automatically by running ``spack compiler find``.
|
||||
For convenience, Spack will automatically detect compilers the first time it needs them,
|
||||
if none is available.
|
||||
|
||||
.. _cmd-spack-compilers:
|
||||
|
||||
@@ -274,16 +273,11 @@ compilers`` or ``spack compiler list``:
|
||||
|
||||
$ spack compilers
|
||||
==> Available compilers
|
||||
-- gcc ---------------------------------------------------------
|
||||
gcc@4.9.0 gcc@4.8.0 gcc@4.7.0 gcc@4.6.2 gcc@4.4.7
|
||||
gcc@4.8.2 gcc@4.7.1 gcc@4.6.3 gcc@4.6.1 gcc@4.1.2
|
||||
-- intel -------------------------------------------------------
|
||||
intel@15.0.0 intel@14.0.0 intel@13.0.0 intel@12.1.0 intel@10.0
|
||||
intel@14.0.3 intel@13.1.1 intel@12.1.5 intel@12.0.4 intel@9.1
|
||||
intel@14.0.2 intel@13.1.0 intel@12.1.3 intel@11.1
|
||||
intel@14.0.1 intel@13.0.1 intel@12.1.2 intel@10.1
|
||||
-- clang -------------------------------------------------------
|
||||
clang@3.4 clang@3.3 clang@3.2 clang@3.1
|
||||
-- gcc ubuntu20.04-x86_64 ---------------------------------------
|
||||
gcc@9.4.0 gcc@8.4.0 gcc@10.5.0
|
||||
|
||||
-- llvm ubuntu20.04-x86_64 --------------------------------------
|
||||
llvm@12.0.0 llvm@11.0.0 llvm@10.0.0
|
||||
|
||||
Any of these compilers can be used to build Spack packages. More on
|
||||
how this is done is in :ref:`sec-specs`.
|
||||
@@ -302,16 +296,22 @@ An alias for ``spack compiler find``.
|
||||
``spack compiler find``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Lists the compilers currently available to Spack. If you do not see
|
||||
a compiler in this list, but you want to use it with Spack, you can
|
||||
simply run ``spack compiler find`` with the path to where the
|
||||
compiler is installed. For example:
|
||||
If you do not see a compiler in the list shown by:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack compiler find /usr/local/tools/ic-13.0.079
|
||||
==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
|
||||
intel@13.0.079
|
||||
$ spack compiler list
|
||||
|
||||
but you want to use it with Spack, you can simply run ``spack compiler find`` with the
|
||||
path to where the compiler is installed. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack compiler find /opt/intel/oneapi/compiler/2025.1/bin/
|
||||
==> Added 1 new compiler to /home/user/.spack/packages.yaml
|
||||
intel-oneapi-compilers@2025.1.0
|
||||
==> Compilers are defined in the following files:
|
||||
/home/user/.spack/packages.yaml
|
||||
|
||||
Or you can run ``spack compiler find`` with no arguments to force
|
||||
auto-detection. This is useful if you do not know where compilers are
|
||||
@@ -322,7 +322,7 @@ installed, but you know that new compilers have been added to your
|
||||
|
||||
$ module load gcc/4.9.0
|
||||
$ spack compiler find
|
||||
==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
|
||||
==> Added 1 new compiler to /home/user/.spack/packages.yaml
|
||||
gcc@4.9.0
|
||||
|
||||
This loads the environment module for gcc-4.9.0 to add it to
|
||||
@@ -331,7 +331,7 @@ This loads the environment module for gcc-4.9.0 to add it to
|
||||
.. note::
|
||||
|
||||
By default, spack does not fill in the ``modules:`` field in the
|
||||
``compilers.yaml`` file. If you are using a compiler from a
|
||||
``packages.yaml`` file. If you are using a compiler from a
|
||||
module, then you should add this field manually.
|
||||
See the section on :ref:`compilers-requiring-modules`.
|
||||
|
||||
@@ -341,91 +341,82 @@ This loads the environment module for gcc-4.9.0 to add it to
|
||||
``spack compiler info``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you want to see specifics on a particular compiler, you can run
|
||||
``spack compiler info`` on it:
|
||||
If you want to see additional information on some specific compilers, you can run ``spack compiler info`` on it:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack compiler info intel@15
|
||||
intel@15.0.0:
|
||||
paths:
|
||||
cc = /usr/local/bin/icc-15.0.090
|
||||
cxx = /usr/local/bin/icpc-15.0.090
|
||||
f77 = /usr/local/bin/ifort-15.0.090
|
||||
fc = /usr/local/bin/ifort-15.0.090
|
||||
modules = []
|
||||
operating_system = centos6
|
||||
...
|
||||
$ spack compiler info gcc
|
||||
gcc@=8.4.0 languages='c,c++,fortran' arch=linux-ubuntu20.04-x86_64:
|
||||
prefix: /usr
|
||||
compilers:
|
||||
c: /usr/bin/gcc-8
|
||||
cxx: /usr/bin/g++-8
|
||||
fortran: /usr/bin/gfortran-8
|
||||
|
||||
This shows which C, C++, and Fortran compilers were detected by Spack.
|
||||
Notice also that we didn't have to be too specific about the
|
||||
version. We just said ``intel@15``, and information about the only
|
||||
matching Intel compiler was displayed.
|
||||
gcc@=9.4.0 languages='c,c++,fortran' arch=linux-ubuntu20.04-x86_64:
|
||||
prefix: /usr
|
||||
compilers:
|
||||
c: /usr/bin/gcc
|
||||
cxx: /usr/bin/g++
|
||||
fortran: /usr/bin/gfortran
|
||||
|
||||
gcc@=10.5.0 languages='c,c++,fortran' arch=linux-ubuntu20.04-x86_64:
|
||||
prefix: /usr
|
||||
compilers:
|
||||
c: /usr/bin/gcc-10
|
||||
cxx: /usr/bin/g++-10
|
||||
fortran: /usr/bin/gfortran-10
|
||||
|
||||
This shows the details of the compilers that were detected by Spack.
|
||||
Notice also that we didn't have to be too specific about the version. We just said ``gcc``, and we got information
|
||||
about all the matching compilers.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Manual compiler configuration
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If auto-detection fails, you can manually configure a compiler by
|
||||
editing your ``~/.spack/<platform>/compilers.yaml`` file. You can do this by running
|
||||
``spack config edit compilers``, which will open the file in
|
||||
If auto-detection fails, you can manually configure a compiler by editing your ``~/.spack/packages.yaml`` file.
|
||||
You can do this by running ``spack config edit packages``, which will open the file in
|
||||
:ref:`your favorite editor <controlling-the-editor>`.
|
||||
|
||||
Each compiler configuration in the file looks like this:
|
||||
Each compiler has an "external" entry in the file with some ``extra_attributes``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: []
|
||||
operating_system: centos6
|
||||
paths:
|
||||
cc: /usr/local/bin/icc-15.0.024-beta
|
||||
cxx: /usr/local/bin/icpc-15.0.024-beta
|
||||
f77: /usr/local/bin/ifort-15.0.024-beta
|
||||
fc: /usr/local/bin/ifort-15.0.024-beta
|
||||
spec: intel@15.0.0
|
||||
packages:
|
||||
gcc:
|
||||
externals:
|
||||
- spec: gcc@10.5.0 languages='c,c++,fortran'
|
||||
prefix: /usr
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /usr/bin/gcc-10
|
||||
cxx: /usr/bin/g++-10
|
||||
fortran: /usr/bin/gfortran-10
|
||||
|
||||
For compilers that do not support Fortran (like ``clang``), put
|
||||
``None`` for ``f77`` and ``fc``:
|
||||
The compiler executables are listed under ``extra_attributes:compilers``, and are keyed by language.
|
||||
Once you save the file, the configured compilers will show up in the list displayed by ``spack compilers``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: []
|
||||
operating_system: centos6
|
||||
paths:
|
||||
cc: /usr/bin/clang
|
||||
cxx: /usr/bin/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
spec: clang@3.3svn
|
||||
|
||||
Once you save the file, the configured compilers will show up in the
|
||||
list displayed by ``spack compilers``.
|
||||
|
||||
You can also add compiler flags to manually configured compilers. These
|
||||
flags should be specified in the ``flags`` section of the compiler
|
||||
specification. The valid flags are ``cflags``, ``cxxflags``, ``fflags``,
|
||||
You can also add compiler flags to manually configured compilers. These flags should be specified in the
|
||||
``flags`` section of the compiler specification. The valid flags are ``cflags``, ``cxxflags``, ``fflags``,
|
||||
``cppflags``, ``ldflags``, and ``ldlibs``. For example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: []
|
||||
operating_system: centos6
|
||||
paths:
|
||||
cc: /usr/bin/gcc
|
||||
cxx: /usr/bin/g++
|
||||
f77: /usr/bin/gfortran
|
||||
fc: /usr/bin/gfortran
|
||||
flags:
|
||||
cflags: -O3 -fPIC
|
||||
cxxflags: -O3 -fPIC
|
||||
cppflags: -O3 -fPIC
|
||||
spec: gcc@4.7.2
|
||||
packages:
|
||||
gcc:
|
||||
externals:
|
||||
- spec: gcc@10.5.0 languages='c,c++,fortran'
|
||||
prefix: /usr
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /usr/bin/gcc-10
|
||||
cxx: /usr/bin/g++-10
|
||||
fortran: /usr/bin/gfortran-10
|
||||
flags:
|
||||
cflags: -O3 -fPIC
|
||||
cxxflags: -O3 -fPIC
|
||||
cppflags: -O3 -fPIC
|
||||
|
||||
These flags will be treated by spack as if they were entered from
|
||||
the command line each time this compiler is used. The compiler wrappers
|
||||
@@ -440,95 +431,44 @@ These variables should be specified in the ``environment`` section of the compil
|
||||
specification. The operations available to modify the environment are ``set``, ``unset``,
|
||||
``prepend_path``, ``append_path``, and ``remove_path``. For example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: []
|
||||
operating_system: centos6
|
||||
paths:
|
||||
cc: /opt/intel/oneapi/compiler/latest/linux/bin/icx
|
||||
cxx: /opt/intel/oneapi/compiler/latest/linux/bin/icpx
|
||||
f77: /opt/intel/oneapi/compiler/latest/linux/bin/ifx
|
||||
fc: /opt/intel/oneapi/compiler/latest/linux/bin/ifx
|
||||
spec: oneapi@latest
|
||||
environment:
|
||||
set:
|
||||
MKL_ROOT: "/path/to/mkl/root"
|
||||
unset: # A list of environment variables to unset
|
||||
- CC
|
||||
prepend_path: # Similar for append|remove_path
|
||||
LD_LIBRARY_PATH: /ld/paths/added/by/setvars/sh
|
||||
|
||||
.. note::
|
||||
|
||||
Spack is in the process of moving compilers from a separate
|
||||
attribute to be handled like all other packages. As part of this
|
||||
process, the ``compilers.yaml`` section will eventually be replaced
|
||||
by configuration in the ``packages.yaml`` section. This new
|
||||
configuration is now available, although it is not yet the default
|
||||
behavior.
|
||||
|
||||
Compilers can also be configured as external packages in the
|
||||
``packages.yaml`` config file. Any external package for a compiler
|
||||
(e.g. ``gcc`` or ``llvm``) will be treated as a configured compiler
|
||||
assuming the paths to the compiler executables are determinable from
|
||||
the prefix.
|
||||
|
||||
If the paths to the compiler executable are not determinable from the
|
||||
prefix, you can add them to the ``extra_attributes`` field. Similarly,
|
||||
all other fields from the compilers config can be added to the
|
||||
``extra_attributes`` field for an external representing a compiler.
|
||||
|
||||
Note that the format for the ``paths`` field in the
|
||||
``extra_attributes`` section is different than in the ``compilers``
|
||||
config. For compilers configured as external packages, the section is
|
||||
named ``compilers`` and the dictionary maps language names (``c``,
|
||||
``cxx``, ``fortran``) to paths, rather than using the names ``cc``,
|
||||
``fc``, and ``f77``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
gcc:
|
||||
external:
|
||||
- spec: gcc@12.2.0 arch=linux-rhel8-skylake
|
||||
prefix: /usr
|
||||
extra_attributes:
|
||||
environment:
|
||||
set:
|
||||
GCC_ROOT: /usr
|
||||
external:
|
||||
- spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake
|
||||
prefix: /usr
|
||||
intel-oneapi-compilers:
|
||||
externals:
|
||||
- spec: intel-oneapi-compilers@2025.1.0
|
||||
prefix: /opt/intel/oneapi
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /usr/bin/clang-with-suffix
|
||||
cxx: /usr/bin/clang++-with-extra-info
|
||||
fortran: /usr/bin/gfortran
|
||||
extra_rpaths:
|
||||
- /usr/lib/llvm/
|
||||
c: /opt/intel/oneapi/compiler/2025.1/bin/icx
|
||||
cxx: /opt/intel/oneapi/compiler/2025.1/bin/icpx
|
||||
fortran: /opt/intel/oneapi/compiler/2025.1/bin/ifx
|
||||
environment:
|
||||
set:
|
||||
MKL_ROOT: "/path/to/mkl/root"
|
||||
unset: # A list of environment variables to unset
|
||||
- CC
|
||||
prepend_path: # Similar for append|remove_path
|
||||
LD_LIBRARY_PATH: /ld/paths/added/by/setvars/sh
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build Your Own Compiler
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you are particular about which compiler/version you use, you might
|
||||
wish to have Spack build it for you. For example:
|
||||
If you are particular about which compiler/version you use, you might wish to have Spack build it for you.
|
||||
For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install gcc@4.9.3
|
||||
$ spack install gcc@14+binutils
|
||||
|
||||
Once that has finished, you will need to add it to your
|
||||
``compilers.yaml`` file. You can then set Spack to use it by default
|
||||
by adding the following to your ``packages.yaml`` file:
|
||||
Once the compiler is installed, you can start using it without additional configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
.. code-block:: console
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc@4.9.3]
|
||||
$ spack install hdf5~mpi %gcc@14
|
||||
|
||||
The same holds true for compilers that are made available from buildcaches, when reusing them is allowed.
|
||||
|
||||
.. _compilers-requiring-modules:
|
||||
|
||||
@@ -536,30 +476,26 @@ by adding the following to your ``packages.yaml`` file:
|
||||
Compilers Requiring Modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Many installed compilers will work regardless of the environment they
|
||||
are called with. However, some installed compilers require
|
||||
``$LD_LIBRARY_PATH`` or other environment variables to be set in order
|
||||
to run; this is typical for Intel and other proprietary compilers.
|
||||
Many installed compilers will work regardless of the environment they are called with.
|
||||
However, some installed compilers require environment variables to be set in order to run;
|
||||
this is typical for Intel and other proprietary compilers.
|
||||
|
||||
In such a case, you should tell Spack which module(s) to load in order
|
||||
to run the chosen compiler (If the compiler does not come with a
|
||||
module file, you might consider making one by hand). Spack will load
|
||||
this module into the environment ONLY when the compiler is run, and
|
||||
NOT in general for a package's ``install()`` method. See, for
|
||||
example, this ``compilers.yaml`` file:
|
||||
On typical HPC clusters, these environment modifications are usually delegated to some "module" system.
|
||||
In such a case, you should tell Spack which module(s) to load in order to run the chosen compiler:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: [other/comp/gcc-5.3-sp3]
|
||||
operating_system: SuSE11
|
||||
paths:
|
||||
cc: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gcc
|
||||
cxx: /usr/local/other/SLES11.3/gcc/5.3.0/bin/g++
|
||||
f77: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gfortran
|
||||
fc: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gfortran
|
||||
spec: gcc@5.3.0
|
||||
packages:
|
||||
gcc:
|
||||
externals:
|
||||
- spec: gcc@10.5.0 languages='c,c++,fortran'
|
||||
prefix: /opt/compilers
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /opt/compilers/bin/gcc-10
|
||||
cxx: /opt/compilers/bin/g++-10
|
||||
fortran: /opt/compilers/bin/gfortran-10
|
||||
modules: [gcc/10.5.0]
|
||||
|
||||
Some compilers require special environment settings to be loaded not just
|
||||
to run, but also to execute the code they build, breaking packages that
|
||||
@@ -580,7 +516,7 @@ Licensed Compilers
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Some proprietary compilers require licensing to use. If you need to
|
||||
use a licensed compiler (eg, PGI), the process is similar to a mix of
|
||||
use a licensed compiler, the process is similar to a mix of
|
||||
build your own, plus modules:
|
||||
|
||||
#. Create a Spack package (if it doesn't exist already) to install
|
||||
@@ -590,24 +526,21 @@ build your own, plus modules:
|
||||
using Spack to load the module it just created, and running simple
|
||||
builds (eg: ``cc helloWorld.c && ./a.out``)
|
||||
|
||||
#. Add the newly-installed compiler to ``compilers.yaml`` as shown
|
||||
above.
|
||||
#. Add the newly-installed compiler to ``packages.yaml`` as shown above.
|
||||
|
||||
.. _mixed-toolchains:
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Mixed Toolchains
|
||||
^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Fortran compilers on macOS
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Modern compilers typically come with related compilers for C, C++ and
|
||||
Fortran bundled together. When possible, results are best if the same
|
||||
compiler is used for all languages.
|
||||
|
||||
In some cases, this is not possible. For example, starting with macOS El
|
||||
Capitan (10.11), many packages no longer build with GCC, but XCode
|
||||
provides no Fortran compilers. The user is therefore forced to use a
|
||||
mixed toolchain: XCode-provided Clang for C/C++ and GNU ``gfortran`` for
|
||||
Fortran.
|
||||
In some cases, this is not possible. For example, XCode on macOS provides no Fortran compilers.
|
||||
The user is therefore forced to use a mixed toolchain: XCode-provided Clang for C/C++ and e.g.
|
||||
GNU ``gfortran`` for Fortran.
|
||||
|
||||
#. You need to make sure that Xcode is installed. Run the following command:
|
||||
|
||||
@@ -660,45 +593,25 @@ Fortran.
|
||||
|
||||
Note: the flag is ``-license``, not ``--license``.
|
||||
|
||||
#. Run ``spack compiler find`` to locate Clang.
|
||||
|
||||
#. There are different ways to get ``gfortran`` on macOS. For example, you can
|
||||
install GCC with Spack (``spack install gcc``), with Homebrew (``brew install
|
||||
gcc``), or from a `DMG installer
|
||||
<https://github.com/fxcoudert/gfortran-for-macOS/releases>`_.
|
||||
|
||||
#. The only thing left to do is to edit ``~/.spack/darwin/compilers.yaml`` to provide
|
||||
the path to ``gfortran``:
|
||||
#. Run ``spack compiler find`` to locate both Apple-Clang and GCC.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
# ...
|
||||
paths:
|
||||
cc: /usr/bin/clang
|
||||
cxx: /usr/bin/clang++
|
||||
f77: /path/to/bin/gfortran
|
||||
fc: /path/to/bin/gfortran
|
||||
spec: apple-clang@11.0.0
|
||||
|
||||
|
||||
If you used Spack to install GCC, you can get the installation prefix by
|
||||
``spack location -i gcc`` (this will only work if you have a single version
|
||||
of GCC installed). Whereas for Homebrew, GCC is installed in
|
||||
``/usr/local/Cellar/gcc/x.y.z``. With the DMG installer, the correct path
|
||||
will be ``/usr/local/gfortran``.
|
||||
Since languages in Spack are modeled as virtual packages, ``apple-clang`` will be used to provide
|
||||
C and C++, while GCC will be used for Fortran.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Compiler Verification
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You can verify that your compilers are configured properly by installing a
|
||||
simple package. For example:
|
||||
You can verify that your compilers are configured properly by installing a simple package. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install zlib%gcc@5.3.0
|
||||
$ spack install zlib-ng%gcc@5.3.0
|
||||
|
||||
|
||||
.. _vendor-specific-compiler-configuration:
|
||||
@@ -707,9 +620,7 @@ simple package. For example:
|
||||
Vendor-Specific Compiler Configuration
|
||||
--------------------------------------
|
||||
|
||||
With Spack, things usually "just work" with GCC. Not so for other
|
||||
compilers. This section provides details on how to get specific
|
||||
compilers working.
|
||||
This section provides details on how to get vendor-specific compilers working.
|
||||
|
||||
^^^^^^^^^^^^^^^
|
||||
Intel Compilers
|
||||
@@ -731,8 +642,8 @@ compilers:
|
||||
you have installed from the ``PATH`` environment variable.
|
||||
|
||||
If you want use a version of ``gcc`` or ``g++`` other than the default
|
||||
version on your system, you need to use either the ``-gcc-name``
|
||||
or ``-gxx-name`` compiler option to specify the path to the version of
|
||||
version on your system, you need to use either the ``--gcc-install-dir``
|
||||
or ``--gcc-toolchain`` compiler option to specify the path to the version of
|
||||
``gcc`` or ``g++`` that you want to use."
|
||||
|
||||
-- `Intel Reference Guide <https://software.intel.com/en-us/node/522750>`_
|
||||
@@ -740,76 +651,12 @@ compilers:
|
||||
Intel compilers may therefore be configured in one of two ways with
|
||||
Spack: using modules, or using compiler flags.
|
||||
|
||||
""""""""""""""""""""""""""
|
||||
Configuration with Modules
|
||||
""""""""""""""""""""""""""
|
||||
|
||||
One can control which GCC is seen by the Intel compiler with modules.
|
||||
A module must be loaded both for the Intel Compiler (so it will run)
|
||||
and GCC (so the compiler can find the intended GCC). The following
|
||||
configuration in ``compilers.yaml`` illustrates this technique:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: [gcc-4.9.3, intel-15.0.24]
|
||||
operating_system: centos7
|
||||
paths:
|
||||
cc: /opt/intel-15.0.24/bin/icc-15.0.24-beta
|
||||
cxx: /opt/intel-15.0.24/bin/icpc-15.0.24-beta
|
||||
f77: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
|
||||
fc: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
|
||||
spec: intel@15.0.24.4.9.3
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
The version number on the Intel compiler is a combination of
|
||||
the "native" Intel version number and the GNU compiler it is
|
||||
targeting.
|
||||
|
||||
""""""""""""""""""""""""""
|
||||
Command Line Configuration
|
||||
""""""""""""""""""""""""""
|
||||
|
||||
One can also control which GCC is seen by the Intel compiler by adding
|
||||
flags to the ``icc`` command:
|
||||
|
||||
#. Identify the location of the compiler you just installed:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack location --install-dir gcc
|
||||
~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw...
|
||||
|
||||
#. Set up ``compilers.yaml``, for example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: [intel-15.0.24]
|
||||
operating_system: centos7
|
||||
paths:
|
||||
cc: /opt/intel-15.0.24/bin/icc-15.0.24-beta
|
||||
cxx: /opt/intel-15.0.24/bin/icpc-15.0.24-beta
|
||||
f77: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
|
||||
fc: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
|
||||
flags:
|
||||
cflags: -gcc-name ~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw.../bin/gcc
|
||||
cxxflags: -gxx-name ~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw.../bin/g++
|
||||
fflags: -gcc-name ~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw.../bin/gcc
|
||||
spec: intel@15.0.24.4.9.3
|
||||
|
||||
|
||||
^^^
|
||||
NAG
|
||||
^^^
|
||||
|
||||
The Numerical Algorithms Group provides a licensed Fortran compiler. Like Clang,
|
||||
this requires you to set up a :ref:`mixed-toolchains`. It is recommended to use
|
||||
GCC for your C/C++ compilers.
|
||||
The Numerical Algorithms Group provides a licensed Fortran compiler.
|
||||
It is recommended to use GCC for your C/C++ compilers.
|
||||
|
||||
The NAG Fortran compilers are a bit more strict than other compilers, and many
|
||||
packages will fail to install with error messages like:
|
||||
@@ -826,44 +673,40 @@ the command line:
|
||||
|
||||
$ spack install openmpi fflags="-mismatch"
|
||||
|
||||
Or it can be set permanently in your ``compilers.yaml``:
|
||||
Or it can be set permanently in your ``packages.yaml``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
- compiler:
|
||||
modules: []
|
||||
operating_system: centos6
|
||||
paths:
|
||||
cc: /soft/spack/opt/spack/linux-x86_64/gcc-5.3.0/gcc-6.1.0-q2zosj3igepi3pjnqt74bwazmptr5gpj/bin/gcc
|
||||
cxx: /soft/spack/opt/spack/linux-x86_64/gcc-5.3.0/gcc-6.1.0-q2zosj3igepi3pjnqt74bwazmptr5gpj/bin/g++
|
||||
f77: /soft/spack/opt/spack/linux-x86_64/gcc-4.4.7/nag-6.1-jt3h5hwt5myezgqguhfsan52zcskqene/bin/nagfor
|
||||
fc: /soft/spack/opt/spack/linux-x86_64/gcc-4.4.7/nag-6.1-jt3h5hwt5myezgqguhfsan52zcskqene/bin/nagfor
|
||||
flags:
|
||||
fflags: -mismatch
|
||||
spec: nag@6.1
|
||||
|
||||
packages:
|
||||
nag:
|
||||
externals:
|
||||
- spec: nag@6.1
|
||||
prefix: /opt/nag/bin
|
||||
extra_attributes:
|
||||
compilers:
|
||||
fortran: /opt/nag/bin/nagfor
|
||||
flags:
|
||||
fflags: -mismatch
|
||||
|
||||
---------------
|
||||
System Packages
|
||||
---------------
|
||||
|
||||
Once compilers are configured, one needs to determine which
|
||||
pre-installed system packages, if any, to use in builds. This is
|
||||
configured in the file ``~/.spack/packages.yaml``. For example, to use
|
||||
an OpenMPI installed in /opt/local, one would use:
|
||||
Once compilers are configured, one needs to determine which pre-installed system packages,
|
||||
if any, to use in builds. These are also configured in the ``~/.spack/packages.yaml`` file.
|
||||
For example, to use an OpenMPI installed in /opt/local, one would use:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
externals:
|
||||
- spec: openmpi@1.10.1
|
||||
prefix: /opt/local
|
||||
buildable: False
|
||||
packages:
|
||||
openmpi:
|
||||
buildable: False
|
||||
externals:
|
||||
- spec: openmpi@1.10.1
|
||||
prefix: /opt/local
|
||||
|
||||
In general, Spack is easier to use and more reliable if it builds all of
|
||||
its own dependencies. However, there are several packages for which one
|
||||
commonly needs to use system versions:
|
||||
In general, *Spack is easier to use and more reliable if it builds all of its own dependencies*.
|
||||
However, there are several packages for which one commonly needs to use system versions:
|
||||
|
||||
^^^
|
||||
MPI
|
||||
@@ -876,8 +719,7 @@ you are unlikely to get a working MPI from Spack. Instead, use an
|
||||
appropriate pre-installed MPI.
|
||||
|
||||
If you choose a pre-installed MPI, you should consider using the
|
||||
pre-installed compiler used to build that MPI; see above on
|
||||
``compilers.yaml``.
|
||||
pre-installed compiler used to build that MPI.
|
||||
|
||||
^^^^^^^
|
||||
OpenSSL
|
||||
@@ -1441,9 +1283,9 @@ To configure Spack, first run the following command inside the Spack console:
|
||||
spack compiler find
|
||||
|
||||
This creates a ``.staging`` directory in our Spack prefix, along with a ``windows`` subdirectory
|
||||
containing a ``compilers.yaml`` file. On a fresh Windows install with the above packages
|
||||
containing a ``packages.yaml`` file. On a fresh Windows install with the above packages
|
||||
installed, this command should only detect Microsoft Visual Studio and the Intel Fortran
|
||||
compiler will be integrated within the first version of MSVC present in the ``compilers.yaml``
|
||||
compiler will be integrated within the first version of MSVC present in the ``packages.yaml``
|
||||
output.
|
||||
|
||||
Spack provides a default ``config.yaml`` file for Windows that it will use unless overridden.
|
||||
|
@@ -23,7 +23,6 @@ components for use by dependent packages:
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [rocmcc@=5.3.0]
|
||||
variants: amdgpu_target=gfx90a
|
||||
hip:
|
||||
buildable: false
|
||||
@@ -70,16 +69,15 @@ This is in combination with the following compiler definition:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
spec: rocmcc@=5.3.0
|
||||
paths:
|
||||
cc: /opt/rocm-5.3.0/bin/amdclang
|
||||
cxx: /opt/rocm-5.3.0/bin/amdclang++
|
||||
f77: null
|
||||
fc: /opt/rocm-5.3.0/bin/amdflang
|
||||
operating_system: rhel8
|
||||
target: x86_64
|
||||
packages:
|
||||
llvm-amdgpu:
|
||||
externals:
|
||||
- spec: llvm-amdgpu@=5.3.0
|
||||
prefix: /opt/rocm-5.3.0
|
||||
compilers:
|
||||
c: /opt/rocm-5.3.0/bin/amdclang
|
||||
cxx: /opt/rocm-5.3.0/bin/amdclang++
|
||||
fortran: null
|
||||
|
||||
This includes the following considerations:
|
||||
|
||||
|
65
lib/spack/docs/include_yaml.rst
Normal file
65
lib/spack/docs/include_yaml.rst
Normal file
@@ -0,0 +1,65 @@
|
||||
.. Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _include-yaml:
|
||||
|
||||
===============================
|
||||
Include Settings (include.yaml)
|
||||
===============================
|
||||
|
||||
Spack allows you to include configuration files through ``include.yaml``.
|
||||
Using the ``include:`` heading results in pulling in external configuration
|
||||
information to be used by any Spack command.
|
||||
|
||||
Included configuration files are required *unless* they are explicitly optional
|
||||
or the entry's condition evaluates to ``false``. Optional includes are specified
|
||||
with the ``optional`` clause and conditional with the ``when`` clause. For
|
||||
example,
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
include:
|
||||
- /path/to/a/required/config.yaml
|
||||
- path: /path/to/$os/$target/config
|
||||
optional: true
|
||||
- path: /path/to/os-specific/config-dir
|
||||
when: os == "ventura"
|
||||
|
||||
shows all three. The first entry, ``/path/to/a/required/config.yaml``,
|
||||
indicates that included ``config.yaml`` file is required (so must exist).
|
||||
Use of ``optional: true`` for ``/path/to/$os/$target/config`` means
|
||||
the path is only included if it exists. The condition ``os == "ventura"``
|
||||
in the ``when`` clause for ``/path/to/os-specific/config-dir`` means the
|
||||
path is only included when the operating system (``os``) is ``ventura``.
|
||||
|
||||
The same conditions and variables in `Spec List References
|
||||
<https://spack.readthedocs.io/en/latest/environments.html#spec-list-references>`_
|
||||
can be used for conditional activation in the ``when`` clauses.
|
||||
|
||||
Included files can be specified by path or by their parent directory.
|
||||
Paths may be absolute, relative (to the configuration file including the path),
|
||||
or specified as URLs. Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or
|
||||
schemes) are supported. Spack-specific, environment and user path variables
|
||||
can be used. (See :ref:`config-file-variables` for more information.)
|
||||
|
||||
A ``sha256`` is required for remote file URLs and must be specified as follows:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
include:
|
||||
- path: https://github.com/path/to/raw/config/compilers.yaml
|
||||
sha256: 26e871804a92cd07bb3d611b31b4156ae93d35b6a6d6e0ef3a67871fcb1d258b
|
||||
|
||||
Additionally, remote file URLs must link to the **raw** form of the file's
|
||||
contents (e.g., `GitHub
|
||||
<https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_
|
||||
or `GitLab
|
||||
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_).
|
||||
|
||||
.. warning::
|
||||
|
||||
Recursive includes are not currently processed in a breadth-first manner
|
||||
so the value of a configuration option that is altered by multiple included
|
||||
files may not be what you expect. This will be addressed in a future
|
||||
update.
|
@@ -71,6 +71,7 @@ or refer to the full manual below.
|
||||
|
||||
configuration
|
||||
config_yaml
|
||||
include_yaml
|
||||
packages_yaml
|
||||
build_settings
|
||||
environments
|
||||
|
@@ -486,6 +486,8 @@ present. For instance with a configuration like:
|
||||
|
||||
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
|
||||
|
||||
.. _package-strong-preferences:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Conflicts and strong preferences
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -555,14 +557,13 @@ preferences.
|
||||
FAQ: :ref:`Why does Spack pick particular versions and variants? <faq-concretizer-precedence>`
|
||||
|
||||
|
||||
Most package preferences (``compilers``, ``target`` and ``providers``)
|
||||
The ``target`` and ``providers`` preferences
|
||||
can only be set globally under the ``all`` section of ``packages.yaml``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc@12.2.0, clang@12:, oneapi@2023:]
|
||||
target: [x86_64_v3]
|
||||
providers:
|
||||
mpi: [mvapich2, mpich, openmpi]
|
||||
|
@@ -1,13 +1,13 @@
|
||||
sphinx==8.2.1
|
||||
sphinx==8.2.3
|
||||
sphinxcontrib-programoutput==0.18
|
||||
sphinx_design==0.6.1
|
||||
sphinx-rtd-theme==3.0.2
|
||||
python-levenshtein==0.26.1
|
||||
python-levenshtein==0.27.1
|
||||
docutils==0.21.2
|
||||
pygments==2.19.1
|
||||
urllib3==2.3.0
|
||||
pytest==8.3.4
|
||||
isort==6.0.0
|
||||
pytest==8.3.5
|
||||
isort==6.0.1
|
||||
black==25.1.0
|
||||
flake8==7.1.2
|
||||
flake8==7.2.0
|
||||
mypy==1.11.1
|
||||
|
@@ -11,6 +11,7 @@
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
import typing
|
||||
import warnings
|
||||
from datetime import datetime, timedelta
|
||||
@@ -707,14 +708,24 @@ def __init__(self, wrapped_object):
|
||||
|
||||
|
||||
class Singleton:
|
||||
"""Simple wrapper for lazily initialized singleton objects."""
|
||||
"""Wrapper for lazily initialized singleton objects."""
|
||||
|
||||
def __init__(self, factory):
|
||||
def __init__(self, factory: Callable[[], object]):
|
||||
"""Create a new singleton to be inited with the factory function.
|
||||
|
||||
Most factories will simply create the object to be initialized and
|
||||
return it.
|
||||
|
||||
In some cases, e.g. when bootstrapping some global state, the singleton
|
||||
may need to be initialized incrementally. If the factory returns a generator
|
||||
instead of a regular object, the singleton will assign each result yielded by
|
||||
the generator to the singleton instance. This allows methods called by
|
||||
the factory in later stages to refer back to the singleton.
|
||||
|
||||
Args:
|
||||
factory (function): function taking no arguments that
|
||||
creates the singleton instance.
|
||||
factory (function): function taking no arguments that creates the
|
||||
singleton instance.
|
||||
|
||||
"""
|
||||
self.factory = factory
|
||||
self._instance = None
|
||||
@@ -722,7 +733,16 @@ def __init__(self, factory):
|
||||
@property
|
||||
def instance(self):
|
||||
if self._instance is None:
|
||||
self._instance = self.factory()
|
||||
instance = self.factory()
|
||||
|
||||
if isinstance(instance, types.GeneratorType):
|
||||
# if it's a generator, assign every value
|
||||
for value in instance:
|
||||
self._instance = value
|
||||
else:
|
||||
# if not, just assign the result like a normal singleton
|
||||
self._instance = instance
|
||||
|
||||
return self._instance
|
||||
|
||||
def __getattr__(self, name):
|
||||
|
@@ -10,9 +10,21 @@
|
||||
import spack.util.git
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "1.0.0-alpha.4"
|
||||
__version__ = "1.0.0.dev0"
|
||||
spack_version = __version__
|
||||
|
||||
#: The current Package API version implemented by this version of Spack. The Package API defines
|
||||
#: the Python interface for packages as well as the layout of package repositories. The minor
|
||||
#: version is incremented when the package API is extended in a backwards-compatible way. The major
|
||||
#: version is incremented upon breaking changes. This version is changed independently from the
|
||||
#: Spack version.
|
||||
package_api_version = (1, 0)
|
||||
|
||||
#: The minimum Package API version that this version of Spack is compatible with. This should
|
||||
#: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies
|
||||
#: compatibility with vX.0.
|
||||
min_package_api_version = (1, 0)
|
||||
|
||||
|
||||
def __try_int(v):
|
||||
try:
|
||||
@@ -79,4 +91,6 @@ def get_short_version() -> str:
|
||||
"get_version",
|
||||
"get_spack_commit",
|
||||
"get_short_version",
|
||||
"package_api_version",
|
||||
"min_package_api_version",
|
||||
]
|
||||
|
20
lib/spack/spack/aliases.py
Normal file
20
lib/spack/spack/aliases.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Alias names to convert legacy compilers to builtin packages and vice-versa"""
|
||||
|
||||
BUILTIN_TO_LEGACY_COMPILER = {
|
||||
"llvm": "clang",
|
||||
"intel-oneapi-compilers": "oneapi",
|
||||
"llvm-amdgpu": "rocmcc",
|
||||
"intel-oneapi-compilers-classic": "intel",
|
||||
"acfl": "arm",
|
||||
}
|
||||
|
||||
LEGACY_COMPILER_TO_BUILTIN = {
|
||||
"clang": "llvm",
|
||||
"oneapi": "intel-oneapi-compilers",
|
||||
"rocmcc": "llvm-amdgpu",
|
||||
"intel": "intel-oneapi-compilers-classic",
|
||||
"arm": "acfl",
|
||||
}
|
@@ -636,14 +636,7 @@ def tarball_directory_name(spec):
|
||||
Return name of the tarball directory according to the convention
|
||||
<os>-<architecture>/<compiler>/<package>-<version>/
|
||||
"""
|
||||
if spec.original_spec_format() < 5:
|
||||
compiler = spec.annotations.compiler_node_attribute
|
||||
assert compiler is not None, "a compiler spec is expected"
|
||||
return spec.format_path(
|
||||
f"{spec.architecture}/{compiler.name}-{compiler.version}/{spec.name}-{spec.version}"
|
||||
)
|
||||
|
||||
return spec.format_path(f"{spec.architecture.platform}/{spec.name}-{spec.version}")
|
||||
return spec.format_path("{architecture}/{compiler.name}-{compiler.version}/{name}-{version}")
|
||||
|
||||
|
||||
def tarball_name(spec, ext):
|
||||
@@ -651,17 +644,9 @@ def tarball_name(spec, ext):
|
||||
Return the name of the tarfile according to the convention
|
||||
<os>-<architecture>-<package>-<dag_hash><ext>
|
||||
"""
|
||||
if spec.original_spec_format() < 5:
|
||||
compiler = spec.annotations.compiler_node_attribute
|
||||
assert compiler is not None, "a compiler spec is expected"
|
||||
spec_formatted = (
|
||||
f"{spec.architecture}-{compiler.name}-{compiler.version}-{spec.name}"
|
||||
f"-{spec.version}-{spec.dag_hash()}"
|
||||
)
|
||||
else:
|
||||
spec_formatted = (
|
||||
f"{spec.architecture.platform}-{spec.name}-{spec.version}-{spec.dag_hash()}"
|
||||
)
|
||||
spec_formatted = spec.format_path(
|
||||
"{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}"
|
||||
)
|
||||
return f"{spec_formatted}{ext}"
|
||||
|
||||
|
||||
|
@@ -234,10 +234,6 @@ def _root_spec(spec_str: str) -> str:
|
||||
# Add a compiler and platform requirement to the root spec.
|
||||
platform = str(spack.platforms.host())
|
||||
|
||||
if platform == "windows":
|
||||
spec_str += " %msvc"
|
||||
elif platform == "freebsd":
|
||||
spec_str += " %clang"
|
||||
spec_str += f" platform={platform}"
|
||||
target = archspec.cpu.host().family
|
||||
spec_str += f" target={target}"
|
||||
|
@@ -133,7 +133,7 @@ def mypy_root_spec() -> str:
|
||||
|
||||
def black_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap black"""
|
||||
return _root_spec("py-black@:24.1.0")
|
||||
return _root_spec("py-black@:25.1.0")
|
||||
|
||||
|
||||
def flake8_root_spec() -> str:
|
||||
|
@@ -36,9 +36,11 @@
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import signal
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
import warnings
|
||||
from collections import defaultdict
|
||||
from enum import Flag, auto
|
||||
from itertools import chain
|
||||
@@ -113,7 +115,7 @@
|
||||
# set_wrapper_variables and used to pass parameters to
|
||||
# Spack's compiler wrappers.
|
||||
#
|
||||
SPACK_ENV_PATH = "SPACK_ENV_PATH"
|
||||
SPACK_COMPILER_WRAPPER_PATH = "SPACK_COMPILER_WRAPPER_PATH"
|
||||
SPACK_MANAGED_DIRS = "SPACK_MANAGED_DIRS"
|
||||
SPACK_INCLUDE_DIRS = "SPACK_INCLUDE_DIRS"
|
||||
SPACK_LINK_DIRS = "SPACK_LINK_DIRS"
|
||||
@@ -715,21 +717,6 @@ def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]
|
||||
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
||||
|
||||
|
||||
def load_external_modules(pkg):
|
||||
"""Traverse a package's spec DAG and load any external modules.
|
||||
|
||||
Traverse a package's dependencies and load any external modules
|
||||
associated with them.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package to load deps for
|
||||
"""
|
||||
for dep in list(pkg.spec.traverse()):
|
||||
external_modules = dep.external_modules or []
|
||||
for external_module in external_modules:
|
||||
load_module(external_module)
|
||||
|
||||
|
||||
def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
"""Execute all environment setup routines."""
|
||||
if context not in (Context.BUILD, Context.TEST):
|
||||
@@ -763,8 +750,10 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
|
||||
tty.debug("setup_package: adding compiler wrappers paths")
|
||||
env_by_name = env_mods.group_by_name()
|
||||
for x in env_by_name["SPACK_ENV_PATH"]:
|
||||
assert isinstance(x, PrependPath), "unexpected setting used for SPACK_ENV_PATH"
|
||||
for x in env_by_name["SPACK_COMPILER_WRAPPER_PATH"]:
|
||||
assert isinstance(
|
||||
x, PrependPath
|
||||
), "unexpected setting used for SPACK_COMPILER_WRAPPER_PATH"
|
||||
env_mods.prepend_path("PATH", x.value)
|
||||
|
||||
# Check whether we want to force RPATH or RUNPATH
|
||||
@@ -792,7 +781,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
|
||||
# Load modules on an already clean environment, just before applying Spack's
|
||||
# own environment modifications. This ensures Spack controls CC/CXX/... variables.
|
||||
load_external_modules(pkg)
|
||||
load_external_modules(setup_context)
|
||||
|
||||
# Make sure nothing's strange about the Spack environment.
|
||||
validate(env_mods, tty.warn)
|
||||
@@ -1089,6 +1078,21 @@ def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
|
||||
env.prepend_path("PATH", bin_dir)
|
||||
|
||||
|
||||
def load_external_modules(context: SetupContext) -> None:
|
||||
"""Traverse a package's spec DAG and load any external modules.
|
||||
|
||||
Traverse a package's dependencies and load any external modules
|
||||
associated with them.
|
||||
|
||||
Args:
|
||||
context: A populated SetupContext object
|
||||
"""
|
||||
for spec, _ in context.external:
|
||||
external_modules = spec.external_modules or []
|
||||
for external_module in external_modules:
|
||||
load_module(external_module)
|
||||
|
||||
|
||||
def _setup_pkg_and_run(
|
||||
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
|
||||
function: Callable,
|
||||
@@ -1187,11 +1191,9 @@ def _setup_pkg_and_run(
|
||||
if isinstance(e, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
||||
process = "test the installation" if context == "test" else "build from sources"
|
||||
error_msg = (
|
||||
"The '{}' package cannot find an attribute while trying to {}. "
|
||||
"This might be due to a change in Spack's package format "
|
||||
"to support multiple build-systems for a single package. You can fix this "
|
||||
"by updating the {} recipe, and you can also report the issue as a bug. "
|
||||
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
|
||||
"The '{}' package cannot find an attribute while trying to {}. You can fix this "
|
||||
"by updating the {} recipe, and you can also report the issue as a build-error or "
|
||||
"a bug at https://github.com/spack/spack/issues"
|
||||
).format(pkg.name, process, context)
|
||||
error_msg = colorize("@*R{{{}}}".format(error_msg))
|
||||
error_msg = "{}\n\n{}".format(str(e), error_msg)
|
||||
@@ -1216,15 +1218,45 @@ def _setup_pkg_and_run(
|
||||
input_pipe.close()
|
||||
|
||||
|
||||
def start_build_process(pkg, function, kwargs):
|
||||
class BuildProcess:
|
||||
def __init__(self, *, target, args) -> None:
|
||||
self.p = multiprocessing.Process(target=target, args=args)
|
||||
|
||||
def start(self) -> None:
|
||||
self.p.start()
|
||||
|
||||
def is_alive(self) -> bool:
|
||||
return self.p.is_alive()
|
||||
|
||||
def join(self, *, timeout: Optional[int] = None):
|
||||
self.p.join(timeout=timeout)
|
||||
|
||||
def terminate(self):
|
||||
# Opportunity for graceful termination
|
||||
self.p.terminate()
|
||||
self.p.join(timeout=1)
|
||||
|
||||
# If the process didn't gracefully terminate, forcefully kill
|
||||
if self.p.is_alive():
|
||||
# TODO (python 3.6 removal): use self.p.kill() instead, consider removing this class
|
||||
assert isinstance(self.p.pid, int), f"unexpected value for PID: {self.p.pid}"
|
||||
os.kill(self.p.pid, signal.SIGKILL)
|
||||
self.p.join()
|
||||
|
||||
@property
|
||||
def exitcode(self):
|
||||
return self.p.exitcode
|
||||
|
||||
|
||||
def start_build_process(pkg, function, kwargs, *, timeout: Optional[int] = None):
|
||||
"""Create a child process to do part of a spack build.
|
||||
|
||||
Args:
|
||||
|
||||
pkg (spack.package_base.PackageBase): package whose environment we should set up the
|
||||
child process for.
|
||||
function (typing.Callable): argless function to run in the child
|
||||
process.
|
||||
function (typing.Callable): argless function to run in the child process.
|
||||
timeout: maximum time allowed to finish the execution of function
|
||||
|
||||
Usage::
|
||||
|
||||
@@ -1252,14 +1284,14 @@ def child_fun():
|
||||
# Forward sys.stdin when appropriate, to allow toggling verbosity
|
||||
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
|
||||
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
||||
mflags = os.environ.get("MAKEFLAGS", False)
|
||||
if mflags:
|
||||
mflags = os.environ.get("MAKEFLAGS")
|
||||
if mflags is not None:
|
||||
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
|
||||
if m:
|
||||
jobserver_fd1 = Connection(int(m.group(1)))
|
||||
jobserver_fd2 = Connection(int(m.group(2)))
|
||||
|
||||
p = multiprocessing.Process(
|
||||
p = BuildProcess(
|
||||
target=_setup_pkg_and_run,
|
||||
args=(
|
||||
serialized_pkg,
|
||||
@@ -1293,14 +1325,17 @@ def exitcode_msg(p):
|
||||
typ = "exit" if p.exitcode >= 0 else "signal"
|
||||
return f"{typ} {abs(p.exitcode)}"
|
||||
|
||||
p.join(timeout=timeout)
|
||||
if p.is_alive():
|
||||
warnings.warn(f"Terminating process, since the timeout of {timeout}s was exceeded")
|
||||
p.terminate()
|
||||
p.join()
|
||||
|
||||
try:
|
||||
child_result = read_pipe.recv()
|
||||
except EOFError:
|
||||
p.join()
|
||||
raise InstallError(f"The process has stopped unexpectedly ({exitcode_msg(p)})")
|
||||
|
||||
p.join()
|
||||
|
||||
# If returns a StopPhase, raise it
|
||||
if isinstance(child_result, spack.error.StopPhase):
|
||||
# do not print
|
||||
|
@@ -2,9 +2,10 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections.abc
|
||||
import enum
|
||||
import os
|
||||
import re
|
||||
from typing import Tuple
|
||||
from typing import Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -13,6 +14,7 @@
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import depends_on
|
||||
from spack.util.executable import which_string
|
||||
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
|
||||
@@ -178,6 +180,64 @@ def initconfig_compiler_entries(self):
|
||||
|
||||
return entries
|
||||
|
||||
class Scheduler(enum.Enum):
|
||||
LSF = enum.auto()
|
||||
SLURM = enum.auto()
|
||||
FLUX = enum.auto()
|
||||
|
||||
def get_scheduler(self) -> Optional[Scheduler]:
|
||||
spec = self.pkg.spec
|
||||
|
||||
# Check for Spectrum-mpi, which always uses LSF or LSF MPI variant
|
||||
if spec.satisfies("^spectrum-mpi") or spec["mpi"].satisfies("schedulers=lsf"):
|
||||
return self.Scheduler.LSF
|
||||
|
||||
# Check for Slurm MPI variants
|
||||
slurm_checks = ["+slurm", "schedulers=slurm", "process_managers=slurm"]
|
||||
if any(spec["mpi"].satisfies(variant) for variant in slurm_checks):
|
||||
return self.Scheduler.SLURM
|
||||
|
||||
# TODO improve this when MPI implementations support flux
|
||||
# Do this check last to avoid using a flux wrapper present next to Slurm/ LSF schedulers
|
||||
if which_string("flux") is not None:
|
||||
return self.Scheduler.FLUX
|
||||
|
||||
return None
|
||||
|
||||
def get_mpi_exec(self) -> Optional[str]:
|
||||
spec = self.pkg.spec
|
||||
scheduler = self.get_scheduler()
|
||||
|
||||
if scheduler == self.Scheduler.LSF:
|
||||
return which_string("lrun")
|
||||
|
||||
elif scheduler == self.Scheduler.SLURM:
|
||||
if spec["mpi"].external:
|
||||
return which_string("srun")
|
||||
else:
|
||||
return os.path.join(spec["slurm"].prefix.bin, "srun")
|
||||
|
||||
elif scheduler == self.Scheduler.FLUX:
|
||||
flux = which_string("flux")
|
||||
return f"{flux};run" if flux else None
|
||||
|
||||
elif hasattr(spec["mpi"].package, "mpiexec"):
|
||||
return spec["mpi"].package.mpiexec
|
||||
|
||||
else:
|
||||
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpirun")
|
||||
if not os.path.exists(mpiexec):
|
||||
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpiexec")
|
||||
return mpiexec
|
||||
|
||||
def get_mpi_exec_num_proc(self) -> str:
|
||||
scheduler = self.get_scheduler()
|
||||
|
||||
if scheduler in [self.Scheduler.FLUX, self.Scheduler.LSF, self.Scheduler.SLURM]:
|
||||
return "-n"
|
||||
else:
|
||||
return "-np"
|
||||
|
||||
def initconfig_mpi_entries(self):
|
||||
spec = self.pkg.spec
|
||||
|
||||
@@ -197,27 +257,10 @@ def initconfig_mpi_entries(self):
|
||||
if hasattr(spec["mpi"], "mpifc"):
|
||||
entries.append(cmake_cache_path("MPI_Fortran_COMPILER", spec["mpi"].mpifc))
|
||||
|
||||
# Check for slurm
|
||||
using_slurm = False
|
||||
slurm_checks = ["+slurm", "schedulers=slurm", "process_managers=slurm"]
|
||||
if any(spec["mpi"].satisfies(variant) for variant in slurm_checks):
|
||||
using_slurm = True
|
||||
|
||||
# Determine MPIEXEC
|
||||
if using_slurm:
|
||||
if spec["mpi"].external:
|
||||
# Heuristic until we have dependents on externals
|
||||
mpiexec = "/usr/bin/srun"
|
||||
else:
|
||||
mpiexec = os.path.join(spec["slurm"].prefix.bin, "srun")
|
||||
elif hasattr(spec["mpi"].package, "mpiexec"):
|
||||
mpiexec = spec["mpi"].package.mpiexec
|
||||
else:
|
||||
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpirun")
|
||||
if not os.path.exists(mpiexec):
|
||||
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpiexec")
|
||||
mpiexec = self.get_mpi_exec()
|
||||
|
||||
if not os.path.exists(mpiexec):
|
||||
if mpiexec is None or not os.path.exists(mpiexec.split(";")[0]):
|
||||
msg = "Unable to determine MPIEXEC, %s tests may fail" % self.pkg.name
|
||||
entries.append("# {0}\n".format(msg))
|
||||
tty.warn(msg)
|
||||
@@ -230,10 +273,7 @@ def initconfig_mpi_entries(self):
|
||||
entries.append(cmake_cache_path("MPIEXEC", mpiexec))
|
||||
|
||||
# Determine MPIEXEC_NUMPROC_FLAG
|
||||
if using_slurm:
|
||||
entries.append(cmake_cache_string("MPIEXEC_NUMPROC_FLAG", "-n"))
|
||||
else:
|
||||
entries.append(cmake_cache_string("MPIEXEC_NUMPROC_FLAG", "-np"))
|
||||
entries.append(cmake_cache_string("MPIEXEC_NUMPROC_FLAG", self.get_mpi_exec_num_proc()))
|
||||
|
||||
return entries
|
||||
|
||||
@@ -276,23 +316,18 @@ def initconfig_hardware_entries(self):
|
||||
entries.append("# ROCm")
|
||||
entries.append("#------------------{0}\n".format("-" * 30))
|
||||
|
||||
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
||||
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
||||
llvm_bin = spec["llvm-amdgpu"].prefix.bin
|
||||
llvm_prefix = spec["llvm-amdgpu"].prefix
|
||||
# Some ROCm systems seem to point to /<path>/rocm-<ver>/ and
|
||||
# others point to /<path>/rocm-<ver>/llvm
|
||||
if os.path.basename(os.path.normpath(llvm_prefix)) != "llvm":
|
||||
llvm_bin = os.path.join(llvm_prefix, "llvm/bin/")
|
||||
entries.append(
|
||||
cmake_cache_filepath("CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "clang++"))
|
||||
)
|
||||
rocm_root = os.path.dirname(spec["llvm-amdgpu"].prefix)
|
||||
entries.append(cmake_cache_path("ROCM_PATH", rocm_root))
|
||||
|
||||
archs = self.spec.variants["amdgpu_target"].value
|
||||
if archs[0] != "none":
|
||||
arch_str = ";".join(archs)
|
||||
entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str))
|
||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
|
||||
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
|
||||
|
||||
llvm_bin = spec["llvm-amdgpu"].prefix.bin
|
||||
entries.append(
|
||||
cmake_cache_filepath("CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "amdclang++"))
|
||||
)
|
||||
|
||||
if spec.satisfies("%gcc"):
|
||||
entries.append(
|
||||
@@ -301,6 +336,15 @@ def initconfig_hardware_entries(self):
|
||||
)
|
||||
)
|
||||
|
||||
# Extra definitions that might be required in other cases
|
||||
if not spec.satisfies("^blt"):
|
||||
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
||||
|
||||
if archs[0] != "none":
|
||||
arch_str = ";".join(archs)
|
||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
|
||||
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
|
||||
|
||||
return entries
|
||||
|
||||
def std_initconfig_entries(self):
|
||||
|
@@ -45,7 +45,7 @@ class CompilerPackage(spack.package_base.PackageBase):
|
||||
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
|
||||
|
||||
#: Relative path to compiler wrappers
|
||||
link_paths: Dict[str, str] = {}
|
||||
compiler_wrapper_link_paths: Dict[str, str] = {}
|
||||
|
||||
def __init__(self, spec: "spack.spec.Spec"):
|
||||
super().__init__(spec)
|
||||
@@ -159,7 +159,7 @@ def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
|
||||
#: Flag to activate OpenMP support
|
||||
openmp_flag: str = "-fopenmp"
|
||||
|
||||
required_libs: List[str] = []
|
||||
implicit_rpath_libs: List[str] = []
|
||||
|
||||
def standard_flag(self, *, language: str, standard: str) -> str:
|
||||
"""Returns the flag used to enforce a given standard for a language"""
|
||||
@@ -199,10 +199,21 @@ def cxx(self) -> Optional[str]:
|
||||
return self.spec.extra_attributes["compilers"].get("cxx", None)
|
||||
return self._cxx_path()
|
||||
|
||||
@property
|
||||
def hip(self) -> Optional[str]:
|
||||
assert self.spec.concrete, "cannot retrieve HIP compiler, spec is not concrete"
|
||||
if self.spec.external:
|
||||
return self.spec.extra_attributes["compilers"].get("hip", None)
|
||||
return self._hip_path()
|
||||
|
||||
def _cxx_path(self) -> Optional[str]:
|
||||
"""Returns the path to the C++ compiler, if the package was installed by Spack"""
|
||||
return None
|
||||
|
||||
def _hip_path(self) -> Optional[str]:
|
||||
"""Returns the path to the HIP compiler, if the package was installed by Spack"""
|
||||
return self._cxx_path()
|
||||
|
||||
@property
|
||||
def fortran(self):
|
||||
assert self.spec.concrete, "cannot retrieve Fortran compiler, spec is not concrete"
|
||||
|
@@ -311,4 +311,4 @@ def ld_flags(self):
|
||||
|
||||
|
||||
#: Tuple of Intel math libraries, exported to packages
|
||||
INTEL_MATH_LIBRARIES = ("intel-mkl", "intel-oneapi-mkl", "intel-parallel-studio")
|
||||
INTEL_MATH_LIBRARIES = ("intel-oneapi-mkl",)
|
||||
|
@@ -77,7 +77,7 @@
|
||||
import os
|
||||
|
||||
import spack.variant
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.directives import conflicts, depends_on, requires, variant
|
||||
from spack.package_base import PackageBase
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
@@ -140,9 +140,7 @@ class ROCmPackage(PackageBase):
|
||||
when="+rocm",
|
||||
)
|
||||
|
||||
depends_on("llvm-amdgpu", type="build", when="+rocm")
|
||||
depends_on("hsa-rocr-dev", when="+rocm")
|
||||
depends_on("hip +rocm", when="+rocm")
|
||||
depends_on("hip-lang", type="build", when="+rocm")
|
||||
|
||||
# need amd gpu type for rocm builds
|
||||
conflicts("amdgpu_target=none", when="+rocm")
|
||||
@@ -183,14 +181,14 @@ def asan_on(self, env: EnvironmentModifications):
|
||||
|
||||
# Add compiler minimum versions based on the first release where the
|
||||
# processor is included in llvm/lib/Support/TargetParser.cpp
|
||||
depends_on("llvm-amdgpu@5.2.0:", when="amdgpu_target=gfx940")
|
||||
depends_on("llvm-amdgpu@5.7.0:", when="amdgpu_target=gfx941")
|
||||
depends_on("llvm-amdgpu@5.7.0:", when="amdgpu_target=gfx942")
|
||||
depends_on("llvm-amdgpu@5.2.0:", when="amdgpu_target=gfx1036")
|
||||
depends_on("llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1100")
|
||||
depends_on("llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1101")
|
||||
depends_on("llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1102")
|
||||
depends_on("llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1103")
|
||||
requires("%[virtuals=hip-lang] llvm-amdgpu@5.2.0:", when="amdgpu_target=gfx940")
|
||||
requires("%[virtuals=hip-lang] llvm-amdgpu@5.7.0:", when="amdgpu_target=gfx941")
|
||||
requires("%[virtuals=hip-lang] llvm-amdgpu@5.7.0:", when="amdgpu_target=gfx942")
|
||||
requires("%[virtuals=hip-lang] llvm-amdgpu@5.2.0:", when="amdgpu_target=gfx1036")
|
||||
requires("%[virtuals=hip-lang] llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1100")
|
||||
requires("%[virtuals=hip-lang] llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1101")
|
||||
requires("%[virtuals=hip-lang] llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1102")
|
||||
requires("%[virtuals=hip-lang] llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1103")
|
||||
|
||||
# Compiler conflicts
|
||||
|
||||
|
@@ -6,6 +6,7 @@
|
||||
import codecs
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
@@ -13,7 +14,7 @@
|
||||
import tempfile
|
||||
import zipfile
|
||||
from collections import namedtuple
|
||||
from typing import Callable, Dict, List, Set
|
||||
from typing import Callable, Dict, List, Optional, Set, Union
|
||||
from urllib.request import Request
|
||||
|
||||
import llnl.path
|
||||
@@ -23,7 +24,6 @@
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.builder
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
@@ -32,6 +32,7 @@
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.git
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -40,6 +41,7 @@
|
||||
from spack import traverse
|
||||
from spack.error import SpackError
|
||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||
from spack.version import GitVersion, StandardVersion
|
||||
|
||||
from .common import (
|
||||
IS_WINDOWS,
|
||||
@@ -78,6 +80,45 @@ def get_change_revisions():
|
||||
return None, None
|
||||
|
||||
|
||||
def get_added_versions(
|
||||
checksums_version_dict: Dict[str, Union[StandardVersion, GitVersion]],
|
||||
path: str,
|
||||
from_ref: str = "HEAD~1",
|
||||
to_ref: str = "HEAD",
|
||||
) -> List[Union[StandardVersion, GitVersion]]:
|
||||
"""Get a list of the versions added between `from_ref` and `to_ref`.
|
||||
Args:
|
||||
checksums_version_dict (Dict): all package versions keyed by known checksums.
|
||||
path (str): path to the package.py
|
||||
from_ref (str): oldest git ref, defaults to `HEAD~1`
|
||||
to_ref (str): newer git ref, defaults to `HEAD`
|
||||
Returns: list of versions added between refs
|
||||
"""
|
||||
git_exe = spack.util.git.git(required=True)
|
||||
|
||||
# Gather git diff
|
||||
diff_lines = git_exe("diff", from_ref, to_ref, "--", path, output=str).split("\n")
|
||||
|
||||
# Store added and removed versions
|
||||
# Removed versions are tracked here to determine when versions are moved in a file
|
||||
# and show up as both added and removed in a git diff.
|
||||
added_checksums = set()
|
||||
removed_checksums = set()
|
||||
|
||||
# Scrape diff for modified versions and prune added versions if they show up
|
||||
# as also removed (which means they've actually just moved in the file and
|
||||
# we shouldn't need to rechecksum them)
|
||||
for checksum in checksums_version_dict.keys():
|
||||
for line in diff_lines:
|
||||
if checksum in line:
|
||||
if line.startswith("+"):
|
||||
added_checksums.add(checksum)
|
||||
if line.startswith("-"):
|
||||
removed_checksums.add(checksum)
|
||||
|
||||
return [checksums_version_dict[c] for c in added_checksums - removed_checksums]
|
||||
|
||||
|
||||
def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
||||
"""Given an environment manifest path and two revisions to compare, return
|
||||
whether or not the stack was changed. Returns True if the environment
|
||||
@@ -223,7 +264,7 @@ def rebuild_filter(s: spack.spec.Spec) -> RebuildDecision:
|
||||
|
||||
def _format_pruning_message(spec: spack.spec.Spec, prune: bool, reasons: List[str]) -> str:
|
||||
reason_msg = ", ".join(reasons)
|
||||
spec_fmt = "{name}{@version}{%compiler}{/hash:7}"
|
||||
spec_fmt = "{name}{@version}{/hash:7}{%compiler}"
|
||||
|
||||
if not prune:
|
||||
status = colorize("@*g{[x]} ")
|
||||
@@ -579,22 +620,25 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
||||
tty.debug(f"job spec: {job_spec}")
|
||||
|
||||
try:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
|
||||
job_pkg = pkg_cls(job_spec)
|
||||
tty.debug(f"job package: {job_pkg}")
|
||||
except AssertionError:
|
||||
msg = f"Cannot copy stage logs: job spec ({job_spec}) must be concrete"
|
||||
tty.error(msg)
|
||||
package_metadata_root = pathlib.Path(spack.store.STORE.layout.metadata_path(job_spec))
|
||||
except spack.error.SpackError as e:
|
||||
tty.error(f"Cannot copy logs: {str(e)}")
|
||||
return
|
||||
|
||||
stage_dir = job_pkg.stage.path
|
||||
tty.debug(f"stage dir: {stage_dir}")
|
||||
for file in [
|
||||
job_pkg.log_path,
|
||||
job_pkg.env_mods_path,
|
||||
*spack.builder.create(job_pkg).archive_files,
|
||||
]:
|
||||
copy_files_to_artifacts(file, job_log_dir)
|
||||
# Get the package's archived files
|
||||
archive_files = []
|
||||
archive_root = package_metadata_root / "archived-files"
|
||||
if archive_root.is_dir():
|
||||
archive_files = [f for f in archive_root.rglob("*") if f.is_file()]
|
||||
else:
|
||||
msg = "Cannot copy package archived files: archived-files must be a directory"
|
||||
tty.warn(msg)
|
||||
|
||||
build_log_zipped = package_metadata_root / "spack-build-out.txt.gz"
|
||||
build_env_mods = package_metadata_root / "spack-build-env.txt"
|
||||
|
||||
for f in [build_log_zipped, build_env_mods, *archive_files]:
|
||||
copy_files_to_artifacts(str(f), job_log_dir)
|
||||
|
||||
|
||||
def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||
@@ -1250,35 +1294,34 @@ def display_broken_spec_messages(base_url, hashes):
|
||||
tty.msg(msg)
|
||||
|
||||
|
||||
def run_standalone_tests(**kwargs):
|
||||
def run_standalone_tests(
|
||||
*,
|
||||
cdash: Optional[CDashHandler] = None,
|
||||
fail_fast: bool = False,
|
||||
log_file: Optional[str] = None,
|
||||
job_spec: Optional[spack.spec.Spec] = None,
|
||||
repro_dir: Optional[str] = None,
|
||||
timeout: Optional[int] = None,
|
||||
):
|
||||
"""Run stand-alone tests on the current spec.
|
||||
|
||||
Arguments:
|
||||
kwargs (dict): dictionary of arguments used to run the tests
|
||||
|
||||
List of recognized keys:
|
||||
|
||||
* "cdash" (CDashHandler): (optional) cdash handler instance
|
||||
* "fail_fast" (bool): (optional) terminate tests after the first failure
|
||||
* "log_file" (str): (optional) test log file name if NOT CDash reporting
|
||||
* "job_spec" (Spec): spec that was built
|
||||
* "repro_dir" (str): reproduction directory
|
||||
Args:
|
||||
cdash: cdash handler instance
|
||||
fail_fast: terminate tests after the first failure
|
||||
log_file: test log file name if NOT CDash reporting
|
||||
job_spec: spec that was built
|
||||
repro_dir: reproduction directory
|
||||
timeout: maximum time (in seconds) that tests are allowed to run
|
||||
"""
|
||||
cdash = kwargs.get("cdash")
|
||||
fail_fast = kwargs.get("fail_fast")
|
||||
log_file = kwargs.get("log_file")
|
||||
|
||||
if cdash and log_file:
|
||||
tty.msg(f"The test log file {log_file} option is ignored with CDash reporting")
|
||||
log_file = None
|
||||
|
||||
# Error out but do NOT terminate if there are missing required arguments.
|
||||
job_spec = kwargs.get("job_spec")
|
||||
if not job_spec:
|
||||
tty.error("Job spec is required to run stand-alone tests")
|
||||
return
|
||||
|
||||
repro_dir = kwargs.get("repro_dir")
|
||||
if not repro_dir:
|
||||
tty.error("Reproduction directory is required for stand-alone tests")
|
||||
return
|
||||
@@ -1287,6 +1330,9 @@ def run_standalone_tests(**kwargs):
|
||||
if fail_fast:
|
||||
test_args.append("--fail-fast")
|
||||
|
||||
if timeout is not None:
|
||||
test_args.extend(["--timeout", str(timeout)])
|
||||
|
||||
if cdash:
|
||||
test_args.extend(cdash.args())
|
||||
else:
|
||||
|
@@ -330,7 +330,7 @@ def ensure_single_spec_or_die(spec, matching_specs):
|
||||
if len(matching_specs) <= 1:
|
||||
return
|
||||
|
||||
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{ arch=architecture}"
|
||||
format_string = "{name}{@version}{ arch=architecture} {%compiler.name}{@compiler.version}"
|
||||
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
||||
args += [
|
||||
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
||||
@@ -477,7 +477,7 @@ def get_arg(name, default=None):
|
||||
if flags:
|
||||
ffmt += " {compiler_flags}"
|
||||
vfmt = "{variants}" if variants else ""
|
||||
format_string = nfmt + "{@version}" + ffmt + vfmt
|
||||
format_string = nfmt + "{@version}" + vfmt + ffmt
|
||||
|
||||
def fmt(s, depth=0):
|
||||
"""Formatter function for all output specs"""
|
||||
|
@@ -5,11 +5,13 @@
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from typing import Dict
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as clr
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.ci as spack_ci
|
||||
@@ -18,12 +20,20 @@
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.hash_types as ht
|
||||
import spack.mirrors.mirror
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.executable
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.timer as timer
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
import spack.version
|
||||
|
||||
description = "manage continuous integration pipelines"
|
||||
section = "build"
|
||||
@@ -150,6 +160,12 @@ def setup_parser(subparser):
|
||||
default=False,
|
||||
help="stop stand-alone tests after the first failure",
|
||||
)
|
||||
rebuild.add_argument(
|
||||
"--timeout",
|
||||
type=int,
|
||||
default=None,
|
||||
help="maximum time (in seconds) that tests are allowed to run",
|
||||
)
|
||||
rebuild.set_defaults(func=ci_rebuild)
|
||||
spack.cmd.common.arguments.add_common_arguments(rebuild, ["jobs"])
|
||||
|
||||
@@ -191,6 +207,16 @@ def setup_parser(subparser):
|
||||
|
||||
reproduce.set_defaults(func=ci_reproduce)
|
||||
|
||||
# Verify checksums inside of ci workflows
|
||||
verify_versions = subparsers.add_parser(
|
||||
"verify-versions",
|
||||
description=deindent(ci_verify_versions.__doc__),
|
||||
help=spack.cmd.first_line(ci_verify_versions.__doc__),
|
||||
)
|
||||
verify_versions.add_argument("from_ref", help="git ref from which start looking at changes")
|
||||
verify_versions.add_argument("to_ref", help="git ref to end looking at changes")
|
||||
verify_versions.set_defaults(func=ci_verify_versions)
|
||||
|
||||
|
||||
def ci_generate(args):
|
||||
"""generate jobs file from a CI-aware spack file
|
||||
@@ -427,7 +453,7 @@ def ci_rebuild(args):
|
||||
|
||||
# Arguments when installing the root from sources
|
||||
deps_install_args = install_args + ["--only=dependencies"]
|
||||
root_install_args = install_args + ["--keep-stage", "--only=package"]
|
||||
root_install_args = install_args + ["--only=package"]
|
||||
|
||||
if cdash_handler:
|
||||
# Add additional arguments to `spack install` for CDash reporting.
|
||||
@@ -464,8 +490,7 @@ def ci_rebuild(args):
|
||||
job_spec.to_dict(hash=ht.dag_hash),
|
||||
)
|
||||
|
||||
# We generated the "spack install ..." command to "--keep-stage", copy
|
||||
# any logs from the staging directory to artifacts now
|
||||
# Copy logs and archived files from the install metadata (.spack) directory to artifacts now
|
||||
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
||||
|
||||
# If the installation succeeded and we're running stand-alone tests for
|
||||
@@ -502,6 +527,7 @@ def ci_rebuild(args):
|
||||
fail_fast=args.fail_fast,
|
||||
log_file=log_file,
|
||||
repro_dir=repro_dir,
|
||||
timeout=args.timeout,
|
||||
)
|
||||
|
||||
except Exception as err:
|
||||
@@ -660,6 +686,156 @@ def _gitlab_artifacts_url(url: str) -> str:
|
||||
return urlunparse(parsed._replace(path="/".join(parts), fragment="", query=""))
|
||||
|
||||
|
||||
def validate_standard_versions(
|
||||
pkg: spack.package_base.PackageBase, versions: spack.version.VersionList
|
||||
) -> bool:
|
||||
"""Get and test the checksum of a package version based on a tarball.
|
||||
Args:
|
||||
pkg spack.package_base.PackageBase: Spack package for which to validate a version checksum
|
||||
versions spack.version.VersionList: list of package versions to validate
|
||||
Returns: bool: result of the validation. True is valid and false is failed.
|
||||
"""
|
||||
url_dict: Dict[spack.version.StandardVersion, str] = {}
|
||||
|
||||
for version in versions:
|
||||
url = pkg.find_valid_url_for_version(version)
|
||||
url_dict[version] = url
|
||||
|
||||
version_hashes = spack.stage.get_checksums_for_versions(
|
||||
url_dict, pkg.name, fetch_options=pkg.fetch_options
|
||||
)
|
||||
|
||||
valid_checksums = True
|
||||
for version, sha in version_hashes.items():
|
||||
if sha != pkg.versions[version]["sha256"]:
|
||||
tty.error(
|
||||
f"Invalid checksum found {pkg.name}@{version}\n"
|
||||
f" [package.py] {pkg.versions[version]['sha256']}\n"
|
||||
f" [Downloaded] {sha}"
|
||||
)
|
||||
valid_checksums = False
|
||||
continue
|
||||
|
||||
tty.info(f"Validated {pkg.name}@{version} --> {sha}")
|
||||
|
||||
return valid_checksums
|
||||
|
||||
|
||||
def validate_git_versions(
|
||||
pkg: spack.package_base.PackageBase, versions: spack.version.VersionList
|
||||
) -> bool:
|
||||
"""Get and test the commit and tag of a package version based on a git repository.
|
||||
Args:
|
||||
pkg spack.package_base.PackageBase: Spack package for which to validate a version
|
||||
versions spack.version.VersionList: list of package versions to validate
|
||||
Returns: bool: result of the validation. True is valid and false is failed.
|
||||
"""
|
||||
valid_commit = True
|
||||
for version in versions:
|
||||
fetcher = spack.fetch_strategy.for_package_version(pkg, version)
|
||||
with spack.stage.Stage(fetcher) as stage:
|
||||
known_commit = pkg.versions[version]["commit"]
|
||||
try:
|
||||
stage.fetch()
|
||||
except spack.error.FetchError:
|
||||
tty.error(
|
||||
f"Invalid commit for {pkg.name}@{version}\n"
|
||||
f" {known_commit} could not be checked out in the git repository."
|
||||
)
|
||||
valid_commit = False
|
||||
continue
|
||||
|
||||
# Test if the specified tag matches the commit in the package.py
|
||||
# We retrieve the commit associated with a tag and compare it to the
|
||||
# commit that is located in the package.py file.
|
||||
if "tag" in pkg.versions[version]:
|
||||
tag = pkg.versions[version]["tag"]
|
||||
try:
|
||||
with fs.working_dir(stage.source_path):
|
||||
found_commit = fetcher.git(
|
||||
"rev-list", "-n", "1", tag, output=str, error=str
|
||||
).strip()
|
||||
except spack.util.executable.ProcessError:
|
||||
tty.error(
|
||||
f"Invalid tag for {pkg.name}@{version}\n"
|
||||
f" {tag} could not be found in the git repository."
|
||||
)
|
||||
valid_commit = False
|
||||
continue
|
||||
|
||||
if found_commit != known_commit:
|
||||
tty.error(
|
||||
f"Mismatched tag <-> commit found for {pkg.name}@{version}\n"
|
||||
f" [package.py] {known_commit}\n"
|
||||
f" [Downloaded] {found_commit}"
|
||||
)
|
||||
valid_commit = False
|
||||
continue
|
||||
|
||||
# If we have downloaded the repository, found the commit, and compared
|
||||
# the tag (if specified) we can conclude that the version is pointing
|
||||
# at what we would expect.
|
||||
tty.info(f"Validated {pkg.name}@{version} --> {known_commit}")
|
||||
|
||||
return valid_commit
|
||||
|
||||
|
||||
def ci_verify_versions(args):
|
||||
"""validate version checksum & commits between git refs
|
||||
This command takes a from_ref and to_ref arguments and
|
||||
then parses the git diff between the two to determine which packages
|
||||
have been modified verifies the new checksums inside of them.
|
||||
"""
|
||||
# Get a list of all packages that have been changed or added
|
||||
# between from_ref and to_ref
|
||||
pkgs = spack.repo.get_all_package_diffs("AC", args.from_ref, args.to_ref)
|
||||
|
||||
failed_version = False
|
||||
for pkg_name in pkgs:
|
||||
spec = spack.spec.Spec(pkg_name)
|
||||
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
|
||||
path = spack.repo.PATH.package_path(pkg_name)
|
||||
|
||||
# Skip checking manual download packages and trust the maintainers
|
||||
if pkg.manual_download:
|
||||
tty.warn(f"Skipping manual download package: {pkg_name}")
|
||||
continue
|
||||
|
||||
# Store versions checksums / commits for future loop
|
||||
checksums_version_dict = {}
|
||||
commits_version_dict = {}
|
||||
for version in pkg.versions:
|
||||
# If the package version defines a sha256 we'll use that as the high entropy
|
||||
# string to detect which versions have been added between from_ref and to_ref
|
||||
if "sha256" in pkg.versions[version]:
|
||||
checksums_version_dict[pkg.versions[version]["sha256"]] = version
|
||||
|
||||
# If a package version instead defines a commit we'll use that as a
|
||||
# high entropy string to detect new versions.
|
||||
elif "commit" in pkg.versions[version]:
|
||||
commits_version_dict[pkg.versions[version]["commit"]] = version
|
||||
|
||||
# TODO: enforce every version have a commit or a sha256 defined if not
|
||||
# an infinite version (there are a lot of package's where this doesn't work yet.)
|
||||
|
||||
with fs.working_dir(os.path.dirname(path)):
|
||||
added_checksums = spack_ci.get_added_versions(
|
||||
checksums_version_dict, path, from_ref=args.from_ref, to_ref=args.to_ref
|
||||
)
|
||||
added_commits = spack_ci.get_added_versions(
|
||||
commits_version_dict, path, from_ref=args.from_ref, to_ref=args.to_ref
|
||||
)
|
||||
|
||||
if added_checksums:
|
||||
failed_version = not validate_standard_versions(pkg, added_checksums) or failed_version
|
||||
|
||||
if added_commits:
|
||||
failed_version = not validate_git_versions(pkg, added_commits) or failed_version
|
||||
|
||||
if failed_version:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def ci(parser, args):
|
||||
if args.func:
|
||||
return args.func(args)
|
||||
|
@@ -350,9 +350,12 @@ def _config_change(config_path, match_spec_str=None):
|
||||
if spack.config.get(key_path, scope=scope):
|
||||
ideal_scope_to_modify = scope
|
||||
break
|
||||
# If we find our key in a specific scope, that's the one we want
|
||||
# to modify. Otherwise we use the default write scope.
|
||||
write_scope = ideal_scope_to_modify or spack.config.default_modify_scope()
|
||||
|
||||
update_path = f"{key_path}:[{str(spec)}]"
|
||||
spack.config.add(update_path, scope=ideal_scope_to_modify)
|
||||
spack.config.add(update_path, scope=write_scope)
|
||||
else:
|
||||
raise ValueError("'config change' can currently only change 'require' sections")
|
||||
|
||||
|
@@ -572,7 +572,7 @@ def edit(self, spec, prefix):
|
||||
class IntelPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for licensed Intel software"""
|
||||
|
||||
base_class_name = "IntelPackage"
|
||||
base_class_name = "IntelOneApiPackage"
|
||||
|
||||
body_def = """\
|
||||
# FIXME: Override `setup_environment` if necessary."""
|
||||
|
@@ -55,7 +55,7 @@ def dependencies(parser, args):
|
||||
env = ev.active_environment()
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||
|
||||
format_string = "{name}{@version}{%compiler}{/hash:7}"
|
||||
format_string = "{name}{@version}{/hash:7}{%compiler}"
|
||||
if sys.stdout.isatty():
|
||||
tty.msg("Dependencies of %s" % spec.format(format_string, color=True))
|
||||
deps = spack.store.STORE.db.installed_relatives(
|
||||
|
@@ -93,7 +93,7 @@ def dependents(parser, args):
|
||||
env = ev.active_environment()
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||
|
||||
format_string = "{name}{@version}{%compiler}{/hash:7}"
|
||||
format_string = "{name}{@version}{/hash:7}{%compiler}"
|
||||
if sys.stdout.isatty():
|
||||
tty.msg("Dependents of %s" % spec.cformat(format_string))
|
||||
deps = spack.store.STORE.db.installed_relatives(spec, "parents", args.transitive)
|
||||
|
@@ -3,11 +3,13 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import shutil
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.fetch_strategy
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
@@ -31,37 +33,33 @@ def setup_parser(subparser):
|
||||
"--no-clone",
|
||||
action="store_false",
|
||||
dest="clone",
|
||||
default=None,
|
||||
help="do not clone, the package already exists at the source path",
|
||||
)
|
||||
clone_group.add_argument(
|
||||
"--clone",
|
||||
action="store_true",
|
||||
dest="clone",
|
||||
default=None,
|
||||
help="clone the package even if the path already exists",
|
||||
default=True,
|
||||
help=(
|
||||
"(default) clone the package unless the path already exists, "
|
||||
"use --force to overwrite"
|
||||
),
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-f", "--force", help="remove any files or directories that block cloning source code"
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-r",
|
||||
"--recursive",
|
||||
action="store_true",
|
||||
help="traverse nodes of the graph to mark everything up to the root as a develop spec",
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(subparser, ["spec"])
|
||||
|
||||
|
||||
def _update_config(spec, path):
|
||||
find_fn = lambda section: spec.name in section
|
||||
|
||||
entry = {"spec": str(spec)}
|
||||
if path != spec.name:
|
||||
entry["path"] = path
|
||||
|
||||
def change_fn(section):
|
||||
section[spec.name] = entry
|
||||
|
||||
spack.config.change_or_add("develop", find_fn, change_fn)
|
||||
|
||||
|
||||
def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
|
||||
# "steal" the source code via staging API. We ask for a stage
|
||||
# to be created, then copy it afterwards somewhere else. It would be
|
||||
@@ -83,44 +81,43 @@ def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
|
||||
package.stage.steal_source(abspath)
|
||||
|
||||
|
||||
def develop(parser, args):
|
||||
# Note: we could put develop specs in any scope, but I assume
|
||||
# users would only ever want to do this for either (a) an active
|
||||
# env or (b) a specified config file (e.g. that is included by
|
||||
# an environment)
|
||||
# TODO: when https://github.com/spack/spack/pull/35307 is merged,
|
||||
# an active env is not required if a scope is specified
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
if not args.spec:
|
||||
if args.clone is False:
|
||||
raise SpackError("No spec provided to spack develop command")
|
||||
def assure_concrete_spec(env: spack.environment.Environment, spec: spack.spec.Spec):
|
||||
version = spec.versions.concrete_range_as_version
|
||||
if not version:
|
||||
# first check environment for a matching concrete spec
|
||||
matching_specs = env.all_matching_specs(spec)
|
||||
if matching_specs:
|
||||
version = matching_specs[0].version
|
||||
test_spec = spack.spec.Spec(f"{spec}@{version}")
|
||||
for m_spec in matching_specs:
|
||||
if not m_spec.satisfies(test_spec):
|
||||
raise SpackError(
|
||||
f"{spec.name}: has multiple concrete instances in the graph that can't be"
|
||||
" satisified by a single develop spec. To use `spack develop` ensure one"
|
||||
" of the following:"
|
||||
f"\n a) {spec.name} nodes can satisfy the same develop spec (minimally "
|
||||
"this means they all share the same version)"
|
||||
f"\n b) Provide a concrete develop spec ({spec.name}@[version]) to clearly"
|
||||
" indicate what should be developed"
|
||||
)
|
||||
else:
|
||||
# look up the maximum version so infintiy versions are preferred for develop
|
||||
version = max(spec.package_class.versions.keys())
|
||||
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
|
||||
spec.versions = spack.version.VersionList([version])
|
||||
|
||||
# download all dev specs
|
||||
for name, entry in env.dev_specs.items():
|
||||
path = entry.get("path", name)
|
||||
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
|
||||
|
||||
if os.path.exists(abspath):
|
||||
msg = "Skipping developer download of %s" % entry["spec"]
|
||||
msg += " because its path already exists."
|
||||
tty.msg(msg)
|
||||
continue
|
||||
def setup_src_code(spec: spack.spec.Spec, src_path: str, clone: bool = True, force: bool = False):
|
||||
"""
|
||||
Handle checking, cloning or overwriting source code
|
||||
"""
|
||||
assert spec.versions
|
||||
|
||||
# Both old syntax `spack develop pkg@x` and new syntax `spack develop pkg@=x`
|
||||
# are currently supported.
|
||||
spec = spack.spec.parse_with_version_concrete(entry["spec"])
|
||||
_retrieve_develop_source(spec, abspath)
|
||||
if clone:
|
||||
_clone(spec, src_path, force)
|
||||
|
||||
if not env.dev_specs:
|
||||
tty.warn("No develop specs to download")
|
||||
|
||||
return
|
||||
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
if len(specs) > 1:
|
||||
raise SpackError("spack develop requires at most one named spec")
|
||||
|
||||
spec = specs[0]
|
||||
if not clone and not os.path.exists(src_path):
|
||||
raise SpackError(f"Provided path {src_path} does not exist")
|
||||
|
||||
version = spec.versions.concrete_range_as_version
|
||||
if not version:
|
||||
@@ -129,40 +126,114 @@ def develop(parser, args):
|
||||
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
|
||||
spec.versions = spack.version.VersionList([version])
|
||||
|
||||
# If user does not specify --path, we choose to create a directory in the
|
||||
# active environment's directory, named after the spec
|
||||
path = args.path or spec.name
|
||||
if not os.path.isabs(path):
|
||||
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
|
||||
else:
|
||||
abspath = path
|
||||
|
||||
# clone default: only if the path doesn't exist
|
||||
clone = args.clone
|
||||
if clone is None:
|
||||
clone = not os.path.exists(abspath)
|
||||
def _update_config(spec, path):
|
||||
find_fn = lambda section: spec.name in section
|
||||
|
||||
if not clone and not os.path.exists(abspath):
|
||||
raise SpackError("Provided path %s does not exist" % abspath)
|
||||
entry = {"spec": str(spec)}
|
||||
if path and path != spec.name:
|
||||
entry["path"] = path
|
||||
|
||||
if clone:
|
||||
if os.path.exists(abspath):
|
||||
if args.force:
|
||||
shutil.rmtree(abspath)
|
||||
else:
|
||||
msg = "Path %s already exists and cannot be cloned to." % abspath
|
||||
msg += " Use `spack develop -f` to overwrite."
|
||||
raise SpackError(msg)
|
||||
def change_fn(section):
|
||||
section[spec.name] = entry
|
||||
|
||||
_retrieve_develop_source(spec, abspath)
|
||||
spack.config.change_or_add("develop", find_fn, change_fn)
|
||||
|
||||
|
||||
def update_env(
|
||||
env: spack.environment.Environment,
|
||||
spec: spack.spec.Spec,
|
||||
specified_path: Optional[str] = None,
|
||||
build_dir: Optional[str] = None,
|
||||
):
|
||||
"""
|
||||
Update the spack.yaml file with additions or changes from a develop call
|
||||
"""
|
||||
tty.debug(f"Updating develop config for {env.name} transactionally")
|
||||
|
||||
if not specified_path:
|
||||
dev_entry = env.dev_specs.get(spec.name)
|
||||
if dev_entry:
|
||||
specified_path = dev_entry.get("path", None)
|
||||
|
||||
tty.debug("Updating develop config for {0} transactionally".format(env.name))
|
||||
with env.write_transaction():
|
||||
if args.build_directory is not None:
|
||||
if build_dir is not None:
|
||||
spack.config.add(
|
||||
"packages:{}:package_attributes:build_directory:{}".format(
|
||||
spec.name, args.build_directory
|
||||
),
|
||||
f"packages:{spec.name}:package_attributes:build_directory:{build_dir}",
|
||||
env.scope_name,
|
||||
)
|
||||
_update_config(spec, path)
|
||||
# add develop spec and update path
|
||||
_update_config(spec, specified_path)
|
||||
|
||||
|
||||
def _clone(spec: spack.spec.Spec, abspath: str, force: bool = False):
|
||||
if os.path.exists(abspath):
|
||||
if force:
|
||||
shutil.rmtree(abspath)
|
||||
else:
|
||||
msg = f"Skipping developer download of {spec.name}"
|
||||
msg += f" because its path {abspath} already exists."
|
||||
tty.msg(msg)
|
||||
return
|
||||
|
||||
# cloning can take a while and it's nice to get a message for the longer clones
|
||||
tty.msg(f"Cloning source code for {spec}")
|
||||
_retrieve_develop_source(spec, abspath)
|
||||
|
||||
|
||||
def _abs_code_path(
|
||||
env: spack.environment.Environment, spec: spack.spec.Spec, path: Optional[str] = None
|
||||
):
|
||||
src_path = path if path else spec.name
|
||||
return spack.util.path.canonicalize_path(src_path, default_wd=env.path)
|
||||
|
||||
|
||||
def _dev_spec_generator(args, env):
|
||||
"""
|
||||
Generator function to loop over all the develop specs based on how the command is called
|
||||
If no specs are supplied then loop over the develop specs listed in the environment.
|
||||
"""
|
||||
if not args.spec:
|
||||
if args.clone is False:
|
||||
raise SpackError("No spec provided to spack develop command")
|
||||
|
||||
for name, entry in env.dev_specs.items():
|
||||
path = entry.get("path", name)
|
||||
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
|
||||
# Both old syntax `spack develop pkg@x` and new syntax `spack develop pkg@=x`
|
||||
# are currently supported.
|
||||
spec = spack.spec.parse_with_version_concrete(entry["spec"])
|
||||
yield spec, abspath
|
||||
else:
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
if (args.path or args.build_directory) and len(specs) > 1:
|
||||
raise SpackError(
|
||||
"spack develop requires at most one named spec when using the --path or"
|
||||
" --build-directory arguments"
|
||||
)
|
||||
|
||||
for spec in specs:
|
||||
if args.recursive:
|
||||
concrete_specs = env.all_matching_specs(spec)
|
||||
if not concrete_specs:
|
||||
tty.warn(
|
||||
f"{spec.name} has no matching concrete specs in the environment and "
|
||||
"will be skipped. `spack develop --recursive` requires a concretized"
|
||||
" environment"
|
||||
)
|
||||
else:
|
||||
for s in concrete_specs:
|
||||
for node_spec in s.traverse(direction="parents", root=True):
|
||||
tty.debug(f"Recursive develop for {node_spec.name}")
|
||||
yield node_spec, _abs_code_path(env, node_spec, args.path)
|
||||
else:
|
||||
yield spec, _abs_code_path(env, spec, args.path)
|
||||
|
||||
|
||||
def develop(parser, args):
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
|
||||
for spec, abspath in _dev_spec_generator(args, env):
|
||||
assure_concrete_spec(env, spec)
|
||||
setup_src_code(spec, abspath, clone=args.clone, force=args.force)
|
||||
update_env(env, spec, args.path, args.build_directory)
|
||||
|
@@ -73,7 +73,7 @@
|
||||
boxlib @B{dim=2} boxlib built for 2 dimensions
|
||||
libdwarf @g{%intel} ^libelf@g{%gcc}
|
||||
libdwarf, built with intel compiler, linked to libelf built with gcc
|
||||
mvapich2 @g{%gcc} @B{fabrics=psm,mrail,sock}
|
||||
mvapich2 @B{fabrics=psm,mrail,sock} @g{%gcc}
|
||||
mvapich2, built with gcc compiler, with support for multiple fabrics
|
||||
"""
|
||||
|
||||
|
@@ -383,8 +383,10 @@ def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
||||
query = " ".join(str(s) for s in args.constraint_specs)
|
||||
msg = f"the constraint '{query}' matches multiple packages:\n"
|
||||
for s in specs:
|
||||
spec_fmt = "{hash:7} {name}{@version}{%compiler}"
|
||||
spec_fmt += "{compiler_flags}{variants}{arch=architecture}"
|
||||
spec_fmt = (
|
||||
"{hash:7} {name}{@version}{compiler_flags}{variants}"
|
||||
"{arch=architecture} {%compiler}"
|
||||
)
|
||||
msg += "\t" + s.cformat(spec_fmt) + "\n"
|
||||
tty.die(msg, "In this context exactly *one* match is needed.")
|
||||
|
||||
|
@@ -1,7 +1,12 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.database
|
||||
import spack.store
|
||||
|
||||
description = "rebuild Spack's package database"
|
||||
@@ -10,4 +15,11 @@
|
||||
|
||||
|
||||
def reindex(parser, args):
|
||||
current_index = spack.store.STORE.db._index_path
|
||||
if os.path.isfile(current_index):
|
||||
backup = f"{current_index}.bkp"
|
||||
shutil.copy(current_index, backup)
|
||||
tty.msg(f"Created a back-up copy of the DB at {backup}")
|
||||
|
||||
spack.store.STORE.reindex()
|
||||
tty.msg(f"The DB at {current_index} has been reindex to v{spack.database._DB_VERSION}")
|
||||
|
@@ -6,8 +6,9 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from itertools import islice, zip_longest
|
||||
from typing import Dict, List, Optional
|
||||
from typing import Callable, Dict, List, Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as color
|
||||
@@ -16,6 +17,9 @@
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.util.git
|
||||
import spack.util.spack_yaml
|
||||
from spack.spec_parser import SPEC_TOKENIZER, SpecTokens
|
||||
from spack.tokenize import Token
|
||||
from spack.util.executable import Executable, which
|
||||
|
||||
description = "runs source code style checks on spack"
|
||||
@@ -198,6 +202,13 @@ def setup_parser(subparser):
|
||||
action="append",
|
||||
help="specify tools to skip (choose from %s)" % ", ".join(tool_names),
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--spec-strings",
|
||||
action="store_true",
|
||||
help="upgrade spec strings in Python, JSON and YAML files for compatibility with Spack "
|
||||
"v1.0 and v0.x. Example: spack style --spec-strings $(git ls-files). Note: this flag "
|
||||
"will be removed in Spack v1.0.",
|
||||
)
|
||||
|
||||
subparser.add_argument("files", nargs=argparse.REMAINDER, help="specific files to check")
|
||||
|
||||
@@ -507,7 +518,196 @@ def _bootstrap_dev_dependencies():
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
|
||||
|
||||
IS_PROBABLY_COMPILER = re.compile(r"%[a-zA-Z_][a-zA-Z0-9\-]")
|
||||
|
||||
|
||||
def _spec_str_reorder_compiler(idx: int, blocks: List[List[Token]]) -> None:
|
||||
# only move the compiler to the back if it exists and is not already at the end
|
||||
if not 0 <= idx < len(blocks) - 1:
|
||||
return
|
||||
# if there's only whitespace after the compiler, don't move it
|
||||
if all(token.kind == SpecTokens.WS for block in blocks[idx + 1 :] for token in block):
|
||||
return
|
||||
# rotate left and always add at least one WS token between compiler and previous token
|
||||
compiler_block = blocks.pop(idx)
|
||||
if compiler_block[0].kind != SpecTokens.WS:
|
||||
compiler_block.insert(0, Token(SpecTokens.WS, " "))
|
||||
# delete the WS tokens from the new first block if it was at the very start, to prevent leading
|
||||
# WS tokens.
|
||||
while idx == 0 and blocks[0][0].kind == SpecTokens.WS:
|
||||
blocks[0].pop(0)
|
||||
blocks.append(compiler_block)
|
||||
|
||||
|
||||
def _spec_str_format(spec_str: str) -> Optional[str]:
|
||||
"""Given any string, try to parse as spec string, and rotate the compiler token to the end
|
||||
of each spec instance. Returns the formatted string if it was changed, otherwise None."""
|
||||
# We parse blocks of tokens that include leading whitespace, and move the compiler block to
|
||||
# the end when we hit a dependency ^... or the end of a string.
|
||||
# [@3.1][ +foo][ +bar][ %gcc@3.1][ +baz]
|
||||
# [@3.1][ +foo][ +bar][ +baz][ %gcc@3.1]
|
||||
|
||||
current_block: List[Token] = []
|
||||
blocks: List[List[Token]] = []
|
||||
compiler_block_idx = -1
|
||||
in_edge_attr = False
|
||||
|
||||
for token in SPEC_TOKENIZER.tokenize(spec_str):
|
||||
if token.kind == SpecTokens.UNEXPECTED:
|
||||
# parsing error, we cannot fix this string.
|
||||
return None
|
||||
elif token.kind in (SpecTokens.COMPILER, SpecTokens.COMPILER_AND_VERSION):
|
||||
# multiple compilers are not supported in Spack v0.x, so early return
|
||||
if compiler_block_idx != -1:
|
||||
return None
|
||||
current_block.append(token)
|
||||
blocks.append(current_block)
|
||||
current_block = []
|
||||
compiler_block_idx = len(blocks) - 1
|
||||
elif token.kind in (
|
||||
SpecTokens.START_EDGE_PROPERTIES,
|
||||
SpecTokens.DEPENDENCY,
|
||||
SpecTokens.UNQUALIFIED_PACKAGE_NAME,
|
||||
SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME,
|
||||
):
|
||||
_spec_str_reorder_compiler(compiler_block_idx, blocks)
|
||||
compiler_block_idx = -1
|
||||
if token.kind == SpecTokens.START_EDGE_PROPERTIES:
|
||||
in_edge_attr = True
|
||||
current_block.append(token)
|
||||
blocks.append(current_block)
|
||||
current_block = []
|
||||
elif token.kind == SpecTokens.END_EDGE_PROPERTIES:
|
||||
in_edge_attr = False
|
||||
current_block.append(token)
|
||||
blocks.append(current_block)
|
||||
current_block = []
|
||||
elif in_edge_attr:
|
||||
current_block.append(token)
|
||||
elif token.kind in (
|
||||
SpecTokens.VERSION_HASH_PAIR,
|
||||
SpecTokens.GIT_VERSION,
|
||||
SpecTokens.VERSION,
|
||||
SpecTokens.PROPAGATED_BOOL_VARIANT,
|
||||
SpecTokens.BOOL_VARIANT,
|
||||
SpecTokens.PROPAGATED_KEY_VALUE_PAIR,
|
||||
SpecTokens.KEY_VALUE_PAIR,
|
||||
SpecTokens.DAG_HASH,
|
||||
):
|
||||
current_block.append(token)
|
||||
blocks.append(current_block)
|
||||
current_block = []
|
||||
elif token.kind == SpecTokens.WS:
|
||||
current_block.append(token)
|
||||
else:
|
||||
raise ValueError(f"unexpected token {token}")
|
||||
|
||||
if current_block:
|
||||
blocks.append(current_block)
|
||||
_spec_str_reorder_compiler(compiler_block_idx, blocks)
|
||||
|
||||
new_spec_str = "".join(token.value for block in blocks for token in block)
|
||||
return new_spec_str if spec_str != new_spec_str else None
|
||||
|
||||
|
||||
SpecStrHandler = Callable[[str, int, int, str, str], None]
|
||||
|
||||
|
||||
def _spec_str_default_handler(path: str, line: int, col: int, old: str, new: str):
|
||||
"""A SpecStrHandler that prints formatted spec strings and their locations."""
|
||||
print(f"{path}:{line}:{col}: `{old}` -> `{new}`")
|
||||
|
||||
|
||||
def _spec_str_fix_handler(path: str, line: int, col: int, old: str, new: str):
|
||||
"""A SpecStrHandler that updates formatted spec strings in files."""
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
lines = f.readlines()
|
||||
new_line = lines[line - 1].replace(old, new)
|
||||
if new_line == lines[line - 1]:
|
||||
tty.warn(f"{path}:{line}:{col}: could not apply fix: `{old}` -> `{new}`")
|
||||
return
|
||||
lines[line - 1] = new_line
|
||||
print(f"{path}:{line}:{col}: fixed `{old}` -> `{new}`")
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
f.writelines(lines)
|
||||
|
||||
|
||||
def _spec_str_ast(path: str, tree: ast.AST, handler: SpecStrHandler) -> None:
|
||||
"""Walk the AST of a Python file and apply handler to formatted spec strings."""
|
||||
has_constant = sys.version_info >= (3, 8)
|
||||
for node in ast.walk(tree):
|
||||
if has_constant and isinstance(node, ast.Constant) and isinstance(node.value, str):
|
||||
current_str = node.value
|
||||
elif not has_constant and isinstance(node, ast.Str):
|
||||
current_str = node.s
|
||||
else:
|
||||
continue
|
||||
if not IS_PROBABLY_COMPILER.search(current_str):
|
||||
continue
|
||||
new = _spec_str_format(current_str)
|
||||
if new is not None:
|
||||
handler(path, node.lineno, node.col_offset, current_str, new)
|
||||
|
||||
|
||||
def _spec_str_json_and_yaml(path: str, data: dict, handler: SpecStrHandler) -> None:
|
||||
"""Walk a YAML or JSON data structure and apply handler to formatted spec strings."""
|
||||
queue = [data]
|
||||
seen = set()
|
||||
|
||||
while queue:
|
||||
current = queue.pop(0)
|
||||
if id(current) in seen:
|
||||
continue
|
||||
seen.add(id(current))
|
||||
if isinstance(current, dict):
|
||||
queue.extend(current.values())
|
||||
queue.extend(current.keys())
|
||||
elif isinstance(current, list):
|
||||
queue.extend(current)
|
||||
elif isinstance(current, str) and IS_PROBABLY_COMPILER.search(current):
|
||||
new = _spec_str_format(current)
|
||||
if new is not None:
|
||||
mark = getattr(current, "_start_mark", None)
|
||||
if mark:
|
||||
line, col = mark.line + 1, mark.column + 1
|
||||
else:
|
||||
line, col = 0, 0
|
||||
handler(path, line, col, current, new)
|
||||
|
||||
|
||||
def _check_spec_strings(
|
||||
paths: List[str], handler: SpecStrHandler = _spec_str_default_handler
|
||||
) -> None:
|
||||
"""Open Python, JSON and YAML files, and format their string literals that look like spec
|
||||
strings. A handler is called for each formatting, which can be used to print or apply fixes."""
|
||||
for path in paths:
|
||||
is_json_or_yaml = path.endswith(".json") or path.endswith(".yaml") or path.endswith(".yml")
|
||||
is_python = path.endswith(".py")
|
||||
if not is_json_or_yaml and not is_python:
|
||||
continue
|
||||
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
# skip files that are likely too large to be user code or config
|
||||
if os.fstat(f.fileno()).st_size > 1024 * 1024:
|
||||
warnings.warn(f"skipping {path}: too large.")
|
||||
continue
|
||||
if is_json_or_yaml:
|
||||
_spec_str_json_and_yaml(path, spack.util.spack_yaml.load_config(f), handler)
|
||||
elif is_python:
|
||||
_spec_str_ast(path, ast.parse(f.read()), handler)
|
||||
except (OSError, spack.util.spack_yaml.SpackYAMLError, SyntaxError, ValueError):
|
||||
warnings.warn(f"skipping {path}")
|
||||
continue
|
||||
|
||||
|
||||
def style(parser, args):
|
||||
if args.spec_strings:
|
||||
if not args.files:
|
||||
tty.die("No files provided to check spec strings.")
|
||||
handler = _spec_str_fix_handler if args.fix else _spec_str_default_handler
|
||||
return _check_spec_strings(args.files, handler)
|
||||
|
||||
# save initial working directory for relativizing paths later
|
||||
args.initial_working_dir = os.getcwd()
|
||||
|
||||
|
@@ -65,6 +65,12 @@ def setup_parser(subparser):
|
||||
run_parser.add_argument(
|
||||
"--help-cdash", action="store_true", help="show usage instructions for CDash reporting"
|
||||
)
|
||||
run_parser.add_argument(
|
||||
"--timeout",
|
||||
type=int,
|
||||
default=None,
|
||||
help="maximum time (in seconds) that tests are allowed to run",
|
||||
)
|
||||
|
||||
cd_group = run_parser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(cd_group, ["clean", "dirty"])
|
||||
@@ -176,7 +182,7 @@ def test_run(args):
|
||||
for spec in specs:
|
||||
matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit)
|
||||
if spec and not matching:
|
||||
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
|
||||
tty.warn(f"No {explicit_str}installed packages match spec {spec}")
|
||||
|
||||
# TODO: Need to write out a log message and/or CDASH Testing
|
||||
# output that package not installed IF continue to process
|
||||
@@ -192,7 +198,7 @@ def test_run(args):
|
||||
# test_stage_dir
|
||||
test_suite = spack.install_test.TestSuite(specs_to_test, args.alias)
|
||||
test_suite.ensure_stage()
|
||||
tty.msg("Spack test %s" % test_suite.name)
|
||||
tty.msg(f"Spack test {test_suite.name}")
|
||||
|
||||
# Set up reporter
|
||||
setattr(args, "package", [s.format() for s in test_suite.specs])
|
||||
@@ -204,6 +210,7 @@ def test_run(args):
|
||||
dirty=args.dirty,
|
||||
fail_first=args.fail_first,
|
||||
externals=args.externals,
|
||||
timeout=args.timeout,
|
||||
)
|
||||
|
||||
|
||||
|
@@ -17,6 +17,7 @@
|
||||
pytest = None # type: ignore
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
@@ -236,6 +237,12 @@ def unit_test(parser, args, unknown_args):
|
||||
pytest_root = spack.extensions.load_extension(args.extension)
|
||||
|
||||
if args.numprocesses is not None and args.numprocesses > 1:
|
||||
try:
|
||||
import xdist # noqa: F401
|
||||
except ImportError:
|
||||
tty.error("parallel unit-test requires pytest-xdist module")
|
||||
return 1
|
||||
|
||||
pytest_args.extend(
|
||||
[
|
||||
"--dist",
|
||||
|
@@ -190,6 +190,10 @@ def f77(self):
|
||||
self._lang_exists_or_raise("f77", lang=Languages.FORTRAN)
|
||||
return self.compilers[Languages.FORTRAN].package.fortran
|
||||
|
||||
@property
|
||||
def stdcxx_libs(self):
|
||||
return self._maybe_return_attribute("stdcxx_libs", lang=Languages.CXX)
|
||||
|
||||
|
||||
class DeprecatedCompiler(lang.DeprecatedProperty):
|
||||
def __init__(self) -> None:
|
||||
|
@@ -7,6 +7,7 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
@@ -25,15 +26,6 @@
|
||||
from spack.operating_systems import windows_os
|
||||
from spack.util.environment import get_path
|
||||
|
||||
package_name_to_compiler_name = {
|
||||
"llvm": "clang",
|
||||
"intel-oneapi-compilers": "oneapi",
|
||||
"llvm-amdgpu": "rocmcc",
|
||||
"intel-oneapi-compilers-classic": "intel",
|
||||
"acfl": "arm",
|
||||
}
|
||||
|
||||
|
||||
#: Tag used to identify packages providing a compiler
|
||||
COMPILER_TAG = "compiler"
|
||||
|
||||
@@ -346,7 +338,15 @@ def from_legacy_yaml(compiler_dict: Dict[str, Any]) -> List[spack.spec.Spec]:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pattern = re.compile(r"|".join(finder.search_patterns(pkg=pkg_cls)))
|
||||
filtered_paths = [x for x in candidate_paths if pattern.search(os.path.basename(x))]
|
||||
detected = finder.detect_specs(pkg=pkg_cls, paths=filtered_paths)
|
||||
try:
|
||||
detected = finder.detect_specs(pkg=pkg_cls, paths=filtered_paths)
|
||||
except Exception:
|
||||
warnings.warn(
|
||||
f"[{__name__}] cannot detect {pkg_name} from the "
|
||||
f"following paths: {', '.join(filtered_paths)}"
|
||||
)
|
||||
continue
|
||||
|
||||
for s in detected:
|
||||
for key in ("flags", "environment", "extra_rpaths"):
|
||||
if key in compiler_dict:
|
||||
|
@@ -250,7 +250,11 @@ def implicit_rpaths(self) -> List[str]:
|
||||
return []
|
||||
|
||||
link_dirs = parse_non_system_link_dirs(output)
|
||||
all_required_libs = list(self.spec.package.required_libs) + ["libc", "libc++", "libstdc++"]
|
||||
all_required_libs = list(self.spec.package.implicit_rpath_libs) + [
|
||||
"libc",
|
||||
"libc++",
|
||||
"libstdc++",
|
||||
]
|
||||
dynamic_linker = self.default_dynamic_linker()
|
||||
result = DefaultDynamicLinkerFilter(dynamic_linker)(
|
||||
paths_containing_libs(link_dirs, all_required_libs)
|
||||
|
@@ -32,9 +32,10 @@
|
||||
import copy
|
||||
import functools
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union
|
||||
from typing import Any, Callable, Dict, Generator, List, NamedTuple, Optional, Tuple, Union
|
||||
|
||||
import jsonschema
|
||||
|
||||
@@ -42,7 +43,6 @@
|
||||
|
||||
import spack.error
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.schema
|
||||
import spack.schema.bootstrap
|
||||
import spack.schema.cdash
|
||||
@@ -54,17 +54,18 @@
|
||||
import spack.schema.develop
|
||||
import spack.schema.env
|
||||
import spack.schema.env_vars
|
||||
import spack.schema.include
|
||||
import spack.schema.merged
|
||||
import spack.schema.mirrors
|
||||
import spack.schema.modules
|
||||
import spack.schema.packages
|
||||
import spack.schema.repos
|
||||
import spack.schema.upstreams
|
||||
import spack.schema.view
|
||||
|
||||
# Hacked yaml for configuration files preserves line numbers.
|
||||
import spack.util.remote_file_cache as rfc_util
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.web as web_util
|
||||
from spack.util.cpus import cpus_available
|
||||
from spack.util.spack_yaml import get_mark_from_yaml_data
|
||||
|
||||
from .enums import ConfigScopePriority
|
||||
|
||||
@@ -74,6 +75,7 @@
|
||||
"concretizer": spack.schema.concretizer.schema,
|
||||
"definitions": spack.schema.definitions.schema,
|
||||
"env_vars": spack.schema.env_vars.schema,
|
||||
"include": spack.schema.include.schema,
|
||||
"view": spack.schema.view.schema,
|
||||
"develop": spack.schema.develop.schema,
|
||||
"mirrors": spack.schema.mirrors.schema,
|
||||
@@ -121,6 +123,17 @@
|
||||
#: Type used for raw YAML configuration
|
||||
YamlConfigDict = Dict[str, Any]
|
||||
|
||||
#: prefix for name of included configuration scopes
|
||||
INCLUDE_SCOPE_PREFIX = "include"
|
||||
|
||||
#: safeguard for recursive includes -- maximum include depth
|
||||
MAX_RECURSIVE_INCLUDES = 100
|
||||
|
||||
|
||||
def _include_cache_location():
|
||||
"""Location to cache included configuration files."""
|
||||
return os.path.join(spack.paths.user_cache_path, "includes")
|
||||
|
||||
|
||||
class ConfigScope:
|
||||
def __init__(self, name: str) -> None:
|
||||
@@ -128,6 +141,25 @@ def __init__(self, name: str) -> None:
|
||||
self.writable = False
|
||||
self.sections = syaml.syaml_dict()
|
||||
|
||||
#: names of any included scopes
|
||||
self._included_scopes: Optional[List["ConfigScope"]] = None
|
||||
|
||||
@property
|
||||
def included_scopes(self) -> List["ConfigScope"]:
|
||||
"""Memoized list of included scopes, in the order they appear in this scope."""
|
||||
if self._included_scopes is None:
|
||||
self._included_scopes = []
|
||||
|
||||
includes = self.get_section("include")
|
||||
if includes:
|
||||
include_paths = [included_path(data) for data in includes["include"]]
|
||||
for path in include_paths:
|
||||
included_scope = include_path_scope(path)
|
||||
if included_scope:
|
||||
self._included_scopes.append(included_scope)
|
||||
|
||||
return self._included_scopes
|
||||
|
||||
def get_section_filename(self, section: str) -> str:
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -433,7 +465,9 @@ def highest(self) -> ConfigScope:
|
||||
return next(self.scopes.reversed_values()) # type: ignore
|
||||
|
||||
@_config_mutator
|
||||
def push_scope(self, scope: ConfigScope, priority: Optional[int] = None) -> None:
|
||||
def push_scope(
|
||||
self, scope: ConfigScope, priority: Optional[int] = None, _depth: int = 0
|
||||
) -> None:
|
||||
"""Adds a scope to the Configuration, at a given priority.
|
||||
|
||||
If a priority is not given, it is assumed to be the current highest priority.
|
||||
@@ -442,18 +476,44 @@ def push_scope(self, scope: ConfigScope, priority: Optional[int] = None) -> None
|
||||
scope: scope to be added
|
||||
priority: priority of the scope
|
||||
"""
|
||||
# TODO: As a follow on to #48784, change this to create a graph of the
|
||||
# TODO: includes AND ensure properly sorted such that the order included
|
||||
# TODO: at the highest level is reflected in the value of an option that
|
||||
# TODO: is set in multiple included files.
|
||||
# before pushing the scope itself, push any included scopes recursively, at same priority
|
||||
for included_scope in reversed(scope.included_scopes):
|
||||
if _depth + 1 > MAX_RECURSIVE_INCLUDES: # make sure we're not recursing endlessly
|
||||
mark = ""
|
||||
if hasattr(included_scope, "path") and syaml.marked(included_scope.path):
|
||||
mark = included_scope.path._start_mark # type: ignore
|
||||
raise RecursiveIncludeError(
|
||||
f"Maximum include recursion exceeded in {included_scope.name}", str(mark)
|
||||
)
|
||||
|
||||
# record this inclusion so that remove_scope() can use it
|
||||
self.push_scope(included_scope, priority=priority, _depth=_depth + 1)
|
||||
|
||||
tty.debug(f"[CONFIGURATION: PUSH SCOPE]: {str(scope)}, priority={priority}", level=2)
|
||||
self.scopes.add(scope.name, value=scope, priority=priority)
|
||||
|
||||
@_config_mutator
|
||||
def remove_scope(self, scope_name: str) -> Optional[ConfigScope]:
|
||||
"""Removes a scope by name, and returns it. If the scope does not exist, returns None."""
|
||||
|
||||
try:
|
||||
scope = self.scopes.remove(scope_name)
|
||||
tty.debug(f"[CONFIGURATION: POP SCOPE]: {str(scope)}", level=2)
|
||||
tty.debug(f"[CONFIGURATION: REMOVE SCOPE]: {str(scope)}", level=2)
|
||||
except KeyError as e:
|
||||
tty.debug(f"[CONFIGURATION: POP SCOPE]: {e}", level=2)
|
||||
tty.debug(f"[CONFIGURATION: REMOVE SCOPE]: {e}", level=2)
|
||||
return None
|
||||
|
||||
# transitively remove included scopes
|
||||
for included_scope in scope.included_scopes:
|
||||
assert (
|
||||
included_scope.name in self.scopes
|
||||
), f"Included scope '{included_scope.name}' was never added to configuration!"
|
||||
self.remove_scope(included_scope.name)
|
||||
|
||||
return scope
|
||||
|
||||
@property
|
||||
@@ -763,6 +823,8 @@ def _add_platform_scope(
|
||||
cfg: Configuration, name: str, path: str, priority: ConfigScopePriority, writable: bool = True
|
||||
) -> None:
|
||||
"""Add a platform-specific subdirectory for the current platform."""
|
||||
import spack.platforms # circular dependency
|
||||
|
||||
platform = spack.platforms.host().name
|
||||
scope = DirectoryConfigScope(
|
||||
f"{name}/{platform}", os.path.join(path, platform), writable=writable
|
||||
@@ -770,6 +832,75 @@ def _add_platform_scope(
|
||||
cfg.push_scope(scope, priority=priority)
|
||||
|
||||
|
||||
#: Class for the relevance of an optional path conditioned on a limited
|
||||
#: python code that evaluates to a boolean and or explicit specification
|
||||
#: as optional.
|
||||
class IncludePath(NamedTuple):
|
||||
path: str
|
||||
when: str
|
||||
sha256: str
|
||||
optional: bool
|
||||
|
||||
|
||||
def included_path(entry: Union[str, dict]) -> IncludePath:
|
||||
"""Convert the included path entry into an IncludePath.
|
||||
|
||||
Args:
|
||||
entry: include configuration entry
|
||||
|
||||
Returns: converted entry, where an empty ``when`` means the path is
|
||||
not conditionally included
|
||||
"""
|
||||
if isinstance(entry, str):
|
||||
return IncludePath(path=entry, sha256="", when="", optional=False)
|
||||
|
||||
path = entry["path"]
|
||||
sha256 = entry.get("sha256", "")
|
||||
when = entry.get("when", "")
|
||||
optional = entry.get("optional", False)
|
||||
return IncludePath(path=path, sha256=sha256, when=when, optional=optional)
|
||||
|
||||
|
||||
def include_path_scope(include: IncludePath) -> Optional[ConfigScope]:
|
||||
"""Instantiate an appropriate configuration scope for the given path.
|
||||
|
||||
Args:
|
||||
include: optional include path
|
||||
|
||||
Returns: configuration scope
|
||||
|
||||
Raises:
|
||||
ValueError: included path has an unsupported URL scheme, is required
|
||||
but does not exist; configuration stage directory argument is missing
|
||||
ConfigFileError: unable to access remote configuration file(s)
|
||||
"""
|
||||
# circular dependencies
|
||||
import spack.spec
|
||||
|
||||
if (not include.when) or spack.spec.eval_conditional(include.when):
|
||||
config_path = rfc_util.local_path(include.path, include.sha256, _include_cache_location)
|
||||
if not config_path:
|
||||
raise ConfigFileError(f"Unable to fetch remote configuration from {include.path}")
|
||||
|
||||
if os.path.isdir(config_path):
|
||||
# directories are treated as regular ConfigScopes
|
||||
config_name = f"{INCLUDE_SCOPE_PREFIX}:{os.path.basename(config_path)}"
|
||||
tty.debug(f"Creating DirectoryConfigScope {config_name} for '{config_path}'")
|
||||
return DirectoryConfigScope(config_name, config_path)
|
||||
|
||||
if os.path.exists(config_path):
|
||||
# files are assumed to be SingleFileScopes
|
||||
config_name = f"{INCLUDE_SCOPE_PREFIX}:{config_path}"
|
||||
tty.debug(f"Creating SingleFileScope {config_name} for '{config_path}'")
|
||||
return SingleFileScope(config_name, config_path, spack.schema.merged.schema)
|
||||
|
||||
if not include.optional:
|
||||
path = f" at ({config_path})" if config_path != include.path else ""
|
||||
raise ValueError(f"Required path ({include.path}) does not exist{path}")
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
||||
"""Load configuration paths from entry points
|
||||
|
||||
@@ -795,7 +926,7 @@ def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
||||
return config_paths
|
||||
|
||||
|
||||
def create() -> Configuration:
|
||||
def create_incremental() -> Generator[Configuration, None, None]:
|
||||
"""Singleton Configuration instance.
|
||||
|
||||
This constructs one instance associated with this module and returns
|
||||
@@ -839,11 +970,25 @@ def create() -> Configuration:
|
||||
# Each scope can have per-platform overrides in subdirectories
|
||||
_add_platform_scope(cfg, name, path, priority=ConfigScopePriority.CONFIG_FILES)
|
||||
|
||||
return cfg
|
||||
# yield the config incrementally so that each config level's init code can get
|
||||
# data from the one below. This can be tricky, but it enables us to have a
|
||||
# single unified config system.
|
||||
#
|
||||
# TODO: think about whether we want to restrict what types of config can be used
|
||||
# at each level. e.g., we may want to just more forcibly disallow remote
|
||||
# config (which uses ssl and other config options) for some of the scopes,
|
||||
# to make the bootstrap issues more explicit, even if allowing config scope
|
||||
# init to reference lower scopes is more flexible.
|
||||
yield cfg
|
||||
|
||||
|
||||
def create() -> Configuration:
|
||||
"""Create a configuration using create_incremental(), return the last yielded result."""
|
||||
return list(create_incremental())[-1]
|
||||
|
||||
|
||||
#: This is the singleton configuration instance for Spack.
|
||||
CONFIG: Configuration = lang.Singleton(create) # type: ignore
|
||||
CONFIG: Configuration = lang.Singleton(create_incremental) # type: ignore
|
||||
|
||||
|
||||
def add_from_file(filename: str, scope: Optional[str] = None) -> None:
|
||||
@@ -939,7 +1084,8 @@ def set(path: str, value: Any, scope: Optional[str] = None) -> None:
|
||||
|
||||
Accepts the path syntax described in ``get()``.
|
||||
"""
|
||||
return CONFIG.set(path, value, scope)
|
||||
result = CONFIG.set(path, value, scope)
|
||||
return result
|
||||
|
||||
|
||||
def scopes() -> lang.PriorityOrderedMapping[str, ConfigScope]:
|
||||
@@ -1462,120 +1608,6 @@ def create_from(*scopes_or_paths: Union[ScopeWithOptionalPriority, str]) -> Conf
|
||||
return result
|
||||
|
||||
|
||||
def raw_github_gitlab_url(url: str) -> str:
|
||||
"""Transform a github URL to the raw form to avoid undesirable html.
|
||||
|
||||
Args:
|
||||
url: url to be converted to raw form
|
||||
|
||||
Returns:
|
||||
Raw github/gitlab url or the original url
|
||||
"""
|
||||
# Note we rely on GitHub to redirect the 'raw' URL returned here to the
|
||||
# actual URL under https://raw.githubusercontent.com/ with '/blob'
|
||||
# removed and or, '/blame' if needed.
|
||||
if "github" in url or "gitlab" in url:
|
||||
return url.replace("/blob/", "/raw/")
|
||||
|
||||
return url
|
||||
|
||||
|
||||
def collect_urls(base_url: str) -> list:
|
||||
"""Return a list of configuration URLs.
|
||||
|
||||
Arguments:
|
||||
base_url: URL for a configuration (yaml) file or a directory
|
||||
containing yaml file(s)
|
||||
|
||||
Returns:
|
||||
List of configuration file(s) or empty list if none
|
||||
"""
|
||||
if not base_url:
|
||||
return []
|
||||
|
||||
extension = ".yaml"
|
||||
|
||||
if base_url.endswith(extension):
|
||||
return [base_url]
|
||||
|
||||
# Collect configuration URLs if the base_url is a "directory".
|
||||
_, links = web_util.spider(base_url, 0)
|
||||
return [link for link in links if link.endswith(extension)]
|
||||
|
||||
|
||||
def fetch_remote_configs(url: str, dest_dir: str, skip_existing: bool = True) -> str:
|
||||
"""Retrieve configuration file(s) at the specified URL.
|
||||
|
||||
Arguments:
|
||||
url: URL for a configuration (yaml) file or a directory containing
|
||||
yaml file(s)
|
||||
dest_dir: destination directory
|
||||
skip_existing: Skip files that already exist in dest_dir if
|
||||
``True``; otherwise, replace those files
|
||||
|
||||
Returns:
|
||||
Path to the corresponding file if URL is or contains a
|
||||
single file and it is the only file in the destination directory or
|
||||
the root (dest_dir) directory if multiple configuration files exist
|
||||
or are retrieved.
|
||||
"""
|
||||
|
||||
def _fetch_file(url):
|
||||
raw = raw_github_gitlab_url(url)
|
||||
tty.debug(f"Reading config from url {raw}")
|
||||
return web_util.fetch_url_text(raw, dest_dir=dest_dir)
|
||||
|
||||
if not url:
|
||||
raise ConfigFileError("Cannot retrieve configuration without a URL")
|
||||
|
||||
# Return the local path to the cached configuration file OR to the
|
||||
# directory containing the cached configuration files.
|
||||
config_links = collect_urls(url)
|
||||
existing_files = os.listdir(dest_dir) if os.path.isdir(dest_dir) else []
|
||||
|
||||
paths = []
|
||||
for config_url in config_links:
|
||||
basename = os.path.basename(config_url)
|
||||
if skip_existing and basename in existing_files:
|
||||
tty.warn(
|
||||
f"Will not fetch configuration from {config_url} since a "
|
||||
f"version already exists in {dest_dir}"
|
||||
)
|
||||
path = os.path.join(dest_dir, basename)
|
||||
else:
|
||||
path = _fetch_file(config_url)
|
||||
|
||||
if path:
|
||||
paths.append(path)
|
||||
|
||||
if paths:
|
||||
return dest_dir if len(paths) > 1 else paths[0]
|
||||
|
||||
raise ConfigFileError(f"Cannot retrieve configuration (yaml) from {url}")
|
||||
|
||||
|
||||
def get_mark_from_yaml_data(obj):
|
||||
"""Try to get ``spack.util.spack_yaml`` mark from YAML data.
|
||||
|
||||
We try the object, and if that fails we try its first member (if it's a container).
|
||||
|
||||
Returns:
|
||||
mark if one is found, otherwise None.
|
||||
"""
|
||||
# mark of object itelf
|
||||
mark = getattr(obj, "_start_mark", None)
|
||||
if mark:
|
||||
return mark
|
||||
|
||||
# mark of first member if it is a container
|
||||
if isinstance(obj, (list, dict)):
|
||||
first_member = next(iter(obj), None)
|
||||
if first_member:
|
||||
mark = getattr(first_member, "_start_mark", None)
|
||||
|
||||
return mark
|
||||
|
||||
|
||||
def determine_number_of_jobs(
|
||||
*,
|
||||
parallel: bool = False,
|
||||
@@ -1680,3 +1712,7 @@ def get_path(path, data):
|
||||
|
||||
# give up and return None if nothing worked
|
||||
return None
|
||||
|
||||
|
||||
class RecursiveIncludeError(spack.error.SpackError):
|
||||
"""Too many levels of recursive includes."""
|
||||
|
@@ -649,7 +649,7 @@ def __init__(
|
||||
@property
|
||||
def db_version(self) -> vn.ConcreteVersion:
|
||||
if self._db_version is None:
|
||||
raise AttributeError("db version is not yet set")
|
||||
raise AttributeError("version not set -- DB has not been read yet")
|
||||
return self._db_version
|
||||
|
||||
@db_version.setter
|
||||
@@ -896,7 +896,7 @@ def _handle_current_version_read(self, check, db):
|
||||
|
||||
def _handle_old_db_versions_read(self, check, db, *, reindex: bool):
|
||||
if reindex is False and not self.is_upstream:
|
||||
self.raise_explicit_database_upgrade()
|
||||
self.raise_explicit_database_upgrade_error()
|
||||
|
||||
if not self.is_readable():
|
||||
raise DatabaseNotReadableError(
|
||||
@@ -909,13 +909,16 @@ def is_readable(self) -> bool:
|
||||
"""Returns true if this DB can be read without reindexing"""
|
||||
return (self.db_version, _DB_VERSION) in _REINDEX_NOT_NEEDED_ON_READ
|
||||
|
||||
def raise_explicit_database_upgrade(self):
|
||||
def raise_explicit_database_upgrade_error(self):
|
||||
"""Raises an ExplicitDatabaseUpgradeError with an appropriate message"""
|
||||
raise ExplicitDatabaseUpgradeError(
|
||||
f"database is v{self.db_version}, but Spack v{spack.__version__} needs v{_DB_VERSION}",
|
||||
long_message=(
|
||||
f"\nUse `spack reindex` to upgrade the store at {self.root} to version "
|
||||
f"{_DB_VERSION}, or change config:install_tree:root to use a different store"
|
||||
f"\nChange config:install_tree:root to use a different store, or use `spack "
|
||||
f"reindex` to migrate the store at {self.root} to version {_DB_VERSION}.\n\n"
|
||||
f"If you decide to migrate the store, note that:\n"
|
||||
f"1. The operation cannot be reverted, and\n"
|
||||
f"2. Older Spack versions will not be able to read the store anymore\n"
|
||||
),
|
||||
)
|
||||
|
||||
@@ -1160,7 +1163,7 @@ def _add(
|
||||
installation_time:
|
||||
Date and time of installation
|
||||
allow_missing: if True, don't warn when installation is not found on on disk
|
||||
This is useful when installing specs without build deps.
|
||||
This is useful when installing specs without build/test deps.
|
||||
"""
|
||||
if not spec.concrete:
|
||||
raise NonConcreteSpecAddError("Specs added to DB must be concrete.")
|
||||
@@ -1180,10 +1183,8 @@ def _add(
|
||||
edge.spec,
|
||||
explicit=False,
|
||||
installation_time=installation_time,
|
||||
# allow missing build-only deps. This prevents excessive warnings when a spec is
|
||||
# installed, and its build dep is missing a build dep; there's no need to install
|
||||
# the build dep's build dep first, and there's no need to warn about it missing.
|
||||
allow_missing=allow_missing or edge.depflag == dt.BUILD,
|
||||
# allow missing build / test only deps
|
||||
allow_missing=allow_missing or edge.depflag & (dt.BUILD | dt.TEST) == edge.depflag,
|
||||
)
|
||||
|
||||
# Make sure the directory layout agrees whether the spec is installed
|
||||
|
@@ -7,6 +7,7 @@
|
||||
import collections
|
||||
import concurrent.futures
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
@@ -15,6 +16,7 @@
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.lang
|
||||
import llnl.util.symlink
|
||||
import llnl.util.tty
|
||||
|
||||
import spack.error
|
||||
@@ -70,13 +72,21 @@ def dedupe_paths(paths: List[str]) -> List[str]:
|
||||
"""Deduplicate paths based on inode and device number. In case the list contains first a
|
||||
symlink and then the directory it points to, the symlink is replaced with the directory path.
|
||||
This ensures that we pick for example ``/usr/bin`` over ``/bin`` if the latter is a symlink to
|
||||
the former`."""
|
||||
the former."""
|
||||
seen: Dict[Tuple[int, int], str] = {}
|
||||
|
||||
linked_parent_check = lambda x: any(
|
||||
[llnl.util.symlink.islink(str(y)) for y in pathlib.Path(x).parents]
|
||||
)
|
||||
|
||||
for path in paths:
|
||||
identifier = file_identifier(path)
|
||||
if identifier not in seen:
|
||||
seen[identifier] = path
|
||||
elif not os.path.islink(path):
|
||||
# we also want to deprioritize paths if they contain a symlink in any parent
|
||||
# (not just the basedir): e.g. oneapi has "latest/bin",
|
||||
# where "latest" is a symlink to 2025.0"
|
||||
elif not (llnl.util.symlink.islink(path) or linked_parent_check(path)):
|
||||
seen[identifier] = path
|
||||
return list(seen.values())
|
||||
|
||||
|
@@ -34,11 +34,13 @@ class OpenMpi(Package):
|
||||
import collections.abc
|
||||
import os
|
||||
import re
|
||||
import warnings
|
||||
from typing import Any, Callable, List, Optional, Tuple, Type, Union
|
||||
|
||||
import llnl.util.tty.color
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.package_base
|
||||
import spack.patch
|
||||
@@ -457,8 +459,7 @@ def _execute_extends(pkg):
|
||||
if dep_spec.name == "python" and not pkg.name == "python-venv":
|
||||
_depends_on(pkg, spack.spec.Spec("python-venv"), when=when, type=("build", "run"))
|
||||
|
||||
# TODO: the values of the extendees dictionary are not used. Remove in next refactor.
|
||||
pkg.extendees[dep_spec.name] = (dep_spec, None)
|
||||
pkg.extendees[dep_spec.name] = (dep_spec, when_spec)
|
||||
|
||||
return _execute_extends
|
||||
|
||||
@@ -609,7 +610,7 @@ def _execute_patch(
|
||||
return _execute_patch
|
||||
|
||||
|
||||
def conditional(*values: List[Any], when: Optional[WhenType] = None):
|
||||
def conditional(*values: Union[str, bool], when: Optional[WhenType] = None):
|
||||
"""Conditional values that can be used in variant declarations."""
|
||||
# _make_when_spec returns None when the condition is statically false.
|
||||
when = _make_when_spec(when)
|
||||
@@ -621,7 +622,7 @@ def conditional(*values: List[Any], when: Optional[WhenType] = None):
|
||||
@directive("variants")
|
||||
def variant(
|
||||
name: str,
|
||||
default: Optional[Any] = None,
|
||||
default: Optional[Union[bool, str, Tuple[str, ...]]] = None,
|
||||
description: str = "",
|
||||
values: Optional[Union[collections.abc.Sequence, Callable[[Any], bool]]] = None,
|
||||
multi: Optional[bool] = None,
|
||||
@@ -651,11 +652,29 @@ def variant(
|
||||
DirectiveError: If arguments passed to the directive are invalid
|
||||
"""
|
||||
|
||||
# This validation can be removed at runtime and enforced with an audit in Spack v1.0.
|
||||
# For now it's a warning to let people migrate faster.
|
||||
if not (
|
||||
default is None
|
||||
or type(default) in (bool, str)
|
||||
or (type(default) is tuple and all(type(x) is str for x in default))
|
||||
):
|
||||
if isinstance(default, (list, tuple)):
|
||||
did_you_mean = f"default={','.join(str(x) for x in default)!r}"
|
||||
else:
|
||||
did_you_mean = f"default={str(default)!r}"
|
||||
warnings.warn(
|
||||
f"default value for variant '{name}' is not a boolean or string: default={default!r}. "
|
||||
f"Did you mean {did_you_mean}?",
|
||||
stacklevel=3,
|
||||
category=spack.error.SpackAPIWarning,
|
||||
)
|
||||
|
||||
def format_error(msg, pkg):
|
||||
msg += " @*r{{[{0}, variant '{1}']}}"
|
||||
return llnl.util.tty.color.colorize(msg.format(pkg.name, name))
|
||||
|
||||
if name in spack.variant.reserved_names:
|
||||
if name in spack.variant.RESERVED_NAMES:
|
||||
|
||||
def _raise_reserved_name(pkg):
|
||||
msg = "The name '%s' is reserved by Spack" % name
|
||||
@@ -666,7 +685,11 @@ def _raise_reserved_name(pkg):
|
||||
# Ensure we have a sequence of allowed variant values, or a
|
||||
# predicate for it.
|
||||
if values is None:
|
||||
if str(default).upper() in ("TRUE", "FALSE"):
|
||||
if (
|
||||
default in (True, False)
|
||||
or type(default) is str
|
||||
and default.upper() in ("TRUE", "FALSE")
|
||||
):
|
||||
values = (True, False)
|
||||
else:
|
||||
values = lambda x: True
|
||||
@@ -699,12 +722,15 @@ def _raise_argument_error(pkg):
|
||||
# or the empty string, as the former indicates that a default
|
||||
# was not set while the latter will make the variant unparsable
|
||||
# from the command line
|
||||
if isinstance(default, tuple):
|
||||
default = ",".join(default)
|
||||
|
||||
if default is None or default == "":
|
||||
|
||||
def _raise_default_not_set(pkg):
|
||||
if default is None:
|
||||
msg = "either a default was not explicitly set, " "or 'None' was used"
|
||||
elif default == "":
|
||||
msg = "either a default was not explicitly set, or 'None' was used"
|
||||
else:
|
||||
msg = "the default cannot be an empty string"
|
||||
raise DirectiveError(format_error(msg, pkg))
|
||||
|
||||
|
@@ -9,7 +9,7 @@
|
||||
`spack.lock` format
|
||||
===================
|
||||
|
||||
Spack environments have existed since Spack ``v0.12.0``, and there have been 4 different
|
||||
Spack environments have existed since Spack ``v0.12.0``, and there have been different
|
||||
``spack.lock`` formats since then. The formats are documented here.
|
||||
|
||||
The high-level format of a Spack lockfile hasn't changed much between versions, but the
|
||||
@@ -53,31 +53,44 @@
|
||||
- ``v3``
|
||||
- ``v4``
|
||||
- ``v5``
|
||||
- ``v6``
|
||||
* - ``v0.12:0.14``
|
||||
- ✅
|
||||
-
|
||||
-
|
||||
-
|
||||
-
|
||||
-
|
||||
* - ``v0.15:0.16``
|
||||
- ✅
|
||||
- ✅
|
||||
-
|
||||
-
|
||||
-
|
||||
-
|
||||
* - ``v0.17``
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
-
|
||||
-
|
||||
-
|
||||
* - ``v0.18:``
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
-
|
||||
* - ``v0.22:``
|
||||
-
|
||||
* - ``v0.22:v0.23``
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
-
|
||||
* - ``v1.0:``
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
@@ -459,6 +472,78 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Version 6
|
||||
---------
|
||||
|
||||
Version 6 uses specs where compilers are modeled as real dependencies, and not as a node attribute.
|
||||
It doesn't change the top-level lockfile format.
|
||||
|
||||
As part of Spack v1.0, compilers stopped being a node attribute, and became a build-only dependency. Packages may
|
||||
declare a dependency on the c, cxx, or fortran languages, which are now treated as virtuals, and compilers would
|
||||
be providers for one or more of those languages. Compilers can also inject runtime dependency, on the node being
|
||||
compiled. The compiler-wrapper is explicitly represented as a node in the DAG, and enters the hash.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"_meta": {
|
||||
"file-type": "spack-lockfile",
|
||||
"lockfile-version": 6,
|
||||
"specfile-version": 5
|
||||
},
|
||||
"spack": {
|
||||
"version": "1.0.0.dev0",
|
||||
"type": "git",
|
||||
"commit": "395b34f17417132389a6a8ee4dbf831c4a04f642"
|
||||
},
|
||||
"roots": [
|
||||
{
|
||||
"hash": "tivmbe3xjw7oqv4c3jv3v4jw42a7cajq",
|
||||
"spec": "zlib-ng"
|
||||
}
|
||||
],
|
||||
"concrete_specs": {
|
||||
"tivmbe3xjw7oqv4c3jv3v4jw42a7cajq": {
|
||||
"name": "zlib-ng",
|
||||
"version": "2.2.3",
|
||||
"<other attributes>": {}
|
||||
}
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "compiler-wrapper",
|
||||
"hash": "n5lamxu36f4cx4sm7m7gocalctve4mcx",
|
||||
"parameters": {
|
||||
"deptypes": [
|
||||
"build"
|
||||
],
|
||||
"virtuals": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "gcc",
|
||||
"hash": "b375mbpprxze4vxy4ho7aixhuchsime2",
|
||||
"parameters": {
|
||||
"deptypes": [
|
||||
"build"
|
||||
],
|
||||
"virtuals": [
|
||||
"c",
|
||||
"cxx"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"<other dependencies>": {}
|
||||
}
|
||||
],
|
||||
"annotations": {
|
||||
"original_specfile_version": 5
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
"""
|
||||
|
||||
from .environment import (
|
||||
@@ -481,7 +566,7 @@
|
||||
display_specs,
|
||||
environment_dir_from_name,
|
||||
environment_from_name_or_dir,
|
||||
environment_path_scopes,
|
||||
environment_path_scope,
|
||||
exists,
|
||||
initialize_environment_dir,
|
||||
installed_specs,
|
||||
@@ -518,7 +603,7 @@
|
||||
"display_specs",
|
||||
"environment_dir_from_name",
|
||||
"environment_from_name_or_dir",
|
||||
"environment_path_scopes",
|
||||
"environment_path_scope",
|
||||
"exists",
|
||||
"initialize_environment_dir",
|
||||
"installed_specs",
|
||||
|
@@ -10,8 +10,6 @@
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import warnings
|
||||
from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union
|
||||
|
||||
@@ -32,7 +30,6 @@
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.schema.env
|
||||
import spack.schema.merged
|
||||
import spack.spec
|
||||
import spack.spec_list
|
||||
import spack.store
|
||||
@@ -43,7 +40,6 @@
|
||||
import spack.util.path
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url
|
||||
from spack import traverse
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.schema.env import TOP_LEVEL_KEY
|
||||
@@ -101,16 +97,15 @@ def environment_name(path: Union[str, pathlib.Path]) -> str:
|
||||
return path_str
|
||||
|
||||
|
||||
def ensure_no_disallowed_env_config_mods(scopes: List[spack.config.ConfigScope]) -> None:
|
||||
for scope in scopes:
|
||||
config = scope.get_section("config")
|
||||
if config and "environments_root" in config["config"]:
|
||||
raise SpackEnvironmentError(
|
||||
"Spack environments are prohibited from modifying 'config:environments_root' "
|
||||
"because it can make the definition of the environment ill-posed. Please "
|
||||
"remove from your environment and place it in a permanent scope such as "
|
||||
"defaults, system, site, etc."
|
||||
)
|
||||
def ensure_no_disallowed_env_config_mods(scope: spack.config.ConfigScope) -> None:
|
||||
config = scope.get_section("config")
|
||||
if config and "environments_root" in config["config"]:
|
||||
raise SpackEnvironmentError(
|
||||
"Spack environments are prohibited from modifying 'config:environments_root' "
|
||||
"because it can make the definition of the environment ill-posed. Please "
|
||||
"remove from your environment and place it in a permanent scope such as "
|
||||
"defaults, system, site, etc."
|
||||
)
|
||||
|
||||
|
||||
def default_manifest_yaml():
|
||||
@@ -390,6 +385,7 @@ def create_in_dir(
|
||||
# dev paths in this environment to refer to their original
|
||||
# locations.
|
||||
_rewrite_relative_dev_paths_on_relocation(env, init_file_dir)
|
||||
_rewrite_relative_repos_paths_on_relocation(env, init_file_dir)
|
||||
|
||||
return env
|
||||
|
||||
@@ -406,8 +402,8 @@ def _rewrite_relative_dev_paths_on_relocation(env, init_file_dir):
|
||||
dev_path = substitute_path_variables(entry["path"])
|
||||
expanded_path = spack.util.path.canonicalize_path(dev_path, default_wd=init_file_dir)
|
||||
|
||||
# Skip if the expanded path is the same (e.g. when absolute)
|
||||
if dev_path == expanded_path:
|
||||
# Skip if the substituted and expanded path is the same (e.g. when absolute)
|
||||
if entry["path"] == expanded_path:
|
||||
continue
|
||||
|
||||
tty.debug("Expanding develop path for {0} to {1}".format(name, expanded_path))
|
||||
@@ -422,6 +418,34 @@ def _rewrite_relative_dev_paths_on_relocation(env, init_file_dir):
|
||||
env._re_read()
|
||||
|
||||
|
||||
def _rewrite_relative_repos_paths_on_relocation(env, init_file_dir):
|
||||
"""When initializing the environment from a manifest file and we plan
|
||||
to store the environment in a different directory, we have to rewrite
|
||||
relative repo paths to absolute ones and expand environment variables."""
|
||||
with env:
|
||||
repos_specs = spack.config.get("repos", default={}, scope=env.scope_name)
|
||||
if not repos_specs:
|
||||
return
|
||||
for i, entry in enumerate(repos_specs):
|
||||
repo_path = substitute_path_variables(entry)
|
||||
expanded_path = spack.util.path.canonicalize_path(repo_path, default_wd=init_file_dir)
|
||||
|
||||
# Skip if the substituted and expanded path is the same (e.g. when absolute)
|
||||
if entry == expanded_path:
|
||||
continue
|
||||
|
||||
tty.debug("Expanding repo path for {0} to {1}".format(entry, expanded_path))
|
||||
|
||||
repos_specs[i] = expanded_path
|
||||
|
||||
spack.config.set("repos", repos_specs, scope=env.scope_name)
|
||||
|
||||
env.repos_specs = None
|
||||
# If we changed the environment's spack.yaml scope, that will not be reflected
|
||||
# in the manifest that we read
|
||||
env._re_read()
|
||||
|
||||
|
||||
def environment_dir_from_name(name: str, exists_ok: bool = True) -> str:
|
||||
"""Returns the directory associated with a named environment.
|
||||
|
||||
@@ -549,13 +573,6 @@ def _write_yaml(data, str_or_file):
|
||||
syaml.dump_config(data, str_or_file, default_flow_style=False)
|
||||
|
||||
|
||||
def _eval_conditional(string):
|
||||
"""Evaluate conditional definitions using restricted variable scope."""
|
||||
valid_variables = spack.spec.get_host_environment()
|
||||
valid_variables.update({"re": re, "env": os.environ})
|
||||
return eval(string, valid_variables)
|
||||
|
||||
|
||||
def _is_dev_spec_and_has_changed(spec):
|
||||
"""Check if the passed spec is a dev build and whether it has changed since the
|
||||
last installation"""
|
||||
@@ -988,7 +1005,7 @@ def _process_definition(self, entry):
|
||||
"""Process a single spec definition item."""
|
||||
when_string = entry.get("when")
|
||||
if when_string is not None:
|
||||
when = _eval_conditional(when_string)
|
||||
when = spack.spec.eval_conditional(when_string)
|
||||
assert len([x for x in entry if x != "when"]) == 1
|
||||
else:
|
||||
when = True
|
||||
@@ -1111,11 +1128,6 @@ def user_specs(self):
|
||||
|
||||
@property
|
||||
def dev_specs(self):
|
||||
if not self._dev_specs:
|
||||
self._dev_specs = self._read_dev_specs()
|
||||
return self._dev_specs
|
||||
|
||||
def _read_dev_specs(self):
|
||||
dev_specs = {}
|
||||
dev_config = spack.config.get("develop", {})
|
||||
for name, entry in dev_config.items():
|
||||
@@ -1533,9 +1545,6 @@ def _get_specs_to_concretize(
|
||||
return new_user_specs, kept_user_specs, specs_to_concretize
|
||||
|
||||
def _concretize_together_where_possible(self, tests: bool = False) -> Sequence[SpecPair]:
|
||||
# Avoid cyclic dependency
|
||||
import spack.solver.asp
|
||||
|
||||
# Exit early if the set of concretized specs is the set of user specs
|
||||
new_user_specs, _, specs_to_concretize = self._get_specs_to_concretize()
|
||||
if not new_user_specs:
|
||||
@@ -2646,20 +2655,23 @@ def _ensure_env_dir():
|
||||
# error handling for bad manifests is handled on other code paths
|
||||
return
|
||||
|
||||
# TODO: make this recursive
|
||||
includes = manifest[TOP_LEVEL_KEY].get("include", [])
|
||||
for include in includes:
|
||||
if os.path.isabs(include):
|
||||
included_path = spack.config.included_path(include)
|
||||
path = included_path.path
|
||||
if os.path.isabs(path):
|
||||
continue
|
||||
|
||||
abspath = pathlib.Path(os.path.normpath(environment_dir / include))
|
||||
abspath = pathlib.Path(os.path.normpath(environment_dir / path))
|
||||
common_path = pathlib.Path(os.path.commonpath([environment_dir, abspath]))
|
||||
if common_path != environment_dir:
|
||||
tty.debug(f"Will not copy relative include from outside environment: {include}")
|
||||
tty.debug(f"Will not copy relative include file from outside environment: {path}")
|
||||
continue
|
||||
|
||||
orig_abspath = os.path.normpath(envfile.parent / include)
|
||||
orig_abspath = os.path.normpath(envfile.parent / path)
|
||||
if not os.path.exists(orig_abspath):
|
||||
tty.warn(f"Included file does not exist; will not copy: '{include}'")
|
||||
tty.warn(f"Included file does not exist; will not copy: '{path}'")
|
||||
continue
|
||||
|
||||
fs.touchp(abspath)
|
||||
@@ -2704,9 +2716,9 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str], name: Optional[str] =
|
||||
self.scope_name = f"env:{self.name}"
|
||||
self.config_stage_dir = os.path.join(env_subdir_path(manifest_dir), "config")
|
||||
|
||||
#: Configuration scopes associated with this environment. Note that these are not
|
||||
#: Configuration scope associated with this environment. Note that this is not
|
||||
#: invalidated by a re-read of the manifest file.
|
||||
self._config_scopes: Optional[List[spack.config.ConfigScope]] = None
|
||||
self._env_config_scope: Optional[spack.config.ConfigScope] = None
|
||||
|
||||
if not self.manifest_file.exists():
|
||||
msg = f"cannot find '{manifest_name}' in {self.manifest_dir}"
|
||||
@@ -2882,7 +2894,7 @@ def extract_name(_item):
|
||||
continue
|
||||
|
||||
condition_str = item.get("when", "True")
|
||||
if not _eval_conditional(condition_str):
|
||||
if not spack.spec.eval_conditional(condition_str):
|
||||
continue
|
||||
|
||||
yield idx, item
|
||||
@@ -2944,140 +2956,27 @@ def __str__(self):
|
||||
return str(self.manifest_file)
|
||||
|
||||
@property
|
||||
def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||
"""List of included configuration scopes from the manifest.
|
||||
|
||||
Scopes are listed in the YAML file in order from highest to
|
||||
lowest precedence, so configuration from earlier scope will take
|
||||
precedence over later ones.
|
||||
|
||||
This routine returns them in the order they should be pushed onto
|
||||
the internal scope stack (so, in reverse, from lowest to highest).
|
||||
|
||||
Returns: Configuration scopes associated with the environment manifest
|
||||
|
||||
Raises:
|
||||
SpackEnvironmentError: if the manifest includes a remote file but
|
||||
no configuration stage directory has been identified
|
||||
"""
|
||||
scopes: List[spack.config.ConfigScope] = []
|
||||
|
||||
# load config scopes added via 'include:', in reverse so that
|
||||
# highest-precedence scopes are last.
|
||||
includes = self[TOP_LEVEL_KEY].get("include", [])
|
||||
missing = []
|
||||
for i, config_path in enumerate(reversed(includes)):
|
||||
# allow paths to contain spack config/environment variables, etc.
|
||||
config_path = substitute_path_variables(config_path)
|
||||
include_url = urllib.parse.urlparse(config_path)
|
||||
|
||||
# If scheme is not valid, config_path is not a url
|
||||
# of a type Spack is generally aware
|
||||
if spack.util.url.validate_scheme(include_url.scheme):
|
||||
# Transform file:// URLs to direct includes.
|
||||
if include_url.scheme == "file":
|
||||
config_path = urllib.request.url2pathname(include_url.path)
|
||||
|
||||
# Any other URL should be fetched.
|
||||
elif include_url.scheme in ("http", "https", "ftp"):
|
||||
# Stage any remote configuration file(s)
|
||||
staged_configs = (
|
||||
os.listdir(self.config_stage_dir)
|
||||
if os.path.exists(self.config_stage_dir)
|
||||
else []
|
||||
)
|
||||
remote_path = urllib.request.url2pathname(include_url.path)
|
||||
basename = os.path.basename(remote_path)
|
||||
if basename in staged_configs:
|
||||
# Do NOT re-stage configuration files over existing
|
||||
# ones with the same name since there is a risk of
|
||||
# losing changes (e.g., from 'spack config update').
|
||||
tty.warn(
|
||||
"Will not re-stage configuration from {0} to avoid "
|
||||
"losing changes to the already staged file of the "
|
||||
"same name.".format(remote_path)
|
||||
)
|
||||
|
||||
# Recognize the configuration stage directory
|
||||
# is flattened to ensure a single copy of each
|
||||
# configuration file.
|
||||
config_path = self.config_stage_dir
|
||||
if basename.endswith(".yaml"):
|
||||
config_path = os.path.join(config_path, basename)
|
||||
else:
|
||||
staged_path = spack.config.fetch_remote_configs(
|
||||
config_path, str(self.config_stage_dir), skip_existing=True
|
||||
)
|
||||
if not staged_path:
|
||||
raise SpackEnvironmentError(
|
||||
"Unable to fetch remote configuration {0}".format(config_path)
|
||||
)
|
||||
config_path = staged_path
|
||||
|
||||
elif include_url.scheme:
|
||||
raise ValueError(
|
||||
f"Unsupported URL scheme ({include_url.scheme}) for "
|
||||
f"environment include: {config_path}"
|
||||
)
|
||||
|
||||
# treat relative paths as relative to the environment
|
||||
if not os.path.isabs(config_path):
|
||||
config_path = os.path.join(self.manifest_dir, config_path)
|
||||
config_path = os.path.normpath(os.path.realpath(config_path))
|
||||
|
||||
if os.path.isdir(config_path):
|
||||
# directories are treated as regular ConfigScopes
|
||||
config_name = f"env:{self.name}:{os.path.basename(config_path)}"
|
||||
tty.debug(f"Creating DirectoryConfigScope {config_name} for '{config_path}'")
|
||||
scopes.append(spack.config.DirectoryConfigScope(config_name, config_path))
|
||||
elif os.path.exists(config_path):
|
||||
# files are assumed to be SingleFileScopes
|
||||
config_name = f"env:{self.name}:{config_path}"
|
||||
tty.debug(f"Creating SingleFileScope {config_name} for '{config_path}'")
|
||||
scopes.append(
|
||||
spack.config.SingleFileScope(
|
||||
config_name, config_path, spack.schema.merged.schema
|
||||
)
|
||||
)
|
||||
else:
|
||||
missing.append(config_path)
|
||||
continue
|
||||
|
||||
if missing:
|
||||
msg = "Detected {0} missing include path(s):".format(len(missing))
|
||||
msg += "\n {0}".format("\n ".join(missing))
|
||||
raise spack.config.ConfigFileError(msg)
|
||||
|
||||
return scopes
|
||||
|
||||
@property
|
||||
def env_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||
"""A list of all configuration scopes for the environment manifest. On the first call this
|
||||
instantiates all the scopes, on subsequent calls it returns the cached list."""
|
||||
if self._config_scopes is not None:
|
||||
return self._config_scopes
|
||||
scopes: List[spack.config.ConfigScope] = [
|
||||
*self.included_config_scopes,
|
||||
spack.config.SingleFileScope(
|
||||
def env_config_scope(self) -> spack.config.ConfigScope:
|
||||
"""The configuration scope for the environment manifest"""
|
||||
if self._env_config_scope is None:
|
||||
self._env_config_scope = spack.config.SingleFileScope(
|
||||
self.scope_name,
|
||||
str(self.manifest_file),
|
||||
spack.schema.env.schema,
|
||||
yaml_path=[TOP_LEVEL_KEY],
|
||||
),
|
||||
]
|
||||
ensure_no_disallowed_env_config_mods(scopes)
|
||||
self._config_scopes = scopes
|
||||
return scopes
|
||||
)
|
||||
ensure_no_disallowed_env_config_mods(self._env_config_scope)
|
||||
return self._env_config_scope
|
||||
|
||||
def prepare_config_scope(self) -> None:
|
||||
"""Add the manifest's scopes to the global configuration search path."""
|
||||
for scope in self.env_config_scopes:
|
||||
spack.config.CONFIG.push_scope(scope, priority=ConfigScopePriority.ENVIRONMENT)
|
||||
"""Add the manifest's scope to the global configuration search path."""
|
||||
spack.config.CONFIG.push_scope(
|
||||
self.env_config_scope, priority=ConfigScopePriority.ENVIRONMENT
|
||||
)
|
||||
|
||||
def deactivate_config_scope(self) -> None:
|
||||
"""Remove any of the manifest's scopes from the global config path."""
|
||||
for scope in self.env_config_scopes:
|
||||
spack.config.CONFIG.remove_scope(scope.name)
|
||||
"""Remove the manifest's scope from the global config path."""
|
||||
spack.config.CONFIG.remove_scope(self.env_config_scope.name)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_config(self):
|
||||
@@ -3088,8 +2987,8 @@ def use_config(self):
|
||||
self.deactivate_config_scope()
|
||||
|
||||
|
||||
def environment_path_scopes(name: str, path: str) -> Optional[List[spack.config.ConfigScope]]:
|
||||
"""Retrieve the suitably named environment path scopes
|
||||
def environment_path_scope(name: str, path: str) -> Optional[spack.config.ConfigScope]:
|
||||
"""Retrieve the suitably named environment path scope
|
||||
|
||||
Arguments:
|
||||
name: configuration scope name
|
||||
@@ -3104,11 +3003,9 @@ def environment_path_scopes(name: str, path: str) -> Optional[List[spack.config.
|
||||
else:
|
||||
return None
|
||||
|
||||
for scope in manifest.env_config_scopes:
|
||||
scope.name = f"{name}:{scope.name}"
|
||||
scope.writable = False
|
||||
|
||||
return manifest.env_config_scopes
|
||||
manifest.env_config_scope.name = f"{name}:{manifest.env_config_scope.name}"
|
||||
manifest.env_config_scope.writable = False
|
||||
return manifest.env_config_scope
|
||||
|
||||
|
||||
class SpackEnvironmentError(spack.error.SpackError):
|
||||
|
@@ -8,6 +8,7 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.schema.environment
|
||||
@@ -158,7 +159,8 @@ def activate(
|
||||
# become PATH variables.
|
||||
#
|
||||
|
||||
env_vars_yaml = env.manifest.configuration.get("env_vars", None)
|
||||
with env.manifest.use_config():
|
||||
env_vars_yaml = spack.config.get("env_vars", None)
|
||||
if env_vars_yaml:
|
||||
env_mods.extend(spack.schema.environment.parse(env_vars_yaml))
|
||||
|
||||
@@ -195,7 +197,8 @@ def deactivate() -> EnvironmentModifications:
|
||||
if active is None:
|
||||
return env_mods
|
||||
|
||||
env_vars_yaml = active.manifest.configuration.get("env_vars", None)
|
||||
with active.manifest.use_config():
|
||||
env_vars_yaml = spack.config.get("env_vars", None)
|
||||
if env_vars_yaml:
|
||||
env_mods.extend(spack.schema.environment.parse(env_vars_yaml).reversed())
|
||||
|
||||
|
@@ -295,8 +295,9 @@ def fetch(self):
|
||||
)
|
||||
|
||||
def _fetch_from_url(self, url):
|
||||
if spack.config.get("config:url_fetch_method") == "curl":
|
||||
return self._fetch_curl(url)
|
||||
fetch_method = spack.config.get("config:url_fetch_method", "urllib")
|
||||
if fetch_method.startswith("curl"):
|
||||
return self._fetch_curl(url, config_args=fetch_method.split()[1:])
|
||||
else:
|
||||
return self._fetch_urllib(url)
|
||||
|
||||
@@ -345,7 +346,7 @@ def _fetch_urllib(self, url):
|
||||
self._check_headers(str(response.headers))
|
||||
|
||||
@_needs_stage
|
||||
def _fetch_curl(self, url):
|
||||
def _fetch_curl(self, url, config_args=[]):
|
||||
save_file = None
|
||||
partial_file = None
|
||||
if self.stage.save_filename:
|
||||
@@ -374,7 +375,7 @@ def _fetch_curl(self, url):
|
||||
timeout = self.extra_options.get("timeout")
|
||||
|
||||
base_args = web_util.base_curl_fetch_args(url, timeout)
|
||||
curl_args = save_args + base_args + cookie_args
|
||||
curl_args = config_args + save_args + base_args + cookie_args
|
||||
|
||||
# Run curl but grab the mime type from the http headers
|
||||
curl = self.curl
|
||||
|
@@ -643,7 +643,7 @@ def print_status(self, *specs, **kwargs):
|
||||
specs.sort()
|
||||
|
||||
abbreviated = [
|
||||
s.cformat("{name}{@version}{%compiler}{compiler_flags}{variants}")
|
||||
s.cformat("{name}{@version}{compiler_flags}{variants}{%compiler}")
|
||||
for s in specs
|
||||
]
|
||||
|
||||
|
@@ -482,7 +482,7 @@ class SimpleDAG(DotGraphBuilder):
|
||||
"""Simple DOT graph, with nodes colored uniformly and edges without properties"""
|
||||
|
||||
def node_entry(self, node):
|
||||
format_option = "{name}{@version}{%compiler}{/hash:7}"
|
||||
format_option = "{name}{@version}{/hash:7}{%compiler}"
|
||||
return node.dag_hash(), f'[label="{node.format(format_option)}"]'
|
||||
|
||||
def edge_entry(self, edge):
|
||||
@@ -515,7 +515,7 @@ def visit(self, edge):
|
||||
super().visit(edge)
|
||||
|
||||
def node_entry(self, node):
|
||||
node_str = node.format("{name}{@version}{%compiler}{/hash:7}")
|
||||
node_str = node.format("{name}{@version}{/hash:7}{%compiler}")
|
||||
options = f'[label="{node_str}", group="build_dependencies", fillcolor="coral"]'
|
||||
if node.dag_hash() in self.main_unified_space:
|
||||
options = f'[label="{node_str}", group="main_psid"]'
|
||||
|
@@ -6,7 +6,7 @@
|
||||
import spack.deptypes as dt
|
||||
import spack.repo
|
||||
|
||||
hashes = []
|
||||
HASHES = []
|
||||
|
||||
|
||||
class SpecHashDescriptor:
|
||||
@@ -23,7 +23,7 @@ def __init__(self, depflag: dt.DepFlag, package_hash, name, override=None):
|
||||
self.depflag = depflag
|
||||
self.package_hash = package_hash
|
||||
self.name = name
|
||||
hashes.append(self)
|
||||
HASHES.append(self)
|
||||
# Allow spec hashes to have an alternate computation method
|
||||
self.override = override
|
||||
|
||||
@@ -43,13 +43,9 @@ def __repr__(self):
|
||||
)
|
||||
|
||||
|
||||
#: Spack's deployment hash. Includes all inputs that can affect how a package is built.
|
||||
dag_hash = SpecHashDescriptor(depflag=dt.BUILD | dt.LINK | dt.RUN, package_hash=True, name="hash")
|
||||
|
||||
|
||||
#: Hash descriptor used only to transfer a DAG, as is, across processes
|
||||
process_hash = SpecHashDescriptor(
|
||||
depflag=dt.BUILD | dt.LINK | dt.RUN | dt.TEST, package_hash=True, name="process_hash"
|
||||
#: The DAG hash includes all inputs that can affect how a package is built.
|
||||
dag_hash = SpecHashDescriptor(
|
||||
depflag=dt.BUILD | dt.LINK | dt.RUN | dt.TEST, package_hash=True, name="hash"
|
||||
)
|
||||
|
||||
|
||||
|
@@ -12,7 +12,7 @@
|
||||
import shutil
|
||||
import sys
|
||||
from collections import Counter, OrderedDict
|
||||
from typing import Callable, List, Optional, Tuple, Type, TypeVar, Union
|
||||
from typing import Callable, Iterable, List, Optional, Tuple, Type, TypeVar, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -21,7 +21,6 @@
|
||||
from llnl.util.lang import nullcontext
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.build_environment
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
@@ -392,15 +391,17 @@ def phase_tests(self, builder, phase_name: str, method_names: List[str]):
|
||||
if self.test_failures:
|
||||
raise TestFailure(self.test_failures)
|
||||
|
||||
def stand_alone_tests(self, kwargs):
|
||||
def stand_alone_tests(self, kwargs, timeout: Optional[int] = None) -> None:
|
||||
"""Run the package's stand-alone tests.
|
||||
|
||||
Args:
|
||||
kwargs (dict): arguments to be used by the test process
|
||||
"""
|
||||
import spack.build_environment
|
||||
import spack.build_environment # avoid circular dependency
|
||||
|
||||
spack.build_environment.start_build_process(self.pkg, test_process, kwargs)
|
||||
spack.build_environment.start_build_process(
|
||||
self.pkg, test_process, kwargs, timeout=timeout
|
||||
)
|
||||
|
||||
def parts(self) -> int:
|
||||
"""The total number of (checked) test parts."""
|
||||
@@ -463,6 +464,8 @@ def write_tested_status(self):
|
||||
|
||||
@contextlib.contextmanager
|
||||
def test_part(pkg: Pb, test_name: str, purpose: str, work_dir: str = ".", verbose: bool = False):
|
||||
import spack.build_environment # avoid circular dependency
|
||||
|
||||
wdir = "." if work_dir is None else work_dir
|
||||
tester = pkg.tester
|
||||
assert test_name and test_name.startswith(
|
||||
@@ -846,7 +849,7 @@ def write_test_summary(counts: "Counter"):
|
||||
class TestSuite:
|
||||
"""The class that manages specs for ``spack test run`` execution."""
|
||||
|
||||
def __init__(self, specs, alias=None):
|
||||
def __init__(self, specs: Iterable[Spec], alias: Optional[str] = None) -> None:
|
||||
# copy so that different test suites have different package objects
|
||||
# even if they contain the same spec
|
||||
self.specs = [spec.copy() for spec in specs]
|
||||
@@ -854,42 +857,43 @@ def __init__(self, specs, alias=None):
|
||||
self.current_base_spec = None # spec currently running do_test
|
||||
|
||||
self.alias = alias
|
||||
self._hash = None
|
||||
self._stage = None
|
||||
self._hash: Optional[str] = None
|
||||
self._stage: Optional[Prefix] = None
|
||||
|
||||
self.counts: "Counter" = Counter()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str:
|
||||
"""The name (alias or, if none, hash) of the test suite."""
|
||||
return self.alias if self.alias else self.content_hash
|
||||
|
||||
@property
|
||||
def content_hash(self):
|
||||
def content_hash(self) -> str:
|
||||
"""The hash used to uniquely identify the test suite."""
|
||||
if not self._hash:
|
||||
json_text = sjson.dump(self.to_dict())
|
||||
assert json_text is not None, f"{__name__} unexpected value for 'json_text'"
|
||||
sha = hashlib.sha1(json_text.encode("utf-8"))
|
||||
b32_hash = base64.b32encode(sha.digest()).lower()
|
||||
b32_hash = b32_hash.decode("utf-8")
|
||||
self._hash = b32_hash
|
||||
return self._hash
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
def __call__(
|
||||
self,
|
||||
*,
|
||||
remove_directory: bool = True,
|
||||
dirty: bool = False,
|
||||
fail_first: bool = False,
|
||||
externals: bool = False,
|
||||
timeout: Optional[int] = None,
|
||||
):
|
||||
self.write_reproducibility_data()
|
||||
|
||||
remove_directory = kwargs.get("remove_directory", True)
|
||||
dirty = kwargs.get("dirty", False)
|
||||
fail_first = kwargs.get("fail_first", False)
|
||||
externals = kwargs.get("externals", False)
|
||||
|
||||
for spec in self.specs:
|
||||
try:
|
||||
if spec.package.test_suite:
|
||||
raise TestSuiteSpecError(
|
||||
"Package {} cannot be run in two test suites at once".format(
|
||||
spec.package.name
|
||||
)
|
||||
f"Package {spec.package.name} cannot be run in two test suites at once"
|
||||
)
|
||||
|
||||
# Set up the test suite to know which test is running
|
||||
@@ -904,7 +908,7 @@ def __call__(self, *args, **kwargs):
|
||||
fs.mkdirp(test_dir)
|
||||
|
||||
# run the package tests
|
||||
spec.package.do_test(dirty=dirty, externals=externals)
|
||||
spec.package.do_test(dirty=dirty, externals=externals, timeout=timeout)
|
||||
|
||||
# Clean up on success
|
||||
if remove_directory:
|
||||
@@ -955,15 +959,12 @@ def __call__(self, *args, **kwargs):
|
||||
if failures:
|
||||
raise TestSuiteFailure(failures)
|
||||
|
||||
def test_status(self, spec: spack.spec.Spec, externals: bool) -> Optional[TestStatus]:
|
||||
"""Determine the overall test results status for the spec.
|
||||
def test_status(self, spec: spack.spec.Spec, externals: bool) -> TestStatus:
|
||||
"""Returns the overall test results status for the spec.
|
||||
|
||||
Args:
|
||||
spec: instance of the spec under test
|
||||
externals: ``True`` if externals are to be tested, else ``False``
|
||||
|
||||
Returns:
|
||||
the spec's test status if available or ``None``
|
||||
"""
|
||||
tests_status_file = self.tested_file_for_spec(spec)
|
||||
if not os.path.exists(tests_status_file):
|
||||
@@ -980,109 +981,84 @@ def test_status(self, spec: spack.spec.Spec, externals: bool) -> Optional[TestSt
|
||||
value = (f.read()).strip("\n")
|
||||
return TestStatus(int(value)) if value else TestStatus.NO_TESTS
|
||||
|
||||
def ensure_stage(self):
|
||||
def ensure_stage(self) -> None:
|
||||
"""Ensure the test suite stage directory exists."""
|
||||
if not os.path.exists(self.stage):
|
||||
fs.mkdirp(self.stage)
|
||||
|
||||
@property
|
||||
def stage(self):
|
||||
"""The root test suite stage directory.
|
||||
|
||||
Returns:
|
||||
str: the spec's test stage directory path
|
||||
"""
|
||||
def stage(self) -> Prefix:
|
||||
"""The root test suite stage directory"""
|
||||
if not self._stage:
|
||||
self._stage = Prefix(fs.join_path(get_test_stage_dir(), self.content_hash))
|
||||
return self._stage
|
||||
|
||||
@stage.setter
|
||||
def stage(self, value):
|
||||
def stage(self, value: Union[Prefix, str]) -> None:
|
||||
"""Set the value of a non-default stage directory."""
|
||||
self._stage = value if isinstance(value, Prefix) else Prefix(value)
|
||||
|
||||
@property
|
||||
def results_file(self):
|
||||
def results_file(self) -> Prefix:
|
||||
"""The path to the results summary file."""
|
||||
return self.stage.join(results_filename)
|
||||
|
||||
@classmethod
|
||||
def test_pkg_id(cls, spec):
|
||||
def test_pkg_id(cls, spec: Spec) -> str:
|
||||
"""The standard install test package identifier.
|
||||
|
||||
Args:
|
||||
spec: instance of the spec under test
|
||||
|
||||
Returns:
|
||||
str: the install test package identifier
|
||||
"""
|
||||
return spec.format_path("{name}-{version}-{hash:7}")
|
||||
|
||||
@classmethod
|
||||
def test_log_name(cls, spec):
|
||||
def test_log_name(cls, spec: Spec) -> str:
|
||||
"""The standard log filename for a spec.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): instance of the spec under test
|
||||
|
||||
Returns:
|
||||
str: the spec's log filename
|
||||
spec: instance of the spec under test
|
||||
"""
|
||||
return "%s-test-out.txt" % cls.test_pkg_id(spec)
|
||||
return f"{cls.test_pkg_id(spec)}-test-out.txt"
|
||||
|
||||
def log_file_for_spec(self, spec):
|
||||
def log_file_for_spec(self, spec: Spec) -> Prefix:
|
||||
"""The test log file path for the provided spec.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): instance of the spec under test
|
||||
|
||||
Returns:
|
||||
str: the path to the spec's log file
|
||||
spec: instance of the spec under test
|
||||
"""
|
||||
return self.stage.join(self.test_log_name(spec))
|
||||
|
||||
def test_dir_for_spec(self, spec):
|
||||
def test_dir_for_spec(self, spec: Spec) -> Prefix:
|
||||
"""The path to the test stage directory for the provided spec.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): instance of the spec under test
|
||||
|
||||
Returns:
|
||||
str: the spec's test stage directory path
|
||||
spec: instance of the spec under test
|
||||
"""
|
||||
return Prefix(self.stage.join(self.test_pkg_id(spec)))
|
||||
|
||||
@classmethod
|
||||
def tested_file_name(cls, spec):
|
||||
def tested_file_name(cls, spec: Spec) -> str:
|
||||
"""The standard test status filename for the spec.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): instance of the spec under test
|
||||
|
||||
Returns:
|
||||
str: the spec's test status filename
|
||||
spec: instance of the spec under test
|
||||
"""
|
||||
return "%s-tested.txt" % cls.test_pkg_id(spec)
|
||||
|
||||
def tested_file_for_spec(self, spec):
|
||||
def tested_file_for_spec(self, spec: Spec) -> str:
|
||||
"""The test status file path for the spec.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): instance of the spec under test
|
||||
|
||||
Returns:
|
||||
str: the spec's test status file path
|
||||
spec: instance of the spec under test
|
||||
"""
|
||||
return fs.join_path(self.stage, self.tested_file_name(spec))
|
||||
|
||||
@property
|
||||
def current_test_cache_dir(self):
|
||||
def current_test_cache_dir(self) -> str:
|
||||
"""Path to the test stage directory where the current spec's cached
|
||||
build-time files were automatically copied.
|
||||
|
||||
Returns:
|
||||
str: path to the current spec's staged, cached build-time files.
|
||||
|
||||
Raises:
|
||||
TestSuiteSpecError: If there is no spec being tested
|
||||
"""
|
||||
@@ -1094,13 +1070,10 @@ def current_test_cache_dir(self):
|
||||
return self.test_dir_for_spec(base_spec).cache.join(test_spec.name)
|
||||
|
||||
@property
|
||||
def current_test_data_dir(self):
|
||||
def current_test_data_dir(self) -> str:
|
||||
"""Path to the test stage directory where the current spec's custom
|
||||
package (data) files were automatically copied.
|
||||
|
||||
Returns:
|
||||
str: path to the current spec's staged, custom package (data) files
|
||||
|
||||
Raises:
|
||||
TestSuiteSpecError: If there is no spec being tested
|
||||
"""
|
||||
@@ -1111,17 +1084,17 @@ def current_test_data_dir(self):
|
||||
base_spec = self.current_base_spec
|
||||
return self.test_dir_for_spec(base_spec).data.join(test_spec.name)
|
||||
|
||||
def write_test_result(self, spec, result):
|
||||
def write_test_result(self, spec: Spec, result: TestStatus) -> None:
|
||||
"""Write the spec's test result to the test suite results file.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): instance of the spec under test
|
||||
result (str): result from the spec's test execution (e.g, PASSED)
|
||||
spec: instance of the spec under test
|
||||
result: result from the spec's test execution (e.g, PASSED)
|
||||
"""
|
||||
msg = f"{self.test_pkg_id(spec)} {result}"
|
||||
_add_msg_to_file(self.results_file, msg)
|
||||
|
||||
def write_reproducibility_data(self):
|
||||
def write_reproducibility_data(self) -> None:
|
||||
for spec in self.specs:
|
||||
repo_cache_path = self.stage.repo.join(spec.name)
|
||||
spack.repo.PATH.dump_provenance(spec, repo_cache_path)
|
||||
@@ -1166,12 +1139,12 @@ def from_dict(d):
|
||||
return TestSuite(specs, alias)
|
||||
|
||||
@staticmethod
|
||||
def from_file(filename):
|
||||
def from_file(filename: str) -> "TestSuite":
|
||||
"""Instantiate a TestSuite using the specs and optional alias
|
||||
provided in the given file.
|
||||
|
||||
Args:
|
||||
filename (str): The path to the JSON file containing the test
|
||||
filename: The path to the JSON file containing the test
|
||||
suite specs and optional alias.
|
||||
|
||||
Raises:
|
||||
|
@@ -871,8 +871,8 @@ def add_command_line_scopes(
|
||||
"""
|
||||
for i, path in enumerate(command_line_scopes):
|
||||
name = f"cmd_scope_{i}"
|
||||
scopes = ev.environment_path_scopes(name, path)
|
||||
if scopes is None:
|
||||
scope = ev.environment_path_scope(name, path)
|
||||
if scope is None:
|
||||
if os.path.isdir(path): # directory with config files
|
||||
cfg.push_scope(
|
||||
spack.config.DirectoryConfigScope(name, path, writable=False),
|
||||
@@ -885,8 +885,7 @@ def add_command_line_scopes(
|
||||
else:
|
||||
raise spack.error.ConfigError(f"Invalid configuration scope: {path}")
|
||||
|
||||
for scope in scopes:
|
||||
cfg.push_scope(scope, priority=ConfigScopePriority.CUSTOM)
|
||||
cfg.push_scope(scope, priority=ConfigScopePriority.CUSTOM)
|
||||
|
||||
|
||||
def _main(argv=None):
|
||||
|
@@ -564,6 +564,12 @@ def __init__(self, configuration):
|
||||
def spec(self):
|
||||
return self.conf.spec
|
||||
|
||||
@tengine.context_property
|
||||
def tags(self):
|
||||
if not hasattr(self.spec.package, "tags"):
|
||||
return []
|
||||
return self.spec.package.tags
|
||||
|
||||
@tengine.context_property
|
||||
def timestamp(self):
|
||||
return datetime.datetime.now()
|
||||
|
@@ -19,6 +19,7 @@
|
||||
import spack.spec
|
||||
import spack.tengine as tengine
|
||||
import spack.util.environment
|
||||
from spack.aliases import BUILTIN_TO_LEGACY_COMPILER
|
||||
|
||||
from .common import BaseConfiguration, BaseContext, BaseFileLayout, BaseModuleFileWriter
|
||||
|
||||
@@ -223,9 +224,9 @@ def provides(self):
|
||||
# If it is in the list of supported compilers family -> compiler
|
||||
if self.spec.name in spack.compilers.config.supported_compilers():
|
||||
provides["compiler"] = spack.spec.Spec(self.spec.format("{name}{@versions}"))
|
||||
elif self.spec.name in spack.compilers.config.package_name_to_compiler_name:
|
||||
elif self.spec.name in BUILTIN_TO_LEGACY_COMPILER:
|
||||
# If it is the package for a supported compiler, but of a different name
|
||||
cname = spack.compilers.config.package_name_to_compiler_name[self.spec.name]
|
||||
cname = BUILTIN_TO_LEGACY_COMPILER[self.spec.name]
|
||||
provides["compiler"] = spack.spec.Spec(cname, self.spec.versions)
|
||||
|
||||
# All the other tokens in the hierarchy must be virtual dependencies
|
||||
|
@@ -47,6 +47,7 @@
|
||||
import spack.store
|
||||
import spack.url
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.web
|
||||
import spack.variant
|
||||
@@ -1289,12 +1290,13 @@ def extendee_spec(self):
|
||||
if not self.extendees:
|
||||
return None
|
||||
|
||||
deps = []
|
||||
|
||||
# If the extendee is in the spec's deps already, return that.
|
||||
for dep in self.spec.traverse(deptype=("link", "run")):
|
||||
if dep.name in self.extendees:
|
||||
deps.append(dep)
|
||||
deps = [
|
||||
dep
|
||||
for dep in self.spec.dependencies(deptype=("link", "run"))
|
||||
for d, when in self.extendees.values()
|
||||
if dep.satisfies(d) and self.spec.satisfies(when)
|
||||
]
|
||||
|
||||
if deps:
|
||||
assert len(deps) == 1
|
||||
@@ -1371,6 +1373,14 @@ def prefix(self):
|
||||
def home(self):
|
||||
return self.prefix
|
||||
|
||||
@property
|
||||
def command(self) -> spack.util.executable.Executable:
|
||||
"""Returns the main executable for this package."""
|
||||
path = os.path.join(self.home.bin, self.spec.name)
|
||||
if fsys.is_exe(path):
|
||||
return spack.util.executable.Executable(path)
|
||||
raise RuntimeError(f"Unable to locate {self.spec.name} command in {self.home.bin}")
|
||||
|
||||
def url_version(self, version):
|
||||
"""
|
||||
Given a version, this returns a string that should be substituted
|
||||
@@ -1811,7 +1821,7 @@ def _resource_stage(self, resource):
|
||||
resource_stage_folder = "-".join(pieces)
|
||||
return resource_stage_folder
|
||||
|
||||
def do_test(self, dirty=False, externals=False):
|
||||
def do_test(self, *, dirty=False, externals=False, timeout: Optional[int] = None):
|
||||
if self.test_requires_compiler and not any(
|
||||
lang in self.spec for lang in ("c", "cxx", "fortran")
|
||||
):
|
||||
@@ -1829,7 +1839,7 @@ def do_test(self, dirty=False, externals=False):
|
||||
"verbose": tty.is_verbose(),
|
||||
}
|
||||
|
||||
self.tester.stand_alone_tests(kwargs)
|
||||
self.tester.stand_alone_tests(kwargs, timeout=timeout)
|
||||
|
||||
def unit_test_check(self):
|
||||
"""Hook for unit tests to assert things about package internals.
|
||||
|
@@ -32,6 +32,7 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack
|
||||
import spack.caches
|
||||
import spack.config
|
||||
import spack.error
|
||||
@@ -49,6 +50,8 @@
|
||||
#: Package modules are imported as spack.pkg.<repo-namespace>.<pkg-name>
|
||||
ROOT_PYTHON_NAMESPACE = "spack.pkg"
|
||||
|
||||
_API_REGEX = re.compile(r"^v(\d+)\.(\d+)$")
|
||||
|
||||
|
||||
def python_package_for_repo(namespace):
|
||||
"""Returns the full namespace of a repository, given its relative one
|
||||
@@ -911,19 +914,52 @@ def __reduce__(self):
|
||||
return RepoPath.unmarshal, self.marshal()
|
||||
|
||||
|
||||
def _parse_package_api_version(
|
||||
config: Dict[str, Any],
|
||||
min_api: Tuple[int, int] = spack.min_package_api_version,
|
||||
max_api: Tuple[int, int] = spack.package_api_version,
|
||||
) -> Tuple[int, int]:
|
||||
api = config.get("api")
|
||||
if api is None:
|
||||
package_api = (1, 0)
|
||||
else:
|
||||
if not isinstance(api, str):
|
||||
raise BadRepoError(f"Invalid Package API version '{api}'. Must be of the form vX.Y")
|
||||
api_match = _API_REGEX.match(api)
|
||||
if api_match is None:
|
||||
raise BadRepoError(f"Invalid Package API version '{api}'. Must be of the form vX.Y")
|
||||
package_api = (int(api_match.group(1)), int(api_match.group(2)))
|
||||
|
||||
if min_api <= package_api <= max_api:
|
||||
return package_api
|
||||
|
||||
min_str = ".".join(str(i) for i in min_api)
|
||||
max_str = ".".join(str(i) for i in max_api)
|
||||
curr_str = ".".join(str(i) for i in package_api)
|
||||
raise BadRepoError(
|
||||
f"Package API v{curr_str} is not supported by this version of Spack ("
|
||||
f"must be between v{min_str} and v{max_str})"
|
||||
)
|
||||
|
||||
|
||||
class Repo:
|
||||
"""Class representing a package repository in the filesystem.
|
||||
|
||||
Each package repository must have a top-level configuration file
|
||||
called `repo.yaml`.
|
||||
Each package repository must have a top-level configuration file called `repo.yaml`.
|
||||
|
||||
Currently, `repo.yaml` must define:
|
||||
It contains the following keys:
|
||||
|
||||
`namespace`:
|
||||
A Python namespace where the repository's packages should live.
|
||||
|
||||
`subdirectory`:
|
||||
An optional subdirectory name where packages are placed
|
||||
|
||||
`api`:
|
||||
A string of the form vX.Y that indicates the Package API version. The default is "v1.0".
|
||||
For the repo to be compatible with the current version of Spack, the version must be
|
||||
greater than or equal to :py:data:`spack.min_package_api_version` and less than or equal to
|
||||
:py:data:`spack.package_api_version`.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@@ -960,7 +996,7 @@ def check(condition, msg):
|
||||
f"{os.path.join(root, repo_config_name)} must define a namespace.",
|
||||
)
|
||||
|
||||
self.namespace = config["namespace"]
|
||||
self.namespace: str = config["namespace"]
|
||||
check(
|
||||
re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
|
||||
f"Invalid namespace '{self.namespace}' in repo '{self.root}'. "
|
||||
@@ -973,12 +1009,14 @@ def check(condition, msg):
|
||||
# Keep name components around for checking prefixes.
|
||||
self._names = self.full_namespace.split(".")
|
||||
|
||||
packages_dir = config.get("subdirectory", packages_dir_name)
|
||||
packages_dir: str = config.get("subdirectory", packages_dir_name)
|
||||
self.packages_path = os.path.join(self.root, packages_dir)
|
||||
check(
|
||||
os.path.isdir(self.packages_path), f"No directory '{packages_dir}' found in '{root}'"
|
||||
)
|
||||
|
||||
self.package_api = _parse_package_api_version(config)
|
||||
|
||||
# Class attribute overrides by package name
|
||||
self.overrides = overrides or {}
|
||||
|
||||
@@ -1028,7 +1066,7 @@ def is_prefix(self, fullname: str) -> bool:
|
||||
parts = fullname.split(".")
|
||||
return self._names[: len(parts)] == parts
|
||||
|
||||
def _read_config(self) -> Dict[str, str]:
|
||||
def _read_config(self) -> Dict[str, Any]:
|
||||
"""Check for a YAML config file in this db's root directory."""
|
||||
try:
|
||||
with open(self.config_file, encoding="utf-8") as reponame_file:
|
||||
@@ -1370,6 +1408,8 @@ def create_repo(root, namespace=None, subdir=packages_dir_name):
|
||||
config.write(f" namespace: '{namespace}'\n")
|
||||
if subdir != packages_dir_name:
|
||||
config.write(f" subdirectory: '{subdir}'\n")
|
||||
x, y = spack.package_api_version
|
||||
config.write(f" api: v{x}.{y}\n")
|
||||
|
||||
except OSError as e:
|
||||
# try to clean up.
|
||||
|
@@ -100,7 +100,7 @@
|
||||
"allow_sgid": {"type": "boolean"},
|
||||
"install_status": {"type": "boolean"},
|
||||
"binary_index_root": {"type": "string"},
|
||||
"url_fetch_method": {"type": "string", "enum": ["urllib", "curl"]},
|
||||
"url_fetch_method": {"type": "string", "pattern": r"^urllib$|^curl( .*)*"},
|
||||
"additional_external_search_paths": {"type": "array", "items": {"type": "string"}},
|
||||
"binary_index_ttl": {"type": "integer", "minimum": 0},
|
||||
"aliases": {"type": "object", "patternProperties": {r"\w[\w-]*": {"type": "string"}}},
|
||||
|
@@ -29,11 +29,7 @@
|
||||
# merged configuration scope schemas
|
||||
spack.schema.merged.properties,
|
||||
# extra environment schema properties
|
||||
{
|
||||
"include": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||
"specs": spec_list_schema,
|
||||
"include_concrete": include_concrete,
|
||||
},
|
||||
{"specs": spec_list_schema, "include_concrete": include_concrete},
|
||||
),
|
||||
}
|
||||
}
|
||||
|
41
lib/spack/spack/schema/include.py
Normal file
41
lib/spack/spack/schema/include.py
Normal file
@@ -0,0 +1,41 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Schema for include.yaml configuration file.
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/include.py
|
||||
:lines: 12-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties: Dict[str, Any] = {
|
||||
"include": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
"additionalProperties": False,
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"when": {"type": "string"},
|
||||
"path": {"type": "string"},
|
||||
"sha256": {"type": "string"},
|
||||
"optional": {"type": "boolean"},
|
||||
},
|
||||
"required": ["path"],
|
||||
"additionalProperties": False,
|
||||
},
|
||||
{"type": "string"},
|
||||
]
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Spack include configuration file schema",
|
||||
"properties": properties,
|
||||
}
|
@@ -21,6 +21,7 @@
|
||||
import spack.schema.definitions
|
||||
import spack.schema.develop
|
||||
import spack.schema.env_vars
|
||||
import spack.schema.include
|
||||
import spack.schema.mirrors
|
||||
import spack.schema.modules
|
||||
import spack.schema.packages
|
||||
@@ -40,6 +41,7 @@
|
||||
spack.schema.definitions.properties,
|
||||
spack.schema.develop.properties,
|
||||
spack.schema.env_vars.properties,
|
||||
spack.schema.include.properties,
|
||||
spack.schema.mirrors.properties,
|
||||
spack.schema.modules.properties,
|
||||
spack.schema.packages.properties,
|
||||
@@ -48,7 +50,6 @@
|
||||
spack.schema.view.properties,
|
||||
)
|
||||
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
|
@@ -49,7 +49,6 @@
|
||||
import spack.deptypes as dt
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.hash_types as ht
|
||||
import spack.package_base
|
||||
import spack.package_prefs
|
||||
import spack.patch
|
||||
@@ -563,7 +562,6 @@ def to_dict(self, test: bool = False) -> dict:
|
||||
serial_node_arg = (
|
||||
lambda node_dict: f"""{{"id": "{node_dict.id}", "pkg": "{node_dict.pkg}"}}"""
|
||||
)
|
||||
spec_hash_type = ht.process_hash if test else ht.dag_hash
|
||||
ret = dict()
|
||||
ret["asp"] = self.asp
|
||||
ret["criteria"] = self.criteria
|
||||
@@ -577,14 +575,14 @@ def to_dict(self, test: bool = False) -> dict:
|
||||
serial_answer = answer[:2]
|
||||
serial_answer_dict = {}
|
||||
for node, spec in answer[2].items():
|
||||
serial_answer_dict[serial_node_arg(node)] = spec.to_dict(hash=spec_hash_type)
|
||||
serial_answer_dict[serial_node_arg(node)] = spec.to_dict()
|
||||
serial_answer = serial_answer + (serial_answer_dict,)
|
||||
serial_answers.append(serial_answer)
|
||||
ret["answers"] = serial_answers
|
||||
ret["specs_by_input"] = {}
|
||||
input_specs = {} if not self.specs_by_input else self.specs_by_input
|
||||
for input, spec in input_specs.items():
|
||||
ret["specs_by_input"][str(input)] = spec.to_dict(hash=spec_hash_type)
|
||||
ret["specs_by_input"][str(input)] = spec.to_dict()
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
@@ -644,10 +642,9 @@ class ConcretizationCache:
|
||||
"""
|
||||
|
||||
def __init__(self, root: Union[str, None] = None):
|
||||
if not root:
|
||||
root = spack.config.get(
|
||||
"config:concretization_cache:url", spack.paths.default_conc_cache_path
|
||||
)
|
||||
root = root or spack.config.get(
|
||||
"config:concretization_cache:url", spack.paths.default_conc_cache_path
|
||||
)
|
||||
self.root = pathlib.Path(spack.util.path.canonicalize_path(root))
|
||||
self._fc = FileCache(self.root)
|
||||
self._cache_manifest = ".cache_manifest"
|
||||
@@ -1189,11 +1186,11 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
||||
full_path = lambda x: os.path.join(parent_dir, x)
|
||||
abs_control_files = [full_path(x) for x in control_files]
|
||||
for ctrl_file in abs_control_files:
|
||||
with open(ctrl_file, "r+", encoding="utf-8") as f:
|
||||
with open(ctrl_file, "r", encoding="utf-8") as f:
|
||||
problem_repr += "\n" + f.read()
|
||||
|
||||
result = None
|
||||
conc_cache_enabled = spack.config.get("config:concretization_cache:enable", True)
|
||||
conc_cache_enabled = spack.config.get("config:concretization_cache:enable", False)
|
||||
if conc_cache_enabled:
|
||||
result, concretization_stats = CONC_CACHE.fetch(problem_repr)
|
||||
|
||||
@@ -1507,6 +1504,7 @@ def __init__(self, tests: bool = False):
|
||||
)
|
||||
|
||||
self.possible_compilers: List[spack.spec.Spec] = []
|
||||
self.rejected_compilers: Set[spack.spec.Spec] = set()
|
||||
self.possible_oses: Set = set()
|
||||
self.variant_values_from_specs: Set = set()
|
||||
self.version_constraints: Set = set()
|
||||
@@ -1773,7 +1771,7 @@ def define_variant(
|
||||
|
||||
# make a spec indicating whether the variant has this conditional value
|
||||
variant_has_value = spack.spec.Spec()
|
||||
variant_has_value.variants[name] = spack.variant.AbstractVariant(name, value.value)
|
||||
variant_has_value.variants[name] = vt.VariantBase(name, value.value)
|
||||
|
||||
if value.when:
|
||||
# the conditional value is always "possible", but it imposes its when condition as
|
||||
@@ -2167,8 +2165,8 @@ def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
|
||||
spec.attach_git_version_lookup()
|
||||
|
||||
when_spec = spec
|
||||
if virtual:
|
||||
when_spec = spack.spec.Spec(pkg_name)
|
||||
if virtual and spec.name != pkg_name:
|
||||
when_spec = spack.spec.Spec(f"^[virtuals={pkg_name}] {spec.name}")
|
||||
|
||||
try:
|
||||
context = ConditionContext()
|
||||
@@ -2266,6 +2264,13 @@ def external_packages(self):
|
||||
for local_idx, spec in enumerate(candidate_specs):
|
||||
msg = f"{spec.name} available as external when satisfying {spec}"
|
||||
|
||||
if any(x.satisfies(spec) for x in self.rejected_compilers):
|
||||
tty.debug(
|
||||
f"[{__name__}]: not considering {spec} as external, since "
|
||||
f"it's a non-working compiler"
|
||||
)
|
||||
continue
|
||||
|
||||
if spec_filters and spec not in selected_externals:
|
||||
continue
|
||||
|
||||
@@ -2329,6 +2334,8 @@ def preferred_variants(self, pkg_name):
|
||||
if not preferred_variants:
|
||||
return
|
||||
|
||||
self.gen.h2(f"Package preferences: {pkg_name}")
|
||||
|
||||
for variant_name in sorted(preferred_variants):
|
||||
variant = preferred_variants[variant_name]
|
||||
|
||||
@@ -2992,14 +2999,46 @@ def setup(
|
||||
"""
|
||||
reuse = reuse or []
|
||||
check_packages_exist(specs)
|
||||
self.gen = ProblemInstanceBuilder()
|
||||
|
||||
node_counter = create_counter(specs, tests=self.tests, possible_graph=self.possible_graph)
|
||||
# Compute possible compilers first, so we can record which dependencies they might inject
|
||||
_ = spack.compilers.config.all_compilers(init_config=True)
|
||||
|
||||
# Get compilers from buildcache only if injected through "reuse" specs
|
||||
supported_compilers = spack.compilers.config.supported_compilers()
|
||||
compilers_from_reuse = {
|
||||
x for x in reuse if x.name in supported_compilers and not x.external
|
||||
}
|
||||
candidate_compilers, self.rejected_compilers = possible_compilers(
|
||||
configuration=spack.config.CONFIG
|
||||
)
|
||||
for x in candidate_compilers:
|
||||
if x.external or x in reuse:
|
||||
continue
|
||||
reuse.append(x)
|
||||
for dep in x.traverse(root=False, deptype="run"):
|
||||
reuse.extend(dep.traverse(deptype=("link", "run")))
|
||||
|
||||
candidate_compilers.update(compilers_from_reuse)
|
||||
self.possible_compilers = list(candidate_compilers)
|
||||
self.possible_compilers.sort() # type: ignore[call-overload]
|
||||
|
||||
self.gen.h1("Runtimes")
|
||||
injected_dependencies = self.define_runtime_constraints()
|
||||
|
||||
node_counter = create_counter(
|
||||
specs + injected_dependencies, tests=self.tests, possible_graph=self.possible_graph
|
||||
)
|
||||
self.possible_virtuals = node_counter.possible_virtuals()
|
||||
self.pkgs = node_counter.possible_dependencies()
|
||||
self.libcs = sorted(all_libcs()) # type: ignore[type-var]
|
||||
|
||||
# Fail if we already know an unreachable node is requested
|
||||
for spec in specs:
|
||||
# concrete roots don't need their dependencies verified
|
||||
if spec.concrete:
|
||||
continue
|
||||
|
||||
missing_deps = [
|
||||
str(d)
|
||||
for d in spec.traverse()
|
||||
@@ -3012,7 +3051,6 @@ def setup(
|
||||
if node.namespace is not None:
|
||||
self.explicitly_required_namespaces[node.name] = node.namespace
|
||||
|
||||
self.gen = ProblemInstanceBuilder()
|
||||
self.gen.h1("Generic information")
|
||||
if using_libc_compatibility():
|
||||
for libc in self.libcs:
|
||||
@@ -3041,25 +3079,6 @@ def setup(
|
||||
|
||||
specs = tuple(specs) # ensure compatible types to add
|
||||
|
||||
_ = spack.compilers.config.all_compilers(init_config=True)
|
||||
|
||||
# Get compilers from buildcache only if injected through "reuse" specs
|
||||
supported_compilers = spack.compilers.config.supported_compilers()
|
||||
compilers_from_reuse = {
|
||||
x for x in reuse if x.name in supported_compilers and not x.external
|
||||
}
|
||||
candidate_compilers = possible_compilers(configuration=spack.config.CONFIG)
|
||||
for x in candidate_compilers:
|
||||
if x.external or x in reuse:
|
||||
continue
|
||||
reuse.append(x)
|
||||
for dep in x.traverse(root=False, deptype="run"):
|
||||
reuse.extend(dep.traverse(deptype=("link", "run")))
|
||||
|
||||
candidate_compilers.update(compilers_from_reuse)
|
||||
self.possible_compilers = list(candidate_compilers)
|
||||
self.possible_compilers.sort() # type: ignore[call-overload]
|
||||
|
||||
self.gen.h1("Reusable concrete specs")
|
||||
self.define_concrete_input_specs(specs, self.pkgs)
|
||||
if reuse:
|
||||
@@ -3111,7 +3130,6 @@ def setup(
|
||||
for pkg in sorted(self.pkgs):
|
||||
self.gen.h2("Package rules: %s" % pkg)
|
||||
self.pkg_rules(pkg, tests=self.tests)
|
||||
self.gen.h2("Package preferences: %s" % pkg)
|
||||
self.preferred_variants(pkg)
|
||||
|
||||
self.gen.h1("Special variants")
|
||||
@@ -3131,9 +3149,6 @@ def setup(
|
||||
self.gen.h1("Variant Values defined in specs")
|
||||
self.define_variant_values()
|
||||
|
||||
self.gen.h1("Runtimes")
|
||||
self.define_runtime_constraints()
|
||||
|
||||
self.gen.h1("Version Constraints")
|
||||
self.collect_virtual_constraints()
|
||||
self.define_version_constraints()
|
||||
@@ -3167,8 +3182,10 @@ def visit(node):
|
||||
path = os.path.join(parent_dir, "concretize.lp")
|
||||
parse_files([path], visit)
|
||||
|
||||
def define_runtime_constraints(self):
|
||||
"""Define the constraints to be imposed on the runtimes"""
|
||||
def define_runtime_constraints(self) -> List[spack.spec.Spec]:
|
||||
"""Define the constraints to be imposed on the runtimes, and returns a list of
|
||||
injected packages.
|
||||
"""
|
||||
recorder = RuntimePropertyRecorder(self)
|
||||
|
||||
for compiler in self.possible_compilers:
|
||||
@@ -3184,12 +3201,13 @@ def define_runtime_constraints(self):
|
||||
|
||||
# FIXME (compiler as nodes): think of using isinstance(compiler_cls, WrappedCompiler)
|
||||
# Add a dependency on the compiler wrapper
|
||||
recorder("*").depends_on(
|
||||
"compiler-wrapper",
|
||||
when=f"%{compiler.name}@{compiler.versions}",
|
||||
type="build",
|
||||
description=f"Add the compiler wrapper when using {compiler}",
|
||||
)
|
||||
for language in ("c", "cxx", "fortran"):
|
||||
recorder("*").depends_on(
|
||||
"compiler-wrapper",
|
||||
when=f"%[virtuals={language}] {compiler.name}@{compiler.versions}",
|
||||
type="build",
|
||||
description=f"Add the compiler wrapper when using {compiler} for {language}",
|
||||
)
|
||||
|
||||
if not using_libc_compatibility():
|
||||
continue
|
||||
@@ -3218,6 +3236,7 @@ def define_runtime_constraints(self):
|
||||
)
|
||||
|
||||
recorder.consume_facts()
|
||||
return sorted(recorder.injected_dependencies)
|
||||
|
||||
def literal_specs(self, specs):
|
||||
for spec in sorted(specs):
|
||||
@@ -3387,8 +3406,7 @@ def __init__(self):
|
||||
self.asp_problem = []
|
||||
|
||||
def fact(self, atom: AspFunction) -> None:
|
||||
symbol = atom.symbol() if hasattr(atom, "symbol") else atom
|
||||
self.asp_problem.append(f"{str(symbol)}.\n")
|
||||
self.asp_problem.append(f"{atom}.\n")
|
||||
|
||||
def append(self, rule: str) -> None:
|
||||
self.asp_problem.append(rule)
|
||||
@@ -3417,12 +3435,13 @@ def value(self) -> str:
|
||||
return "".join(self.asp_problem)
|
||||
|
||||
|
||||
def possible_compilers(*, configuration) -> Set["spack.spec.Spec"]:
|
||||
result = set()
|
||||
def possible_compilers(*, configuration) -> Tuple[Set["spack.spec.Spec"], Set["spack.spec.Spec"]]:
|
||||
result, rejected = set(), set()
|
||||
|
||||
# Compilers defined in configuration
|
||||
for c in spack.compilers.config.all_compilers_from(configuration):
|
||||
if using_libc_compatibility() and not c_compiler_runs(c):
|
||||
rejected.add(c)
|
||||
try:
|
||||
compiler = c.extra_attributes["compilers"]["c"]
|
||||
tty.debug(
|
||||
@@ -3435,6 +3454,7 @@ def possible_compilers(*, configuration) -> Set["spack.spec.Spec"]:
|
||||
continue
|
||||
|
||||
if using_libc_compatibility() and not CompilerPropertyDetector(c).default_libc():
|
||||
rejected.add(c)
|
||||
warnings.warn(
|
||||
f"cannot detect libc from {c}. The compiler will not be used "
|
||||
f"during concretization."
|
||||
@@ -3442,9 +3462,7 @@ def possible_compilers(*, configuration) -> Set["spack.spec.Spec"]:
|
||||
continue
|
||||
|
||||
if c in result:
|
||||
warnings.warn(
|
||||
f"duplicate {c.long_spec} compiler found. Edit your packages.yaml to remove it."
|
||||
)
|
||||
tty.debug(f"[{__name__}] duplicate {c.long_spec} compiler found")
|
||||
continue
|
||||
|
||||
result.add(c)
|
||||
@@ -3454,7 +3472,7 @@ def possible_compilers(*, configuration) -> Set["spack.spec.Spec"]:
|
||||
for pkg_name in supported_compilers:
|
||||
result.update(spack.store.STORE.db.query(pkg_name))
|
||||
|
||||
return result
|
||||
return result, rejected
|
||||
|
||||
|
||||
class RuntimePropertyRecorder:
|
||||
@@ -3483,6 +3501,7 @@ def __init__(self, setup):
|
||||
self._setup = setup
|
||||
self.rules = []
|
||||
self.runtime_conditions = set()
|
||||
self.injected_dependencies = set()
|
||||
# State of this object set in the __call__ method, and reset after
|
||||
# each directive-like method
|
||||
self.current_package = None
|
||||
@@ -3521,6 +3540,7 @@ def depends_on(self, dependency_str: str, *, when: str, type: str, description:
|
||||
if dependency_spec.versions != vn.any_version:
|
||||
self._setup.version_constraints.add((dependency_spec.name, dependency_spec.versions))
|
||||
|
||||
self.injected_dependencies.add(dependency_spec)
|
||||
body_str, node_variable = self.rule_body_from(when_spec)
|
||||
|
||||
head_clauses = self._setup.spec_clauses(dependency_spec, body=False)
|
||||
@@ -3582,11 +3602,9 @@ def rule_body_from(self, when_spec: "spack.spec.Spec") -> Tuple[str, str]:
|
||||
# (avoid adding virtuals everywhere, if a single edge needs it)
|
||||
_, provider, virtual = clause.args
|
||||
clause.args = "virtual_on_edge", node_placeholder, provider, virtual
|
||||
body_str = (
|
||||
f" {f',{os.linesep} '.join(str(x) for x in body_clauses)},\n"
|
||||
f" not external({node_variable}),\n"
|
||||
f" not runtime(Package)"
|
||||
).replace(f'"{node_placeholder}"', f"{node_variable}")
|
||||
body_str = ",\n".join(f" {x}" for x in body_clauses)
|
||||
body_str += f",\n not external({node_variable})"
|
||||
body_str = body_str.replace(f'"{node_placeholder}"', f"{node_variable}")
|
||||
for old, replacement in when_substitutions.items():
|
||||
body_str = body_str.replace(old, replacement)
|
||||
return body_str, node_variable
|
||||
@@ -3632,7 +3650,6 @@ def propagate(self, constraint_str: str, *, when: str):
|
||||
body_str, node_variable = self.rule_body_from(when_spec)
|
||||
constraint_spec = spack.spec.Spec(constraint_str)
|
||||
|
||||
# constraint_spec.name = placeholder
|
||||
constraint_clauses = self._setup.spec_clauses(constraint_spec, body=False)
|
||||
for clause in constraint_clauses:
|
||||
if clause.args[0] == "node_version_satisfies":
|
||||
@@ -3677,20 +3694,21 @@ def consume_facts(self):
|
||||
"""Consume the facts collected by this object, and emits rules and
|
||||
facts for the runtimes.
|
||||
"""
|
||||
self._setup.gen.h2("Runtimes: declarations")
|
||||
runtime_pkgs = sorted(
|
||||
{x.name for x in self.injected_dependencies if not spack.repo.PATH.is_virtual(x.name)}
|
||||
)
|
||||
for runtime_pkg in runtime_pkgs:
|
||||
self._setup.gen.fact(fn.runtime(runtime_pkg))
|
||||
self._setup.gen.newline()
|
||||
|
||||
self._setup.gen.h2("Runtimes: rules")
|
||||
self._setup.gen.newline()
|
||||
for rule in self.rules:
|
||||
self._setup.gen.append(rule)
|
||||
self._setup.gen.newline()
|
||||
|
||||
self._setup.gen.h2("Runtimes: conditions")
|
||||
for runtime_pkg in spack.repo.PATH.packages_with_tags("runtime"):
|
||||
self._setup.gen.fact(fn.runtime(runtime_pkg))
|
||||
self._setup.gen.fact(fn.possible_in_link_run(runtime_pkg))
|
||||
self._setup.gen.newline()
|
||||
# Inject version rules for runtimes (versions are declared based
|
||||
# on the available compilers)
|
||||
self._setup.pkg_version_rules(runtime_pkg)
|
||||
|
||||
self._setup.gen.h2("Runtimes: requirements")
|
||||
for imposed_spec, when_spec in sorted(self.runtime_conditions):
|
||||
msg = f"{when_spec} requires {imposed_spec} at runtime"
|
||||
_ = self._setup.condition(when_spec, imposed_spec=imposed_spec, msg=msg)
|
||||
@@ -3825,7 +3843,7 @@ def virtual_on_edge(self, parent_node, provider_node, virtual):
|
||||
provider_spec = self._specs[provider_node]
|
||||
dependencies = [x for x in dependencies if id(x.spec) == id(provider_spec)]
|
||||
assert len(dependencies) == 1, f"{virtual}: {provider_node.pkg}"
|
||||
dependencies[0].update_virtuals((virtual,))
|
||||
dependencies[0].update_virtuals(virtual)
|
||||
|
||||
def reorder_flags(self):
|
||||
"""For each spec, determine the order of compiler flags applied to it.
|
||||
@@ -4259,6 +4277,8 @@ def _is_reusable(spec: spack.spec.Spec, packages, local: bool) -> bool:
|
||||
|
||||
|
||||
def _has_runtime_dependencies(spec: spack.spec.Spec) -> bool:
|
||||
# TODO (compiler as nodes): this function contains specific names from builtin, and should
|
||||
# be made more general
|
||||
if "gcc" in spec and "gcc-runtime" not in spec:
|
||||
return False
|
||||
|
||||
@@ -4695,3 +4715,7 @@ def __init__(self, provided, conflicts):
|
||||
|
||||
class InvalidSpliceError(spack.error.SpackError):
|
||||
"""For cases in which the splice configuration is invalid."""
|
||||
|
||||
|
||||
class NoCompilerFoundError(spack.error.SpackError):
|
||||
"""Raised when there is no possible compiler"""
|
||||
|
@@ -314,12 +314,6 @@ possible_version_weight(node(ID, Package), Weight)
|
||||
{ attr("version", node(ID, Package), Version) : pkg_fact(Package, version_satisfies(Constraint, Version)) }
|
||||
:- attr("node_version_satisfies", node(ID, Package), Constraint).
|
||||
|
||||
% If there is at least a version that satisfy the constraint, impose a lower
|
||||
% bound on the choice rule to avoid false positives with the error below
|
||||
{ attr("version", node(ID, Package), Version) : pkg_fact(Package, version_satisfies(Constraint, Version)) }
|
||||
:- attr("node_version_satisfies", node(ID, Package), Constraint),
|
||||
pkg_fact(Package, version_satisfies(Constraint, _)).
|
||||
|
||||
% More specific error message if the version cannot satisfy some constraint
|
||||
% Otherwise covered by `no_version_error` and `versions_conflict_error`.
|
||||
error(1, "Cannot satisfy '{0}@{1}'", Package, Constraint)
|
||||
@@ -504,6 +498,9 @@ attr("node_version_satisfies", node(X, BuildDependency), Constraint) :-
|
||||
|
||||
attr("depends_on", node(X, Parent), node(Y, BuildDependency), "build") :- build_requirement(node(X, Parent), node(Y, BuildDependency)).
|
||||
|
||||
1 { attr("provider_set", node(X, BuildDependency), node(0..Y-1, Virtual)) : max_dupes(Virtual, Y) } 1 :-
|
||||
attr("build_requirement", ParentNode, build_requirement("provider_set", BuildDependency, Virtual)),
|
||||
build_requirement(ParentNode, node(X, BuildDependency)).
|
||||
|
||||
% Reconstruct virtual dependencies for reused specs
|
||||
attr("virtual_on_edge", node(X, A1), node(Y, A2), Virtual)
|
||||
@@ -697,6 +694,13 @@ attr("virtual_on_edge", PackageNode, ProviderNode, Virtual)
|
||||
attr("virtual_on_incoming_edges", ProviderNode, Virtual)
|
||||
:- attr("virtual_on_edge", _, ProviderNode, Virtual).
|
||||
|
||||
% This is needed to allow requirement on virtuals,
|
||||
% when a virtual root is requested
|
||||
attr("virtual_on_incoming_edges", ProviderNode, Virtual)
|
||||
:- attr("virtual_root", node(min_dupe_id, Virtual)),
|
||||
attr("root", ProviderNode),
|
||||
provider(ProviderNode, node(min_dupe_id, Virtual)).
|
||||
|
||||
% dependencies on virtuals also imply that the virtual is a virtual node
|
||||
1 { attr("virtual_node", node(0..X-1, Virtual)) : max_dupes(Virtual, X) }
|
||||
:- node_depends_on_virtual(PackageNode, Virtual).
|
||||
@@ -705,8 +709,8 @@ attr("virtual_on_incoming_edges", ProviderNode, Virtual)
|
||||
% The provider must be selected among the possible providers.
|
||||
|
||||
error(100, "'{0}' cannot be a provider for the '{1}' virtual", Package, Virtual)
|
||||
:- attr("provider_set", node(min_dupe_id, Package), node(min_dupe_id, Virtual)),
|
||||
not virtual_condition_holds( node(min_dupe_id, Package), Virtual).
|
||||
:- attr("provider_set", node(X, Package), node(Y, Virtual)),
|
||||
not virtual_condition_holds( node(X, Package), Virtual).
|
||||
|
||||
error(100, "Cannot find valid provider for virtual {0}", Virtual)
|
||||
:- attr("virtual_node", node(X, Virtual)),
|
||||
@@ -1067,12 +1071,14 @@ error(100, "Cannot set variant '{0}' for package '{1}' because the variant condi
|
||||
build(node(ID, Package)).
|
||||
|
||||
% at most one variant value for single-valued variants.
|
||||
error(100, "'{0}' required multiple values for single-valued variant '{1}'", Package, Variant)
|
||||
error(100, "'{0}' requires conflicting variant values 'Spec({1}={2})' and 'Spec({1}={3})'", Package, Variant, Value1, Value2)
|
||||
:- attr("node", node(ID, Package)),
|
||||
node_has_variant(node(ID, Package), Variant, _),
|
||||
variant_single_value(node(ID, Package), Variant),
|
||||
build(node(ID, Package)),
|
||||
2 { attr("variant_value", node(ID, Package), Variant, Value) }.
|
||||
attr("variant_value", node(ID, Package), Variant, Value1),
|
||||
attr("variant_value", node(ID, Package), Variant, Value2),
|
||||
Value1 < Value2,
|
||||
build(node(ID, Package)).
|
||||
|
||||
error(100, "No valid value for variant '{1}' of package '{0}'", Package, Variant)
|
||||
:- attr("node", node(ID, Package)),
|
||||
@@ -1415,6 +1421,8 @@ compiler(Compiler) :- compiler_supports_target(Compiler, _, _).
|
||||
language("c").
|
||||
language("cxx").
|
||||
language("fortran").
|
||||
language("hip-lang").
|
||||
language_runtime("fortran-rt").
|
||||
|
||||
error(10, "Only external, or concrete, compilers are allowed for the {0} language", Language)
|
||||
:- provider(ProviderNode, node(_, Language)),
|
||||
@@ -1677,7 +1685,7 @@ opt_criterion(60, "preferred providers for roots").
|
||||
#minimize{
|
||||
Weight@60+Priority,ProviderNode,X,Virtual
|
||||
: provider_weight(ProviderNode, node(X, Virtual), Weight),
|
||||
attr("root", ProviderNode), not language(Virtual),
|
||||
attr("root", ProviderNode), not language(Virtual), not language_runtime(Virtual),
|
||||
build_priority(ProviderNode, Priority)
|
||||
}.
|
||||
|
||||
@@ -1710,7 +1718,7 @@ opt_criterion(48, "preferred providers (non-roots)").
|
||||
#minimize{
|
||||
Weight@48+Priority,ProviderNode,X,Virtual
|
||||
: provider_weight(ProviderNode, node(X, Virtual), Weight),
|
||||
not attr("root", ProviderNode), not language(Virtual),
|
||||
not attr("root", ProviderNode), not language(Virtual), not language_runtime(Virtual),
|
||||
build_priority(ProviderNode, Priority)
|
||||
}.
|
||||
|
||||
@@ -1803,6 +1811,15 @@ opt_criterion(5, "non-preferred targets").
|
||||
not runtime(Package)
|
||||
}.
|
||||
|
||||
opt_criterion(4, "preferred providers (language runtimes)").
|
||||
#minimize{ 0@204: #true }.
|
||||
#minimize{ 0@4: #true }.
|
||||
#minimize{
|
||||
Weight@4+Priority,ProviderNode,X,Virtual
|
||||
: provider_weight(ProviderNode, node(X, Virtual), Weight),
|
||||
language_runtime(Virtual),
|
||||
build_priority(ProviderNode, Priority)
|
||||
}.
|
||||
|
||||
% Choose more recent versions for runtimes
|
||||
opt_criterion(3, "version badness (runtimes)").
|
||||
|
@@ -31,16 +31,19 @@ class AspObject:
|
||||
"""Object representing a piece of ASP code."""
|
||||
|
||||
|
||||
def _id(thing: Any) -> Union[str, AspObject]:
|
||||
def _id(thing: Any) -> Union[str, int, AspObject]:
|
||||
"""Quote string if needed for it to be a valid identifier."""
|
||||
if isinstance(thing, AspObject):
|
||||
if isinstance(thing, bool):
|
||||
return f'"{thing}"'
|
||||
elif isinstance(thing, (AspObject, int)):
|
||||
return thing
|
||||
elif isinstance(thing, bool):
|
||||
return f'"{str(thing)}"'
|
||||
elif isinstance(thing, int):
|
||||
return str(thing)
|
||||
else:
|
||||
return f'"{str(thing)}"'
|
||||
if isinstance(thing, str):
|
||||
# escape characters that cannot be in clingo strings
|
||||
thing = thing.replace("\\", r"\\")
|
||||
thing = thing.replace("\n", r"\n")
|
||||
thing = thing.replace('"', r"\"")
|
||||
return f'"{thing}"'
|
||||
|
||||
|
||||
class AspVar(AspObject):
|
||||
@@ -90,26 +93,9 @@ def __call__(self, *args: Any) -> "AspFunction":
|
||||
"""
|
||||
return AspFunction(self.name, self.args + args)
|
||||
|
||||
def _argify(self, arg: Any) -> Any:
|
||||
"""Turn the argument into an appropriate clingo symbol"""
|
||||
if isinstance(arg, bool):
|
||||
return clingo().String(str(arg))
|
||||
elif isinstance(arg, int):
|
||||
return clingo().Number(arg)
|
||||
elif isinstance(arg, AspFunction):
|
||||
return clingo().Function(arg.name, [self._argify(x) for x in arg.args], positive=True)
|
||||
elif isinstance(arg, AspVar):
|
||||
return clingo().Variable(arg.name)
|
||||
return clingo().String(str(arg))
|
||||
|
||||
def symbol(self):
|
||||
"""Return a clingo symbol for this function"""
|
||||
return clingo().Function(
|
||||
self.name, [self._argify(arg) for arg in self.args], positive=True
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.name}({', '.join(str(_id(arg)) for arg in self.args)})"
|
||||
args = f"({','.join(str(_id(arg)) for arg in self.args)})"
|
||||
return f"{self.name}{args}"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return str(self)
|
||||
|
@@ -117,7 +117,7 @@ error(0, "Cannot find a valid provider for virtual {0}", Virtual, startcauses, C
|
||||
condition_holds(Cause, node(CID, TriggerPkg)).
|
||||
|
||||
% At most one variant value for single-valued variants
|
||||
error(0, "'{0}' required multiple values for single-valued variant '{1}'\n Requested 'Spec({1}={2})' and 'Spec({1}={3})'", Package, Variant, Value1, Value2, startcauses, Cause1, X, Cause2, X)
|
||||
error(0, "'{0}' requires conflicting variant values 'Spec({1}={2})' and 'Spec({1}={3})'", Package, Variant, Value1, Value2, startcauses, Cause1, X, Cause2, X)
|
||||
:- attr("node", node(X, Package)),
|
||||
node_has_variant(node(X, Package), Variant, VariantID),
|
||||
variant_single_value(node(X, Package), Variant),
|
||||
|
@@ -18,8 +18,6 @@
|
||||
import spack.store
|
||||
from spack.error import SpackError
|
||||
|
||||
RUNTIME_TAG = "runtime"
|
||||
|
||||
|
||||
class PossibleGraph(NamedTuple):
|
||||
real_pkgs: Set[str]
|
||||
@@ -50,7 +48,8 @@ def possible_dependencies(
|
||||
) -> PossibleGraph:
|
||||
"""Returns the set of possible dependencies, and the set of possible virtuals.
|
||||
|
||||
Both sets always include runtime packages, which may be injected by compilers.
|
||||
Runtime packages, which may be injected by compilers, needs to be added to specs if
|
||||
the dependency is not explicit in the package.py recipe.
|
||||
|
||||
Args:
|
||||
transitive: return transitive dependencies if True, only direct dependencies if False
|
||||
@@ -70,14 +69,9 @@ class NoStaticAnalysis(PossibleDependencyGraph):
|
||||
def __init__(self, *, configuration: spack.config.Configuration, repo: spack.repo.RepoPath):
|
||||
self.configuration = configuration
|
||||
self.repo = repo
|
||||
self.runtime_pkgs = set(self.repo.packages_with_tags(RUNTIME_TAG))
|
||||
self.runtime_virtuals = set()
|
||||
self._platform_condition = spack.spec.Spec(
|
||||
f"platform={spack.platforms.host()} target={archspec.cpu.host().family}:"
|
||||
)
|
||||
for x in self.runtime_pkgs:
|
||||
pkg_class = self.repo.get_pkg_class(x)
|
||||
self.runtime_virtuals.update(pkg_class.provided_virtual_names())
|
||||
|
||||
try:
|
||||
self.libc_pkgs = [x.name for x in self.providers_for("libc")]
|
||||
@@ -214,8 +208,6 @@ def possible_dependencies(
|
||||
for root, children in edges.items():
|
||||
real_packages.update(x for x in children if self._is_possible(pkg_name=x))
|
||||
|
||||
virtuals.update(self.runtime_virtuals)
|
||||
real_packages = real_packages | self.runtime_pkgs
|
||||
return PossibleGraph(real_pkgs=real_packages, virtuals=virtuals, edges=edges)
|
||||
|
||||
def _package_list(self, specs: Tuple[Union[spack.spec.Spec, str], ...]) -> List[str]:
|
||||
@@ -470,7 +462,7 @@ def possible_packages_facts(self, gen, fn):
|
||||
gen.fact(fn.max_dupes(package_name, 1))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Packages with at multiple possible nodes (build-tools)")
|
||||
gen.h2("Packages with multiple possible nodes (build-tools)")
|
||||
default = spack.config.CONFIG.get("concretizer:duplicates:max_dupes:default", 2)
|
||||
for package_name in sorted(self.possible_dependencies() & build_tools):
|
||||
max_dupes = spack.config.CONFIG.get(
|
||||
|
@@ -8,6 +8,10 @@
|
||||
% These rules are used on Linux
|
||||
%=============================================================================
|
||||
|
||||
% Non-libc reused specs must be host libc compatible. In case we build packages, we get a
|
||||
% host compatible libc provider from other rules. If nothing is built, there is no libc provider,
|
||||
% since it's pruned from reusable specs, meaning we have to explicitly impose reused specs are host
|
||||
% compatible.
|
||||
|
||||
% A package cannot be reused if it needs a libc that is not compatible with the current one
|
||||
error(100, "Cannot reuse {0} since we cannot determine libc compatibility", ReusedPackage)
|
||||
@@ -24,14 +28,6 @@ error(100, "Cannot reuse {0} since we cannot determine libc compatibility", Reus
|
||||
attr("needs_libc", node(R, ReusedPackage)),
|
||||
not attr("compatible_libc", node(R, ReusedPackage), _, _).
|
||||
|
||||
% Non-libc reused specs must be host libc compatible. In case we build packages, we get a
|
||||
% host compatible libc provider from other rules. If nothing is built, there is no libc provider,
|
||||
% since it's pruned from reusable specs, meaning we have to explicitly impose reused specs are host
|
||||
% compatible.
|
||||
%:- attr("hash", node(R, ReusedPackage), Hash),
|
||||
% not provider(node(R, ReusedPackage), node(0, "libc")),
|
||||
% not attr("compatible_libc", node(R, ReusedPackage), _, _).
|
||||
|
||||
% The libc provider must be one that a compiler can target
|
||||
:- has_built_packages(),
|
||||
provider(node(X, LibcPackage), node(0, "libc")),
|
||||
|
@@ -11,7 +11,7 @@
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.config import get_mark_from_yaml_data
|
||||
from spack.util.spack_yaml import get_mark_from_yaml_data
|
||||
|
||||
|
||||
class RequirementKind(enum.Enum):
|
||||
@@ -69,18 +69,29 @@ def rules_from_package_py(self, pkg: spack.package_base.PackageBase) -> List[Req
|
||||
return rules
|
||||
|
||||
def rules_from_virtual(self, virtual_str: str) -> List[RequirementRule]:
|
||||
requirements = self.config.get("packages", {}).get(virtual_str, {}).get("require", [])
|
||||
return self._rules_from_requirements(
|
||||
virtual_str, requirements, kind=RequirementKind.VIRTUAL
|
||||
)
|
||||
kind, requests = self._raw_yaml_data(virtual_str, section="require", virtual=True)
|
||||
result = self._rules_from_requirements(virtual_str, requests, kind=kind)
|
||||
|
||||
kind, requests = self._raw_yaml_data(virtual_str, section="prefer", virtual=True)
|
||||
result.extend(self._rules_from_preferences(virtual_str, preferences=requests, kind=kind))
|
||||
|
||||
kind, requests = self._raw_yaml_data(virtual_str, section="conflict", virtual=True)
|
||||
result.extend(self._rules_from_conflicts(virtual_str, conflicts=requests, kind=kind))
|
||||
|
||||
return result
|
||||
|
||||
def rules_from_require(self, pkg: spack.package_base.PackageBase) -> List[RequirementRule]:
|
||||
kind, requirements = self._raw_yaml_data(pkg, section="require")
|
||||
kind, requirements = self._raw_yaml_data(pkg.name, section="require")
|
||||
return self._rules_from_requirements(pkg.name, requirements, kind=kind)
|
||||
|
||||
def rules_from_prefer(self, pkg: spack.package_base.PackageBase) -> List[RequirementRule]:
|
||||
kind, preferences = self._raw_yaml_data(pkg.name, section="prefer")
|
||||
return self._rules_from_preferences(pkg.name, preferences=preferences, kind=kind)
|
||||
|
||||
def _rules_from_preferences(
|
||||
self, pkg_name: str, *, preferences, kind: RequirementKind
|
||||
) -> List[RequirementRule]:
|
||||
result = []
|
||||
kind, preferences = self._raw_yaml_data(pkg, section="prefer")
|
||||
for item in preferences:
|
||||
spec, condition, message = self._parse_prefer_conflict_item(item)
|
||||
result.append(
|
||||
@@ -89,7 +100,7 @@ def rules_from_prefer(self, pkg: spack.package_base.PackageBase) -> List[Require
|
||||
# require:
|
||||
# - any_of: [spec_str, "@:"]
|
||||
RequirementRule(
|
||||
pkg_name=pkg.name,
|
||||
pkg_name=pkg_name,
|
||||
policy="any_of",
|
||||
requirements=[spec, spack.spec.Spec("@:")],
|
||||
kind=kind,
|
||||
@@ -100,8 +111,13 @@ def rules_from_prefer(self, pkg: spack.package_base.PackageBase) -> List[Require
|
||||
return result
|
||||
|
||||
def rules_from_conflict(self, pkg: spack.package_base.PackageBase) -> List[RequirementRule]:
|
||||
kind, conflicts = self._raw_yaml_data(pkg.name, section="conflict")
|
||||
return self._rules_from_conflicts(pkg.name, conflicts=conflicts, kind=kind)
|
||||
|
||||
def _rules_from_conflicts(
|
||||
self, pkg_name: str, *, conflicts, kind: RequirementKind
|
||||
) -> List[RequirementRule]:
|
||||
result = []
|
||||
kind, conflicts = self._raw_yaml_data(pkg, section="conflict")
|
||||
for item in conflicts:
|
||||
spec, condition, message = self._parse_prefer_conflict_item(item)
|
||||
result.append(
|
||||
@@ -110,7 +126,7 @@ def rules_from_conflict(self, pkg: spack.package_base.PackageBase) -> List[Requi
|
||||
# require:
|
||||
# - one_of: [spec_str, "@:"]
|
||||
RequirementRule(
|
||||
pkg_name=pkg.name,
|
||||
pkg_name=pkg_name,
|
||||
policy="one_of",
|
||||
requirements=[spec, spack.spec.Spec("@:")],
|
||||
kind=kind,
|
||||
@@ -132,10 +148,14 @@ def _parse_prefer_conflict_item(self, item):
|
||||
message = item.get("message")
|
||||
return spec, condition, message
|
||||
|
||||
def _raw_yaml_data(self, pkg: spack.package_base.PackageBase, *, section: str):
|
||||
def _raw_yaml_data(self, pkg_name: str, *, section: str, virtual: bool = False):
|
||||
config = self.config.get("packages")
|
||||
data = config.get(pkg.name, {}).get(section, [])
|
||||
data = config.get(pkg_name, {}).get(section, [])
|
||||
kind = RequirementKind.PACKAGE
|
||||
|
||||
if virtual:
|
||||
return RequirementKind.VIRTUAL, data
|
||||
|
||||
if not data:
|
||||
data = config.get("all", {}).get(section, [])
|
||||
kind = RequirementKind.DEFAULT
|
||||
@@ -168,7 +188,8 @@ def _rules_from_requirements(
|
||||
|
||||
# validate specs from YAML first, and fail with line numbers if parsing fails.
|
||||
constraints = [
|
||||
parse_spec_from_yaml_string(constraint) for constraint in constraints
|
||||
parse_spec_from_yaml_string(constraint, named=kind == RequirementKind.VIRTUAL)
|
||||
for constraint in constraints
|
||||
]
|
||||
when_str = requirement.get("when")
|
||||
when = parse_spec_from_yaml_string(when_str) if when_str else spack.spec.Spec()
|
||||
@@ -226,21 +247,37 @@ def reject_requirement_constraint(
|
||||
return False
|
||||
|
||||
|
||||
def parse_spec_from_yaml_string(string: str) -> spack.spec.Spec:
|
||||
def parse_spec_from_yaml_string(string: str, *, named: bool = False) -> spack.spec.Spec:
|
||||
"""Parse a spec from YAML and add file/line info to errors, if it's available.
|
||||
|
||||
Parse a ``Spec`` from the supplied string, but also intercept any syntax errors and
|
||||
add file/line information for debugging using file/line annotations from the string.
|
||||
|
||||
Arguments:
|
||||
Args:
|
||||
string: a string representing a ``Spec`` from config YAML.
|
||||
|
||||
named: if True, the spec must have a name
|
||||
"""
|
||||
try:
|
||||
return spack.spec.Spec(string)
|
||||
result = spack.spec.Spec(string)
|
||||
except spack.error.SpecSyntaxError as e:
|
||||
mark = get_mark_from_yaml_data(string)
|
||||
if mark:
|
||||
msg = f"{mark.name}:{mark.line + 1}: {str(e)}"
|
||||
raise spack.error.SpecSyntaxError(msg) from e
|
||||
raise e
|
||||
|
||||
if named is True and not result.name:
|
||||
msg = f"expected a named spec, but got '{string}' instead"
|
||||
mark = get_mark_from_yaml_data(string)
|
||||
|
||||
# Add a hint in case it's dependencies
|
||||
deps = result.dependencies()
|
||||
if len(deps) == 1:
|
||||
msg = f"{msg}. Did you mean '{deps[0]}'?"
|
||||
|
||||
if mark:
|
||||
msg = f"{mark.name}:{mark.line + 1}: {msg}"
|
||||
|
||||
raise spack.error.SpackError(msg)
|
||||
|
||||
return result
|
||||
|
@@ -86,6 +86,7 @@
|
||||
import llnl.util.tty.color as clr
|
||||
|
||||
import spack
|
||||
import spack.aliases
|
||||
import spack.compilers.flags
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
@@ -97,7 +98,6 @@
|
||||
import spack.spec_parser
|
||||
import spack.store
|
||||
import spack.traverse
|
||||
import spack.util.executable
|
||||
import spack.util.hash
|
||||
import spack.util.prefix
|
||||
import spack.util.spack_json as sjson
|
||||
@@ -206,7 +206,7 @@ class InstallStatus(enum.Enum):
|
||||
|
||||
installed = "@g{[+]} "
|
||||
upstream = "@g{[^]} "
|
||||
external = "@g{[e]} "
|
||||
external = "@M{[e]} "
|
||||
absent = "@K{ - } "
|
||||
missing = "@r{[-]} "
|
||||
|
||||
@@ -663,11 +663,9 @@ def versions(self):
|
||||
def display_str(self):
|
||||
"""Equivalent to {compiler.name}{@compiler.version} for Specs, without extra
|
||||
@= for readability."""
|
||||
if self.spec.concrete:
|
||||
return f"{self.name}@{self.version}"
|
||||
elif self.versions != vn.any_version:
|
||||
return f"{self.name}@{self.versions}"
|
||||
return self.name
|
||||
if self.versions != vn.any_version:
|
||||
return self.spec.format("{name}{@version}")
|
||||
return self.spec.format("{name}")
|
||||
|
||||
def __lt__(self, other):
|
||||
if not isinstance(other, CompilerSpec):
|
||||
@@ -699,6 +697,11 @@ def __init__(self):
|
||||
super().__init__(name="compiler")
|
||||
|
||||
def factory(self, instance, owner):
|
||||
if instance.original_spec_format() < 5:
|
||||
compiler = instance.annotations.compiler_node_attribute
|
||||
assert compiler is not None, "a compiler spec is expected"
|
||||
return CompilerSpec(compiler)
|
||||
|
||||
for language in ("c", "cxx", "fortran"):
|
||||
deps = instance.dependencies(virtuals=language)
|
||||
if deps:
|
||||
@@ -751,11 +754,17 @@ def update_deptypes(self, depflag: dt.DepFlag) -> bool:
|
||||
self.depflag = new
|
||||
return True
|
||||
|
||||
def update_virtuals(self, virtuals: Iterable[str]) -> bool:
|
||||
def update_virtuals(self, virtuals: Union[str, Iterable[str]]) -> bool:
|
||||
"""Update the list of provided virtuals"""
|
||||
old = self.virtuals
|
||||
self.virtuals = tuple(sorted(set(virtuals).union(self.virtuals)))
|
||||
return old != self.virtuals
|
||||
if isinstance(virtuals, str):
|
||||
union = {virtuals, *self.virtuals}
|
||||
else:
|
||||
union = {*virtuals, *self.virtuals}
|
||||
if len(union) == len(old):
|
||||
return False
|
||||
self.virtuals = tuple(sorted(union))
|
||||
return True
|
||||
|
||||
def copy(self) -> "DependencySpec":
|
||||
"""Return a copy of this edge"""
|
||||
@@ -1019,7 +1028,7 @@ def select(
|
||||
parent: Optional[str] = None,
|
||||
child: Optional[str] = None,
|
||||
depflag: dt.DepFlag = dt.ALL,
|
||||
virtuals: Optional[Sequence[str]] = None,
|
||||
virtuals: Optional[Union[str, Sequence[str]]] = None,
|
||||
) -> List[DependencySpec]:
|
||||
"""Selects a list of edges and returns them.
|
||||
|
||||
@@ -1038,7 +1047,7 @@ def select(
|
||||
parent: name of the parent package
|
||||
child: name of the child package
|
||||
depflag: allowed dependency types in flag form
|
||||
virtuals: list of virtuals on the edge
|
||||
virtuals: list of virtuals or specific virtual on the edge
|
||||
"""
|
||||
if not depflag:
|
||||
return []
|
||||
@@ -1059,7 +1068,10 @@ def select(
|
||||
|
||||
# Filter by virtuals
|
||||
if virtuals is not None:
|
||||
selected = (dep for dep in selected if any(v in dep.virtuals for v in virtuals))
|
||||
if isinstance(virtuals, str):
|
||||
selected = (dep for dep in selected if virtuals in dep.virtuals)
|
||||
else:
|
||||
selected = (dep for dep in selected if any(v in dep.virtuals for v in virtuals))
|
||||
|
||||
return list(selected)
|
||||
|
||||
@@ -1067,28 +1079,6 @@ def clear(self):
|
||||
self.edges.clear()
|
||||
|
||||
|
||||
def _command_default_handler(spec: "Spec"):
|
||||
"""Default handler when looking for the 'command' attribute.
|
||||
|
||||
Tries to search for ``spec.name`` in the ``spec.home.bin`` directory.
|
||||
|
||||
Parameters:
|
||||
spec: spec that is being queried
|
||||
|
||||
Returns:
|
||||
Executable: An executable of the command
|
||||
|
||||
Raises:
|
||||
RuntimeError: If the command is not found
|
||||
"""
|
||||
home = getattr(spec.package, "home")
|
||||
path = os.path.join(home.bin, spec.name)
|
||||
|
||||
if fs.is_exe(path):
|
||||
return spack.util.executable.Executable(path)
|
||||
raise RuntimeError(f"Unable to locate {spec.name} command in {home.bin}")
|
||||
|
||||
|
||||
def _headers_default_handler(spec: "Spec"):
|
||||
"""Default handler when looking for the 'headers' attribute.
|
||||
|
||||
@@ -1292,9 +1282,7 @@ class SpecBuildInterface(lang.ObjectWrapper):
|
||||
home = ForwardQueryToPackage("home", default_handler=None)
|
||||
headers = ForwardQueryToPackage("headers", default_handler=_headers_default_handler)
|
||||
libs = ForwardQueryToPackage("libs", default_handler=_libs_default_handler)
|
||||
command = ForwardQueryToPackage(
|
||||
"command", default_handler=_command_default_handler, _indirect=True
|
||||
)
|
||||
command = ForwardQueryToPackage("command", default_handler=None, _indirect=True)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -1493,7 +1481,7 @@ def __init__(self, spec_like=None, *, external_path=None, external_modules=None)
|
||||
self.abstract_hash = None
|
||||
|
||||
# initial values for all spec hash types
|
||||
for h in ht.hashes:
|
||||
for h in ht.HASHES:
|
||||
setattr(self, h.attr, None)
|
||||
|
||||
# cache for spec's prefix, computed lazily by prefix property
|
||||
@@ -1608,7 +1596,11 @@ def _get_dependency(self, name):
|
||||
return deps[0]
|
||||
|
||||
def edges_from_dependents(
|
||||
self, name=None, depflag: dt.DepFlag = dt.ALL, *, virtuals: Optional[List[str]] = None
|
||||
self,
|
||||
name=None,
|
||||
depflag: dt.DepFlag = dt.ALL,
|
||||
*,
|
||||
virtuals: Optional[Union[str, Sequence[str]]] = None,
|
||||
) -> List[DependencySpec]:
|
||||
"""Return a list of edges connecting this node in the DAG
|
||||
to parents.
|
||||
@@ -1623,7 +1615,11 @@ def edges_from_dependents(
|
||||
]
|
||||
|
||||
def edges_to_dependencies(
|
||||
self, name=None, depflag: dt.DepFlag = dt.ALL, *, virtuals: Optional[Sequence[str]] = None
|
||||
self,
|
||||
name=None,
|
||||
depflag: dt.DepFlag = dt.ALL,
|
||||
*,
|
||||
virtuals: Optional[Union[str, Sequence[str]]] = None,
|
||||
) -> List[DependencySpec]:
|
||||
"""Returns a list of edges connecting this node in the DAG to children.
|
||||
|
||||
@@ -1644,12 +1640,16 @@ def edge_attributes(self) -> str:
|
||||
return ""
|
||||
|
||||
union = DependencySpec(parent=Spec(), spec=self, depflag=0, virtuals=())
|
||||
all_direct_edges = all(x.direct for x in edges)
|
||||
|
||||
for edge in edges:
|
||||
union.update_deptypes(edge.depflag)
|
||||
union.update_virtuals(edge.virtuals)
|
||||
deptypes_str = (
|
||||
f"deptypes={','.join(dt.flag_to_tuple(union.depflag))}" if union.depflag else ""
|
||||
)
|
||||
|
||||
deptypes_str = ""
|
||||
if not all_direct_edges and union.depflag:
|
||||
deptypes_str = f"deptypes={','.join(dt.flag_to_tuple(union.depflag))}"
|
||||
|
||||
virtuals_str = f"virtuals={','.join(union.virtuals)}" if union.virtuals else ""
|
||||
if not deptypes_str and not virtuals_str:
|
||||
return ""
|
||||
@@ -1661,7 +1661,7 @@ def dependencies(
|
||||
name=None,
|
||||
deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL,
|
||||
*,
|
||||
virtuals: Optional[Sequence[str]] = None,
|
||||
virtuals: Optional[Union[str, Sequence[str]]] = None,
|
||||
) -> List["Spec"]:
|
||||
"""Returns a list of direct dependencies (nodes in the DAG)
|
||||
|
||||
@@ -1706,12 +1706,10 @@ def _dependencies_dict(self, depflag: dt.DepFlag = dt.ALL):
|
||||
result[key] = list(group)
|
||||
return result
|
||||
|
||||
def _add_flag(self, name, value, propagate):
|
||||
"""Called by the parser to add a known flag.
|
||||
Known flags currently include "arch"
|
||||
"""
|
||||
def _add_flag(self, name: str, value: str, propagate: bool, concrete: bool) -> None:
|
||||
"""Called by the parser to add a known flag"""
|
||||
|
||||
if propagate and name in vt.reserved_names:
|
||||
if propagate and name in vt.RESERVED_NAMES:
|
||||
raise UnsupportedPropagationError(
|
||||
f"Propagation with '==' is not supported for '{name}'."
|
||||
)
|
||||
@@ -1736,14 +1734,12 @@ def _add_flag(self, name, value, propagate):
|
||||
for flag, propagation in flags_and_propagation:
|
||||
self.compiler_flags.add_flag(name, flag, propagation, flag_group)
|
||||
else:
|
||||
# FIXME:
|
||||
# All other flags represent variants. 'foo=true' and 'foo=false'
|
||||
# map to '+foo' and '~foo' respectively. As such they need a
|
||||
# BoolValuedVariant instance.
|
||||
if str(value).upper() == "TRUE" or str(value).upper() == "FALSE":
|
||||
self.variants[name] = vt.BoolValuedVariant(name, value, propagate)
|
||||
elif concrete:
|
||||
self.variants[name] = vt.MultiValuedVariant(name, value, propagate)
|
||||
else:
|
||||
self.variants[name] = vt.AbstractVariant(name, value, propagate)
|
||||
self.variants[name] = vt.VariantBase(name, value, propagate)
|
||||
|
||||
def _set_architecture(self, **kwargs):
|
||||
"""Called by the parser to set the architecture."""
|
||||
@@ -2089,21 +2085,19 @@ def traverse_edges(
|
||||
def long_spec(self):
|
||||
"""Returns a string of the spec with the dependencies completely
|
||||
enumerated."""
|
||||
name_conversion = {
|
||||
"llvm": "clang",
|
||||
"intel-oneapi-compilers": "oneapi",
|
||||
"llvm-amdgpu": "rocmcc",
|
||||
"intel-oneapi-compiler-classic": "intel",
|
||||
"acfl": "arm",
|
||||
}
|
||||
parts = [self.format()]
|
||||
direct, transitive = lang.stable_partition(
|
||||
self.edges_to_dependencies(), predicate_fn=lambda x: x.direct
|
||||
)
|
||||
for item in sorted(direct, key=lambda x: x.spec.name):
|
||||
current_name = item.spec.name
|
||||
new_name = name_conversion.get(current_name, current_name)
|
||||
parts.append(f"%{item.spec.format()}".replace(current_name, new_name))
|
||||
new_name = spack.aliases.BUILTIN_TO_LEGACY_COMPILER.get(current_name, current_name)
|
||||
# note: depflag not allowed, currently, on "direct" edges
|
||||
edge_attributes = ""
|
||||
if item.virtuals:
|
||||
edge_attributes = item.spec.format("{edge_attributes}") + " "
|
||||
|
||||
parts.append(f"%{edge_attributes}{item.spec.format()}".replace(current_name, new_name))
|
||||
for item in sorted(transitive, key=lambda x: x.spec.name):
|
||||
# Recurse to attach build deps in order
|
||||
edge_attributes = ""
|
||||
@@ -2200,30 +2194,16 @@ def package_hash(self):
|
||||
def dag_hash(self, length=None):
|
||||
"""This is Spack's default hash, used to identify installations.
|
||||
|
||||
Same as the full hash (includes package hash and build/link/run deps).
|
||||
Tells us when package files and any dependencies have changes.
|
||||
|
||||
NOTE: Versions of Spack prior to 0.18 only included link and run deps.
|
||||
NOTE: Versions of Spack prior to 1.0 only did not include test deps.
|
||||
|
||||
"""
|
||||
return self._cached_hash(ht.dag_hash, length)
|
||||
|
||||
def process_hash(self, length=None):
|
||||
"""Hash used to transfer specs among processes.
|
||||
|
||||
This hash includes build and test dependencies and is only used to
|
||||
serialize a spec and pass it around among processes.
|
||||
"""
|
||||
return self._cached_hash(ht.process_hash, length)
|
||||
|
||||
def dag_hash_bit_prefix(self, bits):
|
||||
"""Get the first <bits> bits of the DAG hash as an integer type."""
|
||||
return spack.util.hash.base32_prefix_bits(self.dag_hash(), bits)
|
||||
|
||||
def process_hash_bit_prefix(self, bits):
|
||||
"""Get the first <bits> bits of the DAG hash as an integer type."""
|
||||
return spack.util.hash.base32_prefix_bits(self.process_hash(), bits)
|
||||
|
||||
def _lookup_hash(self):
|
||||
"""Lookup just one spec with an abstract hash, returning a spec from the the environment,
|
||||
store, or finally, binary caches."""
|
||||
@@ -2367,6 +2347,7 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
[v.name for v in self.variants.values() if v.propagate], flag_names
|
||||
)
|
||||
)
|
||||
d["abstract"] = sorted(v.name for v in self.variants.values() if not v.concrete)
|
||||
|
||||
if self.external:
|
||||
d["external"] = {
|
||||
@@ -3040,9 +3021,8 @@ def ensure_valid_variants(spec):
|
||||
# but are not necessarily recorded by the package's class
|
||||
propagate_variants = [name for name, variant in spec.variants.items() if variant.propagate]
|
||||
|
||||
not_existing = set(spec.variants) - (
|
||||
set(pkg_variants) | set(vt.reserved_names) | set(propagate_variants)
|
||||
)
|
||||
not_existing = set(spec.variants)
|
||||
not_existing.difference_update(pkg_variants, vt.RESERVED_NAMES, propagate_variants)
|
||||
|
||||
if not_existing:
|
||||
raise vt.UnknownVariantError(
|
||||
@@ -3094,7 +3074,7 @@ def constrain(self, other, deps=True):
|
||||
raise UnsatisfiableVersionSpecError(self.versions, other.versions)
|
||||
|
||||
for v in [x for x in other.variants if x in self.variants]:
|
||||
if not self.variants[v].compatible(other.variants[v]):
|
||||
if not self.variants[v].intersects(other.variants[v]):
|
||||
raise vt.UnsatisfiableVariantSpecError(self.variants[v], other.variants[v])
|
||||
|
||||
sarch, oarch = self.architecture, other.architecture
|
||||
@@ -3150,6 +3130,8 @@ def _constrain_dependencies(self, other: "Spec") -> bool:
|
||||
raise UnconstrainableDependencySpecError(other)
|
||||
|
||||
# Handle common first-order constraints directly
|
||||
# Note: This doesn't handle constraining transitive dependencies with the same name
|
||||
# as direct dependencies
|
||||
changed = False
|
||||
common_dependencies = {x.name for x in self.dependencies()}
|
||||
common_dependencies &= {x.name for x in other.dependencies()}
|
||||
@@ -3439,7 +3421,18 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
# Note: this relies on abstract specs from string not being deeper than 2 levels
|
||||
# e.g. in foo %fee ^bar %baz we cannot go deeper than "baz" and e.g. specify its
|
||||
# dependencies too.
|
||||
current_node = self if rhs_edge.parent.name is None else self[rhs_edge.parent.name]
|
||||
#
|
||||
# We also need to account for cases like gcc@<new> %gcc@<old> where the parent
|
||||
# name is the same as the child name
|
||||
#
|
||||
# The same assumptions hold on Spec.constrain, and Spec.intersect
|
||||
current_node = self
|
||||
if rhs_edge.parent.name is not None and rhs_edge.parent.name != rhs_edge.spec.name:
|
||||
try:
|
||||
current_node = self[rhs_edge.parent.name]
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
candidates = current_node.dependencies(
|
||||
name=rhs_edge.spec.name,
|
||||
deptype=rhs_edge.depflag,
|
||||
@@ -3448,6 +3441,8 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
if not candidates or not any(x.satisfies(rhs_edge.spec) for x in candidates):
|
||||
return False
|
||||
|
||||
continue
|
||||
|
||||
if not rhs_edge.virtuals:
|
||||
continue
|
||||
|
||||
@@ -3592,11 +3587,11 @@ def _dup(self, other: "Spec", deps: Union[bool, dt.DepTypes, dt.DepFlag] = True)
|
||||
|
||||
if self._concrete:
|
||||
self._dunder_hash = other._dunder_hash
|
||||
for h in ht.hashes:
|
||||
for h in ht.HASHES:
|
||||
setattr(self, h.attr, getattr(other, h.attr, None))
|
||||
else:
|
||||
self._dunder_hash = None
|
||||
for h in ht.hashes:
|
||||
for h in ht.HASHES:
|
||||
setattr(self, h.attr, None)
|
||||
|
||||
return changed
|
||||
@@ -3682,8 +3677,8 @@ def __getitem__(self, name: str):
|
||||
|
||||
# Consider all direct dependencies and transitive runtime dependencies
|
||||
order = itertools.chain(
|
||||
self.traverse_edges(deptype=dt.LINK | dt.RUN, order="breadth", cover="edges"),
|
||||
self.edges_to_dependencies(depflag=dt.BUILD | dt.TEST),
|
||||
self.traverse_edges(deptype=dt.LINK | dt.RUN, order="breadth", cover="edges"),
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -3790,16 +3785,6 @@ def _cmp_iter(self):
|
||||
# serialized before the hash change and one after, are considered different.
|
||||
yield self.dag_hash() if self.concrete else None
|
||||
|
||||
# This needs to be in _cmp_iter so that no specs with different process hashes
|
||||
# are considered the same by `__hash__` or `__eq__`.
|
||||
#
|
||||
# TODO: We should eventually unify the `_cmp_*` methods with `to_node_dict` so
|
||||
# TODO: there aren't two sources of truth, but this needs some thought, since
|
||||
# TODO: they exist for speed. We should benchmark whether it's really worth
|
||||
# TODO: having two types of hashing now that we use `json` instead of `yaml` for
|
||||
# TODO: spec hashing.
|
||||
yield self.process_hash() if self.concrete else None
|
||||
|
||||
def deps():
|
||||
for dep in sorted(itertools.chain.from_iterable(self._dependencies.values())):
|
||||
yield dep.spec.name
|
||||
@@ -4453,7 +4438,7 @@ def clear_caches(self, ignore: Tuple[str, ...] = ()) -> None:
|
||||
"""
|
||||
Clears all cached hashes in a Spec, while preserving other properties.
|
||||
"""
|
||||
for h in ht.hashes:
|
||||
for h in ht.HASHES:
|
||||
if h.attr not in ignore:
|
||||
if hasattr(self, h.attr):
|
||||
setattr(self, h.attr, None)
|
||||
@@ -4462,18 +4447,12 @@ def clear_caches(self, ignore: Tuple[str, ...] = ()) -> None:
|
||||
setattr(self, attr, None)
|
||||
|
||||
def __hash__(self):
|
||||
# If the spec is concrete, we leverage the process hash and just use
|
||||
# a 64-bit prefix of it. The process hash has the advantage that it's
|
||||
# computed once per concrete spec, and it's saved -- so if we read
|
||||
# concrete specs we don't need to recompute the whole hash. This is
|
||||
# good for large, unchanging specs.
|
||||
#
|
||||
# We use the process hash instead of the DAG hash here because the DAG
|
||||
# hash includes the package hash, which can cause infinite recursion,
|
||||
# and which isn't defined unless the spec has a known package.
|
||||
# If the spec is concrete, we leverage the dag hash and just use a 64-bit prefix of it.
|
||||
# The dag hash has the advantage that it's computed once per concrete spec, and it's saved
|
||||
# -- so if we read concrete specs we don't need to recompute the whole hash.
|
||||
if self.concrete:
|
||||
if not self._dunder_hash:
|
||||
self._dunder_hash = self.process_hash_bit_prefix(64)
|
||||
self._dunder_hash = self.dag_hash_bit_prefix(64)
|
||||
return self._dunder_hash
|
||||
|
||||
# This is the normal hash for lazy_lexicographic_ordering. It's
|
||||
@@ -4482,7 +4461,7 @@ def __hash__(self):
|
||||
return hash(lang.tuplify(self._cmp_iter))
|
||||
|
||||
def __reduce__(self):
|
||||
return Spec.from_dict, (self.to_dict(hash=ht.process_hash),)
|
||||
return Spec.from_dict, (self.to_dict(hash=ht.dag_hash),)
|
||||
|
||||
def attach_git_version_lookup(self):
|
||||
# Add a git lookup method for GitVersions
|
||||
@@ -4496,6 +4475,9 @@ def original_spec_format(self) -> int:
|
||||
"""Returns the spec format originally used for this spec."""
|
||||
return self.annotations.original_spec_format
|
||||
|
||||
def has_virtual_dependency(self, virtual: str) -> bool:
|
||||
return bool(self.dependencies(virtuals=(virtual,)))
|
||||
|
||||
|
||||
class VariantMap(lang.HashableMap):
|
||||
"""Map containing variant instances. New values can be added only
|
||||
@@ -4507,7 +4489,7 @@ def __init__(self, spec: Spec):
|
||||
|
||||
def __setitem__(self, name, vspec):
|
||||
# Raise a TypeError if vspec is not of the right type
|
||||
if not isinstance(vspec, vt.AbstractVariant):
|
||||
if not isinstance(vspec, vt.VariantBase):
|
||||
raise TypeError(
|
||||
"VariantMap accepts only values of variant types "
|
||||
f"[got {type(vspec).__name__} instead]"
|
||||
@@ -4617,8 +4599,7 @@ def constrain(self, other: "VariantMap") -> bool:
|
||||
changed = False
|
||||
for k in other:
|
||||
if k in self:
|
||||
# If they are not compatible raise an error
|
||||
if not self[k].compatible(other[k]):
|
||||
if not self[k].intersects(other[k]):
|
||||
raise vt.UnsatisfiableVariantSpecError(self[k], other[k])
|
||||
# If they are compatible merge them
|
||||
changed |= self[k].constrain(other[k])
|
||||
@@ -4683,7 +4664,7 @@ def substitute_abstract_variants(spec: Spec):
|
||||
if name == "dev_path":
|
||||
spec.variants.substitute(vt.SingleValuedVariant(name, v._original_value))
|
||||
continue
|
||||
elif name in vt.reserved_names:
|
||||
elif name in vt.RESERVED_NAMES:
|
||||
continue
|
||||
|
||||
variant_defs = spack.repo.PATH.get_pkg_class(spec.fullname).variant_definitions(name)
|
||||
@@ -4808,7 +4789,7 @@ def from_node_dict(cls, node):
|
||||
spec = Spec()
|
||||
|
||||
name, node = cls.name_and_data(node)
|
||||
for h in ht.hashes:
|
||||
for h in ht.HASHES:
|
||||
setattr(spec, h.attr, node.get(h.name, None))
|
||||
|
||||
spec.name = name
|
||||
@@ -4822,6 +4803,7 @@ def from_node_dict(cls, node):
|
||||
spec.architecture = ArchSpec.from_dict(node)
|
||||
|
||||
propagated_names = node.get("propagate", [])
|
||||
abstract_variants = set(node.get("abstract", ()))
|
||||
for name, values in node.get("parameters", {}).items():
|
||||
propagate = name in propagated_names
|
||||
if name in _valid_compiler_flags:
|
||||
@@ -4830,7 +4812,7 @@ def from_node_dict(cls, node):
|
||||
spec.compiler_flags.add_flag(name, val, propagate)
|
||||
else:
|
||||
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(
|
||||
name, values, propagate=propagate
|
||||
name, values, propagate=propagate, abstract=name in abstract_variants
|
||||
)
|
||||
|
||||
spec.external_path = None
|
||||
@@ -5004,7 +4986,7 @@ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
|
||||
"""
|
||||
for dep_name, elt in deps.items():
|
||||
if isinstance(elt, dict):
|
||||
for h in ht.hashes:
|
||||
for h in ht.HASHES:
|
||||
if h.name in elt:
|
||||
dep_hash, deptypes = elt[h.name], elt["type"]
|
||||
hash_type = h.name
|
||||
@@ -5049,7 +5031,7 @@ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
|
||||
dep_name = dep["name"]
|
||||
if isinstance(elt, dict):
|
||||
# new format: elements of dependency spec are keyed.
|
||||
for h in ht.hashes:
|
||||
for h in ht.HASHES:
|
||||
if h.name in elt:
|
||||
dep_hash, deptypes, hash_type, virtuals = cls.extract_info_from_dep(elt, h)
|
||||
break
|
||||
@@ -5173,6 +5155,13 @@ def get_host_environment() -> Dict[str, Any]:
|
||||
}
|
||||
|
||||
|
||||
def eval_conditional(string):
|
||||
"""Evaluate conditional definitions using restricted variable scope."""
|
||||
valid_variables = get_host_environment()
|
||||
valid_variables.update({"re": re, "env": os.environ})
|
||||
return eval(string, valid_variables)
|
||||
|
||||
|
||||
class SpecParseError(spack.error.SpecError):
|
||||
"""Wrapper for ParseError for when we're parsing specs."""
|
||||
|
||||
|
@@ -60,14 +60,19 @@
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
from typing import Iterator, List, Optional
|
||||
import traceback
|
||||
import warnings
|
||||
from typing import Iterator, List, Optional, Tuple
|
||||
|
||||
from llnl.util.tty import color
|
||||
|
||||
import spack.deptypes
|
||||
import spack.error
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.util.spack_yaml
|
||||
import spack.version
|
||||
from spack.aliases import LEGACY_COMPILER_TO_BUILTIN
|
||||
from spack.tokenize import Token, TokenBase, Tokenizer
|
||||
|
||||
#: Valid name for specs and variants. Here we are not using
|
||||
@@ -94,8 +99,10 @@
|
||||
VERSION_RANGE = rf"(?:(?:{VERSION})?:(?:{VERSION}(?!\s*=))?)"
|
||||
VERSION_LIST = rf"(?:{VERSION_RANGE}|{VERSION})(?:\s*,\s*(?:{VERSION_RANGE}|{VERSION}))*"
|
||||
|
||||
#: Regex with groups to use for splitting (optionally propagated) key-value pairs
|
||||
SPLIT_KVP = re.compile(rf"^({NAME})(==?)(.*)$")
|
||||
SPLIT_KVP = re.compile(rf"^({NAME})(:?==?)(.*)$")
|
||||
|
||||
#: Regex with groups to use for splitting %[virtuals=...] tokens
|
||||
SPLIT_COMPILER_TOKEN = re.compile(rf"^%\[virtuals=({VALUE}|{QUOTED_VALUE})]\s*(.*)$")
|
||||
|
||||
#: A filename starts either with a "." or a "/" or a "{name}/, or on Windows, a drive letter
|
||||
#: followed by a colon and "\" or "." or {name}\
|
||||
@@ -127,11 +134,16 @@ class SpecTokens(TokenBase):
|
||||
# Variants
|
||||
PROPAGATED_BOOL_VARIANT = rf"(?:(?:\+\+|~~|--)\s*{NAME})"
|
||||
BOOL_VARIANT = rf"(?:[~+-]\s*{NAME})"
|
||||
PROPAGATED_KEY_VALUE_PAIR = rf"(?:{NAME}==(?:{VALUE}|{QUOTED_VALUE}))"
|
||||
KEY_VALUE_PAIR = rf"(?:{NAME}=(?:{VALUE}|{QUOTED_VALUE}))"
|
||||
PROPAGATED_KEY_VALUE_PAIR = rf"(?:{NAME}:?==(?:{VALUE}|{QUOTED_VALUE}))"
|
||||
KEY_VALUE_PAIR = rf"(?:{NAME}:?=(?:{VALUE}|{QUOTED_VALUE}))"
|
||||
# Compilers
|
||||
COMPILER_AND_VERSION = rf"(?:%\s*(?:{NAME})(?:[\s]*)@\s*(?:{VERSION_LIST}))"
|
||||
COMPILER = rf"(?:%\s*(?:{NAME}))"
|
||||
COMPILER_AND_VERSION_WITH_VIRTUALS = (
|
||||
rf"(?:%\[virtuals=(?:{VALUE}|{QUOTED_VALUE})\]"
|
||||
rf"\s*(?:{NAME})(?:[\s]*)@\s*(?:{VERSION_LIST}))"
|
||||
)
|
||||
COMPILER_WITH_VIRTUALS = rf"(?:%\[virtuals=(?:{VALUE}|{QUOTED_VALUE})\]\s*(?:{NAME}))"
|
||||
# FILENAME
|
||||
FILENAME = rf"(?:{FILENAME})"
|
||||
# Package name
|
||||
@@ -204,6 +216,32 @@ def __init__(self, tokens: List[Token], text: str):
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
def _warn_about_variant_after_compiler(literal_str: str, issues: List[str]):
|
||||
"""Issue a warning if variant or other token is preceded by a compiler token. The warning is
|
||||
only issued if it's actionable: either we know the config file it originates from, or we have
|
||||
call site that's not internal to Spack."""
|
||||
ignore = [spack.paths.lib_path, spack.paths.bin_path]
|
||||
mark = spack.util.spack_yaml.get_mark_from_yaml_data(literal_str)
|
||||
issue_str = ", ".join(issues)
|
||||
error = f"{issue_str} in `{literal_str}`"
|
||||
|
||||
# warning from config file
|
||||
if mark:
|
||||
warnings.warn(f"{mark.name}:{mark.line + 1}: {error}")
|
||||
return
|
||||
|
||||
# warning from hopefully package.py
|
||||
for frame in reversed(traceback.extract_stack()):
|
||||
if frame.lineno and not any(frame.filename.startswith(path) for path in ignore):
|
||||
warnings.warn_explicit(
|
||||
error,
|
||||
category=spack.error.SpackAPIWarning,
|
||||
filename=frame.filename,
|
||||
lineno=frame.lineno,
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
class SpecParser:
|
||||
"""Parse text into specs"""
|
||||
|
||||
@@ -242,26 +280,31 @@ def add_dependency(dep, **edge_properties):
|
||||
raise SpecParsingError(str(e), self.ctx.current_token, self.literal_str) from e
|
||||
|
||||
initial_spec = initial_spec or spack.spec.Spec()
|
||||
root_spec = SpecNodeParser(self.ctx, self.literal_str).parse(initial_spec)
|
||||
root_spec, parser_warnings = SpecNodeParser(self.ctx, self.literal_str).parse(initial_spec)
|
||||
while True:
|
||||
if self.ctx.accept(SpecTokens.START_EDGE_PROPERTIES):
|
||||
edge_properties = EdgeAttributeParser(self.ctx, self.literal_str).parse()
|
||||
edge_properties.setdefault("depflag", 0)
|
||||
edge_properties.setdefault("virtuals", ())
|
||||
dependency = self._parse_node(root_spec)
|
||||
dependency, warnings = self._parse_node(root_spec)
|
||||
parser_warnings.extend(warnings)
|
||||
add_dependency(dependency, **edge_properties)
|
||||
|
||||
elif self.ctx.accept(SpecTokens.DEPENDENCY):
|
||||
dependency = self._parse_node(root_spec)
|
||||
dependency, warnings = self._parse_node(root_spec)
|
||||
parser_warnings.extend(warnings)
|
||||
add_dependency(dependency, depflag=0, virtuals=())
|
||||
|
||||
else:
|
||||
break
|
||||
|
||||
if parser_warnings:
|
||||
_warn_about_variant_after_compiler(self.literal_str, parser_warnings)
|
||||
|
||||
return root_spec
|
||||
|
||||
def _parse_node(self, root_spec):
|
||||
dependency = SpecNodeParser(self.ctx, self.literal_str).parse()
|
||||
dependency, parser_warnings = SpecNodeParser(self.ctx, self.literal_str).parse()
|
||||
if dependency is None:
|
||||
msg = (
|
||||
"the dependency sigil and any optional edge attributes must be followed by a "
|
||||
@@ -270,7 +313,7 @@ def _parse_node(self, root_spec):
|
||||
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
||||
if root_spec.concrete:
|
||||
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
|
||||
return dependency
|
||||
return dependency, parser_warnings
|
||||
|
||||
def all_specs(self) -> List["spack.spec.Spec"]:
|
||||
"""Return all the specs that remain to be parsed"""
|
||||
@@ -289,7 +332,7 @@ def __init__(self, ctx, literal_str):
|
||||
|
||||
def parse(
|
||||
self, initial_spec: Optional["spack.spec.Spec"] = None
|
||||
) -> Optional["spack.spec.Spec"]:
|
||||
) -> Tuple["spack.spec.Spec", List[str]]:
|
||||
"""Parse a single spec node from a stream of tokens
|
||||
|
||||
Args:
|
||||
@@ -298,12 +341,15 @@ def parse(
|
||||
Return
|
||||
The object passed as argument
|
||||
"""
|
||||
if not self.ctx.next_token or self.ctx.expect(SpecTokens.DEPENDENCY):
|
||||
return initial_spec
|
||||
parser_warnings: List[str] = []
|
||||
last_compiler = None
|
||||
|
||||
if initial_spec is None:
|
||||
initial_spec = spack.spec.Spec()
|
||||
|
||||
if not self.ctx.next_token or self.ctx.expect(SpecTokens.DEPENDENCY):
|
||||
return initial_spec, parser_warnings
|
||||
|
||||
# If we start with a package name we have a named spec, we cannot
|
||||
# accept another package name afterwards in a node
|
||||
if self.ctx.accept(SpecTokens.UNQUALIFIED_PACKAGE_NAME):
|
||||
@@ -317,38 +363,53 @@ def parse(
|
||||
initial_spec.namespace = namespace
|
||||
|
||||
elif self.ctx.accept(SpecTokens.FILENAME):
|
||||
return FileParser(self.ctx).parse(initial_spec)
|
||||
return FileParser(self.ctx).parse(initial_spec), parser_warnings
|
||||
|
||||
def raise_parsing_error(string: str, cause: Optional[Exception] = None):
|
||||
"""Raise a spec parsing error with token context."""
|
||||
raise SpecParsingError(string, self.ctx.current_token, self.literal_str) from cause
|
||||
|
||||
def add_flag(name: str, value: str, propagate: bool):
|
||||
def add_flag(name: str, value: str, propagate: bool, concrete: bool):
|
||||
"""Wrapper around ``Spec._add_flag()`` that adds parser context to errors raised."""
|
||||
try:
|
||||
initial_spec._add_flag(name, value, propagate)
|
||||
initial_spec._add_flag(name, value, propagate, concrete)
|
||||
except Exception as e:
|
||||
raise_parsing_error(str(e), e)
|
||||
|
||||
while True:
|
||||
if self.ctx.accept(SpecTokens.COMPILER) or self.ctx.accept(
|
||||
SpecTokens.COMPILER_AND_VERSION
|
||||
):
|
||||
build_dependency = spack.spec.Spec(self.ctx.current_token.value[1:])
|
||||
name_conversion = {
|
||||
"clang": "llvm",
|
||||
"oneapi": "intel-oneapi-compilers",
|
||||
"rocmcc": "llvm-amdgpu",
|
||||
"intel": "intel-oneapi-compiler-classic",
|
||||
"arm": "acfl",
|
||||
}
|
||||
def warn_if_after_compiler(token: str):
|
||||
"""Register a warning for %compiler followed by +variant that will in the future apply
|
||||
to the compiler instead of the current root."""
|
||||
if last_compiler:
|
||||
parser_warnings.append(f"`{token}` should go before `{last_compiler}`")
|
||||
|
||||
if build_dependency.name in name_conversion:
|
||||
build_dependency.name = name_conversion[build_dependency.name]
|
||||
while True:
|
||||
if (
|
||||
self.ctx.accept(SpecTokens.COMPILER)
|
||||
or self.ctx.accept(SpecTokens.COMPILER_AND_VERSION)
|
||||
or self.ctx.accept(SpecTokens.COMPILER_WITH_VIRTUALS)
|
||||
or self.ctx.accept(SpecTokens.COMPILER_AND_VERSION_WITH_VIRTUALS)
|
||||
):
|
||||
current_token = self.ctx.current_token
|
||||
if current_token.kind in (
|
||||
SpecTokens.COMPILER_WITH_VIRTUALS,
|
||||
SpecTokens.COMPILER_AND_VERSION_WITH_VIRTUALS,
|
||||
):
|
||||
m = SPLIT_COMPILER_TOKEN.match(current_token.value)
|
||||
assert m, "SPLIT_COMPILER_TOKEN and COMPILER_* do not agree."
|
||||
virtuals_str, compiler_str = m.groups()
|
||||
virtuals = tuple(virtuals_str.strip("'\" ").split(","))
|
||||
else:
|
||||
virtuals = tuple()
|
||||
compiler_str = current_token.value[1:]
|
||||
|
||||
build_dependency = spack.spec.Spec(compiler_str)
|
||||
if build_dependency.name in LEGACY_COMPILER_TO_BUILTIN:
|
||||
build_dependency.name = LEGACY_COMPILER_TO_BUILTIN[build_dependency.name]
|
||||
|
||||
initial_spec._add_dependency(
|
||||
build_dependency, depflag=spack.deptypes.BUILD, virtuals=(), direct=True
|
||||
build_dependency, depflag=spack.deptypes.BUILD, virtuals=virtuals, direct=True
|
||||
)
|
||||
last_compiler = self.ctx.current_token.value
|
||||
|
||||
elif (
|
||||
self.ctx.accept(SpecTokens.VERSION_HASH_PAIR)
|
||||
@@ -363,39 +424,50 @@ def add_flag(name: str, value: str, propagate: bool):
|
||||
)
|
||||
initial_spec.attach_git_version_lookup()
|
||||
self.has_version = True
|
||||
warn_if_after_compiler(self.ctx.current_token.value)
|
||||
|
||||
elif self.ctx.accept(SpecTokens.BOOL_VARIANT):
|
||||
name = self.ctx.current_token.value[1:].strip()
|
||||
variant_value = self.ctx.current_token.value[0] == "+"
|
||||
add_flag(self.ctx.current_token.value[1:].strip(), variant_value, propagate=False)
|
||||
add_flag(name, variant_value, propagate=False, concrete=True)
|
||||
warn_if_after_compiler(self.ctx.current_token.value)
|
||||
|
||||
elif self.ctx.accept(SpecTokens.PROPAGATED_BOOL_VARIANT):
|
||||
name = self.ctx.current_token.value[2:].strip()
|
||||
variant_value = self.ctx.current_token.value[0:2] == "++"
|
||||
add_flag(self.ctx.current_token.value[2:].strip(), variant_value, propagate=True)
|
||||
add_flag(name, variant_value, propagate=True, concrete=True)
|
||||
warn_if_after_compiler(self.ctx.current_token.value)
|
||||
|
||||
elif self.ctx.accept(SpecTokens.KEY_VALUE_PAIR):
|
||||
match = SPLIT_KVP.match(self.ctx.current_token.value)
|
||||
assert match, "SPLIT_KVP and KEY_VALUE_PAIR do not agree."
|
||||
name, value = self.ctx.current_token.value.split("=", maxsplit=1)
|
||||
concrete = name.endswith(":")
|
||||
if concrete:
|
||||
name = name[:-1]
|
||||
|
||||
name, _, value = match.groups()
|
||||
add_flag(name, strip_quotes_and_unescape(value), propagate=False)
|
||||
add_flag(
|
||||
name, strip_quotes_and_unescape(value), propagate=False, concrete=concrete
|
||||
)
|
||||
warn_if_after_compiler(self.ctx.current_token.value)
|
||||
|
||||
elif self.ctx.accept(SpecTokens.PROPAGATED_KEY_VALUE_PAIR):
|
||||
match = SPLIT_KVP.match(self.ctx.current_token.value)
|
||||
assert match, "SPLIT_KVP and PROPAGATED_KEY_VALUE_PAIR do not agree."
|
||||
|
||||
name, _, value = match.groups()
|
||||
add_flag(name, strip_quotes_and_unescape(value), propagate=True)
|
||||
name, value = self.ctx.current_token.value.split("==", maxsplit=1)
|
||||
concrete = name.endswith(":")
|
||||
if concrete:
|
||||
name = name[:-1]
|
||||
add_flag(name, strip_quotes_and_unescape(value), propagate=True, concrete=concrete)
|
||||
warn_if_after_compiler(self.ctx.current_token.value)
|
||||
|
||||
elif self.ctx.expect(SpecTokens.DAG_HASH):
|
||||
if initial_spec.abstract_hash:
|
||||
break
|
||||
self.ctx.accept(SpecTokens.DAG_HASH)
|
||||
initial_spec.abstract_hash = self.ctx.current_token.value[1:]
|
||||
warn_if_after_compiler(self.ctx.current_token.value)
|
||||
|
||||
else:
|
||||
break
|
||||
|
||||
return initial_spec
|
||||
return initial_spec, parser_warnings
|
||||
|
||||
|
||||
class FileParser:
|
||||
@@ -441,7 +513,8 @@ def parse(self):
|
||||
while True:
|
||||
if self.ctx.accept(SpecTokens.KEY_VALUE_PAIR):
|
||||
name, value = self.ctx.current_token.value.split("=", maxsplit=1)
|
||||
name = name.strip("'\" ")
|
||||
if name.endswith(":"):
|
||||
name = name[:-1]
|
||||
value = value.strip("'\" ").split(",")
|
||||
attributes[name] = value
|
||||
if name not in ("deptypes", "virtuals"):
|
||||
@@ -485,23 +558,18 @@ def parse_one_or_raise(
|
||||
text (str): text to be parsed
|
||||
initial_spec: buffer where to parse the spec. If None a new one will be created.
|
||||
"""
|
||||
stripped_text = text.strip()
|
||||
parser = SpecParser(stripped_text)
|
||||
parser = SpecParser(text)
|
||||
result = parser.next_spec(initial_spec)
|
||||
last_token = parser.ctx.current_token
|
||||
next_token = parser.ctx.next_token
|
||||
|
||||
if last_token is not None and last_token.end != len(stripped_text):
|
||||
message = "a single spec was requested, but parsed more than one:"
|
||||
message += f"\n{text}"
|
||||
if last_token is not None:
|
||||
underline = f"\n{' ' * last_token.end}{'^' * (len(text) - last_token.end)}"
|
||||
message += color.colorize(f"@*r{{{underline}}}")
|
||||
if next_token:
|
||||
message = f"expected a single spec, but got more:\n{text}"
|
||||
underline = f"\n{' ' * next_token.start}{'^' * len(next_token.value)}"
|
||||
message += color.colorize(f"@*r{{{underline}}}")
|
||||
raise ValueError(message)
|
||||
|
||||
if result is None:
|
||||
message = "a single spec was requested, but none was parsed:"
|
||||
message += f"\n{text}"
|
||||
raise ValueError(message)
|
||||
raise ValueError("expected a single spec, but got none")
|
||||
|
||||
return result
|
||||
|
||||
|
@@ -129,6 +129,6 @@ def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constra
|
||||
)
|
||||
def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch):
|
||||
spec = spack.concretize.concretize_one(
|
||||
f"pkg-a %gcc@10 foobar=bar target={root_target_range} ^pkg-b target={dep_target_range}"
|
||||
f"pkg-a foobar=bar target={root_target_range} %gcc@10 ^pkg-b target={dep_target_range}"
|
||||
)
|
||||
assert spec.target == spec["pkg-b"].target == result
|
||||
|
@@ -197,7 +197,11 @@ def dummy_prefix(tmpdir):
|
||||
@pytest.mark.requires_executables(*required_executables)
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.usefixtures(
|
||||
"default_config", "cache_directory", "install_dir_default_layout", "temporary_mirror"
|
||||
"default_config",
|
||||
"cache_directory",
|
||||
"install_dir_default_layout",
|
||||
"temporary_mirror",
|
||||
"mutable_mock_env_path",
|
||||
)
|
||||
def test_default_rpaths_create_install_default_layout(temporary_mirror_dir):
|
||||
"""
|
||||
@@ -269,7 +273,11 @@ def test_default_rpaths_install_nondefault_layout(temporary_mirror_dir):
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.nomockstage
|
||||
@pytest.mark.usefixtures(
|
||||
"default_config", "cache_directory", "install_dir_default_layout", "temporary_mirror"
|
||||
"default_config",
|
||||
"cache_directory",
|
||||
"install_dir_default_layout",
|
||||
"temporary_mirror",
|
||||
"mutable_mock_env_path",
|
||||
)
|
||||
def test_relative_rpaths_install_default_layout(temporary_mirror_dir):
|
||||
"""
|
||||
@@ -561,7 +569,6 @@ def test_FetchCacheError_only_accepts_lists_of_errors():
|
||||
def test_FetchCacheError_pretty_printing_multiple():
|
||||
e = bindist.FetchCacheError([RuntimeError("Oops!"), TypeError("Trouble!")])
|
||||
str_e = str(e)
|
||||
print("'" + str_e + "'")
|
||||
assert "Multiple errors" in str_e
|
||||
assert "Error 1: RuntimeError: Oops!" in str_e
|
||||
assert "Error 2: TypeError: Trouble!" in str_e
|
||||
|
@@ -1,9 +1,12 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
import multiprocessing
|
||||
import os
|
||||
import posixpath
|
||||
import sys
|
||||
from typing import Dict, Optional, Tuple
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -48,7 +51,7 @@ def build_environment(monkeypatch, wrapper_dir, tmp_path):
|
||||
monkeypatch.setenv("SPACK_FC", realcc)
|
||||
|
||||
monkeypatch.setenv("SPACK_PREFIX", prefix)
|
||||
monkeypatch.setenv("SPACK_ENV_PATH", "test")
|
||||
monkeypatch.setenv("SPACK_COMPILER_WRAPPER_PATH", "test")
|
||||
monkeypatch.setenv("SPACK_DEBUG_LOG_DIR", ".")
|
||||
monkeypatch.setenv("SPACK_DEBUG_LOG_ID", "foo-hashabc")
|
||||
monkeypatch.setenv("SPACK_SHORT_SPEC", "foo@1.2 arch=linux-rhel6-x86_64 /hashabc")
|
||||
@@ -312,7 +315,7 @@ def test_spack_paths_before_module_paths(
|
||||
mutable_config.set("packages", {"gcc": {"externals": [gcc_entry]}})
|
||||
|
||||
module_path = os.path.join("path", "to", "module")
|
||||
monkeypatch.setenv("SPACK_ENV_PATH", wrapper_dir)
|
||||
monkeypatch.setenv("SPACK_COMPILER_WRAPPER_PATH", wrapper_dir)
|
||||
|
||||
def _set_wrong_cc(x):
|
||||
os.environ["PATH"] = module_path + os.pathsep + os.environ["PATH"]
|
||||
@@ -777,3 +780,139 @@ def test_optimization_flags_are_using_node_target(default_mock_concretization, m
|
||||
|
||||
assert len(actions) == 1 and isinstance(actions[0], spack.util.environment.SetEnv)
|
||||
assert actions[0].value == "-march=x86-64 -mtune=generic"
|
||||
|
||||
|
||||
@pytest.mark.regression("49827")
|
||||
@pytest.mark.parametrize(
|
||||
"gcc_config,expected_rpaths",
|
||||
[
|
||||
(
|
||||
"""\
|
||||
gcc:
|
||||
externals:
|
||||
- spec: gcc@14.2.0 languages=c
|
||||
prefix: /fake/path1
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /fake/path1
|
||||
extra_rpaths:
|
||||
- /extra/rpaths1
|
||||
- /extra/rpaths2
|
||||
""",
|
||||
"/extra/rpaths1:/extra/rpaths2",
|
||||
),
|
||||
(
|
||||
"""\
|
||||
gcc:
|
||||
externals:
|
||||
- spec: gcc@14.2.0 languages=c
|
||||
prefix: /fake/path1
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /fake/path1
|
||||
""",
|
||||
None,
|
||||
),
|
||||
],
|
||||
)
|
||||
@pytest.mark.not_on_windows("Windows doesn't use the compiler-wrapper")
|
||||
def test_extra_rpaths_is_set(
|
||||
working_env, mutable_config, mock_packages, gcc_config, expected_rpaths
|
||||
):
|
||||
"""Tests that using a compiler with an 'extra_rpaths' section will set the corresponding
|
||||
SPACK_COMPILER_EXTRA_RPATHS variable for the wrapper.
|
||||
"""
|
||||
cfg_data = syaml.load_config(gcc_config)
|
||||
spack.config.set("packages", cfg_data)
|
||||
mpich = spack.concretize.concretize_one("mpich %gcc@14")
|
||||
spack.build_environment.setup_package(mpich.package, dirty=False)
|
||||
|
||||
if expected_rpaths is not None:
|
||||
assert os.environ["SPACK_COMPILER_EXTRA_RPATHS"] == expected_rpaths
|
||||
else:
|
||||
assert "SPACK_COMPILER_EXTRA_RPATHS" not in os.environ
|
||||
|
||||
|
||||
class _TestProcess:
|
||||
calls: Dict[str, int] = collections.defaultdict(int)
|
||||
terminated = False
|
||||
runtime = 0
|
||||
|
||||
def __init__(self, *, target, args):
|
||||
self.alive = None
|
||||
self.exitcode = 0
|
||||
self._reset()
|
||||
|
||||
def start(self):
|
||||
self.calls["start"] += 1
|
||||
self.alive = True
|
||||
|
||||
def is_alive(self):
|
||||
self.calls["is_alive"] += 1
|
||||
return self.alive
|
||||
|
||||
def join(self, timeout: Optional[int] = None):
|
||||
self.calls["join"] += 1
|
||||
if timeout is not None and timeout > self.runtime:
|
||||
self.alive = False
|
||||
|
||||
def terminate(self):
|
||||
self.calls["terminate"] += 1
|
||||
self._set_terminated()
|
||||
self.alive = False
|
||||
|
||||
@classmethod
|
||||
def _set_terminated(cls):
|
||||
cls.terminated = True
|
||||
|
||||
@classmethod
|
||||
def _reset(cls):
|
||||
cls.calls.clear()
|
||||
cls.terminated = False
|
||||
|
||||
|
||||
class _TestPipe:
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def recv(self):
|
||||
if _TestProcess.terminated is True:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
def _pipe_fn(*, duplex: bool = False) -> Tuple[_TestPipe, _TestPipe]:
|
||||
return _TestPipe(), _TestPipe()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_build_process(monkeypatch):
|
||||
monkeypatch.setattr(spack.build_environment, "BuildProcess", _TestProcess)
|
||||
monkeypatch.setattr(multiprocessing, "Pipe", _pipe_fn)
|
||||
|
||||
def _factory(*, runtime: int):
|
||||
_TestProcess.runtime = runtime
|
||||
|
||||
return _factory
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"runtime,timeout,expected_result,expected_calls",
|
||||
[
|
||||
# execution time < timeout
|
||||
(2, 5, 0, {"start": 1, "join": 1, "is_alive": 1}),
|
||||
# execution time > timeout
|
||||
(5, 2, 1, {"start": 1, "join": 2, "is_alive": 1, "terminate": 1}),
|
||||
],
|
||||
)
|
||||
def test_build_process_timeout(
|
||||
mock_build_process, runtime, timeout, expected_result, expected_calls
|
||||
):
|
||||
"""Tests that we make the correct function calls in different timeout scenarios."""
|
||||
mock_build_process(runtime=runtime)
|
||||
result = spack.build_environment.start_build_process(
|
||||
pkg=None, function=None, kwargs={}, timeout=timeout
|
||||
)
|
||||
|
||||
assert result == expected_result
|
||||
assert _TestProcess.calls == expected_calls
|
||||
|
@@ -146,7 +146,7 @@ def wrapper_environment(working_env):
|
||||
SPACK_CXX=real_cc,
|
||||
SPACK_FC=real_cc,
|
||||
SPACK_PREFIX=pkg_prefix,
|
||||
SPACK_ENV_PATH="test",
|
||||
SPACK_COMPILER_WRAPPER_PATH="test",
|
||||
SPACK_DEBUG_LOG_DIR=".",
|
||||
SPACK_DEBUG_LOG_ID="foo-hashabc",
|
||||
SPACK_SHORT_SPEC="foo@1.2 arch=linux-rhel6-x86_64 /hashabc",
|
||||
@@ -216,16 +216,13 @@ def check_args_contents(cc, args, must_contain, must_not_contain):
|
||||
assert a not in cc_modified_args
|
||||
|
||||
|
||||
def check_env_var(executable, var, expected):
|
||||
"""Check environment variables updated by the passed compiler wrapper
|
||||
|
||||
This assumes that cc will print debug output when it's environment
|
||||
contains SPACK_TEST_COMMAND=dump-env-<variable-to-debug>
|
||||
"""
|
||||
executable = Executable(str(executable))
|
||||
with set_env(SPACK_TEST_COMMAND="dump-env-" + var):
|
||||
output = executable(*test_args, output=str).strip()
|
||||
assert executable.path + ": " + var + ": " + expected == output
|
||||
def check_wrapper_var(exe, *args, var, expected):
|
||||
"""Check variables set by the compiler wrapper. This works by setting SPACK_TEST_COMMAND to
|
||||
dump-var-<variable-to-debug>, which will print the variable and exit."""
|
||||
executable = Executable(str(exe))
|
||||
with set_env(SPACK_TEST_COMMAND=f"dump-var-{var}"):
|
||||
output = executable(*args, output=str).strip()
|
||||
assert f"{executable.path}: {var}: {expected}" == output
|
||||
|
||||
|
||||
def dump_mode(cc, args):
|
||||
@@ -737,23 +734,34 @@ def test_expected_args_with_flags(wrapper_environment, wrapper_flags, wrapper_di
|
||||
|
||||
|
||||
def test_system_path_cleanup(wrapper_environment, wrapper_dir):
|
||||
"""Ensure SPACK_ENV_PATH is removed from PATH, even with trailing /
|
||||
"""Ensure SPACK_COMPILER_WRAPPER_PATH is removed from PATH, even with trailing /
|
||||
|
||||
The compiler wrapper has to ensure that it is not called nested
|
||||
like it would happen when gcc's collect2 looks in PATH for ld.
|
||||
|
||||
To prevent nested calls, the compiler wrapper removes the elements
|
||||
of SPACK_ENV_PATH from PATH. Autotest's generated testsuite appends
|
||||
of SPACK_COMPILER_WRAPPER_PATH from PATH. Autotest's generated testsuite appends
|
||||
a / to each element of PATH when adding AUTOTEST_PATH.
|
||||
Thus, ensure that PATH cleanup works even with trailing /.
|
||||
"""
|
||||
cc = wrapper_dir / "cc"
|
||||
system_path = "/bin:/usr/bin:/usr/local/bin"
|
||||
with set_env(SPACK_ENV_PATH=str(wrapper_dir), SPACK_CC="true"):
|
||||
with set_env(SPACK_COMPILER_WRAPPER_PATH=str(wrapper_dir)):
|
||||
with set_env(PATH=str(wrapper_dir) + ":" + system_path):
|
||||
check_env_var(cc, "PATH", system_path)
|
||||
check_wrapper_var(cc, *test_args, var="PATH", expected=system_path)
|
||||
with set_env(PATH=str(wrapper_dir) + "/:" + system_path):
|
||||
check_env_var(cc, "PATH", system_path)
|
||||
check_wrapper_var(cc, *test_args, var="PATH", expected=system_path)
|
||||
|
||||
|
||||
def test_language_from_flags(wrapper_environment, wrapper_dir):
|
||||
"""Tes that the compiler language mode is determined by -x/--language flags if present"""
|
||||
cc = wrapper_dir / "cc"
|
||||
|
||||
for flag_value, lang in [("c", "CC"), ("c++", "CXX"), ("f77", "F77"), ("f95", "FC")]:
|
||||
check_wrapper_var(cc, "-c", "file", "-x", flag_value, var="comp", expected=lang)
|
||||
check_wrapper_var(cc, "-c", "file", f"-x{flag_value}", var="comp", expected=lang)
|
||||
check_wrapper_var(cc, "-c", "file", f"--language={flag_value}", var="comp", expected=lang)
|
||||
check_wrapper_var(cc, "-c", "file", "--language", flag_value, var="comp", expected=lang)
|
||||
|
||||
|
||||
def test_ld_deps_partial(wrapper_environment, wrapper_dir):
|
||||
|
@@ -18,6 +18,7 @@
|
||||
import spack.repo as repo
|
||||
import spack.util.git
|
||||
from spack.test.conftest import MockHTTPResponse
|
||||
from spack.version import Version
|
||||
|
||||
pytestmark = [pytest.mark.usefixtures("mock_packages")]
|
||||
|
||||
@@ -30,6 +31,43 @@ def repro_dir(tmp_path):
|
||||
yield result
|
||||
|
||||
|
||||
def test_get_added_versions_new_checksum(mock_git_package_changes):
|
||||
repo, filename, commits = mock_git_package_changes
|
||||
|
||||
checksum_versions = {
|
||||
"3f6576971397b379d4205ae5451ff5a68edf6c103b2f03c4188ed7075fbb5f04": Version("2.1.5"),
|
||||
"a0293475e6a44a3f6c045229fe50f69dc0eebc62a42405a51f19d46a5541e77a": Version("2.1.4"),
|
||||
"6c0853bb27738b811f2b4d4af095323c3d5ce36ceed6b50e5f773204fb8f7200": Version("2.0.7"),
|
||||
"86993903527d9b12fc543335c19c1d33a93797b3d4d37648b5addae83679ecd8": Version("2.0.0"),
|
||||
}
|
||||
|
||||
with fs.working_dir(repo.packages_path):
|
||||
added_versions = ci.get_added_versions(
|
||||
checksum_versions, filename, from_ref=commits[-1], to_ref=commits[-2]
|
||||
)
|
||||
assert len(added_versions) == 1
|
||||
assert added_versions[0] == Version("2.1.5")
|
||||
|
||||
|
||||
def test_get_added_versions_new_commit(mock_git_package_changes):
|
||||
repo, filename, commits = mock_git_package_changes
|
||||
|
||||
checksum_versions = {
|
||||
"74253725f884e2424a0dd8ae3f69896d5377f325": Version("2.1.6"),
|
||||
"3f6576971397b379d4205ae5451ff5a68edf6c103b2f03c4188ed7075fbb5f04": Version("2.1.5"),
|
||||
"a0293475e6a44a3f6c045229fe50f69dc0eebc62a42405a51f19d46a5541e77a": Version("2.1.4"),
|
||||
"6c0853bb27738b811f2b4d4af095323c3d5ce36ceed6b50e5f773204fb8f7200": Version("2.0.7"),
|
||||
"86993903527d9b12fc543335c19c1d33a93797b3d4d37648b5addae83679ecd8": Version("2.0.0"),
|
||||
}
|
||||
|
||||
with fs.working_dir(repo.packages_path):
|
||||
added_versions = ci.get_added_versions(
|
||||
checksum_versions, filename, from_ref=commits[-2], to_ref=commits[-3]
|
||||
)
|
||||
assert len(added_versions) == 1
|
||||
assert added_versions[0] == Version("2.1.6")
|
||||
|
||||
|
||||
def test_pipeline_dag(config, tmpdir):
|
||||
r"""Test creation, pruning, and traversal of PipelineDAG using the
|
||||
following package dependency graph:
|
||||
|
@@ -214,9 +214,7 @@ def verify_mirror_contents():
|
||||
if in_env_pkg in p:
|
||||
found_pkg = True
|
||||
|
||||
if not found_pkg:
|
||||
print("Expected to find {0} in {1}".format(in_env_pkg, dest_mirror_dir))
|
||||
assert False
|
||||
assert found_pkg, f"Expected to find {in_env_pkg} in {dest_mirror_dir}"
|
||||
|
||||
# Install a package and put it in the buildcache
|
||||
s = spack.concretize.concretize_one(out_env_pkg)
|
||||
|
@@ -22,7 +22,11 @@
|
||||
import spack.hash_types as ht
|
||||
import spack.main
|
||||
import spack.paths as spack_paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.version
|
||||
from spack.ci import gitlab as gitlab_generator
|
||||
from spack.ci.common import PipelineDag, PipelineOptions, SpackCIConfig
|
||||
from spack.ci.generator_registry import generator
|
||||
@@ -867,7 +871,7 @@ def test_push_to_build_cache(
|
||||
logs_dir = scratch / "logs_dir"
|
||||
logs_dir.mkdir()
|
||||
ci.copy_stage_logs_to_artifacts(concrete_spec, str(logs_dir))
|
||||
assert "spack-build-out.txt" in os.listdir(logs_dir)
|
||||
assert "spack-build-out.txt.gz" in os.listdir(logs_dir)
|
||||
|
||||
dl_dir = scratch / "download_dir"
|
||||
buildcache_cmd("download", "--spec-file", json_path, "--path", str(dl_dir))
|
||||
@@ -1841,3 +1845,229 @@ def test_ci_generate_alternate_target(
|
||||
|
||||
assert pipeline_doc.startswith("unittestpipeline")
|
||||
assert "externaltest" in pipeline_doc
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fetch_versions_match(monkeypatch):
|
||||
"""Fake successful checksums returned from downloaded tarballs."""
|
||||
|
||||
def get_checksums_for_versions(url_by_version, package_name, **kwargs):
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(package_name)
|
||||
return {v: pkg_cls.versions[v]["sha256"] for v in url_by_version}
|
||||
|
||||
monkeypatch.setattr(spack.stage, "get_checksums_for_versions", get_checksums_for_versions)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fetch_versions_invalid(monkeypatch):
|
||||
"""Fake successful checksums returned from downloaded tarballs."""
|
||||
|
||||
def get_checksums_for_versions(url_by_version, package_name, **kwargs):
|
||||
return {
|
||||
v: "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890"
|
||||
for v in url_by_version
|
||||
}
|
||||
|
||||
monkeypatch.setattr(spack.stage, "get_checksums_for_versions", get_checksums_for_versions)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("versions", [["2.1.4"], ["2.1.4", "2.1.5"]])
|
||||
def test_ci_validate_standard_versions_valid(capfd, mock_packages, fetch_versions_match, versions):
|
||||
spec = spack.spec.Spec("diff-test")
|
||||
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
|
||||
version_list = [spack.version.Version(v) for v in versions]
|
||||
|
||||
assert spack.cmd.ci.validate_standard_versions(pkg, version_list)
|
||||
|
||||
out, err = capfd.readouterr()
|
||||
for version in versions:
|
||||
assert f"Validated diff-test@{version}" in out
|
||||
|
||||
|
||||
@pytest.mark.parametrize("versions", [["2.1.4"], ["2.1.4", "2.1.5"]])
|
||||
def test_ci_validate_standard_versions_invalid(
|
||||
capfd, mock_packages, fetch_versions_invalid, versions
|
||||
):
|
||||
spec = spack.spec.Spec("diff-test")
|
||||
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
|
||||
version_list = [spack.version.Version(v) for v in versions]
|
||||
|
||||
assert spack.cmd.ci.validate_standard_versions(pkg, version_list) is False
|
||||
|
||||
out, err = capfd.readouterr()
|
||||
for version in versions:
|
||||
assert f"Invalid checksum found diff-test@{version}" in err
|
||||
|
||||
|
||||
@pytest.mark.parametrize("versions", [[("1.0", -2)], [("1.1", -4), ("2.0", -6)]])
|
||||
def test_ci_validate_git_versions_valid(
|
||||
capfd, monkeypatch, mock_packages, mock_git_version_info, versions
|
||||
):
|
||||
spec = spack.spec.Spec("diff-test")
|
||||
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
|
||||
version_list = [spack.version.Version(v) for v, _ in versions]
|
||||
|
||||
repo_path, filename, commits = mock_git_version_info
|
||||
version_commit_dict = {
|
||||
spack.version.Version(v): {"tag": f"v{v}", "commit": commits[c]} for v, c in versions
|
||||
}
|
||||
|
||||
pkg_class = spec.package_class
|
||||
|
||||
monkeypatch.setattr(pkg_class, "git", repo_path)
|
||||
monkeypatch.setattr(pkg_class, "versions", version_commit_dict)
|
||||
|
||||
assert spack.cmd.ci.validate_git_versions(pkg, version_list)
|
||||
|
||||
out, err = capfd.readouterr()
|
||||
for version in version_list:
|
||||
assert f"Validated diff-test@{version}" in out
|
||||
|
||||
|
||||
@pytest.mark.parametrize("versions", [[("1.0", -3)], [("1.1", -5), ("2.0", -5)]])
|
||||
def test_ci_validate_git_versions_bad_tag(
|
||||
capfd, monkeypatch, mock_packages, mock_git_version_info, versions
|
||||
):
|
||||
spec = spack.spec.Spec("diff-test")
|
||||
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
|
||||
version_list = [spack.version.Version(v) for v, _ in versions]
|
||||
|
||||
repo_path, filename, commits = mock_git_version_info
|
||||
version_commit_dict = {
|
||||
spack.version.Version(v): {"tag": f"v{v}", "commit": commits[c]} for v, c in versions
|
||||
}
|
||||
|
||||
pkg_class = spec.package_class
|
||||
|
||||
monkeypatch.setattr(pkg_class, "git", repo_path)
|
||||
monkeypatch.setattr(pkg_class, "versions", version_commit_dict)
|
||||
|
||||
assert spack.cmd.ci.validate_git_versions(pkg, version_list) is False
|
||||
|
||||
out, err = capfd.readouterr()
|
||||
for version in version_list:
|
||||
assert f"Mismatched tag <-> commit found for diff-test@{version}" in err
|
||||
|
||||
|
||||
@pytest.mark.parametrize("versions", [[("1.0", -2)], [("1.1", -4), ("2.0", -6), ("3.0", -6)]])
|
||||
def test_ci_validate_git_versions_invalid(
|
||||
capfd, monkeypatch, mock_packages, mock_git_version_info, versions
|
||||
):
|
||||
spec = spack.spec.Spec("diff-test")
|
||||
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
|
||||
version_list = [spack.version.Version(v) for v, _ in versions]
|
||||
|
||||
repo_path, filename, commits = mock_git_version_info
|
||||
version_commit_dict = {
|
||||
spack.version.Version(v): {
|
||||
"tag": f"v{v}",
|
||||
"commit": "abcdefabcdefabcdefabcdefabcdefabcdefabc",
|
||||
}
|
||||
for v, c in versions
|
||||
}
|
||||
|
||||
pkg_class = spec.package_class
|
||||
|
||||
monkeypatch.setattr(pkg_class, "git", repo_path)
|
||||
monkeypatch.setattr(pkg_class, "versions", version_commit_dict)
|
||||
|
||||
assert spack.cmd.ci.validate_git_versions(pkg, version_list) is False
|
||||
|
||||
out, err = capfd.readouterr()
|
||||
for version in version_list:
|
||||
assert f"Invalid commit for diff-test@{version}" in err
|
||||
|
||||
|
||||
def mock_packages_path(path):
|
||||
def packages_path():
|
||||
return path
|
||||
|
||||
return packages_path
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def verify_standard_versions_valid(monkeypatch):
|
||||
def validate_standard_versions(pkg, versions):
|
||||
for version in versions:
|
||||
print(f"Validated {pkg.name}@{version}")
|
||||
return True
|
||||
|
||||
monkeypatch.setattr(spack.cmd.ci, "validate_standard_versions", validate_standard_versions)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def verify_git_versions_valid(monkeypatch):
|
||||
def validate_git_versions(pkg, versions):
|
||||
for version in versions:
|
||||
print(f"Validated {pkg.name}@{version}")
|
||||
return True
|
||||
|
||||
monkeypatch.setattr(spack.cmd.ci, "validate_git_versions", validate_git_versions)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def verify_standard_versions_invalid(monkeypatch):
|
||||
def validate_standard_versions(pkg, versions):
|
||||
for version in versions:
|
||||
print(f"Invalid checksum found {pkg.name}@{version}")
|
||||
return False
|
||||
|
||||
monkeypatch.setattr(spack.cmd.ci, "validate_standard_versions", validate_standard_versions)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def verify_git_versions_invalid(monkeypatch):
|
||||
def validate_git_versions(pkg, versions):
|
||||
for version in versions:
|
||||
print(f"Invalid commit for {pkg.name}@{version}")
|
||||
return False
|
||||
|
||||
monkeypatch.setattr(spack.cmd.ci, "validate_git_versions", validate_git_versions)
|
||||
|
||||
|
||||
def test_ci_verify_versions_valid(
|
||||
monkeypatch,
|
||||
mock_packages,
|
||||
mock_git_package_changes,
|
||||
verify_standard_versions_valid,
|
||||
verify_git_versions_valid,
|
||||
tmpdir,
|
||||
):
|
||||
repo, _, commits = mock_git_package_changes
|
||||
spack.repo.PATH.put_first(repo)
|
||||
|
||||
monkeypatch.setattr(spack.repo, "packages_path", mock_packages_path(repo.packages_path))
|
||||
|
||||
out = ci_cmd("verify-versions", commits[-1], commits[-3])
|
||||
assert "Validated diff-test@2.1.5" in out
|
||||
assert "Validated diff-test@2.1.6" in out
|
||||
|
||||
|
||||
def test_ci_verify_versions_standard_invalid(
|
||||
monkeypatch,
|
||||
mock_packages,
|
||||
mock_git_package_changes,
|
||||
verify_standard_versions_invalid,
|
||||
verify_git_versions_invalid,
|
||||
):
|
||||
repo, _, commits = mock_git_package_changes
|
||||
spack.repo.PATH.put_first(repo)
|
||||
|
||||
monkeypatch.setattr(spack.repo, "packages_path", mock_packages_path(repo.packages_path))
|
||||
|
||||
out = ci_cmd("verify-versions", commits[-1], commits[-3], fail_on_error=False)
|
||||
assert "Invalid checksum found diff-test@2.1.5" in out
|
||||
assert "Invalid commit for diff-test@2.1.6" in out
|
||||
|
||||
|
||||
def test_ci_verify_versions_manual_package(monkeypatch, mock_packages, mock_git_package_changes):
|
||||
repo, _, commits = mock_git_package_changes
|
||||
spack.repo.PATH.put_first(repo)
|
||||
|
||||
monkeypatch.setattr(spack.repo, "packages_path", mock_packages_path(repo.packages_path))
|
||||
|
||||
pkg_class = spack.spec.Spec("diff-test").package_class
|
||||
monkeypatch.setattr(pkg_class, "manual_download", True)
|
||||
|
||||
out = ci_cmd("verify-versions", commits[-1], commits[-2])
|
||||
assert "Skipping manual download package: diff-test" in out
|
||||
|
@@ -5,6 +5,7 @@
|
||||
import filecmp
|
||||
import os
|
||||
import shutil
|
||||
import textwrap
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -259,15 +260,25 @@ def test_update_completion_arg(shell, tmpdir, monkeypatch):
|
||||
def test_updated_completion_scripts(shell, tmpdir):
|
||||
"""Make sure our shell tab completion scripts remain up-to-date."""
|
||||
|
||||
msg = (
|
||||
width = 72
|
||||
lines = textwrap.wrap(
|
||||
"It looks like Spack's command-line interface has been modified. "
|
||||
"Please update Spack's shell tab completion scripts by running:\n\n"
|
||||
" spack commands --update-completion\n\n"
|
||||
"and adding the changed files to your pull request."
|
||||
"If differences are more than your global 'include:' scopes, please "
|
||||
"update Spack's shell tab completion scripts by running:",
|
||||
width,
|
||||
)
|
||||
lines.append("\n spack commands --update-completion\n")
|
||||
lines.extend(
|
||||
textwrap.wrap(
|
||||
"and adding the changed files (minus your global 'include:' scopes) "
|
||||
"to your pull request.",
|
||||
width,
|
||||
)
|
||||
)
|
||||
msg = "\n".join(lines)
|
||||
|
||||
header = os.path.join(spack.paths.share_path, shell, f"spack-completion.{shell}")
|
||||
script = "spack-completion.{0}".format(shell)
|
||||
script = f"spack-completion.{shell}"
|
||||
old_script = os.path.join(spack.paths.share_path, script)
|
||||
new_script = str(tmpdir.join(script))
|
||||
|
||||
|
@@ -213,7 +213,7 @@ def test_config_add_update_dict(mutable_empty_config):
|
||||
|
||||
def test_config_with_c_argument(mutable_empty_config):
|
||||
# I don't know how to add a spack argument to a Spack Command, so we test this way
|
||||
config_file = "config:install_root:root:/path/to/config.yaml"
|
||||
config_file = "config:install_tree:root:/path/to/config.yaml"
|
||||
parser = spack.main.make_argument_parser()
|
||||
args = parser.parse_args(["-c", config_file])
|
||||
assert config_file in args.config_vars
|
||||
@@ -221,7 +221,7 @@ def test_config_with_c_argument(mutable_empty_config):
|
||||
# Add the path to the config
|
||||
config("add", args.config_vars[0], scope="command_line")
|
||||
output = config("get", "config")
|
||||
assert "config:\n install_root:\n root: /path/to/config.yaml" in output
|
||||
assert "config:\n install_tree:\n root: /path/to/config.yaml" in output
|
||||
|
||||
|
||||
def test_config_add_ordered_dict(mutable_empty_config):
|
||||
|
@@ -62,7 +62,7 @@
|
||||
(
|
||||
["-t", "intel", "/test-intel"],
|
||||
"test-intel",
|
||||
[r"TestIntel(IntelPackage)", r"setup_environment"],
|
||||
[r"TestIntel(IntelOneApiPackage)", r"setup_environment"],
|
||||
),
|
||||
(
|
||||
["-t", "makefile", "/test-makefile"],
|
||||
|
@@ -38,10 +38,9 @@
|
||||
(["--transitive", "--deptype=link", "dtbuild1"], {"dtlink2"}),
|
||||
],
|
||||
)
|
||||
def test_direct_dependencies(cli_args, expected, mock_runtimes):
|
||||
def test_direct_dependencies(cli_args, expected, mock_packages):
|
||||
out = dependencies(*cli_args)
|
||||
result = set(re.split(r"\s+", out.strip()))
|
||||
expected.update(mock_runtimes)
|
||||
assert expected == result
|
||||
|
||||
|
||||
|
@@ -15,6 +15,9 @@
|
||||
deprecate = SpackCommand("deprecate")
|
||||
find = SpackCommand("find")
|
||||
|
||||
# Unit tests should not be affected by the user's managed environments
|
||||
pytestmark = pytest.mark.usefixtures("mutable_mock_env_path")
|
||||
|
||||
|
||||
def test_deprecate(mock_packages, mock_archive, mock_fetch, install_mockery):
|
||||
install("--fake", "libelf@0.8.13")
|
||||
|
@@ -16,6 +16,7 @@
|
||||
import spack.stage
|
||||
import spack.util.git
|
||||
import spack.util.path
|
||||
from spack.error import SpackError
|
||||
from spack.main import SpackCommand
|
||||
|
||||
add = SpackCommand("add")
|
||||
@@ -159,6 +160,7 @@ def check_path(stage, dest):
|
||||
# Create path to allow develop to modify env
|
||||
fs.mkdirp(abspath)
|
||||
develop("--no-clone", "-p", path, "mpich@1.0")
|
||||
self.check_develop(e, spack.spec.Spec("mpich@=1.0"), path)
|
||||
|
||||
# Remove path to ensure develop with no args runs staging code
|
||||
os.rmdir(abspath)
|
||||
@@ -218,6 +220,40 @@ def test_develop_full_git_repo(
|
||||
assert len(commits) > 1
|
||||
|
||||
|
||||
def test_recursive(mutable_mock_env_path, install_mockery, mock_fetch):
|
||||
env("create", "test")
|
||||
|
||||
with ev.read("test") as e:
|
||||
add("indirect-mpich@1.0")
|
||||
e.concretize()
|
||||
specs = e.all_specs()
|
||||
|
||||
assert len(specs) > 1
|
||||
develop("--recursive", "mpich")
|
||||
|
||||
expected_dev_specs = ["mpich", "direct-mpich", "indirect-mpich"]
|
||||
for spec in expected_dev_specs:
|
||||
assert spec in e.dev_specs
|
||||
|
||||
|
||||
def test_develop_fails_with_multiple_concrete_versions(
|
||||
mutable_mock_env_path, install_mockery, mock_fetch
|
||||
):
|
||||
env("create", "test")
|
||||
|
||||
with ev.read("test") as e:
|
||||
add("indirect-mpich@1.0")
|
||||
add("indirect-mpich@0.9")
|
||||
e.unify = False
|
||||
e.concretize()
|
||||
|
||||
with pytest.raises(SpackError) as develop_error:
|
||||
develop("indirect-mpich", fail_on_error=True)
|
||||
|
||||
error_str = "has multiple concrete instances in the graph"
|
||||
assert error_str in str(develop_error.value)
|
||||
|
||||
|
||||
def test_concretize_dev_path_with_at_symbol_in_env(mutable_mock_env_path, tmpdir, mock_packages):
|
||||
spec_like = "develop-test@develop"
|
||||
|
||||
|
@@ -1067,13 +1067,17 @@ def test_init_from_yaml_relative_includes(tmp_path):
|
||||
assert os.path.exists(os.path.join(e2.path, f))
|
||||
|
||||
|
||||
# TODO: Should we be supporting relative path rewrites when creating new env from existing?
|
||||
# TODO: If so, then this should confirm that the absolute include paths in the new env exist.
|
||||
def test_init_from_yaml_relative_includes_outside_env(tmp_path):
|
||||
files = ["../outside_env_not_copied/repos.yaml"]
|
||||
"""Ensure relative includes to files outside the environment fail."""
|
||||
files = ["../outside_env/repos.yaml"]
|
||||
|
||||
manifest = f"""
|
||||
spack:
|
||||
specs: []
|
||||
include: {files}
|
||||
include:
|
||||
- path: {files[0]}
|
||||
"""
|
||||
|
||||
# subdir to ensure parent of environment dir is not shared
|
||||
@@ -1086,7 +1090,7 @@ def test_init_from_yaml_relative_includes_outside_env(tmp_path):
|
||||
for f in files:
|
||||
fs.touchp(e1_path / f)
|
||||
|
||||
with pytest.raises(spack.config.ConfigFileError, match="Detected 1 missing include"):
|
||||
with pytest.raises(ValueError, match="does not exist"):
|
||||
_ = _env_create("test2", init_file=e1_manifest)
|
||||
|
||||
|
||||
@@ -1186,14 +1190,14 @@ def test_env_with_config(environment_from_manifest):
|
||||
|
||||
|
||||
def test_with_config_bad_include_create(environment_from_manifest):
|
||||
"""Confirm missing include paths raise expected exception and error."""
|
||||
with pytest.raises(spack.config.ConfigFileError, match="2 missing include path"):
|
||||
"""Confirm missing required include raises expected exception."""
|
||||
err = "does not exist"
|
||||
with pytest.raises(ValueError, match=err):
|
||||
environment_from_manifest(
|
||||
"""
|
||||
spack:
|
||||
include:
|
||||
- /no/such/directory
|
||||
- no/such/file.yaml
|
||||
"""
|
||||
)
|
||||
|
||||
@@ -1203,34 +1207,25 @@ def test_with_config_bad_include_activate(environment_from_manifest, tmpdir):
|
||||
include1 = env_root / "include1.yaml"
|
||||
include1.touch()
|
||||
|
||||
abs_include_path = os.path.abspath(tmpdir.join("subdir").ensure("include2.yaml"))
|
||||
|
||||
spack_yaml = env_root / ev.manifest_name
|
||||
spack_yaml.write_text(
|
||||
f"""
|
||||
"""
|
||||
spack:
|
||||
include:
|
||||
- ./include1.yaml
|
||||
- {abs_include_path}
|
||||
"""
|
||||
)
|
||||
|
||||
with ev.Environment(env_root) as e:
|
||||
e.concretize()
|
||||
|
||||
# we've created an environment with some included config files (which do
|
||||
# in fact exist): now we remove them and check that we get a sensible
|
||||
# error message
|
||||
# We've created an environment with included config file (which does
|
||||
# exist). Now we remove it and check that we get a sensible error.
|
||||
|
||||
os.remove(abs_include_path)
|
||||
os.remove(include1)
|
||||
with pytest.raises(spack.config.ConfigFileError) as exc:
|
||||
with pytest.raises(ValueError, match="does not exist"):
|
||||
ev.activate(ev.Environment(env_root))
|
||||
|
||||
err = exc.value.message
|
||||
assert "missing include" in err
|
||||
assert abs_include_path in err
|
||||
assert "include1.yaml" in err
|
||||
assert ev.active_environment() is None
|
||||
|
||||
|
||||
@@ -1338,8 +1333,10 @@ def test_config_change_existing(mutable_mock_env_path, tmp_path, mock_packages,
|
||||
included file scope.
|
||||
"""
|
||||
|
||||
env_path = tmp_path / "test_config"
|
||||
fs.mkdirp(env_path)
|
||||
included_file = "included-packages.yaml"
|
||||
included_path = tmp_path / included_file
|
||||
included_path = env_path / included_file
|
||||
with open(included_path, "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
"""\
|
||||
@@ -1355,7 +1352,7 @@ def test_config_change_existing(mutable_mock_env_path, tmp_path, mock_packages,
|
||||
"""
|
||||
)
|
||||
|
||||
spack_yaml = tmp_path / ev.manifest_name
|
||||
spack_yaml = env_path / ev.manifest_name
|
||||
spack_yaml.write_text(
|
||||
f"""\
|
||||
spack:
|
||||
@@ -1369,7 +1366,8 @@ def test_config_change_existing(mutable_mock_env_path, tmp_path, mock_packages,
|
||||
"""
|
||||
)
|
||||
|
||||
e = ev.Environment(tmp_path)
|
||||
mutable_config.set("config:misc_cache", str(tmp_path / "cache"))
|
||||
e = ev.Environment(env_path)
|
||||
with e:
|
||||
# List of requirements, flip a variant
|
||||
config("change", "packages:mpich:require:~debug")
|
||||
@@ -1459,19 +1457,6 @@ def test_env_with_included_config_file_url(tmpdir, mutable_empty_config, package
|
||||
assert cfg["mpileaks"]["version"] == ["2.2"]
|
||||
|
||||
|
||||
def test_env_with_included_config_missing_file(tmpdir, mutable_empty_config):
|
||||
"""Test inclusion of a missing configuration file raises FetchError
|
||||
noting missing file."""
|
||||
|
||||
spack_yaml = tmpdir.join("spack.yaml")
|
||||
missing_file = tmpdir.join("packages.yaml")
|
||||
with spack_yaml.open("w") as f:
|
||||
f.write("spack:\n include:\n - {0}\n".format(missing_file.strpath))
|
||||
|
||||
with pytest.raises(spack.error.ConfigError, match="missing include path"):
|
||||
ev.Environment(tmpdir.strpath)
|
||||
|
||||
|
||||
def test_env_with_included_config_scope(mutable_mock_env_path, packages_file):
|
||||
"""Test inclusion of a package file from the environment's configuration
|
||||
stage directory. This test is intended to represent a case where a remote
|
||||
@@ -1566,7 +1551,7 @@ def test_env_with_included_config_precedence(tmp_path):
|
||||
|
||||
|
||||
def test_env_with_included_configs_precedence(tmp_path):
|
||||
"""Test precendence of multiple included configuration files."""
|
||||
"""Test precedence of multiple included configuration files."""
|
||||
file1 = "high-config.yaml"
|
||||
file2 = "low-config.yaml"
|
||||
|
||||
@@ -1794,7 +1779,7 @@ def test_roots_display_with_variants():
|
||||
with ev.read("test"):
|
||||
out = find(output=str)
|
||||
|
||||
assert "boost +shared" in out
|
||||
assert "boost+shared" in out
|
||||
|
||||
|
||||
def test_uninstall_keeps_in_env(mock_stage, mock_fetch, install_mockery):
|
||||
@@ -3080,14 +3065,26 @@ def test_stack_view_activate_from_default(
|
||||
|
||||
def test_envvar_set_in_activate(tmp_path, mock_packages, install_mockery):
|
||||
spack_yaml = tmp_path / "spack.yaml"
|
||||
env_vars_yaml = tmp_path / "env_vars.yaml"
|
||||
|
||||
env_vars_yaml.write_text(
|
||||
"""
|
||||
env_vars:
|
||||
set:
|
||||
CONFIG_ENVAR_SET_IN_ENV_LOAD: "True"
|
||||
"""
|
||||
)
|
||||
|
||||
spack_yaml.write_text(
|
||||
"""
|
||||
spack:
|
||||
include:
|
||||
- env_vars.yaml
|
||||
specs:
|
||||
- cmake%gcc
|
||||
env_vars:
|
||||
set:
|
||||
ENVAR_SET_IN_ENV_LOAD: "True"
|
||||
SPACK_ENVAR_SET_IN_ENV_LOAD: "True"
|
||||
"""
|
||||
)
|
||||
|
||||
@@ -3098,12 +3095,16 @@ def test_envvar_set_in_activate(tmp_path, mock_packages, install_mockery):
|
||||
test_env = ev.read("test")
|
||||
output = env("activate", "--sh", "test")
|
||||
|
||||
assert "ENVAR_SET_IN_ENV_LOAD=True" in output
|
||||
assert "SPACK_ENVAR_SET_IN_ENV_LOAD=True" in output
|
||||
assert "CONFIG_ENVAR_SET_IN_ENV_LOAD=True" in output
|
||||
|
||||
with test_env:
|
||||
with spack.util.environment.set_env(ENVAR_SET_IN_ENV_LOAD="True"):
|
||||
with spack.util.environment.set_env(
|
||||
SPACK_ENVAR_SET_IN_ENV_LOAD="True", CONFIG_ENVAR_SET_IN_ENV_LOAD="True"
|
||||
):
|
||||
output = env("deactivate", "--sh")
|
||||
assert "unset ENVAR_SET_IN_ENV_LOAD" in output
|
||||
assert "unset SPACK_ENVAR_SET_IN_ENV_LOAD" in output
|
||||
assert "unset CONFIG_ENVAR_SET_IN_ENV_LOAD" in output
|
||||
|
||||
|
||||
def test_stack_view_no_activate_without_default(
|
||||
@@ -4277,21 +4278,31 @@ def test_unify_when_possible_works_around_conflicts():
|
||||
assert len([x for x in e.all_specs() if x.satisfies("mpich")]) == 1
|
||||
|
||||
|
||||
# Using mock_include_cache to ensure the "remote" file is cached in a temporary
|
||||
# location and not polluting the user cache.
|
||||
def test_env_include_packages_url(
|
||||
tmpdir, mutable_empty_config, mock_spider_configs, mock_curl_configs
|
||||
tmpdir, mutable_empty_config, mock_fetch_url_text, mock_curl_configs, mock_include_cache
|
||||
):
|
||||
"""Test inclusion of a (GitHub) URL."""
|
||||
develop_url = "https://github.com/fake/fake/blob/develop/"
|
||||
default_packages = develop_url + "etc/fake/defaults/packages.yaml"
|
||||
sha256 = "8b69d9c6e983dfb8bac2ddc3910a86265cffdd9c85f905c716d426ec5b0d9847"
|
||||
spack_yaml = tmpdir.join("spack.yaml")
|
||||
with spack_yaml.open("w") as f:
|
||||
f.write("spack:\n include:\n - {0}\n".format(default_packages))
|
||||
assert os.path.isfile(spack_yaml.strpath)
|
||||
f.write(
|
||||
f"""\
|
||||
spack:
|
||||
include:
|
||||
- path: {default_packages}
|
||||
sha256: {sha256}
|
||||
"""
|
||||
)
|
||||
|
||||
with spack.config.override("config:url_fetch_method", "curl"):
|
||||
env = ev.Environment(tmpdir.strpath)
|
||||
ev.activate(env)
|
||||
|
||||
# Make sure a setting from test/data/config/packages.yaml is present
|
||||
cfg = spack.config.get("packages")
|
||||
assert "mpich" in cfg["all"]["providers"]["mpi"]
|
||||
|
||||
@@ -4360,7 +4371,7 @@ def test_env_view_disabled(tmp_path, mutable_mock_env_path):
|
||||
|
||||
|
||||
@pytest.mark.parametrize("first", ["false", "true", "custom"])
|
||||
def test_env_include_mixed_views(tmp_path, mutable_mock_env_path, mutable_config, first):
|
||||
def test_env_include_mixed_views(tmp_path, mutable_config, mutable_mock_env_path, first):
|
||||
"""Ensure including path and boolean views in different combinations result
|
||||
in the creation of only the first view if it is not disabled."""
|
||||
false_yaml = tmp_path / "false-view.yaml"
|
||||
|
@@ -712,10 +712,11 @@ def test_install_deps_then_package(tmpdir, mock_fetch, install_mockery):
|
||||
assert os.path.exists(root.prefix)
|
||||
|
||||
|
||||
# Unit tests should not be affected by the user's managed environments
|
||||
@pytest.mark.not_on_windows("Environment views not supported on windows. Revisit after #34701")
|
||||
@pytest.mark.regression("12002")
|
||||
def test_install_only_dependencies_in_env(
|
||||
tmpdir, mock_fetch, install_mockery, mutable_mock_env_path
|
||||
tmpdir, mutable_mock_env_path, mock_fetch, install_mockery
|
||||
):
|
||||
env("create", "test")
|
||||
|
||||
@@ -729,9 +730,10 @@ def test_install_only_dependencies_in_env(
|
||||
assert not os.path.exists(root.prefix)
|
||||
|
||||
|
||||
# Unit tests should not be affected by the user's managed environments
|
||||
@pytest.mark.regression("12002")
|
||||
def test_install_only_dependencies_of_all_in_env(
|
||||
tmpdir, mock_fetch, install_mockery, mutable_mock_env_path
|
||||
tmpdir, mutable_mock_env_path, mock_fetch, install_mockery
|
||||
):
|
||||
env("create", "--without-view", "test")
|
||||
|
||||
@@ -751,7 +753,8 @@ def test_install_only_dependencies_of_all_in_env(
|
||||
assert os.path.exists(dep.prefix)
|
||||
|
||||
|
||||
def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery, mutable_mock_env_path):
|
||||
# Unit tests should not be affected by the user's managed environments
|
||||
def test_install_no_add_in_env(tmpdir, mutable_mock_env_path, mock_fetch, install_mockery):
|
||||
# To test behavior of --add option, we create the following environment:
|
||||
#
|
||||
# mpileaks
|
||||
@@ -892,7 +895,6 @@ def test_cdash_configure_warning(tmpdir, mock_fetch, install_mockery, capfd):
|
||||
specfile = "./spec.json"
|
||||
with open(specfile, "w", encoding="utf-8") as f:
|
||||
f.write(spec.to_json())
|
||||
print(spec.to_json())
|
||||
install("--log-file=cdash_reports", "--log-format=cdash", specfile)
|
||||
# Verify Configure.xml exists with expected contents.
|
||||
report_dir = tmpdir.join("cdash_reports")
|
||||
@@ -927,9 +929,10 @@ def test_install_fails_no_args_suggests_env_activation(tmpdir):
|
||||
assert "using the `spack.yaml` in this directory" in output
|
||||
|
||||
|
||||
# Unit tests should not be affected by the user's managed environments
|
||||
@pytest.mark.not_on_windows("Environment views not supported on windows. Revisit after #34701")
|
||||
def test_install_env_with_tests_all(
|
||||
tmpdir, mock_packages, mock_fetch, install_mockery, mutable_mock_env_path
|
||||
tmpdir, mutable_mock_env_path, mock_packages, mock_fetch, install_mockery
|
||||
):
|
||||
env("create", "test")
|
||||
with ev.read("test"):
|
||||
@@ -939,9 +942,10 @@ def test_install_env_with_tests_all(
|
||||
assert os.path.exists(test_dep.prefix)
|
||||
|
||||
|
||||
# Unit tests should not be affected by the user's managed environments
|
||||
@pytest.mark.not_on_windows("Environment views not supported on windows. Revisit after #34701")
|
||||
def test_install_env_with_tests_root(
|
||||
tmpdir, mock_packages, mock_fetch, install_mockery, mutable_mock_env_path
|
||||
tmpdir, mutable_mock_env_path, mock_packages, mock_fetch, install_mockery
|
||||
):
|
||||
env("create", "test")
|
||||
with ev.read("test"):
|
||||
@@ -951,9 +955,10 @@ def test_install_env_with_tests_root(
|
||||
assert not os.path.exists(test_dep.prefix)
|
||||
|
||||
|
||||
# Unit tests should not be affected by the user's managed environments
|
||||
@pytest.mark.not_on_windows("Environment views not supported on windows. Revisit after #34701")
|
||||
def test_install_empty_env(
|
||||
tmpdir, mock_packages, mock_fetch, install_mockery, mutable_mock_env_path
|
||||
tmpdir, mutable_mock_env_path, mock_packages, mock_fetch, install_mockery
|
||||
):
|
||||
env_name = "empty"
|
||||
env("create", env_name)
|
||||
@@ -989,9 +994,17 @@ def test_installation_fail_tests(install_mockery, mock_fetch, name, method):
|
||||
assert "See test log for details" in output
|
||||
|
||||
|
||||
# Unit tests should not be affected by the user's managed environments
|
||||
@pytest.mark.not_on_windows("Buildcache not supported on windows")
|
||||
def test_install_use_buildcache(
|
||||
capsys, mock_packages, mock_fetch, mock_archive, mock_binary_index, tmpdir, install_mockery
|
||||
capsys,
|
||||
mutable_mock_env_path,
|
||||
mock_packages,
|
||||
mock_fetch,
|
||||
mock_archive,
|
||||
mock_binary_index,
|
||||
tmpdir,
|
||||
install_mockery,
|
||||
):
|
||||
"""
|
||||
Make sure installing with use-buildcache behaves correctly.
|
||||
|
@@ -12,6 +12,9 @@
|
||||
install = SpackCommand("install")
|
||||
uninstall = SpackCommand("uninstall")
|
||||
|
||||
# Unit tests should not be affected by the user's managed environments
|
||||
pytestmark = pytest.mark.usefixtures("mutable_mock_env_path")
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
def test_mark_mode_required(mutable_database):
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user