Compare commits
566 Commits
packages/c
...
fix/fewer-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8c563e3cf0 | ||
|
|
9c5a70ab6c | ||
|
|
5ef58144cb | ||
|
|
41ddbdfd90 | ||
|
|
66924c85a3 | ||
|
|
a4c3fc138c | ||
|
|
62ed2a07a7 | ||
|
|
e7aec9e872 | ||
|
|
b065c3e11e | ||
|
|
88b357c453 | ||
|
|
bb7299c04a | ||
|
|
7a5bddfd15 | ||
|
|
50fe769f82 | ||
|
|
29d39d1adf | ||
|
|
8dde7f3975 | ||
|
|
0cd038273e | ||
|
|
1f5bfe80ed | ||
|
|
4d2611ad8a | ||
|
|
21a97dad31 | ||
|
|
338a01ca6d | ||
|
|
392396ded4 | ||
|
|
a336e0edb7 | ||
|
|
9426fefa00 | ||
|
|
812192eef5 | ||
|
|
b8c8e80965 | ||
|
|
77fd5d8414 | ||
|
|
82050ed371 | ||
|
|
a7381a9413 | ||
|
|
b932783d4d | ||
|
|
0b51f25034 | ||
|
|
d6a182fb5d | ||
|
|
e8635adb21 | ||
|
|
f242e0fd0c | ||
|
|
67b5f6b838 | ||
|
|
9d16f17463 | ||
|
|
f44f5b0db0 | ||
|
|
39ace5fc45 | ||
|
|
0601d6a0c5 | ||
|
|
11869ff872 | ||
|
|
6753605807 | ||
|
|
918db85737 | ||
|
|
1184de8352 | ||
|
|
2470fde5d9 | ||
|
|
abfff43976 | ||
|
|
230687a501 | ||
|
|
5ff8908ff3 | ||
|
|
882e09e50b | ||
|
|
6753f4a7cb | ||
|
|
1dc63dbea6 | ||
|
|
b9dfae4722 | ||
|
|
70412612c7 | ||
|
|
cd741c368c | ||
|
|
16a7bef456 | ||
|
|
85f62728c6 | ||
|
|
092dc96e6c | ||
|
|
2bb20caa5f | ||
|
|
00bcf935e8 | ||
|
|
3751372396 | ||
|
|
e6afeca92f | ||
|
|
35b9307af6 | ||
|
|
567f728579 | ||
|
|
404c5c29a1 | ||
|
|
63712ba6c6 | ||
|
|
ef62d47dc7 | ||
|
|
a4594857fc | ||
|
|
e77572b753 | ||
|
|
8c84c5ff66 | ||
|
|
5d8beaf0ed | ||
|
|
ac405f3d79 | ||
|
|
2e30553310 | ||
|
|
85a61772d8 | ||
|
|
4007f8726d | ||
|
|
a097f7791b | ||
|
|
3d4d89b2c0 | ||
|
|
e461234865 | ||
|
|
2c1d5f9844 | ||
|
|
c4b682b983 | ||
|
|
de0b784d5a | ||
|
|
5f38afdfc7 | ||
|
|
ac67c6e34b | ||
|
|
72deb53832 | ||
|
|
7c87253fd8 | ||
|
|
1136aedd08 | ||
|
|
24e1b56268 | ||
|
|
eef6a79b35 | ||
|
|
556a36cbd7 | ||
|
|
8aa490d6b7 | ||
|
|
d9d085da10 | ||
|
|
d88d720577 | ||
|
|
1d670ae744 | ||
|
|
35ad6f52c1 | ||
|
|
b61bae7640 | ||
|
|
8b7abace8b | ||
|
|
5cf98d9564 | ||
|
|
973a961cb5 | ||
|
|
868d0cb957 | ||
|
|
497f3a3832 | ||
|
|
9843f41bce | ||
|
|
e54fefc2b7 | ||
|
|
90c0889533 | ||
|
|
6696e82ce7 | ||
|
|
dcc55d53db | ||
|
|
92000e81b8 | ||
|
|
125175ae25 | ||
|
|
f60e548a0d | ||
|
|
04dc16a6b1 | ||
|
|
27b90e38db | ||
|
|
7e5ce3ba48 | ||
|
|
f5f7cfdc8f | ||
|
|
3e1a562312 | ||
|
|
ce4d962faa | ||
|
|
b9816a97fc | ||
|
|
f7b9c30456 | ||
|
|
884620a38a | ||
|
|
7503a41773 | ||
|
|
9a5fc6b4a3 | ||
|
|
a31aeed167 | ||
|
|
71f542a951 | ||
|
|
322bd48788 | ||
|
|
b752fa59d4 | ||
|
|
d53e4cc426 | ||
|
|
ee4b7fa3a1 | ||
|
|
d6f02c86d9 | ||
|
|
62efde8e3c | ||
|
|
bda1d94d49 | ||
|
|
3f472039c5 | ||
|
|
912ef34206 | ||
|
|
9c88a48a73 | ||
|
|
4bf5cc9a9a | ||
|
|
08834e2b03 | ||
|
|
8020a111df | ||
|
|
86fb547f7c | ||
|
|
b9556c7c44 | ||
|
|
7bdb106b1b | ||
|
|
2b191cd7f4 | ||
|
|
774f0a4e60 | ||
|
|
faf11efa72 | ||
|
|
5a99142b41 | ||
|
|
a3aca0242a | ||
|
|
72f276fab3 | ||
|
|
21139945df | ||
|
|
900bd2f477 | ||
|
|
29d4a5af44 | ||
|
|
dd9b7ed6a7 | ||
|
|
09ff74be62 | ||
|
|
a94ebfea11 | ||
|
|
8f5fe1d123 | ||
|
|
d4fb58efa3 | ||
|
|
ce900346cc | ||
|
|
7cb64e465f | ||
|
|
eb70c9f5b9 | ||
|
|
a28405700e | ||
|
|
f8f4d94d7a | ||
|
|
32dfb522d6 | ||
|
|
c61c707aa5 | ||
|
|
60d10848c8 | ||
|
|
dcd6b530f9 | ||
|
|
419f0742a0 | ||
|
|
c99174798b | ||
|
|
8df2a4b511 | ||
|
|
c174cf6830 | ||
|
|
5eebd65366 | ||
|
|
625f5323c0 | ||
|
|
e05a32cead | ||
|
|
c69af5d1e5 | ||
|
|
1ac2ee8043 | ||
|
|
36af1c1c73 | ||
|
|
e2fa087002 | ||
|
|
df02bfbad2 | ||
|
|
fecb63843e | ||
|
|
b33e2d09d3 | ||
|
|
f8054aa21a | ||
|
|
8f3a2acc54 | ||
|
|
d1a20908b8 | ||
|
|
dd781f7368 | ||
|
|
9bcc43c4c1 | ||
|
|
77c83af17d | ||
|
|
574bd2db99 | ||
|
|
a76f37da96 | ||
|
|
9e75f3ec0a | ||
|
|
4d42d45897 | ||
|
|
a4b4bfda73 | ||
|
|
1bcdd3a57e | ||
|
|
297a3a1bc9 | ||
|
|
8d01e8c978 | ||
|
|
6be28aa303 | ||
|
|
5e38310515 | ||
|
|
ddfed65485 | ||
|
|
2a16d8bfa8 | ||
|
|
6a40a50a29 | ||
|
|
b2924f68c0 | ||
|
|
41ffe36636 | ||
|
|
24edc72252 | ||
|
|
83b38a26a0 | ||
|
|
914d785e3b | ||
|
|
f99f642fa8 | ||
|
|
e0bf3667e3 | ||
|
|
a24ca50fed | ||
|
|
51e9f37252 | ||
|
|
453900c884 | ||
|
|
4696459d2d | ||
|
|
ad1e3231e5 | ||
|
|
2ef7eb1826 | ||
|
|
fe86019f9a | ||
|
|
9dbb18219f | ||
|
|
451a977de0 | ||
|
|
e604929a4c | ||
|
|
9d591f9f7c | ||
|
|
f8ad915100 | ||
|
|
cbbabe6920 | ||
|
|
81fe460194 | ||
|
|
b894f996c0 | ||
|
|
1ce09847d9 | ||
|
|
722d401394 | ||
|
|
e6f04d5ef9 | ||
|
|
b8e3ecbf00 | ||
|
|
d189387c24 | ||
|
|
9e96ddc5ae | ||
|
|
543bd189af | ||
|
|
43291aa723 | ||
|
|
d0589285f7 | ||
|
|
d079aaa083 | ||
|
|
6c65977e0d | ||
|
|
1b5d786cf5 | ||
|
|
4cf00645bd | ||
|
|
e9149cfc3c | ||
|
|
a5c8111076 | ||
|
|
c3576f712d | ||
|
|
410e6a59b7 | ||
|
|
bd2b2fb75a | ||
|
|
7ae318efd0 | ||
|
|
73e9d56647 | ||
|
|
f87a752b63 | ||
|
|
ae2fec30c3 | ||
|
|
1af5564cbe | ||
|
|
a8f057a701 | ||
|
|
7f3dd38ccc | ||
|
|
8e9adefcd5 | ||
|
|
d276f9700f | ||
|
|
4f111659ec | ||
|
|
eaf330f2a8 | ||
|
|
cdaeb74dc7 | ||
|
|
fbaac46604 | ||
|
|
7f6210ee90 | ||
|
|
63f6e6079a | ||
|
|
d4fd6caae0 | ||
|
|
fd3c18b6fd | ||
|
|
725f427f25 | ||
|
|
32b3e91ef7 | ||
|
|
b7e4602268 | ||
|
|
4a98d4db93 | ||
|
|
9d6bf373be | ||
|
|
cff35c4987 | ||
|
|
d594f84b8f | ||
|
|
f8f01c336c | ||
|
|
12e3665df3 | ||
|
|
fa4778b9fc | ||
|
|
66d297d420 | ||
|
|
56251c11f3 | ||
|
|
40bf9a179b | ||
|
|
095aba0b9f | ||
|
|
4270136598 | ||
|
|
f73d7d2dce | ||
|
|
567566da08 | ||
|
|
30a9ab749d | ||
|
|
8160a96b27 | ||
|
|
10414d3e6c | ||
|
|
1d96c09094 | ||
|
|
e7112fbc6a | ||
|
|
b79761b7eb | ||
|
|
3381899c69 | ||
|
|
c7cf5eabc1 | ||
|
|
d88fa5cf8e | ||
|
|
2ed0e3d737 | ||
|
|
506a40cac1 | ||
|
|
447739fcef | ||
|
|
e60f6f4a6e | ||
|
|
7df35d0da0 | ||
|
|
71b035ece1 | ||
|
|
86a134235e | ||
|
|
24cd0da7fb | ||
|
|
762833a663 | ||
|
|
636d479e5f | ||
|
|
f2184f26fa | ||
|
|
e1686eef7c | ||
|
|
314893982e | ||
|
|
9ab6c30a3d | ||
|
|
ddf94291d4 | ||
|
|
5d1038c512 | ||
|
|
2e40c88d50 | ||
|
|
2bcba57757 | ||
|
|
37330e5e2b | ||
|
|
b4411cf2db | ||
|
|
65d1ae083c | ||
|
|
0b8faa3918 | ||
|
|
f077c7e33b | ||
|
|
9d7410d22e | ||
|
|
e295730d0e | ||
|
|
868327ee14 | ||
|
|
f5430b16bc | ||
|
|
2446695113 | ||
|
|
e0c6cca65c | ||
|
|
84ed4cd331 | ||
|
|
f6d50f790e | ||
|
|
d3c3d23d1e | ||
|
|
33752c2b55 | ||
|
|
26759249ca | ||
|
|
8b4cbbe7b3 | ||
|
|
be71f9fdc4 | ||
|
|
05c1e7ecc2 | ||
|
|
f7afd67a26 | ||
|
|
d22bdc1c4e | ||
|
|
540f9eefb7 | ||
|
|
2db5bca778 | ||
|
|
bcd05407b8 | ||
|
|
b35ec605fe | ||
|
|
0a353abc42 | ||
|
|
e178c58847 | ||
|
|
d7297e67a5 | ||
|
|
ee8addf04a | ||
|
|
fd3cd3a1c6 | ||
|
|
e585aeb883 | ||
|
|
1f43384db4 | ||
|
|
814b328fca | ||
|
|
125206d44d | ||
|
|
a081b875b4 | ||
|
|
a16ee3348b | ||
|
|
d654d6b1f4 | ||
|
|
9b4ca0be40 | ||
|
|
dc71dcfdc2 | ||
|
|
1f31c3374c | ||
|
|
27aeb6e293 | ||
|
|
715214c1a1 | ||
|
|
b471d62dbd | ||
|
|
a5f62889ca | ||
|
|
2a942d98e3 | ||
|
|
4a4077d4ef | ||
|
|
c0fcccc232 | ||
|
|
0b2cbfefce | ||
|
|
c499514322 | ||
|
|
ae392b5935 | ||
|
|
62e9bb5d51 | ||
|
|
6cd948184e | ||
|
|
44ff24f558 | ||
|
|
c657dfb768 | ||
|
|
f2e410d95a | ||
|
|
df443a38d6 | ||
|
|
74fe498cb8 | ||
|
|
5f13a48bf2 | ||
|
|
c4824f7fd2 | ||
|
|
49a8634584 | ||
|
|
eac5ea869f | ||
|
|
f5946c4621 | ||
|
|
8564ab19c3 | ||
|
|
aae7a22d39 | ||
|
|
09cea230b4 | ||
|
|
a1f34ec58b | ||
|
|
4d7cd4c0bf | ||
|
|
4adbfa3835 | ||
|
|
8a1b69c1d3 | ||
|
|
a1d69f8661 | ||
|
|
e05dbc529e | ||
|
|
99d33bf1f2 | ||
|
|
bd1918cd71 | ||
|
|
2a967c7df4 | ||
|
|
7596aac958 | ||
|
|
c73ded8ed6 | ||
|
|
df1d783035 | ||
|
|
47051c3690 | ||
|
|
3fd83c637d | ||
|
|
ef5afb66da | ||
|
|
ecc4336bf9 | ||
|
|
d2ed217796 | ||
|
|
272c7c069a | ||
|
|
23f16041cd | ||
|
|
e2329adac0 | ||
|
|
4ec788ca12 | ||
|
|
c1cea9d304 | ||
|
|
5c96e67bb1 | ||
|
|
7008bb6335 | ||
|
|
14561fafff | ||
|
|
89bf1edb6e | ||
|
|
cc85dc05b7 | ||
|
|
ae171f8b83 | ||
|
|
578dd18b34 | ||
|
|
a7a51ee5cf | ||
|
|
960cc90667 | ||
|
|
dea44bad8b | ||
|
|
e37870ff43 | ||
|
|
3751642a27 | ||
|
|
0f386697c6 | ||
|
|
67ce103b2c | ||
|
|
a8c9fa0e45 | ||
|
|
b56a133fce | ||
|
|
f0b3d33145 | ||
|
|
32564da9d0 | ||
|
|
8f2faf65dc | ||
|
|
1d59637051 | ||
|
|
97dc353cb0 | ||
|
|
beebe2c9d3 | ||
|
|
2eb7add8c4 | ||
|
|
a9fea9f611 | ||
|
|
9b62a9c238 | ||
|
|
f7eb0ccfc9 | ||
|
|
a0aa35667c | ||
|
|
b1d4fd14bc | ||
|
|
7e8415a3a6 | ||
|
|
7f4f42894d | ||
|
|
4e876b4014 | ||
|
|
77a8a4fe08 | ||
|
|
597e5a4e5e | ||
|
|
3c31c32f62 | ||
|
|
3a93a716e4 | ||
|
|
82229a0784 | ||
|
|
5d846a69d1 | ||
|
|
d21aa1cc12 | ||
|
|
7896ff51f6 | ||
|
|
5849a24a74 | ||
|
|
38c49d6b82 | ||
|
|
0d8900986d | ||
|
|
62554cebc4 | ||
|
|
067155cff5 | ||
|
|
08e68d779f | ||
|
|
05b04cd4c3 | ||
|
|
be48f762a9 | ||
|
|
de5b4840e9 | ||
|
|
20f9884445 | ||
|
|
deb78bcd93 | ||
|
|
06239de0e9 | ||
|
|
1f904c38b3 | ||
|
|
f2d0ba8fcc | ||
|
|
49d3eb1723 | ||
|
|
7c5439f48a | ||
|
|
7f2cedd31f | ||
|
|
d47951a1e3 | ||
|
|
f2bd0c5cf1 | ||
|
|
4362382223 | ||
|
|
ba4859b33d | ||
|
|
e8472714ef | ||
|
|
ee6960e53e | ||
|
|
dad266c955 | ||
|
|
7a234ce00a | ||
|
|
a0c2ed97c8 | ||
|
|
a3aa5b59cd | ||
|
|
f7dbb59d13 | ||
|
|
0df27bc0f7 | ||
|
|
877e09dcc1 | ||
|
|
c4439e86a2 | ||
|
|
aa00dcac96 | ||
|
|
4c9a946b3b | ||
|
|
0c6e6ad226 | ||
|
|
bf8f32443f | ||
|
|
c2eef8bab2 | ||
|
|
2df4b307d7 | ||
|
|
3c57440c10 | ||
|
|
3e6e9829da | ||
|
|
859745f1a9 | ||
|
|
ddabb8b12c | ||
|
|
16bba32124 | ||
|
|
7d87369ead | ||
|
|
7723bd28ed | ||
|
|
43f3a35150 | ||
|
|
ae9f2d4d40 | ||
|
|
5a3814ff15 | ||
|
|
946c539dbd | ||
|
|
0037462f9e | ||
|
|
e5edac4d0c | ||
|
|
3e1474dbbb | ||
|
|
0f502bb6c3 | ||
|
|
1eecbd3208 | ||
|
|
6e92b9180c | ||
|
|
ac9012da0c | ||
|
|
e3cb4f09f0 | ||
|
|
2e8600bb71 | ||
|
|
d946c37cbb | ||
|
|
47a9f0bdf7 | ||
|
|
2bf900a893 | ||
|
|
99bba0b1ce | ||
|
|
a8506f9022 | ||
|
|
4a40a76291 | ||
|
|
fe9ddf22fc | ||
|
|
1cae1299eb | ||
|
|
8b106416c0 | ||
|
|
e2088b599e | ||
|
|
56446685ca | ||
|
|
47a8d875c8 | ||
|
|
56b2d250c1 | ||
|
|
abbd09b4b2 | ||
|
|
9e5fdc6614 | ||
|
|
1224a3e8cf | ||
|
|
6c3218920f | ||
|
|
02cc3ea005 | ||
|
|
641ab95a31 | ||
|
|
e8b76c27e4 | ||
|
|
0dbe4d54b6 | ||
|
|
1eb6977049 | ||
|
|
3f1cfdb7d7 | ||
|
|
d438d7993d | ||
|
|
aa0825d642 | ||
|
|
978c20f35a | ||
|
|
d535124500 | ||
|
|
01f61a2eba | ||
|
|
7d5e27d5e8 | ||
|
|
d210425eef | ||
|
|
6be07da201 | ||
|
|
02b38716bf | ||
|
|
d7bc624c61 | ||
|
|
b7cecc9726 | ||
|
|
393a2f562b | ||
|
|
682fcec0b2 | ||
|
|
d6baae525f | ||
|
|
e1f2612581 | ||
|
|
080fc875eb | ||
|
|
69f417b26a | ||
|
|
80b5106611 | ||
|
|
34146c197a | ||
|
|
209a3bf302 | ||
|
|
e8c41cdbcb | ||
|
|
a450dd31fa | ||
|
|
7c1a309453 | ||
|
|
78b6fa96e5 | ||
|
|
1b315a9ede | ||
|
|
82df0e549d | ||
|
|
f5591f9068 | ||
|
|
98c08d277d | ||
|
|
facca4e2c8 | ||
|
|
764029bcd1 | ||
|
|
44cb4eca93 | ||
|
|
39888d4df6 | ||
|
|
f68ea49e54 | ||
|
|
78b5e4cdfa | ||
|
|
26515b8871 | ||
|
|
74640987c7 | ||
|
|
d6154645c7 | ||
|
|
faed43704b | ||
|
|
6fba31ce34 | ||
|
|
112cead00b | ||
|
|
9e2558bd56 | ||
|
|
019058226f | ||
|
|
ac0040f67d | ||
|
|
38f341f12d | ||
|
|
26ad22743f | ||
|
|
46c2b8a565 | ||
|
|
5cbb59f2b8 | ||
|
|
f29fa1cfdf | ||
|
|
c69951d6e1 | ||
|
|
f406f27d9c | ||
|
|
36ea208e12 | ||
|
|
17e0774189 | ||
|
|
3162c2459d | ||
|
|
7cad6c62a3 | ||
|
|
eb2ddf6fa2 | ||
|
|
2bc2902fed | ||
|
|
b362362291 | ||
|
|
32bb5c7523 | ||
|
|
a2b76c68a0 | ||
|
|
62132919e1 | ||
|
|
b06929f6df | ||
|
|
0f33de157b | ||
|
|
03a074ebe7 | ||
|
|
4d12b6a4fd | ||
|
|
26bb15e1fb | ||
|
|
1bf92c7881 | ||
|
|
eefe0b2eec | ||
|
|
de6c6f0cd9 | ||
|
|
309d3aa1ec |
36
.github/workflows/audit.yaml
vendored
36
.github/workflows/audit.yaml
vendored
@@ -17,33 +17,51 @@ concurrency:
|
|||||||
jobs:
|
jobs:
|
||||||
# Run audits on all the packages in the built-in repository
|
# Run audits on all the packages in the built-in repository
|
||||||
package-audits:
|
package-audits:
|
||||||
runs-on: ${{ matrix.operating_system }}
|
runs-on: ${{ matrix.system.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
system:
|
||||||
|
- { os: windows-latest, shell: 'powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}' }
|
||||||
|
- { os: ubuntu-latest, shell: bash }
|
||||||
|
- { os: macos-latest, shell: bash }
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: ${{ matrix.system.shell }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: ${{inputs.python_version}}
|
python-version: ${{inputs.python_version}}
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip setuptools pytest coverage[toml]
|
pip install --upgrade pip setuptools pytest coverage[toml]
|
||||||
|
- name: Setup for Windows run
|
||||||
|
if: runner.os == 'Windows'
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pywin32
|
||||||
- name: Package audits (with coverage)
|
- name: Package audits (with coverage)
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
||||||
run: |
|
run: |
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
coverage run $(which spack) audit packages
|
coverage run $(which spack) audit packages
|
||||||
coverage run $(which spack) audit externals
|
coverage run $(which spack) -d audit externals
|
||||||
coverage combine
|
coverage combine
|
||||||
coverage xml
|
coverage xml
|
||||||
- name: Package audits (without coverage)
|
- name: Package audits (without coverage)
|
||||||
if: ${{ inputs.with_coverage == 'false' }}
|
if: ${{ inputs.with_coverage == 'false' && runner.os != 'Windows' }}
|
||||||
run: |
|
run: |
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
$(which spack) audit packages
|
spack -d audit packages
|
||||||
$(which spack) audit externals
|
spack -d audit externals
|
||||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
- name: Package audits (without coverage)
|
||||||
|
if: ${{ runner.os == 'Windows' }}
|
||||||
|
run: |
|
||||||
|
. share/spack/setup-env.sh
|
||||||
|
spack -d audit packages
|
||||||
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
|
spack -d audit externals
|
||||||
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
|
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
if: ${{ inputs.with_coverage == 'true' }}
|
||||||
with:
|
with:
|
||||||
flags: unittests,audits
|
flags: unittests,audits
|
||||||
|
|||||||
5
.github/workflows/bootstrap-test.sh
vendored
5
.github/workflows/bootstrap-test.sh
vendored
@@ -1,7 +1,8 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -ex
|
set -e
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
|
$PYTHON bin/spack bootstrap disable github-actions-v0.4
|
||||||
$PYTHON bin/spack bootstrap disable spack-install
|
$PYTHON bin/spack bootstrap disable spack-install
|
||||||
$PYTHON bin/spack -d solve zlib
|
$PYTHON bin/spack $SPACK_FLAGS solve zlib
|
||||||
tree $BOOTSTRAP/store
|
tree $BOOTSTRAP/store
|
||||||
exit 0
|
exit 0
|
||||||
|
|||||||
346
.github/workflows/bootstrap.yml
vendored
346
.github/workflows/bootstrap.yml
vendored
@@ -13,118 +13,22 @@ concurrency:
|
|||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
fedora-clingo-sources:
|
distros-clingo-sources:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container: "fedora:latest"
|
container: ${{ matrix.image }}
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
image: ["fedora:latest", "opensuse/leap:latest"]
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Setup Fedora
|
||||||
|
if: ${{ matrix.image == 'fedora:latest' }}
|
||||||
run: |
|
run: |
|
||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \
|
||||||
make patch unzip which xz python3 python3-devel tree \
|
make patch unzip which xz python3 python3-devel tree \
|
||||||
cmake bison bison-devel libstdc++-static
|
cmake bison bison-devel libstdc++-static
|
||||||
- name: Checkout
|
- name: Setup OpenSUSE
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
if: ${{ matrix.image == 'opensuse/leap:latest' }}
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Setup non-root user
|
|
||||||
run: |
|
|
||||||
# See [1] below
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
|
||||||
useradd spack-test && mkdir -p ~spack-test
|
|
||||||
chown -R spack-test . ~spack-test
|
|
||||||
- name: Setup repo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
git --version
|
|
||||||
. .github/workflows/setup_git.sh
|
|
||||||
- name: Bootstrap clingo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack bootstrap disable github-actions-v0.5
|
|
||||||
spack bootstrap disable github-actions-v0.4
|
|
||||||
spack external find cmake bison
|
|
||||||
spack -d solve zlib
|
|
||||||
tree ~/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
ubuntu-clingo-sources:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container: "ubuntu:latest"
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
env:
|
|
||||||
DEBIAN_FRONTEND: noninteractive
|
|
||||||
run: |
|
|
||||||
apt-get update -y && apt-get upgrade -y
|
|
||||||
apt-get install -y \
|
|
||||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
|
||||||
make patch unzip xz-utils python3 python3-dev tree \
|
|
||||||
cmake bison
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Setup non-root user
|
|
||||||
run: |
|
|
||||||
# See [1] below
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
|
||||||
useradd spack-test && mkdir -p ~spack-test
|
|
||||||
chown -R spack-test . ~spack-test
|
|
||||||
- name: Setup repo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
git --version
|
|
||||||
. .github/workflows/setup_git.sh
|
|
||||||
- name: Bootstrap clingo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack bootstrap disable github-actions-v0.5
|
|
||||||
spack bootstrap disable github-actions-v0.4
|
|
||||||
spack external find cmake bison
|
|
||||||
spack -d solve zlib
|
|
||||||
tree ~/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
ubuntu-clingo-binaries-and-patchelf:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container: "ubuntu:latest"
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
env:
|
|
||||||
DEBIAN_FRONTEND: noninteractive
|
|
||||||
run: |
|
|
||||||
apt-get update -y && apt-get upgrade -y
|
|
||||||
apt-get install -y \
|
|
||||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
|
||||||
make patch unzip xz-utils python3 python3-dev tree
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Setup non-root user
|
|
||||||
run: |
|
|
||||||
# See [1] below
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
|
||||||
useradd spack-test && mkdir -p ~spack-test
|
|
||||||
chown -R spack-test . ~spack-test
|
|
||||||
- name: Setup repo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
git --version
|
|
||||||
. .github/workflows/setup_git.sh
|
|
||||||
- name: Bootstrap clingo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack -d solve zlib
|
|
||||||
tree ~/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
opensuse-clingo-sources:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container: "opensuse/leap:latest"
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
run: |
|
||||||
# Harden CI by applying the workaround described here: https://www.suse.com/support/kb/doc/?id=000019505
|
# Harden CI by applying the workaround described here: https://www.suse.com/support/kb/doc/?id=000019505
|
||||||
zypper update -y || zypper update -y
|
zypper update -y || zypper update -y
|
||||||
@@ -133,15 +37,9 @@ jobs:
|
|||||||
make patch unzip which xz python3 python3-devel tree \
|
make patch unzip which xz python3 python3-devel tree \
|
||||||
cmake bison
|
cmake bison
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup repo
|
|
||||||
run: |
|
|
||||||
# See [1] below
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
|
||||||
git --version
|
|
||||||
. .github/workflows/setup_git.sh
|
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
@@ -151,77 +49,100 @@ jobs:
|
|||||||
spack -d solve zlib
|
spack -d solve zlib
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
macos-clingo-sources:
|
clingo-sources:
|
||||||
runs-on: macos-latest
|
runs-on: ${{ matrix.runner }}
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Setup macOS
|
||||||
|
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||||
run: |
|
run: |
|
||||||
brew install cmake bison@2.7 tree
|
brew install cmake bison tree
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: "3.12"
|
python-version: "3.12"
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
|
|
||||||
spack bootstrap disable github-actions-v0.5
|
spack bootstrap disable github-actions-v0.5
|
||||||
spack bootstrap disable github-actions-v0.4
|
spack bootstrap disable github-actions-v0.4
|
||||||
spack external find --not-buildable cmake bison
|
spack external find --not-buildable cmake bison
|
||||||
spack -d solve zlib
|
spack -d solve zlib
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
macos-clingo-binaries:
|
gnupg-sources:
|
||||||
runs-on: ${{ matrix.macos-version }}
|
runs-on: ${{ matrix.runner }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
macos-version: ['macos-11', 'macos-12']
|
runner: [ 'macos-13', 'macos-14', "ubuntu-latest" ]
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Setup macOS
|
||||||
|
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||||
run: |
|
run: |
|
||||||
brew install tree
|
brew install tree gawk
|
||||||
|
sudo rm -rf $(command -v gpg gpg2)
|
||||||
|
- name: Setup Ubuntu
|
||||||
|
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||||
|
run: sudo rm -rf $(command -v gpg gpg2 patchelf)
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
- name: Bootstrap clingo
|
|
||||||
run: |
|
|
||||||
set -ex
|
|
||||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
|
||||||
not_found=1
|
|
||||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
|
||||||
echo "Testing $ver_dir"
|
|
||||||
if [[ -d "$ver_dir" ]] ; then
|
|
||||||
if $ver_dir/python --version ; then
|
|
||||||
export PYTHON="$ver_dir/python"
|
|
||||||
not_found=0
|
|
||||||
old_path="$PATH"
|
|
||||||
export PATH="$ver_dir:$PATH"
|
|
||||||
./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
|
|
||||||
export PATH="$old_path"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
# NOTE: test all pythons that exist, not all do on 12
|
|
||||||
done
|
|
||||||
|
|
||||||
ubuntu-clingo-binaries:
|
|
||||||
runs-on: ubuntu-20.04
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup repo
|
- name: Bootstrap GnuPG
|
||||||
run: |
|
run: |
|
||||||
git --version
|
source share/spack/setup-env.sh
|
||||||
. .github/workflows/setup_git.sh
|
spack solve zlib
|
||||||
|
spack bootstrap disable github-actions-v0.5
|
||||||
|
spack bootstrap disable github-actions-v0.4
|
||||||
|
spack -d gpg list
|
||||||
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
|
from-binaries:
|
||||||
|
runs-on: ${{ matrix.runner }}
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||||
|
steps:
|
||||||
|
- name: Setup macOS
|
||||||
|
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||||
|
run: |
|
||||||
|
brew install tree
|
||||||
|
# Remove GnuPG since we want to bootstrap it
|
||||||
|
sudo rm -rf /usr/local/bin/gpg
|
||||||
|
- name: Setup Ubuntu
|
||||||
|
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||||
|
run: |
|
||||||
|
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
|
with:
|
||||||
|
python-version: |
|
||||||
|
3.8
|
||||||
|
3.9
|
||||||
|
3.10
|
||||||
|
3.11
|
||||||
|
3.12
|
||||||
|
- name: Set bootstrap sources
|
||||||
|
run: |
|
||||||
|
source share/spack/setup-env.sh
|
||||||
|
spack bootstrap disable github-actions-v0.4
|
||||||
|
spack bootstrap disable spack-install
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
run: |
|
run: |
|
||||||
set -ex
|
set -e
|
||||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' ; do
|
||||||
not_found=1
|
not_found=1
|
||||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||||
echo "Testing $ver_dir"
|
|
||||||
if [[ -d "$ver_dir" ]] ; then
|
if [[ -d "$ver_dir" ]] ; then
|
||||||
|
echo "Testing $ver_dir"
|
||||||
if $ver_dir/python --version ; then
|
if $ver_dir/python --version ; then
|
||||||
export PYTHON="$ver_dir/python"
|
export PYTHON="$ver_dir/python"
|
||||||
not_found=0
|
not_found=0
|
||||||
@@ -236,122 +157,9 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
ubuntu-gnupg-binaries:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container: "ubuntu:latest"
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
env:
|
|
||||||
DEBIAN_FRONTEND: noninteractive
|
|
||||||
run: |
|
|
||||||
apt-get update -y && apt-get upgrade -y
|
|
||||||
apt-get install -y \
|
|
||||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
|
||||||
make patch unzip xz-utils python3 python3-dev tree
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Setup non-root user
|
|
||||||
run: |
|
|
||||||
# See [1] below
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
|
||||||
useradd spack-test && mkdir -p ~spack-test
|
|
||||||
chown -R spack-test . ~spack-test
|
|
||||||
- name: Setup repo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
git --version
|
|
||||||
. .github/workflows/setup_git.sh
|
|
||||||
- name: Bootstrap GnuPG
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack bootstrap disable github-actions-v0.4
|
|
||||||
spack bootstrap disable spack-install
|
|
||||||
spack -d gpg list
|
|
||||||
tree ~/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
ubuntu-gnupg-sources:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container: "ubuntu:latest"
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
env:
|
|
||||||
DEBIAN_FRONTEND: noninteractive
|
|
||||||
run: |
|
|
||||||
apt-get update -y && apt-get upgrade -y
|
|
||||||
apt-get install -y \
|
|
||||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
|
||||||
make patch unzip xz-utils python3 python3-dev tree \
|
|
||||||
gawk
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Setup non-root user
|
|
||||||
run: |
|
|
||||||
# See [1] below
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
|
||||||
useradd spack-test && mkdir -p ~spack-test
|
|
||||||
chown -R spack-test . ~spack-test
|
|
||||||
- name: Setup repo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
git --version
|
|
||||||
. .github/workflows/setup_git.sh
|
|
||||||
- name: Bootstrap GnuPG
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack solve zlib
|
|
||||||
spack bootstrap disable github-actions-v0.5
|
|
||||||
spack bootstrap disable github-actions-v0.4
|
|
||||||
spack -d gpg list
|
|
||||||
tree ~/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
macos-gnupg-binaries:
|
|
||||||
runs-on: macos-latest
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
brew install tree
|
|
||||||
# Remove GnuPG since we want to bootstrap it
|
|
||||||
sudo rm -rf /usr/local/bin/gpg
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
|
||||||
- name: Bootstrap GnuPG
|
- name: Bootstrap GnuPG
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
spack bootstrap disable github-actions-v0.4
|
|
||||||
spack bootstrap disable spack-install
|
|
||||||
spack -d gpg list
|
spack -d gpg list
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
macos-gnupg-sources:
|
|
||||||
runs-on: macos-latest
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
brew install gawk tree
|
|
||||||
# Remove GnuPG since we want to bootstrap it
|
|
||||||
sudo rm -rf /usr/local/bin/gpg
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
|
||||||
- name: Bootstrap GnuPG
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack solve zlib
|
|
||||||
spack bootstrap disable github-actions-v0.5
|
|
||||||
spack bootstrap disable github-actions-v0.4
|
|
||||||
spack -d gpg list
|
|
||||||
tree ~/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
|
|
||||||
# [1] Distros that have patched git to resolve CVE-2022-24765 (e.g. Ubuntu patching v2.25.1)
|
|
||||||
# introduce breaking behaviorso we have to set `safe.directory` in gitconfig ourselves.
|
|
||||||
# See:
|
|
||||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
|
||||||
# - https://github.com/actions/checkout/issues/760
|
|
||||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
|
||||||
|
|||||||
22
.github/workflows/build-containers.yml
vendored
22
.github/workflows/build-containers.yml
vendored
@@ -45,17 +45,18 @@ jobs:
|
|||||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||||
|
[ubuntu-noble, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:24.04'],
|
||||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
||||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
||||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||||
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
|
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
|
||||||
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38']]
|
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
|
||||||
name: Build ${{ matrix.dockerfile[0] }}
|
name: Build ${{ matrix.dockerfile[0] }}
|
||||||
if: github.repository == 'spack/spack'
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
|
|
||||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||||
id: docker_meta
|
id: docker_meta
|
||||||
@@ -87,9 +88,9 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Upload Dockerfile
|
- name: Upload Dockerfile
|
||||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808
|
||||||
with:
|
with:
|
||||||
name: dockerfiles
|
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||||
path: dockerfiles
|
path: dockerfiles
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
@@ -120,3 +121,14 @@ jobs:
|
|||||||
push: ${{ github.event_name != 'pull_request' }}
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||||
labels: ${{ steps.docker_meta.outputs.labels }}
|
labels: ${{ steps.docker_meta.outputs.labels }}
|
||||||
|
|
||||||
|
merge-dockerfiles:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: deploy-images
|
||||||
|
steps:
|
||||||
|
- name: Merge Artifacts
|
||||||
|
uses: actions/upload-artifact/merge@65462800fd760344b1a7b4382951275a0abb4808
|
||||||
|
with:
|
||||||
|
name: dockerfiles
|
||||||
|
pattern: dockerfiles_*
|
||||||
|
delete-merged: true
|
||||||
|
|||||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
|||||||
core: ${{ steps.filter.outputs.core }}
|
core: ${{ steps.filter.outputs.core }}
|
||||||
packages: ${{ steps.filter.outputs.packages }}
|
packages: ${{ steps.filter.outputs.packages }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
if: ${{ github.event_name == 'push' }}
|
if: ${{ github.event_name == 'push' }}
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|||||||
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
|||||||
build-paraview-deps:
|
build-paraview-deps:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
|
|||||||
4
.github/workflows/style/requirements.txt
vendored
4
.github/workflows/style/requirements.txt
vendored
@@ -1,7 +1,7 @@
|
|||||||
black==24.4.0
|
black==24.4.2
|
||||||
clingo==5.7.1
|
clingo==5.7.1
|
||||||
flake8==7.0.0
|
flake8==7.0.0
|
||||||
isort==5.13.2
|
isort==5.13.2
|
||||||
mypy==1.8.0
|
mypy==1.8.0
|
||||||
types-six==1.16.21.9
|
types-six==1.16.21.20240513
|
||||||
vermin==1.6.0
|
vermin==1.6.0
|
||||||
|
|||||||
20
.github/workflows/unit_tests.yaml
vendored
20
.github/workflows/unit_tests.yaml
vendored
@@ -51,7 +51,7 @@ jobs:
|
|||||||
on_develop: false
|
on_develop: false
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
@@ -91,7 +91,7 @@ jobs:
|
|||||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,${{ matrix.concretizer }}
|
flags: unittests,linux,${{ matrix.concretizer }}
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -100,7 +100,7 @@ jobs:
|
|||||||
shell:
|
shell:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
@@ -124,7 +124,7 @@ jobs:
|
|||||||
COVERAGE: true
|
COVERAGE: true
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-shell-tests
|
share/spack/qa/run-shell-tests
|
||||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||||
with:
|
with:
|
||||||
flags: shelltests,linux
|
flags: shelltests,linux
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -141,7 +141,7 @@ jobs:
|
|||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch tcl unzip which xz
|
make patch tcl unzip which xz
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
@@ -160,7 +160,7 @@ jobs:
|
|||||||
clingo-cffi:
|
clingo-cffi:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
@@ -185,7 +185,7 @@ jobs:
|
|||||||
SPACK_TEST_SOLVER: clingo
|
SPACK_TEST_SOLVER: clingo
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,clingo
|
flags: unittests,linux,clingo
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -195,10 +195,10 @@ jobs:
|
|||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [macos-latest, macos-14]
|
os: [macos-13, macos-14]
|
||||||
python-version: ["3.11"]
|
python-version: ["3.11"]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
@@ -223,7 +223,7 @@ jobs:
|
|||||||
$(which spack) solve zlib
|
$(which spack) solve zlib
|
||||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||||
with:
|
with:
|
||||||
flags: unittests,macos
|
flags: unittests,macos
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|||||||
6
.github/workflows/valid-style.yml
vendored
6
.github/workflows/valid-style.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
|||||||
validate:
|
validate:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
@@ -35,7 +35,7 @@ jobs:
|
|||||||
style:
|
style:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
@@ -70,7 +70,7 @@ jobs:
|
|||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch tcl unzip which xz
|
make patch tcl unzip which xz
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
|
|||||||
10
.github/workflows/windows_python.yml
vendored
10
.github/workflows/windows_python.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
|||||||
unit-tests:
|
unit-tests:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
@@ -33,7 +33,7 @@ jobs:
|
|||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
coverage combine -a
|
coverage combine -a
|
||||||
coverage xml
|
coverage xml
|
||||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||||
with:
|
with:
|
||||||
flags: unittests,windows
|
flags: unittests,windows
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -41,7 +41,7 @@ jobs:
|
|||||||
unit-tests-cmd:
|
unit-tests-cmd:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
@@ -59,7 +59,7 @@ jobs:
|
|||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
coverage combine -a
|
coverage combine -a
|
||||||
coverage xml
|
coverage xml
|
||||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||||
with:
|
with:
|
||||||
flags: unittests,windows
|
flags: unittests,windows
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -67,7 +67,7 @@ jobs:
|
|||||||
build-abseil:
|
build-abseil:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
|
|||||||
45
CHANGELOG.md
45
CHANGELOG.md
@@ -1,3 +1,48 @@
|
|||||||
|
# v0.21.2 (2024-03-01)
|
||||||
|
|
||||||
|
## Bugfixes
|
||||||
|
|
||||||
|
- Containerize: accommodate nested or pre-existing spack-env paths (#41558)
|
||||||
|
- Fix setup-env script, when going back and forth between instances (#40924)
|
||||||
|
- Fix using fully-qualified namespaces from root specs (#41957)
|
||||||
|
- Fix a bug when a required provider is requested for multiple virtuals (#42088)
|
||||||
|
- OCI buildcaches:
|
||||||
|
- only push in parallel when forking (#42143)
|
||||||
|
- use pickleable errors (#42160)
|
||||||
|
- Fix using sticky variants in externals (#42253)
|
||||||
|
- Fix a rare issue with conditional requirements and multi-valued variants (#42566)
|
||||||
|
|
||||||
|
## Package updates
|
||||||
|
- rust: add v1.75, rework a few variants (#41161,#41903)
|
||||||
|
- py-transformers: add v4.35.2 (#41266)
|
||||||
|
- mgard: fix OpenMP on AppleClang (#42933)
|
||||||
|
|
||||||
|
# v0.21.1 (2024-01-11)
|
||||||
|
|
||||||
|
## New features
|
||||||
|
- Add support for reading buildcaches created by Spack v0.22 (#41773)
|
||||||
|
|
||||||
|
## Bugfixes
|
||||||
|
|
||||||
|
- spack graph: fix coloring with environments (#41240)
|
||||||
|
- spack info: sort variants in --variants-by-name (#41389)
|
||||||
|
- Spec.format: error on old style format strings (#41934)
|
||||||
|
- ASP-based solver:
|
||||||
|
- fix infinite recursion when computing concretization errors (#41061)
|
||||||
|
- don't error for type mismatch on preferences (#41138)
|
||||||
|
- don't emit spurious debug output (#41218)
|
||||||
|
- Improve the error message for deprecated preferences (#41075)
|
||||||
|
- Fix MSVC preview version breaking clingo build on Windows (#41185)
|
||||||
|
- Fix multi-word aliases (#41126)
|
||||||
|
- Add a warning for unconfigured compiler (#41213)
|
||||||
|
- environment: fix an issue with deconcretization/reconcretization of specs (#41294)
|
||||||
|
- buildcache: don't error if a patch is missing, when installing from binaries (#41986)
|
||||||
|
- Multiple improvements to unit-tests (#41215,#41369,#41495,#41359,#41361,#41345,#41342,#41308,#41226)
|
||||||
|
|
||||||
|
## Package updates
|
||||||
|
- root: add a webgui patch to address security issue (#41404)
|
||||||
|
- BerkeleyGW: update source urls (#38218)
|
||||||
|
|
||||||
# v0.21.0 (2023-11-11)
|
# v0.21.0 (2023-11-11)
|
||||||
|
|
||||||
`v0.21.0` is a major feature release.
|
`v0.21.0` is a major feature release.
|
||||||
|
|||||||
@@ -32,7 +32,7 @@
|
|||||||
|
|
||||||
Spack is a multi-platform package manager that builds and installs
|
Spack is a multi-platform package manager that builds and installs
|
||||||
multiple versions and configurations of software. It works on Linux,
|
multiple versions and configurations of software. It works on Linux,
|
||||||
macOS, and many supercomputers. Spack is non-destructive: installing a
|
macOS, Windows, and many supercomputers. Spack is non-destructive: installing a
|
||||||
new version of a package does not break existing installations, so many
|
new version of a package does not break existing installations, so many
|
||||||
configurations of the same package can coexist.
|
configurations of the same package can coexist.
|
||||||
|
|
||||||
@@ -88,7 +88,7 @@ Resources:
|
|||||||
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
|
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
|
||||||
* [**Github Discussions**](https://github.com/spack/spack/discussions):
|
* [**Github Discussions**](https://github.com/spack/spack/discussions):
|
||||||
for Q&A and discussions. Note the pinned discussions for announcements.
|
for Q&A and discussions. Note the pinned discussions for announcements.
|
||||||
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
* **X**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||||
`@mention` us!
|
`@mention` us!
|
||||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack):
|
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack):
|
||||||
only for announcements. Please use other venues for discussions.
|
only for announcements. Please use other venues for discussions.
|
||||||
|
|||||||
@@ -144,3 +144,5 @@ switch($SpackSubCommand)
|
|||||||
"unload" {Invoke-SpackLoad}
|
"unload" {Invoke-SpackLoad}
|
||||||
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exit $LASTEXITCODE
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ concretizer:
|
|||||||
# as possible, rather than building. If `false`, we'll always give you a fresh
|
# as possible, rather than building. If `false`, we'll always give you a fresh
|
||||||
# concretization. If `dependencies`, we'll only reuse dependencies but
|
# concretization. If `dependencies`, we'll only reuse dependencies but
|
||||||
# give you a fresh concretization for your root specs.
|
# give you a fresh concretization for your root specs.
|
||||||
reuse: dependencies
|
reuse: true
|
||||||
# Options that tune which targets are considered for concretization. The
|
# Options that tune which targets are considered for concretization. The
|
||||||
# concretization process is very sensitive to the number targets, and the time
|
# concretization process is very sensitive to the number targets, and the time
|
||||||
# needed to reach a solution increases noticeably with the number of targets
|
# needed to reach a solution increases noticeably with the number of targets
|
||||||
|
|||||||
@@ -1,16 +0,0 @@
|
|||||||
# -------------------------------------------------------------------------
|
|
||||||
# This is the default configuration for Spack's module file generation.
|
|
||||||
#
|
|
||||||
# Settings here are versioned with Spack and are intended to provide
|
|
||||||
# sensible defaults out of the box. Spack maintainers should edit this
|
|
||||||
# file to keep it current.
|
|
||||||
#
|
|
||||||
# Users can override these settings by editing the following files.
|
|
||||||
#
|
|
||||||
# Per-spack-instance settings (overrides defaults):
|
|
||||||
# $SPACK_ROOT/etc/spack/modules.yaml
|
|
||||||
#
|
|
||||||
# Per-user settings (overrides default and site settings):
|
|
||||||
# ~/.spack/modules.yaml
|
|
||||||
# -------------------------------------------------------------------------
|
|
||||||
modules: {}
|
|
||||||
@@ -19,7 +19,6 @@ packages:
|
|||||||
- apple-clang
|
- apple-clang
|
||||||
- clang
|
- clang
|
||||||
- gcc
|
- gcc
|
||||||
- intel
|
|
||||||
providers:
|
providers:
|
||||||
elf: [libelf]
|
elf: [libelf]
|
||||||
fuse: [macfuse]
|
fuse: [macfuse]
|
||||||
|
|||||||
@@ -15,9 +15,10 @@
|
|||||||
# -------------------------------------------------------------------------
|
# -------------------------------------------------------------------------
|
||||||
packages:
|
packages:
|
||||||
all:
|
all:
|
||||||
compiler: [gcc, intel, pgi, clang, xl, nag, fj, aocc]
|
compiler: [gcc, clang, oneapi, xl, nag, fj, aocc]
|
||||||
providers:
|
providers:
|
||||||
awk: [gawk]
|
awk: [gawk]
|
||||||
|
armci: [armcimpi]
|
||||||
blas: [openblas, amdblis]
|
blas: [openblas, amdblis]
|
||||||
D: [ldc]
|
D: [ldc]
|
||||||
daal: [intel-oneapi-daal]
|
daal: [intel-oneapi-daal]
|
||||||
@@ -35,11 +36,11 @@ packages:
|
|||||||
java: [openjdk, jdk, ibm-java]
|
java: [openjdk, jdk, ibm-java]
|
||||||
jpeg: [libjpeg-turbo, libjpeg]
|
jpeg: [libjpeg-turbo, libjpeg]
|
||||||
lapack: [openblas, amdlibflame]
|
lapack: [openblas, amdlibflame]
|
||||||
|
libc: [glibc, musl]
|
||||||
libgfortran: [ gcc-runtime ]
|
libgfortran: [ gcc-runtime ]
|
||||||
libglx: [mesa+glx, mesa18+glx]
|
libglx: [mesa+glx]
|
||||||
libifcore: [ intel-oneapi-runtime ]
|
libifcore: [ intel-oneapi-runtime ]
|
||||||
libllvm: [llvm]
|
libllvm: [llvm]
|
||||||
libosmesa: [mesa+osmesa, mesa18+osmesa]
|
|
||||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||||
luajit: [lua-luajit-openresty, lua-luajit]
|
luajit: [lua-luajit-openresty, lua-luajit]
|
||||||
mariadb-client: [mariadb-c-client, mariadb]
|
mariadb-client: [mariadb-c-client, mariadb]
|
||||||
|
|||||||
12
lib/spack/docs/_templates/layout.html
vendored
Normal file
12
lib/spack/docs/_templates/layout.html
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{% extends "!layout.html" %}
|
||||||
|
|
||||||
|
{%- block extrahead %}
|
||||||
|
<!-- Google tag (gtag.js) -->
|
||||||
|
<script async src="https://www.googletagmanager.com/gtag/js?id=G-S0PQ7WV75K"></script>
|
||||||
|
<script>
|
||||||
|
window.dataLayer = window.dataLayer || [];
|
||||||
|
function gtag(){dataLayer.push(arguments);}
|
||||||
|
gtag('js', new Date());
|
||||||
|
gtag('config', 'G-S0PQ7WV75K');
|
||||||
|
</script>
|
||||||
|
{% endblock %}
|
||||||
@@ -865,7 +865,7 @@ There are several different ways to use Spack packages once you have
|
|||||||
installed them. As you've seen, spack packages are installed into long
|
installed them. As you've seen, spack packages are installed into long
|
||||||
paths with hashes, and you need a way to get them into your path. The
|
paths with hashes, and you need a way to get them into your path. The
|
||||||
easiest way is to use :ref:`spack load <cmd-spack-load>`, which is
|
easiest way is to use :ref:`spack load <cmd-spack-load>`, which is
|
||||||
described in the next section.
|
described in this section.
|
||||||
|
|
||||||
Some more advanced ways to use Spack packages include:
|
Some more advanced ways to use Spack packages include:
|
||||||
|
|
||||||
@@ -959,7 +959,86 @@ use ``spack find --loaded``.
|
|||||||
You can also use ``spack load --list`` to get the same output, but it
|
You can also use ``spack load --list`` to get the same output, but it
|
||||||
does not have the full set of query options that ``spack find`` offers.
|
does not have the full set of query options that ``spack find`` offers.
|
||||||
|
|
||||||
We'll learn more about Spack's spec syntax in the next section.
|
We'll learn more about Spack's spec syntax in :ref:`a later section <sec-specs>`.
|
||||||
|
|
||||||
|
|
||||||
|
.. _extensions:
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Python packages and virtual environments
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Spack can install a large number of Python packages. Their names are
|
||||||
|
typically prefixed with ``py-``. Installing and using them is no
|
||||||
|
different from any other package:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack install py-numpy
|
||||||
|
$ spack load py-numpy
|
||||||
|
$ python3
|
||||||
|
>>> import numpy
|
||||||
|
|
||||||
|
The ``spack load`` command sets the ``PATH`` variable so that the right Python
|
||||||
|
executable is used, and makes sure that ``numpy`` and its dependencies can be
|
||||||
|
located in the ``PYTHONPATH``.
|
||||||
|
|
||||||
|
Spack is different from other Python package managers in that it installs
|
||||||
|
every package into its *own* prefix. This is in contrast to ``pip``, which
|
||||||
|
installs all packages into the same prefix, be it in a virtual environment
|
||||||
|
or not.
|
||||||
|
|
||||||
|
For many users, **virtual environments** are more convenient than repeated
|
||||||
|
``spack load`` commands, particularly when working with multiple Python
|
||||||
|
packages. Fortunately Spack supports environments itself, which together
|
||||||
|
with a view are no different from Python virtual environments.
|
||||||
|
|
||||||
|
The recommended way of working with Python extensions such as ``py-numpy``
|
||||||
|
is through :ref:`Environments <environments>`. The following example creates
|
||||||
|
a Spack environment with ``numpy`` in the current working directory. It also
|
||||||
|
puts a filesystem view in ``./view``, which is a more traditional combined
|
||||||
|
prefix for all packages in the environment.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack env create --with-view view --dir .
|
||||||
|
$ spack -e . add py-numpy
|
||||||
|
$ spack -e . concretize
|
||||||
|
$ spack -e . install
|
||||||
|
|
||||||
|
Now you can activate the environment and start using the packages:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack env activate .
|
||||||
|
$ python3
|
||||||
|
>>> import numpy
|
||||||
|
|
||||||
|
The environment view is also a virtual environment, which is useful if you are
|
||||||
|
sharing the environment with others who are unfamiliar with Spack. They can
|
||||||
|
either use the Python executable directly:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ ./view/bin/python3
|
||||||
|
>>> import numpy
|
||||||
|
|
||||||
|
or use the activation script:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ source ./view/bin/activate
|
||||||
|
$ python3
|
||||||
|
>>> import numpy
|
||||||
|
|
||||||
|
In general, there should not be much difference between ``spack env activate``
|
||||||
|
and using the virtual environment. The main advantage of ``spack env activate``
|
||||||
|
is that it knows about more packages than just Python packages, and it may set
|
||||||
|
additional runtime variables that are not covered by the virtual environment
|
||||||
|
activation script.
|
||||||
|
|
||||||
|
See :ref:`environments` for a more in-depth description of Spack
|
||||||
|
environments and customizations to views.
|
||||||
|
|
||||||
|
|
||||||
.. _sec-specs:
|
.. _sec-specs:
|
||||||
@@ -1354,22 +1433,12 @@ the reserved keywords ``platform``, ``os`` and ``target``:
|
|||||||
$ spack install libelf os=ubuntu18.04
|
$ spack install libelf os=ubuntu18.04
|
||||||
$ spack install libelf target=broadwell
|
$ spack install libelf target=broadwell
|
||||||
|
|
||||||
or together by using the reserved keyword ``arch``:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack install libelf arch=cray-CNL10-haswell
|
|
||||||
|
|
||||||
Normally users don't have to bother specifying the architecture if they
|
Normally users don't have to bother specifying the architecture if they
|
||||||
are installing software for their current host, as in that case the
|
are installing software for their current host, as in that case the
|
||||||
values will be detected automatically. If you need fine-grained control
|
values will be detected automatically. If you need fine-grained control
|
||||||
over which packages use which targets (or over *all* packages' default
|
over which packages use which targets (or over *all* packages' default
|
||||||
target), see :ref:`package-preferences`.
|
target), see :ref:`package-preferences`.
|
||||||
|
|
||||||
.. admonition:: Cray machines
|
|
||||||
|
|
||||||
The situation is a little bit different for Cray machines and a detailed
|
|
||||||
explanation on how the architecture can be set on them can be found at :ref:`cray-support`
|
|
||||||
|
|
||||||
.. _support-for-microarchitectures:
|
.. _support-for-microarchitectures:
|
||||||
|
|
||||||
@@ -1705,165 +1774,6 @@ check only local packages (as opposed to those used transparently from
|
|||||||
``upstream`` spack instances) and the ``-j,--json`` option to output
|
``upstream`` spack instances) and the ``-j,--json`` option to output
|
||||||
machine-readable json data for any errors.
|
machine-readable json data for any errors.
|
||||||
|
|
||||||
|
|
||||||
.. _extensions:
|
|
||||||
|
|
||||||
---------------------------
|
|
||||||
Extensions & Python support
|
|
||||||
---------------------------
|
|
||||||
|
|
||||||
Spack's installation model assumes that each package will live in its
|
|
||||||
own install prefix. However, certain packages are typically installed
|
|
||||||
*within* the directory hierarchy of other packages. For example,
|
|
||||||
`Python <https://www.python.org>`_ packages are typically installed in the
|
|
||||||
``$prefix/lib/python-2.7/site-packages`` directory.
|
|
||||||
|
|
||||||
In Spack, installation prefixes are immutable, so this type of installation
|
|
||||||
is not directly supported. However, it is possible to create views that
|
|
||||||
allow you to merge install prefixes of multiple packages into a single new prefix.
|
|
||||||
Views are a convenient way to get a more traditional filesystem structure.
|
|
||||||
Using *extensions*, you can ensure that Python packages always share the
|
|
||||||
same prefix in the view as Python itself. Suppose you have
|
|
||||||
Python installed like so:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack find python
|
|
||||||
==> 1 installed packages.
|
|
||||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
|
||||||
python@2.7.8
|
|
||||||
|
|
||||||
.. _cmd-spack-extensions:
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^
|
|
||||||
``spack extensions``
|
|
||||||
^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
You can find extensions for your Python installation like this:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack extensions python
|
|
||||||
==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
|
|
||||||
==> 36 extensions:
|
|
||||||
geos py-ipython py-pexpect py-pyside py-sip
|
|
||||||
py-basemap py-libxml2 py-pil py-pytz py-six
|
|
||||||
py-biopython py-mako py-pmw py-rpy2 py-sympy
|
|
||||||
py-cython py-matplotlib py-pychecker py-scientificpython py-virtualenv
|
|
||||||
py-dateutil py-mpi4py py-pygments py-scikit-learn
|
|
||||||
py-epydoc py-mx py-pylint py-scipy
|
|
||||||
py-gnuplot py-nose py-pyparsing py-setuptools
|
|
||||||
py-h5py py-numpy py-pyqt py-shiboken
|
|
||||||
|
|
||||||
==> 12 installed:
|
|
||||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
|
||||||
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
|
|
||||||
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
|
|
||||||
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
|
|
||||||
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
|
|
||||||
|
|
||||||
The extensions are a subset of what's returned by ``spack list``, and
|
|
||||||
they are packages like any other. They are installed into their own
|
|
||||||
prefixes, and you can see this with ``spack find --paths``:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack find --paths py-numpy
|
|
||||||
==> 1 installed packages.
|
|
||||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
|
||||||
py-numpy@1.9.1 ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/py-numpy@1.9.1-66733244
|
|
||||||
|
|
||||||
However, even though this package is installed, you cannot use it
|
|
||||||
directly when you run ``python``:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack load python
|
|
||||||
$ python
|
|
||||||
Python 2.7.8 (default, Feb 17 2015, 01:35:25)
|
|
||||||
[GCC 4.4.7 20120313 (Red Hat 4.4.7-11)] on linux2
|
|
||||||
Type "help", "copyright", "credits" or "license" for more information.
|
|
||||||
>>> import numpy
|
|
||||||
Traceback (most recent call last):
|
|
||||||
File "<stdin>", line 1, in <module>
|
|
||||||
ImportError: No module named numpy
|
|
||||||
>>>
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Using Extensions in Environments
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
The recommended way of working with extensions such as ``py-numpy``
|
|
||||||
above is through :ref:`Environments <environments>`. For example,
|
|
||||||
the following creates an environment in the current working directory
|
|
||||||
with a filesystem view in the ``./view`` directory:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack env create --with-view view --dir .
|
|
||||||
$ spack -e . add py-numpy
|
|
||||||
$ spack -e . concretize
|
|
||||||
$ spack -e . install
|
|
||||||
|
|
||||||
We recommend environments for two reasons. Firstly, environments
|
|
||||||
can be activated (requires :ref:`shell-support`):
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack env activate .
|
|
||||||
|
|
||||||
which sets all the right environment variables such as ``PATH`` and
|
|
||||||
``PYTHONPATH``. This ensures that
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ python
|
|
||||||
>>> import numpy
|
|
||||||
|
|
||||||
works. Secondly, even without shell support, the view ensures
|
|
||||||
that Python can locate its extensions:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ ./view/bin/python
|
|
||||||
>>> import numpy
|
|
||||||
|
|
||||||
See :ref:`environments` for a more in-depth description of Spack
|
|
||||||
environments and customizations to views.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Using ``spack load``
|
|
||||||
^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
A more traditional way of using Spack and extensions is ``spack load``
|
|
||||||
(requires :ref:`shell-support`). This will add the extension to ``PYTHONPATH``
|
|
||||||
in your current shell, and Python itself will be available in the ``PATH``:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack load py-numpy
|
|
||||||
$ python
|
|
||||||
>>> import numpy
|
|
||||||
|
|
||||||
The loaded packages can be checked using ``spack find --loaded``
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Loading Extensions via Modules
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Apart from ``spack env activate`` and ``spack load``, you can load numpy
|
|
||||||
through your environment modules (using ``environment-modules`` or
|
|
||||||
``lmod``). This will also add the extension to the ``PYTHONPATH`` in
|
|
||||||
your current shell.
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ module load <name of numpy module>
|
|
||||||
|
|
||||||
If you do not know the name of the specific numpy module you wish to
|
|
||||||
load, you can use the ``spack module tcl|lmod loads`` command to get
|
|
||||||
the name of the module from the Spack spec.
|
|
||||||
|
|
||||||
-----------------------
|
-----------------------
|
||||||
Filesystem requirements
|
Filesystem requirements
|
||||||
-----------------------
|
-----------------------
|
||||||
|
|||||||
@@ -21,23 +21,86 @@ is the following:
|
|||||||
Reuse already installed packages
|
Reuse already installed packages
|
||||||
--------------------------------
|
--------------------------------
|
||||||
|
|
||||||
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
|
The ``reuse`` attribute controls how aggressively Spack reuses binary packages during concretization. The
|
||||||
whether it will do a "fresh" installation and prefer the latest settings from
|
attribute can either be a single value, or an object for more complex configurations.
|
||||||
``package.py`` files and ``packages.yaml`` (``false``).
|
|
||||||
You can use:
|
In the former case ("single value") it allows Spack to:
|
||||||
|
|
||||||
|
1. Reuse installed packages and buildcaches for all the specs to be concretized, when ``true``
|
||||||
|
2. Reuse installed packages and buildcaches only for the dependencies of the root specs, when ``dependencies``
|
||||||
|
3. Disregard reusing installed packages and buildcaches, when ``false``
|
||||||
|
|
||||||
|
In case a finer control over which specs are reused is needed, then the value of this attribute can be
|
||||||
|
an object, with the following keys:
|
||||||
|
|
||||||
|
1. ``roots``: if ``true`` root specs are reused, if ``false`` only dependencies of root specs are reused
|
||||||
|
2. ``from``: list of sources from which reused specs are taken
|
||||||
|
|
||||||
|
Each source in ``from`` is itself an object:
|
||||||
|
|
||||||
|
.. list-table:: Attributes for a source or reusable specs
|
||||||
|
:header-rows: 1
|
||||||
|
|
||||||
|
* - Attribute name
|
||||||
|
- Description
|
||||||
|
* - type (mandatory, string)
|
||||||
|
- Can be ``local``, ``buildcache``, or ``external``
|
||||||
|
* - include (optional, list of specs)
|
||||||
|
- If present, reusable specs must match at least one of the constraint in the list
|
||||||
|
* - exclude (optional, list of specs)
|
||||||
|
- If present, reusable specs must not match any of the constraint in the list.
|
||||||
|
|
||||||
|
For instance, the following configuration:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
concretizer:
|
||||||
|
reuse:
|
||||||
|
roots: true
|
||||||
|
from:
|
||||||
|
- type: local
|
||||||
|
include:
|
||||||
|
- "%gcc"
|
||||||
|
- "%clang"
|
||||||
|
|
||||||
|
tells the concretizer to reuse all specs compiled with either ``gcc`` or ``clang``, that are installed
|
||||||
|
in the local store. Any spec from remote buildcaches is disregarded.
|
||||||
|
|
||||||
|
To reduce the boilerplate in configuration files, default values for the ``include`` and
|
||||||
|
``exclude`` options can be pushed up one level:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
concretizer:
|
||||||
|
reuse:
|
||||||
|
roots: true
|
||||||
|
include:
|
||||||
|
- "%gcc"
|
||||||
|
from:
|
||||||
|
- type: local
|
||||||
|
- type: buildcache
|
||||||
|
- type: local
|
||||||
|
include:
|
||||||
|
- "foo %oneapi"
|
||||||
|
|
||||||
|
In the example above we reuse all specs compiled with ``gcc`` from the local store
|
||||||
|
and remote buildcaches, and we also reuse ``foo %oneapi``. Note that the last source of
|
||||||
|
specs override the default ``include`` attribute.
|
||||||
|
|
||||||
|
For one-off concretizations, the are command line arguments for each of the simple "single value"
|
||||||
|
configurations. This means a user can:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
% spack install --reuse <spec>
|
% spack install --reuse <spec>
|
||||||
|
|
||||||
to enable reuse for a single installation, and you can use:
|
to enable reuse for a single installation, or:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
spack install --fresh <spec>
|
spack install --fresh <spec>
|
||||||
|
|
||||||
to do a fresh install if ``reuse`` is enabled by default.
|
to do a fresh install if ``reuse`` is enabled by default.
|
||||||
``reuse: dependencies`` is the default.
|
|
||||||
|
|
||||||
.. seealso::
|
.. seealso::
|
||||||
|
|
||||||
|
|||||||
@@ -147,6 +147,15 @@ example, the ``bash`` shell is used to run the ``autogen.sh`` script.
|
|||||||
def autoreconf(self, spec, prefix):
|
def autoreconf(self, spec, prefix):
|
||||||
which("bash")("autogen.sh")
|
which("bash")("autogen.sh")
|
||||||
|
|
||||||
|
If the ``package.py`` has build instructions in a separate
|
||||||
|
:ref:`builder class <multiple_build_systems>`, the signature for a phase changes slightly:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class AutotoolsBuilder(AutotoolsBuilder):
|
||||||
|
def autoreconf(self, pkg, spec, prefix):
|
||||||
|
which("bash")("autogen.sh")
|
||||||
|
|
||||||
"""""""""""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""""""""""""""""""
|
||||||
patching configure or Makefile.in files
|
patching configure or Makefile.in files
|
||||||
"""""""""""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""""""""""""""""""
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ use Spack to build packages with the tools.
|
|||||||
The Spack Python class ``IntelOneapiPackage`` is a base class that is
|
The Spack Python class ``IntelOneapiPackage`` is a base class that is
|
||||||
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
|
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
|
||||||
``IntelOneapiTbb`` and other classes to implement the oneAPI
|
``IntelOneapiTbb`` and other classes to implement the oneAPI
|
||||||
packages. Search for ``oneAPI`` at `<packages.spack.io>`_ for the full
|
packages. Search for ``oneAPI`` at `packages.spack.io <https://packages.spack.io>`_ for the full
|
||||||
list of available oneAPI packages, or use::
|
list of available oneAPI packages, or use::
|
||||||
|
|
||||||
spack list -d oneAPI
|
spack list -d oneAPI
|
||||||
|
|||||||
@@ -718,23 +718,45 @@ command-line tool, or C/C++/Fortran program with optional Python
|
|||||||
modules? The former should be prepended with ``py-``, while the
|
modules? The former should be prepended with ``py-``, while the
|
||||||
latter should not.
|
latter should not.
|
||||||
|
|
||||||
""""""""""""""""""""""
|
""""""""""""""""""""""""""""""
|
||||||
extends vs. depends_on
|
``extends`` vs. ``depends_on``
|
||||||
""""""""""""""""""""""
|
""""""""""""""""""""""""""""""
|
||||||
|
|
||||||
This is very similar to the naming dilemma above, with a slight twist.
|
|
||||||
As mentioned in the :ref:`Packaging Guide <packaging_extensions>`,
|
As mentioned in the :ref:`Packaging Guide <packaging_extensions>`,
|
||||||
``extends`` and ``depends_on`` are very similar, but ``extends`` ensures
|
``extends`` and ``depends_on`` are very similar, but ``extends`` ensures
|
||||||
that the extension and extendee share the same prefix in views.
|
that the extension and extendee share the same prefix in views.
|
||||||
This allows the user to import a Python module without
|
This allows the user to import a Python module without
|
||||||
having to add that module to ``PYTHONPATH``.
|
having to add that module to ``PYTHONPATH``.
|
||||||
|
|
||||||
When deciding between ``extends`` and ``depends_on``, the best rule of
|
Additionally, ``extends("python")`` adds a dependency on the package
|
||||||
thumb is to check the installation prefix. If Python libraries are
|
``python-venv``. This improves isolation from the system, whether
|
||||||
installed to ``<prefix>/lib/pythonX.Y/site-packages``, then you
|
it's during the build or at runtime: user and system site packages
|
||||||
should use ``extends``. If Python libraries are installed elsewhere
|
cannot accidentally be used by any package that ``extends("python")``.
|
||||||
or the only files that get installed reside in ``<prefix>/bin``, then
|
|
||||||
don't use ``extends``.
|
As a rule of thumb: if a package does not install any Python modules
|
||||||
|
of its own, and merely puts a Python script in the ``bin`` directory,
|
||||||
|
then there is no need for ``extends``. If the package installs modules
|
||||||
|
in the ``site-packages`` directory, it requires ``extends``.
|
||||||
|
|
||||||
|
"""""""""""""""""""""""""""""""""""""
|
||||||
|
Executing ``python`` during the build
|
||||||
|
"""""""""""""""""""""""""""""""""""""
|
||||||
|
|
||||||
|
Whenever you need to execute a Python command or pass the path of the
|
||||||
|
Python interpreter to the build system, it is best to use the global
|
||||||
|
variable ``python`` directly. For example:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
@run_before("install")
|
||||||
|
def recythonize(self):
|
||||||
|
python("setup.py", "clean") # use the `python` global
|
||||||
|
|
||||||
|
As mentioned in the previous section, ``extends("python")`` adds an
|
||||||
|
automatic dependency on ``python-venv``, which is a virtual environment
|
||||||
|
that guarantees build isolation. The ``python`` global always refers to
|
||||||
|
the correct Python interpreter, whether the package uses ``extends("python")``
|
||||||
|
or ``depends_on("python")``.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
Alternatives to Spack
|
Alternatives to Spack
|
||||||
|
|||||||
@@ -11,7 +11,8 @@ Chaining Spack Installations
|
|||||||
|
|
||||||
You can point your Spack installation to another installation to use any
|
You can point your Spack installation to another installation to use any
|
||||||
packages that are installed there. To register the other Spack instance,
|
packages that are installed there. To register the other Spack instance,
|
||||||
you can add it as an entry to ``upstreams.yaml``:
|
you can add it as an entry to ``upstreams.yaml`` at any of the
|
||||||
|
:ref:`configuration-scopes`:
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
@@ -22,7 +23,8 @@ you can add it as an entry to ``upstreams.yaml``:
|
|||||||
install_tree: /path/to/another/spack/opt/spack
|
install_tree: /path/to/another/spack/opt/spack
|
||||||
|
|
||||||
``install_tree`` must point to the ``opt/spack`` directory inside of the
|
``install_tree`` must point to the ``opt/spack`` directory inside of the
|
||||||
Spack base directory.
|
Spack base directory, or the location of the ``install_tree`` defined
|
||||||
|
in :ref:`config.yaml <config-yaml>`.
|
||||||
|
|
||||||
Once the upstream Spack instance has been added, ``spack find`` will
|
Once the upstream Spack instance has been added, ``spack find`` will
|
||||||
automatically check the upstream instance when querying installed packages,
|
automatically check the upstream instance when querying installed packages,
|
||||||
|
|||||||
@@ -150,7 +150,7 @@ this can expose you to attacks. Use at your own risk.
|
|||||||
--------------------
|
--------------------
|
||||||
|
|
||||||
Path to custom certificats for SSL verification. The value can be a
|
Path to custom certificats for SSL verification. The value can be a
|
||||||
filesytem path, or an environment variable that expands to a file path.
|
filesytem path, or an environment variable that expands to an absolute file path.
|
||||||
The default value is set to the environment variable ``SSL_CERT_FILE``
|
The default value is set to the environment variable ``SSL_CERT_FILE``
|
||||||
to use the same syntax used by many other applications that automatically
|
to use the same syntax used by many other applications that automatically
|
||||||
detect custom certificates.
|
detect custom certificates.
|
||||||
@@ -160,6 +160,9 @@ in the subprocess calling ``curl``.
|
|||||||
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
|
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
|
||||||
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
|
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
|
||||||
will work.
|
will work.
|
||||||
|
In all cases the expanded path must be absolute for Spack to use the certificates.
|
||||||
|
Certificates relative to an environment can be created by prepending the path variable
|
||||||
|
with the Spack configuration variable``$env``.
|
||||||
|
|
||||||
--------------------
|
--------------------
|
||||||
``checksum``
|
``checksum``
|
||||||
|
|||||||
@@ -194,15 +194,15 @@ The OS that are currently supported are summarized in the table below:
|
|||||||
* - Operating System
|
* - Operating System
|
||||||
- Base Image
|
- Base Image
|
||||||
- Spack Image
|
- Spack Image
|
||||||
* - Ubuntu 18.04
|
|
||||||
- ``ubuntu:18.04``
|
|
||||||
- ``spack/ubuntu-bionic``
|
|
||||||
* - Ubuntu 20.04
|
* - Ubuntu 20.04
|
||||||
- ``ubuntu:20.04``
|
- ``ubuntu:20.04``
|
||||||
- ``spack/ubuntu-focal``
|
- ``spack/ubuntu-focal``
|
||||||
* - Ubuntu 22.04
|
* - Ubuntu 22.04
|
||||||
- ``ubuntu:22.04``
|
- ``ubuntu:22.04``
|
||||||
- ``spack/ubuntu-jammy``
|
- ``spack/ubuntu-jammy``
|
||||||
|
* - Ubuntu 24.04
|
||||||
|
- ``ubuntu:24.04``
|
||||||
|
- ``spack/ubuntu-noble``
|
||||||
* - CentOS 7
|
* - CentOS 7
|
||||||
- ``centos:7``
|
- ``centos:7``
|
||||||
- ``spack/centos7``
|
- ``spack/centos7``
|
||||||
@@ -227,12 +227,12 @@ The OS that are currently supported are summarized in the table below:
|
|||||||
* - Rocky Linux 9
|
* - Rocky Linux 9
|
||||||
- ``rockylinux:9``
|
- ``rockylinux:9``
|
||||||
- ``spack/rockylinux9``
|
- ``spack/rockylinux9``
|
||||||
* - Fedora Linux 37
|
* - Fedora Linux 39
|
||||||
- ``fedora:37``
|
- ``fedora:39``
|
||||||
- ``spack/fedora37``
|
- ``spack/fedora39``
|
||||||
* - Fedora Linux 38
|
* - Fedora Linux 40
|
||||||
- ``fedora:38``
|
- ``fedora:40``
|
||||||
- ``spack/fedora38``
|
- ``spack/fedora40``
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -552,11 +552,11 @@ With either interpreter you can run a single command:
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack python -c 'import distro; distro.linux_distribution()'
|
$ spack python -c 'from spack.spec import Spec; Spec("python").concretized()'
|
||||||
('Ubuntu', '18.04', 'Bionic Beaver')
|
...
|
||||||
|
|
||||||
$ spack python -i ipython -c 'import distro; distro.linux_distribution()'
|
$ spack python -i ipython -c 'from spack.spec import Spec; Spec("python").concretized()'
|
||||||
Out[1]: ('Ubuntu', '18.04', 'Bionic Beaver')
|
Out[1]: ...
|
||||||
|
|
||||||
or a file:
|
or a file:
|
||||||
|
|
||||||
@@ -1071,9 +1071,9 @@ Announcing a release
|
|||||||
|
|
||||||
We announce releases in all of the major Spack communication channels.
|
We announce releases in all of the major Spack communication channels.
|
||||||
Publishing the release takes care of GitHub. The remaining channels are
|
Publishing the release takes care of GitHub. The remaining channels are
|
||||||
Twitter, Slack, and the mailing list. Here are the steps:
|
X, Slack, and the mailing list. Here are the steps:
|
||||||
|
|
||||||
#. Announce the release on Twitter.
|
#. Announce the release on X.
|
||||||
|
|
||||||
* Compose the tweet on the ``@spackpm`` account per the
|
* Compose the tweet on the ``@spackpm`` account per the
|
||||||
``spack-twitter`` slack channel.
|
``spack-twitter`` slack channel.
|
||||||
|
|||||||
@@ -142,12 +142,8 @@ user's prompt to begin with the environment name in brackets.
|
|||||||
$ spack env activate -p myenv
|
$ spack env activate -p myenv
|
||||||
[myenv] $ ...
|
[myenv] $ ...
|
||||||
|
|
||||||
The ``activate`` command can also be used to create a new environment, if it is
|
The ``activate`` command can also be used to create a new environment if it does not already
|
||||||
not already defined, by adding the ``--create`` flag. Managed and anonymous
|
exist.
|
||||||
environments, anonymous environments are explained in the next section,
|
|
||||||
can both be created using the same flags that `spack env create` accepts.
|
|
||||||
If an environment already exists then spack will simply activate it and ignore the
|
|
||||||
create specific flags.
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
@@ -176,21 +172,36 @@ environment will remove the view from the user environment.
|
|||||||
Anonymous Environments
|
Anonymous Environments
|
||||||
^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Any directory can be treated as an environment if it contains a file
|
Apart from managed environments, Spack also supports anonymous environments.
|
||||||
``spack.yaml``. To load an anonymous environment, use:
|
|
||||||
|
Anonymous environments can be placed in any directory of choice.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
When uninstalling packages, Spack asks the user to confirm the removal of packages
|
||||||
|
that are still used in a managed environment. This is not the case for anonymous
|
||||||
|
environments.
|
||||||
|
|
||||||
|
To create an anonymous environment, use one of the following commands:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env activate -d /path/to/directory
|
$ spack env create --dir my_env
|
||||||
|
$ spack env create ./my_env
|
||||||
|
|
||||||
Anonymous specs can be created in place using the command:
|
As a shorthand, you can also create an anonymous environment upon activation if it does not
|
||||||
|
already exist:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env create -d .
|
$ spack env activate --create ./my_env
|
||||||
|
|
||||||
|
For convenience, Spack can also place an anonymous environment in a temporary directory for you:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack env activate --temp
|
||||||
|
|
||||||
In this case Spack simply creates a ``spack.yaml`` file in the requested
|
|
||||||
directory.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Environment Sensitive Commands
|
Environment Sensitive Commands
|
||||||
@@ -449,6 +460,125 @@ Sourcing that file in Bash will make the environment available to the
|
|||||||
user; and can be included in ``.bashrc`` files, etc. The ``loads``
|
user; and can be included in ``.bashrc`` files, etc. The ``loads``
|
||||||
file may also be copied out of the environment, renamed, etc.
|
file may also be copied out of the environment, renamed, etc.
|
||||||
|
|
||||||
|
|
||||||
|
.. _environment_include_concrete:
|
||||||
|
|
||||||
|
------------------------------
|
||||||
|
Included Concrete Environments
|
||||||
|
------------------------------
|
||||||
|
|
||||||
|
Spack environments can create an environment based off of information in already
|
||||||
|
established environments. You can think of it as a combination of existing
|
||||||
|
environments. It will gather information from the existing environment's
|
||||||
|
``spack.lock`` and use that during the creation of this included concrete
|
||||||
|
environment. When an included concrete environment is created it will generate
|
||||||
|
a ``spack.lock`` file for the newly created environment.
|
||||||
|
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Creating included environments
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
To create a combined concrete environment, you must have at least one existing
|
||||||
|
concrete environment. You will use the command ``spack env create`` with the
|
||||||
|
argument ``--include-concrete`` followed by the name or path of the environment
|
||||||
|
you'd like to include. Here is an example of how to create a combined environment
|
||||||
|
from the command line.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack env create myenv
|
||||||
|
$ spack -e myenv add python
|
||||||
|
$ spack -e myenv concretize
|
||||||
|
$ spack env create --include-concrete myenv included_env
|
||||||
|
|
||||||
|
|
||||||
|
You can also include an environment directly in the ``spack.yaml`` file. It
|
||||||
|
involves adding the ``include_concrete`` heading in the yaml followed by the
|
||||||
|
absolute path to the independent environments.
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
spack:
|
||||||
|
specs: []
|
||||||
|
concretizer:
|
||||||
|
unify: true
|
||||||
|
include_concrete:
|
||||||
|
- /absolute/path/to/environment1
|
||||||
|
- /absolute/path/to/environment2
|
||||||
|
|
||||||
|
|
||||||
|
Once the ``spack.yaml`` has been updated you must concretize the environment to
|
||||||
|
get the concrete specs from the included environments.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Updating an included environment
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
If changes were made to the base environment and you want that reflected in the
|
||||||
|
included environment you will need to reconcretize both the base environment and the
|
||||||
|
included environment for the change to be implemented. For example:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack env create myenv
|
||||||
|
$ spack -e myenv add python
|
||||||
|
$ spack -e myenv concretize
|
||||||
|
$ spack env create --include-concrete myenv included_env
|
||||||
|
|
||||||
|
|
||||||
|
$ spack -e myenv find
|
||||||
|
==> In environment myenv
|
||||||
|
==> Root specs
|
||||||
|
python
|
||||||
|
|
||||||
|
==> 0 installed packages
|
||||||
|
|
||||||
|
|
||||||
|
$ spack -e included_env find
|
||||||
|
==> In environment included_env
|
||||||
|
==> No root specs
|
||||||
|
==> Included specs
|
||||||
|
python
|
||||||
|
|
||||||
|
==> 0 installed packages
|
||||||
|
|
||||||
|
Here we see that ``included_env`` has access to the python package through
|
||||||
|
the ``myenv`` environment. But if we were to add another spec to ``myenv``,
|
||||||
|
``included_env`` will not be able to access the new information.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack -e myenv add perl
|
||||||
|
$ spack -e myenv concretize
|
||||||
|
$ spack -e myenv find
|
||||||
|
==> In environment myenv
|
||||||
|
==> Root specs
|
||||||
|
perl python
|
||||||
|
|
||||||
|
==> 0 installed packages
|
||||||
|
|
||||||
|
|
||||||
|
$ spack -e included_env find
|
||||||
|
==> In environment included_env
|
||||||
|
==> No root specs
|
||||||
|
==> Included specs
|
||||||
|
python
|
||||||
|
|
||||||
|
==> 0 installed packages
|
||||||
|
|
||||||
|
It isn't until you run the ``spack concretize`` command that the combined
|
||||||
|
environment will get the updated information from the reconcretized base environmennt.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack -e included_env concretize
|
||||||
|
$ spack -e included_env find
|
||||||
|
==> In environment included_env
|
||||||
|
==> No root specs
|
||||||
|
==> Included specs
|
||||||
|
perl python
|
||||||
|
|
||||||
|
==> 0 installed packages
|
||||||
|
|
||||||
.. _environment-configuration:
|
.. _environment-configuration:
|
||||||
|
|
||||||
------------------------
|
------------------------
|
||||||
@@ -800,6 +930,7 @@ For example, the following environment has three root packages:
|
|||||||
This allows for a much-needed reduction in redundancy between packages
|
This allows for a much-needed reduction in redundancy between packages
|
||||||
and constraints.
|
and constraints.
|
||||||
|
|
||||||
|
|
||||||
----------------
|
----------------
|
||||||
Filesystem Views
|
Filesystem Views
|
||||||
----------------
|
----------------
|
||||||
@@ -1033,7 +1164,7 @@ other targets to depend on the environment installation.
|
|||||||
|
|
||||||
A typical workflow is as follows:
|
A typical workflow is as follows:
|
||||||
|
|
||||||
.. code:: console
|
.. code-block:: console
|
||||||
|
|
||||||
spack env create -d .
|
spack env create -d .
|
||||||
spack -e . add perl
|
spack -e . add perl
|
||||||
@@ -1126,7 +1257,7 @@ its dependencies. This can be useful when certain flags should only apply to
|
|||||||
dependencies. Below we show a use case where a spec is installed with verbose
|
dependencies. Below we show a use case where a spec is installed with verbose
|
||||||
output (``spack install --verbose``) while its dependencies are installed silently:
|
output (``spack install --verbose``) while its dependencies are installed silently:
|
||||||
|
|
||||||
.. code:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env depfile -o Makefile
|
$ spack env depfile -o Makefile
|
||||||
|
|
||||||
@@ -1148,7 +1279,7 @@ This can be accomplished through the generated ``[<prefix>/]SPACK_PACKAGE_IDS``
|
|||||||
variable. Assuming we have an active and concrete environment, we generate the
|
variable. Assuming we have an active and concrete environment, we generate the
|
||||||
associated ``Makefile`` with a prefix ``example``:
|
associated ``Makefile`` with a prefix ``example``:
|
||||||
|
|
||||||
.. code:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env depfile -o env.mk --make-prefix example
|
$ spack env depfile -o env.mk --make-prefix example
|
||||||
|
|
||||||
|
|||||||
@@ -478,6 +478,13 @@ prefix, you can add them to the ``extra_attributes`` field. Similarly,
|
|||||||
all other fields from the compilers config can be added to the
|
all other fields from the compilers config can be added to the
|
||||||
``extra_attributes`` field for an external representing a compiler.
|
``extra_attributes`` field for an external representing a compiler.
|
||||||
|
|
||||||
|
Note that the format for the ``paths`` field in the
|
||||||
|
``extra_attributes`` section is different than in the ``compilers``
|
||||||
|
config. For compilers configured as external packages, the section is
|
||||||
|
named ``compilers`` and the dictionary maps language names (``c``,
|
||||||
|
``cxx``, ``fortran``) to paths, rather than using the names ``cc``,
|
||||||
|
``fc``, and ``f77``.
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
packages:
|
packages:
|
||||||
@@ -493,11 +500,10 @@ all other fields from the compilers config can be added to the
|
|||||||
- spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake
|
- spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake
|
||||||
prefix: /usr
|
prefix: /usr
|
||||||
extra_attributes:
|
extra_attributes:
|
||||||
paths:
|
compilers:
|
||||||
cc: /usr/bin/clang-with-suffix
|
c: /usr/bin/clang-with-suffix
|
||||||
cxx: /usr/bin/clang++-with-extra-info
|
cxx: /usr/bin/clang++-with-extra-info
|
||||||
fc: /usr/bin/gfortran
|
fortran: /usr/bin/gfortran
|
||||||
f77: /usr/bin/gfortran
|
|
||||||
extra_rpaths:
|
extra_rpaths:
|
||||||
- /usr/lib/llvm/
|
- /usr/lib/llvm/
|
||||||
|
|
||||||
@@ -1358,187 +1364,6 @@ This will write the private key to the file `dinosaur.priv`.
|
|||||||
or for help on an issue or the Spack slack.
|
or for help on an issue or the Spack slack.
|
||||||
|
|
||||||
|
|
||||||
.. _cray-support:
|
|
||||||
|
|
||||||
-------------
|
|
||||||
Spack on Cray
|
|
||||||
-------------
|
|
||||||
|
|
||||||
Spack differs slightly when used on a Cray system. The architecture spec
|
|
||||||
can differentiate between the front-end and back-end processor and operating system.
|
|
||||||
For example, on Edison at NERSC, the back-end target processor
|
|
||||||
is "Ivy Bridge", so you can specify to use the back-end this way:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack install zlib target=ivybridge
|
|
||||||
|
|
||||||
You can also use the operating system to build against the back-end:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack install zlib os=CNL10
|
|
||||||
|
|
||||||
Notice that the name includes both the operating system name and the major
|
|
||||||
version number concatenated together.
|
|
||||||
|
|
||||||
Alternatively, if you want to build something for the front-end,
|
|
||||||
you can specify the front-end target processor. The processor for a login node
|
|
||||||
on Edison is "Sandy bridge" so we specify on the command line like so:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack install zlib target=sandybridge
|
|
||||||
|
|
||||||
And the front-end operating system is:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack install zlib os=SuSE11
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Cray compiler detection
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Spack can detect compilers using two methods. For the front-end, we treat
|
|
||||||
everything the same. The difference lies in back-end compiler detection.
|
|
||||||
Back-end compiler detection is made via the Tcl module avail command.
|
|
||||||
Once it detects the compiler it writes the appropriate PrgEnv and compiler
|
|
||||||
module name to compilers.yaml and sets the paths to each compiler with Cray\'s
|
|
||||||
compiler wrapper names (i.e. cc, CC, ftn). During build time, Spack will load
|
|
||||||
the correct PrgEnv and compiler module and will call appropriate wrapper.
|
|
||||||
|
|
||||||
The compilers.yaml config file will also differ. There is a
|
|
||||||
modules section that is filled with the compiler's Programming Environment
|
|
||||||
and module name. On other systems, this field is empty []:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
- compiler:
|
|
||||||
modules:
|
|
||||||
- PrgEnv-intel
|
|
||||||
- intel/15.0.109
|
|
||||||
|
|
||||||
As mentioned earlier, the compiler paths will look different on a Cray system.
|
|
||||||
Since most compilers are invoked using cc, CC and ftn, the paths for each
|
|
||||||
compiler are replaced with their respective Cray compiler wrapper names:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
paths:
|
|
||||||
cc: cc
|
|
||||||
cxx: CC
|
|
||||||
f77: ftn
|
|
||||||
fc: ftn
|
|
||||||
|
|
||||||
As opposed to an explicit path to the compiler executable. This allows Spack
|
|
||||||
to call the Cray compiler wrappers during build time.
|
|
||||||
|
|
||||||
For more on compiler configuration, check out :ref:`compiler-config`.
|
|
||||||
|
|
||||||
Spack sets the default Cray link type to dynamic, to better match other
|
|
||||||
other platforms. Individual packages can enable static linking (which is the
|
|
||||||
default outside of Spack on cray systems) using the ``-static`` flag.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Setting defaults and using Cray modules
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
If you want to use default compilers for each PrgEnv and also be able
|
|
||||||
to load cray external modules, you will need to set up a ``packages.yaml``.
|
|
||||||
|
|
||||||
Here's an example of an external configuration for cray modules:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
packages:
|
|
||||||
mpich:
|
|
||||||
externals:
|
|
||||||
- spec: "mpich@7.3.1%gcc@5.2.0 arch=cray_xc-haswell-CNL10"
|
|
||||||
modules:
|
|
||||||
- cray-mpich
|
|
||||||
- spec: "mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-haswell-CNL10"
|
|
||||||
modules:
|
|
||||||
- cray-mpich
|
|
||||||
all:
|
|
||||||
providers:
|
|
||||||
mpi: [mpich]
|
|
||||||
|
|
||||||
This tells Spack that for whatever package that depends on mpi, load the
|
|
||||||
cray-mpich module into the environment. You can then be able to use whatever
|
|
||||||
environment variables, libraries, etc, that are brought into the environment
|
|
||||||
via module load.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
For Cray-provided packages, it is best to use ``modules:`` instead of ``prefix:``
|
|
||||||
in ``packages.yaml``, because the Cray Programming Environment heavily relies on
|
|
||||||
modules (e.g., loading the ``cray-mpich`` module adds MPI libraries to the
|
|
||||||
compiler wrapper link line).
|
|
||||||
|
|
||||||
You can set the default compiler that Spack can use for each compiler type.
|
|
||||||
If you want to use the Cray defaults, then set them under ``all:`` in packages.yaml.
|
|
||||||
In the compiler field, set the compiler specs in your order of preference.
|
|
||||||
Whenever you build with that compiler type, Spack will concretize to that version.
|
|
||||||
|
|
||||||
Here is an example of a full packages.yaml used at NERSC
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
packages:
|
|
||||||
mpich:
|
|
||||||
externals:
|
|
||||||
- spec: "mpich@7.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
|
||||||
modules:
|
|
||||||
- cray-mpich
|
|
||||||
- spec: "mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-SuSE11-ivybridge"
|
|
||||||
modules:
|
|
||||||
- cray-mpich
|
|
||||||
buildable: False
|
|
||||||
netcdf:
|
|
||||||
externals:
|
|
||||||
- spec: "netcdf@4.3.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
|
||||||
modules:
|
|
||||||
- cray-netcdf
|
|
||||||
- spec: "netcdf@4.3.3.1%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge"
|
|
||||||
modules:
|
|
||||||
- cray-netcdf
|
|
||||||
buildable: False
|
|
||||||
hdf5:
|
|
||||||
externals:
|
|
||||||
- spec: "hdf5@1.8.14%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
|
||||||
modules:
|
|
||||||
- cray-hdf5
|
|
||||||
- spec: "hdf5@1.8.14%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge"
|
|
||||||
modules:
|
|
||||||
- cray-hdf5
|
|
||||||
buildable: False
|
|
||||||
all:
|
|
||||||
compiler: [gcc@5.2.0, intel@16.0.0.109]
|
|
||||||
providers:
|
|
||||||
mpi: [mpich]
|
|
||||||
|
|
||||||
Here we tell spack that whenever we want to build with gcc use version 5.2.0 or
|
|
||||||
if we want to build with intel compilers, use version 16.0.0.109. We add a spec
|
|
||||||
for each compiler type for each cray modules. This ensures that for each
|
|
||||||
compiler on our system we can use that external module.
|
|
||||||
|
|
||||||
For more on external packages check out the section :ref:`sec-external-packages`.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Using Linux containers on Cray machines
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Spack uses environment variables particular to the Cray programming
|
|
||||||
environment to determine which systems are Cray platforms. These
|
|
||||||
environment variables may be propagated into containers that are not
|
|
||||||
using the Cray programming environment.
|
|
||||||
|
|
||||||
To ensure that Spack does not autodetect the Cray programming
|
|
||||||
environment, unset the environment variable ``MODULEPATH``. This
|
|
||||||
will cause Spack to treat a linux container on a Cray system as a base
|
|
||||||
linux distro.
|
|
||||||
|
|
||||||
.. _windows_support:
|
.. _windows_support:
|
||||||
|
|
||||||
----------------
|
----------------
|
||||||
@@ -1572,6 +1397,8 @@ Microsoft Visual Studio
|
|||||||
"""""""""""""""""""""""
|
"""""""""""""""""""""""
|
||||||
|
|
||||||
Microsoft Visual Studio provides the only Windows C/C++ compiler that is currently supported by Spack.
|
Microsoft Visual Studio provides the only Windows C/C++ compiler that is currently supported by Spack.
|
||||||
|
Spack additionally requires that the Windows SDK (including WGL) to be installed as part of your
|
||||||
|
visual studio installation as it is required to build many packages from source.
|
||||||
|
|
||||||
We require several specific components to be included in the Visual Studio installation.
|
We require several specific components to be included in the Visual Studio installation.
|
||||||
One is the C/C++ toolset, which can be selected as "Desktop development with C++" or "C++ build tools,"
|
One is the C/C++ toolset, which can be selected as "Desktop development with C++" or "C++ build tools,"
|
||||||
@@ -1579,6 +1406,7 @@ depending on installation type (Professional, Build Tools, etc.) The other requ
|
|||||||
"C++ CMake tools for Windows," which can be selected from among the optional packages.
|
"C++ CMake tools for Windows," which can be selected from among the optional packages.
|
||||||
This provides CMake and Ninja for use during Spack configuration.
|
This provides CMake and Ninja for use during Spack configuration.
|
||||||
|
|
||||||
|
|
||||||
If you already have Visual Studio installed, you can make sure these components are installed by
|
If you already have Visual Studio installed, you can make sure these components are installed by
|
||||||
rerunning the installer. Next to your installation, select "Modify" and look at the
|
rerunning the installer. Next to your installation, select "Modify" and look at the
|
||||||
"Installation details" pane on the right.
|
"Installation details" pane on the right.
|
||||||
|
|||||||
@@ -6435,9 +6435,12 @@ the ``paths`` attribute:
|
|||||||
echo "Target: x86_64-pc-linux-gnu"
|
echo "Target: x86_64-pc-linux-gnu"
|
||||||
echo "Thread model: posix"
|
echo "Thread model: posix"
|
||||||
echo "InstalledDir: /usr/bin"
|
echo "InstalledDir: /usr/bin"
|
||||||
|
platforms: ["linux", "darwin"]
|
||||||
results:
|
results:
|
||||||
- spec: 'llvm@3.9.1 +clang~lld~lldb'
|
- spec: 'llvm@3.9.1 +clang~lld~lldb'
|
||||||
|
|
||||||
|
If the ``platforms`` attribute is present, tests are run only if the current host
|
||||||
|
matches one of the listed platforms.
|
||||||
Each test is performed by first creating a temporary directory structure as
|
Each test is performed by first creating a temporary directory structure as
|
||||||
specified in the corresponding ``layout`` and by then running
|
specified in the corresponding ``layout`` and by then running
|
||||||
package detection and checking that the outcome matches the expected
|
package detection and checking that the outcome matches the expected
|
||||||
@@ -6471,6 +6474,10 @@ package detection and checking that the outcome matches the expected
|
|||||||
- A spec that is expected from detection
|
- A spec that is expected from detection
|
||||||
- Any valid spec
|
- Any valid spec
|
||||||
- Yes
|
- Yes
|
||||||
|
* - ``results:[0]:extra_attributes``
|
||||||
|
- Extra attributes expected on the associated Spec
|
||||||
|
- Nested dictionary with string as keys, and regular expressions as leaf values
|
||||||
|
- No
|
||||||
|
|
||||||
"""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""""""""""
|
||||||
Reuse tests from other packages
|
Reuse tests from other packages
|
||||||
|
|||||||
@@ -476,9 +476,3 @@ implemented using Python's built-in `sys.path
|
|||||||
:py:mod:`spack.repo` module implements a custom `Python importer
|
:py:mod:`spack.repo` module implements a custom `Python importer
|
||||||
<https://docs.python.org/2/library/imp.html>`_.
|
<https://docs.python.org/2/library/imp.html>`_.
|
||||||
|
|
||||||
.. warning::
|
|
||||||
|
|
||||||
The mechanism for extending packages is not yet extensively tested,
|
|
||||||
and extending packages across repositories imposes inter-repo
|
|
||||||
dependencies, which may be hard to manage. Use this feature at your
|
|
||||||
own risk, but let us know if you have a use case for it.
|
|
||||||
|
|||||||
@@ -4,10 +4,10 @@ sphinx_design==0.5.0
|
|||||||
sphinx-rtd-theme==2.0.0
|
sphinx-rtd-theme==2.0.0
|
||||||
python-levenshtein==0.25.1
|
python-levenshtein==0.25.1
|
||||||
docutils==0.20.1
|
docutils==0.20.1
|
||||||
pygments==2.17.2
|
pygments==2.18.0
|
||||||
urllib3==2.2.1
|
urllib3==2.2.1
|
||||||
pytest==8.1.1
|
pytest==8.2.1
|
||||||
isort==5.13.2
|
isort==5.13.2
|
||||||
black==24.4.0
|
black==24.4.2
|
||||||
flake8==7.0.0
|
flake8==7.0.0
|
||||||
mypy==1.9.0
|
mypy==1.10.0
|
||||||
|
|||||||
232
lib/spack/env/cc
vendored
232
lib/spack/env/cc
vendored
@@ -47,7 +47,8 @@ SPACK_F77_RPATH_ARG
|
|||||||
SPACK_FC_RPATH_ARG
|
SPACK_FC_RPATH_ARG
|
||||||
SPACK_LINKER_ARG
|
SPACK_LINKER_ARG
|
||||||
SPACK_SHORT_SPEC
|
SPACK_SHORT_SPEC
|
||||||
SPACK_SYSTEM_DIRS"
|
SPACK_SYSTEM_DIRS
|
||||||
|
SPACK_MANAGED_DIRS"
|
||||||
|
|
||||||
# Optional parameters that aren't required to be set
|
# Optional parameters that aren't required to be set
|
||||||
|
|
||||||
@@ -173,22 +174,6 @@ preextend() {
|
|||||||
unset IFS
|
unset IFS
|
||||||
}
|
}
|
||||||
|
|
||||||
# system_dir PATH
|
|
||||||
# test whether a path is a system directory
|
|
||||||
system_dir() {
|
|
||||||
IFS=':' # SPACK_SYSTEM_DIRS is colon-separated
|
|
||||||
path="$1"
|
|
||||||
for sd in $SPACK_SYSTEM_DIRS; do
|
|
||||||
if [ "${path}" = "${sd}" ] || [ "${path}" = "${sd}/" ]; then
|
|
||||||
# success if path starts with a system prefix
|
|
||||||
unset IFS
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
unset IFS
|
|
||||||
return 1 # fail if path starts no system prefix
|
|
||||||
}
|
|
||||||
|
|
||||||
# Fail with a clear message if the input contains any bell characters.
|
# Fail with a clear message if the input contains any bell characters.
|
||||||
if eval "[ \"\${*#*${lsep}}\" != \"\$*\" ]"; then
|
if eval "[ \"\${*#*${lsep}}\" != \"\$*\" ]"; then
|
||||||
die "Compiler command line contains our separator ('${lsep}'). Cannot parse."
|
die "Compiler command line contains our separator ('${lsep}'). Cannot parse."
|
||||||
@@ -201,6 +186,18 @@ for param in $params; do
|
|||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
|
# eval this because SPACK_MANAGED_DIRS and SPACK_SYSTEM_DIRS are inputs we don't wanna loop over.
|
||||||
|
# moving the eval inside the function would eval it every call.
|
||||||
|
eval "\
|
||||||
|
path_order() {
|
||||||
|
case \"\$1\" in
|
||||||
|
$SPACK_MANAGED_DIRS) return 0 ;;
|
||||||
|
$SPACK_SYSTEM_DIRS) return 2 ;;
|
||||||
|
/*) return 1 ;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
"
|
||||||
|
|
||||||
# Check if optional parameters are defined
|
# Check if optional parameters are defined
|
||||||
# If we aren't asking for debug flags, don't add them
|
# If we aren't asking for debug flags, don't add them
|
||||||
if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then
|
if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then
|
||||||
@@ -420,11 +417,12 @@ input_command="$*"
|
|||||||
parse_Wl() {
|
parse_Wl() {
|
||||||
while [ $# -ne 0 ]; do
|
while [ $# -ne 0 ]; do
|
||||||
if [ "$wl_expect_rpath" = yes ]; then
|
if [ "$wl_expect_rpath" = yes ]; then
|
||||||
if system_dir "$1"; then
|
path_order "$1"
|
||||||
append return_system_rpath_dirs_list "$1"
|
case $? in
|
||||||
else
|
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||||
append return_rpath_dirs_list "$1"
|
1) append return_rpath_dirs_list "$1" ;;
|
||||||
fi
|
2) append return_system_rpath_dirs_list "$1" ;;
|
||||||
|
esac
|
||||||
wl_expect_rpath=no
|
wl_expect_rpath=no
|
||||||
else
|
else
|
||||||
case "$1" in
|
case "$1" in
|
||||||
@@ -432,21 +430,25 @@ parse_Wl() {
|
|||||||
arg="${1#-rpath=}"
|
arg="${1#-rpath=}"
|
||||||
if [ -z "$arg" ]; then
|
if [ -z "$arg" ]; then
|
||||||
shift; continue
|
shift; continue
|
||||||
elif system_dir "$arg"; then
|
|
||||||
append return_system_rpath_dirs_list "$arg"
|
|
||||||
else
|
|
||||||
append return_rpath_dirs_list "$arg"
|
|
||||||
fi
|
fi
|
||||||
|
path_order "$arg"
|
||||||
|
case $? in
|
||||||
|
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||||
|
1) append return_rpath_dirs_list "$arg" ;;
|
||||||
|
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
--rpath=*)
|
--rpath=*)
|
||||||
arg="${1#--rpath=}"
|
arg="${1#--rpath=}"
|
||||||
if [ -z "$arg" ]; then
|
if [ -z "$arg" ]; then
|
||||||
shift; continue
|
shift; continue
|
||||||
elif system_dir "$arg"; then
|
|
||||||
append return_system_rpath_dirs_list "$arg"
|
|
||||||
else
|
|
||||||
append return_rpath_dirs_list "$arg"
|
|
||||||
fi
|
fi
|
||||||
|
path_order "$arg"
|
||||||
|
case $? in
|
||||||
|
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||||
|
1) append return_rpath_dirs_list "$arg" ;;
|
||||||
|
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
-rpath|--rpath)
|
-rpath|--rpath)
|
||||||
wl_expect_rpath=yes
|
wl_expect_rpath=yes
|
||||||
@@ -473,12 +475,20 @@ categorize_arguments() {
|
|||||||
|
|
||||||
return_other_args_list=""
|
return_other_args_list=""
|
||||||
return_isystem_was_used=""
|
return_isystem_was_used=""
|
||||||
|
|
||||||
|
return_isystem_spack_store_include_dirs_list=""
|
||||||
return_isystem_system_include_dirs_list=""
|
return_isystem_system_include_dirs_list=""
|
||||||
return_isystem_include_dirs_list=""
|
return_isystem_include_dirs_list=""
|
||||||
|
|
||||||
|
return_spack_store_include_dirs_list=""
|
||||||
return_system_include_dirs_list=""
|
return_system_include_dirs_list=""
|
||||||
return_include_dirs_list=""
|
return_include_dirs_list=""
|
||||||
|
|
||||||
|
return_spack_store_lib_dirs_list=""
|
||||||
return_system_lib_dirs_list=""
|
return_system_lib_dirs_list=""
|
||||||
return_lib_dirs_list=""
|
return_lib_dirs_list=""
|
||||||
|
|
||||||
|
return_spack_store_rpath_dirs_list=""
|
||||||
return_system_rpath_dirs_list=""
|
return_system_rpath_dirs_list=""
|
||||||
return_rpath_dirs_list=""
|
return_rpath_dirs_list=""
|
||||||
|
|
||||||
@@ -546,29 +556,32 @@ categorize_arguments() {
|
|||||||
arg="${1#-isystem}"
|
arg="${1#-isystem}"
|
||||||
return_isystem_was_used=true
|
return_isystem_was_used=true
|
||||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||||
if system_dir "$arg"; then
|
path_order "$arg"
|
||||||
append return_isystem_system_include_dirs_list "$arg"
|
case $? in
|
||||||
else
|
0) append return_isystem_spack_store_include_dirs_list "$arg" ;;
|
||||||
append return_isystem_include_dirs_list "$arg"
|
1) append return_isystem_include_dirs_list "$arg" ;;
|
||||||
fi
|
2) append return_isystem_system_include_dirs_list "$arg" ;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
-I*)
|
-I*)
|
||||||
arg="${1#-I}"
|
arg="${1#-I}"
|
||||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||||
if system_dir "$arg"; then
|
path_order "$arg"
|
||||||
append return_system_include_dirs_list "$arg"
|
case $? in
|
||||||
else
|
0) append return_spack_store_include_dirs_list "$arg" ;;
|
||||||
append return_include_dirs_list "$arg"
|
1) append return_include_dirs_list "$arg" ;;
|
||||||
fi
|
2) append return_system_include_dirs_list "$arg" ;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
-L*)
|
-L*)
|
||||||
arg="${1#-L}"
|
arg="${1#-L}"
|
||||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||||
if system_dir "$arg"; then
|
path_order "$arg"
|
||||||
append return_system_lib_dirs_list "$arg"
|
case $? in
|
||||||
else
|
0) append return_spack_store_lib_dirs_list "$arg" ;;
|
||||||
append return_lib_dirs_list "$arg"
|
1) append return_lib_dirs_list "$arg" ;;
|
||||||
fi
|
2) append return_system_lib_dirs_list "$arg" ;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
-l*)
|
-l*)
|
||||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||||
@@ -601,29 +614,32 @@ categorize_arguments() {
|
|||||||
break
|
break
|
||||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||||
if system_dir "$1"; then
|
path_order "$1"
|
||||||
append return_system_rpath_dirs_list "$1"
|
case $? in
|
||||||
else
|
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||||
append return_rpath_dirs_list "$1"
|
1) append return_rpath_dirs_list "$1" ;;
|
||||||
fi
|
2) append return_system_rpath_dirs_list "$1" ;;
|
||||||
|
esac
|
||||||
xlinker_expect_rpath=no
|
xlinker_expect_rpath=no
|
||||||
else
|
else
|
||||||
case "$1" in
|
case "$1" in
|
||||||
-rpath=*)
|
-rpath=*)
|
||||||
arg="${1#-rpath=}"
|
arg="${1#-rpath=}"
|
||||||
if system_dir "$arg"; then
|
path_order "$arg"
|
||||||
append return_system_rpath_dirs_list "$arg"
|
case $? in
|
||||||
else
|
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||||
append return_rpath_dirs_list "$arg"
|
1) append return_rpath_dirs_list "$arg" ;;
|
||||||
fi
|
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
--rpath=*)
|
--rpath=*)
|
||||||
arg="${1#--rpath=}"
|
arg="${1#--rpath=}"
|
||||||
if system_dir "$arg"; then
|
path_order "$arg"
|
||||||
append return_system_rpath_dirs_list "$arg"
|
case $? in
|
||||||
else
|
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||||
append return_rpath_dirs_list "$arg"
|
1) append return_rpath_dirs_list "$arg" ;;
|
||||||
fi
|
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
-rpath|--rpath)
|
-rpath|--rpath)
|
||||||
xlinker_expect_rpath=yes
|
xlinker_expect_rpath=yes
|
||||||
@@ -661,15 +677,24 @@ categorize_arguments() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
categorize_arguments "$@"
|
categorize_arguments "$@"
|
||||||
include_dirs_list="$return_include_dirs_list"
|
|
||||||
lib_dirs_list="$return_lib_dirs_list"
|
spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||||
rpath_dirs_list="$return_rpath_dirs_list"
|
|
||||||
system_include_dirs_list="$return_system_include_dirs_list"
|
system_include_dirs_list="$return_system_include_dirs_list"
|
||||||
|
include_dirs_list="$return_include_dirs_list"
|
||||||
|
|
||||||
|
spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||||
|
lib_dirs_list="$return_lib_dirs_list"
|
||||||
|
|
||||||
|
spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||||
isystem_was_used="$return_isystem_was_used"
|
rpath_dirs_list="$return_rpath_dirs_list"
|
||||||
|
|
||||||
|
isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||||
|
|
||||||
|
isystem_was_used="$return_isystem_was_used"
|
||||||
other_args_list="$return_other_args_list"
|
other_args_list="$return_other_args_list"
|
||||||
|
|
||||||
#
|
#
|
||||||
@@ -730,7 +755,7 @@ esac
|
|||||||
|
|
||||||
# Linker flags
|
# Linker flags
|
||||||
case "$mode" in
|
case "$mode" in
|
||||||
ld|ccld)
|
ccld)
|
||||||
extend spack_flags_list SPACK_LDFLAGS
|
extend spack_flags_list SPACK_LDFLAGS
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
@@ -738,15 +763,24 @@ esac
|
|||||||
IFS="$lsep"
|
IFS="$lsep"
|
||||||
categorize_arguments $spack_flags_list
|
categorize_arguments $spack_flags_list
|
||||||
unset IFS
|
unset IFS
|
||||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
|
||||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
spack_flags_isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
|
||||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
|
||||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
|
||||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
|
||||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
|
||||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||||
|
|
||||||
|
spack_flags_spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||||
|
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||||
|
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||||
|
|
||||||
|
spack_flags_spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||||
|
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||||
|
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||||
|
|
||||||
|
spack_flags_spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||||
|
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||||
|
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||||
|
|
||||||
|
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||||
spack_flags_other_args_list="$return_other_args_list"
|
spack_flags_other_args_list="$return_other_args_list"
|
||||||
|
|
||||||
|
|
||||||
@@ -767,11 +801,13 @@ if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
|||||||
# Append RPATH directories. Note that in the case of the
|
# Append RPATH directories. Note that in the case of the
|
||||||
# top-level package these directories may not exist yet. For dependencies
|
# top-level package these directories may not exist yet. For dependencies
|
||||||
# it is assumed that paths have already been confirmed.
|
# it is assumed that paths have already been confirmed.
|
||||||
|
extend spack_store_rpath_dirs_list SPACK_STORE_RPATH_DIRS
|
||||||
extend rpath_dirs_list SPACK_RPATH_DIRS
|
extend rpath_dirs_list SPACK_RPATH_DIRS
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||||
|
extend spack_store_lib_dirs_list SPACK_STORE_LINK_DIRS
|
||||||
extend lib_dirs_list SPACK_LINK_DIRS
|
extend lib_dirs_list SPACK_LINK_DIRS
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -798,38 +834,50 @@ case "$mode" in
|
|||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
|
case "$mode" in
|
||||||
|
cpp|cc|as|ccld)
|
||||||
|
if [ "$spack_flags_isystem_was_used" = "true" ] || [ "$isystem_was_used" = "true" ]; then
|
||||||
|
extend isystem_spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||||
|
extend isystem_include_dirs_list SPACK_INCLUDE_DIRS
|
||||||
|
else
|
||||||
|
extend spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||||
|
extend include_dirs_list SPACK_INCLUDE_DIRS
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
#
|
#
|
||||||
# Finally, reassemble the command line.
|
# Finally, reassemble the command line.
|
||||||
#
|
#
|
||||||
args_list="$flags_list"
|
args_list="$flags_list"
|
||||||
|
|
||||||
# Insert include directories just prior to any system include directories
|
# Include search paths partitioned by (in store, non-sytem, system)
|
||||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||||
extend args_list spack_flags_include_dirs_list "-I"
|
extend args_list spack_flags_spack_store_include_dirs_list -I
|
||||||
extend args_list include_dirs_list "-I"
|
extend args_list spack_store_include_dirs_list -I
|
||||||
|
|
||||||
|
extend args_list spack_flags_include_dirs_list -I
|
||||||
|
extend args_list include_dirs_list -I
|
||||||
|
|
||||||
|
extend args_list spack_flags_isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||||
|
extend args_list isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||||
|
|
||||||
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||||
|
|
||||||
case "$mode" in
|
|
||||||
cpp|cc|as|ccld)
|
|
||||||
if [ "$spack_flags_isystem_was_used" = "true" ]; then
|
|
||||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
|
||||||
elif [ "$isystem_was_used" = "true" ]; then
|
|
||||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
|
||||||
else
|
|
||||||
extend args_list SPACK_INCLUDE_DIRS "-I"
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
extend args_list spack_flags_system_include_dirs_list -I
|
extend args_list spack_flags_system_include_dirs_list -I
|
||||||
extend args_list system_include_dirs_list -I
|
extend args_list system_include_dirs_list -I
|
||||||
|
|
||||||
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
||||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||||
|
|
||||||
# Library search paths
|
# Library search paths partitioned by (in store, non-sytem, system)
|
||||||
|
extend args_list spack_flags_spack_store_lib_dirs_list "-L"
|
||||||
|
extend args_list spack_store_lib_dirs_list "-L"
|
||||||
|
|
||||||
extend args_list spack_flags_lib_dirs_list "-L"
|
extend args_list spack_flags_lib_dirs_list "-L"
|
||||||
extend args_list lib_dirs_list "-L"
|
extend args_list lib_dirs_list "-L"
|
||||||
|
|
||||||
extend args_list spack_flags_system_lib_dirs_list "-L"
|
extend args_list spack_flags_system_lib_dirs_list "-L"
|
||||||
extend args_list system_lib_dirs_list "-L"
|
extend args_list system_lib_dirs_list "-L"
|
||||||
|
|
||||||
@@ -839,8 +887,12 @@ case "$mode" in
|
|||||||
if [ -n "$dtags_to_add" ] ; then
|
if [ -n "$dtags_to_add" ] ; then
|
||||||
append args_list "$linker_arg$dtags_to_add"
|
append args_list "$linker_arg$dtags_to_add"
|
||||||
fi
|
fi
|
||||||
|
extend args_list spack_flags_spack_store_rpath_dirs_list "$rpath"
|
||||||
|
extend args_list spack_store_rpath_dirs_list "$rpath"
|
||||||
|
|
||||||
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
||||||
extend args_list rpath_dirs_list "$rpath"
|
extend args_list rpath_dirs_list "$rpath"
|
||||||
|
|
||||||
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
||||||
extend args_list system_rpath_dirs_list "$rpath"
|
extend args_list system_rpath_dirs_list "$rpath"
|
||||||
;;
|
;;
|
||||||
@@ -848,8 +900,12 @@ case "$mode" in
|
|||||||
if [ -n "$dtags_to_add" ] ; then
|
if [ -n "$dtags_to_add" ] ; then
|
||||||
append args_list "$dtags_to_add"
|
append args_list "$dtags_to_add"
|
||||||
fi
|
fi
|
||||||
|
extend args_list spack_flags_spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||||
|
extend args_list spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||||
|
|
||||||
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
||||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||||
|
|
||||||
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
||||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||||
;;
|
;;
|
||||||
|
|||||||
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
* Homepage: https://pypi.python.org/pypi/archspec
|
* Homepage: https://pypi.python.org/pypi/archspec
|
||||||
* Usage: Labeling, comparison and detection of microarchitectures
|
* Usage: Labeling, comparison and detection of microarchitectures
|
||||||
* Version: 0.2.3 (commit 7b8fe60b69e2861e7dac104bc1c183decfcd3daf)
|
* Version: 0.2.4 (commit 48b92512b9ce203ded0ebd1ac41b42593e931f7c)
|
||||||
|
|
||||||
astunparse
|
astunparse
|
||||||
----------------
|
----------------
|
||||||
|
|||||||
@@ -497,7 +497,7 @@ def copy_attributes(self, t, memo=None):
|
|||||||
Tag.attrib, merge_attrib]:
|
Tag.attrib, merge_attrib]:
|
||||||
if hasattr(self, a):
|
if hasattr(self, a):
|
||||||
if memo is not None:
|
if memo is not None:
|
||||||
setattr(t, a, copy.deepcopy(getattr(self, a, memo)))
|
setattr(t, a, copy.deepcopy(getattr(self, a), memo))
|
||||||
else:
|
else:
|
||||||
setattr(t, a, getattr(self, a))
|
setattr(t, a, getattr(self, a))
|
||||||
# fmt: on
|
# fmt: on
|
||||||
|
|||||||
2
lib/spack/external/archspec/__init__.py
vendored
2
lib/spack/external/archspec/__init__.py
vendored
@@ -1,3 +1,3 @@
|
|||||||
"""Init file to avoid namespace packages"""
|
"""Init file to avoid namespace packages"""
|
||||||
|
|
||||||
__version__ = "0.2.3"
|
__version__ = "0.2.4"
|
||||||
|
|||||||
9
lib/spack/external/archspec/cpu/__init__.py
vendored
9
lib/spack/external/archspec/cpu/__init__.py
vendored
@@ -5,9 +5,10 @@
|
|||||||
"""The "cpu" package permits to query and compare different
|
"""The "cpu" package permits to query and compare different
|
||||||
CPU microarchitectures.
|
CPU microarchitectures.
|
||||||
"""
|
"""
|
||||||
from .detect import host
|
from .detect import brand_string, host
|
||||||
from .microarchitecture import (
|
from .microarchitecture import (
|
||||||
TARGETS,
|
TARGETS,
|
||||||
|
InvalidCompilerVersion,
|
||||||
Microarchitecture,
|
Microarchitecture,
|
||||||
UnsupportedMicroarchitecture,
|
UnsupportedMicroarchitecture,
|
||||||
generic_microarchitecture,
|
generic_microarchitecture,
|
||||||
@@ -15,10 +16,12 @@
|
|||||||
)
|
)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
|
"brand_string",
|
||||||
|
"host",
|
||||||
|
"TARGETS",
|
||||||
|
"InvalidCompilerVersion",
|
||||||
"Microarchitecture",
|
"Microarchitecture",
|
||||||
"UnsupportedMicroarchitecture",
|
"UnsupportedMicroarchitecture",
|
||||||
"TARGETS",
|
|
||||||
"generic_microarchitecture",
|
"generic_microarchitecture",
|
||||||
"host",
|
|
||||||
"version_components",
|
"version_components",
|
||||||
]
|
]
|
||||||
|
|||||||
42
lib/spack/external/archspec/cpu/detect.py
vendored
42
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -155,6 +155,31 @@ def _is_bit_set(self, register: int, bit: int) -> bool:
|
|||||||
mask = 1 << bit
|
mask = 1 << bit
|
||||||
return register & mask > 0
|
return register & mask > 0
|
||||||
|
|
||||||
|
def brand_string(self) -> Optional[str]:
|
||||||
|
"""Returns the brand string, if available."""
|
||||||
|
if self.highest_extension_support < 0x80000004:
|
||||||
|
return None
|
||||||
|
|
||||||
|
r1 = self.cpuid.registers_for(eax=0x80000002, ecx=0)
|
||||||
|
r2 = self.cpuid.registers_for(eax=0x80000003, ecx=0)
|
||||||
|
r3 = self.cpuid.registers_for(eax=0x80000004, ecx=0)
|
||||||
|
result = struct.pack(
|
||||||
|
"IIIIIIIIIIII",
|
||||||
|
r1.eax,
|
||||||
|
r1.ebx,
|
||||||
|
r1.ecx,
|
||||||
|
r1.edx,
|
||||||
|
r2.eax,
|
||||||
|
r2.ebx,
|
||||||
|
r2.ecx,
|
||||||
|
r2.edx,
|
||||||
|
r3.eax,
|
||||||
|
r3.ebx,
|
||||||
|
r3.ecx,
|
||||||
|
r3.edx,
|
||||||
|
).decode("utf-8")
|
||||||
|
return result.strip("\x00")
|
||||||
|
|
||||||
|
|
||||||
@detection(operating_system="Windows")
|
@detection(operating_system="Windows")
|
||||||
def cpuid_info():
|
def cpuid_info():
|
||||||
@@ -174,8 +199,8 @@ def _check_output(args, env):
|
|||||||
|
|
||||||
|
|
||||||
WINDOWS_MAPPING = {
|
WINDOWS_MAPPING = {
|
||||||
"AMD64": "x86_64",
|
"AMD64": X86_64,
|
||||||
"ARM64": "aarch64",
|
"ARM64": AARCH64,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -409,3 +434,16 @@ def compatibility_check_for_riscv64(info, target):
|
|||||||
return (target == arch_root or arch_root in target.ancestors) and (
|
return (target == arch_root or arch_root in target.ancestors) and (
|
||||||
target.name == info.name or target.vendor == "generic"
|
target.name == info.name or target.vendor == "generic"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def brand_string() -> Optional[str]:
|
||||||
|
"""Returns the brand string of the host, if detected, or None."""
|
||||||
|
if platform.system() == "Darwin":
|
||||||
|
return _check_output(
|
||||||
|
["sysctl", "-n", "machdep.cpu.brand_string"], env=_ensure_bin_usrbin_in_path()
|
||||||
|
).strip()
|
||||||
|
|
||||||
|
if host().family == X86_64:
|
||||||
|
return CpuidInfoCollector().brand_string()
|
||||||
|
|
||||||
|
return None
|
||||||
|
|||||||
@@ -208,6 +208,8 @@ def optimization_flags(self, compiler, version):
|
|||||||
"""Returns a string containing the optimization flags that needs
|
"""Returns a string containing the optimization flags that needs
|
||||||
to be used to produce code optimized for this micro-architecture.
|
to be used to produce code optimized for this micro-architecture.
|
||||||
|
|
||||||
|
The version is expected to be a string of dot separated digits.
|
||||||
|
|
||||||
If there is no information on the compiler passed as argument the
|
If there is no information on the compiler passed as argument the
|
||||||
function returns an empty string. If it is known that the compiler
|
function returns an empty string. If it is known that the compiler
|
||||||
version we want to use does not support this architecture the function
|
version we want to use does not support this architecture the function
|
||||||
@@ -216,6 +218,11 @@ def optimization_flags(self, compiler, version):
|
|||||||
Args:
|
Args:
|
||||||
compiler (str): name of the compiler to be used
|
compiler (str): name of the compiler to be used
|
||||||
version (str): version of the compiler to be used
|
version (str): version of the compiler to be used
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
UnsupportedMicroarchitecture: if the requested compiler does not support
|
||||||
|
this micro-architecture.
|
||||||
|
ValueError: if the version doesn't match the expected format
|
||||||
"""
|
"""
|
||||||
# If we don't have information on compiler at all return an empty string
|
# If we don't have information on compiler at all return an empty string
|
||||||
if compiler not in self.family.compilers:
|
if compiler not in self.family.compilers:
|
||||||
@@ -232,6 +239,14 @@ def optimization_flags(self, compiler, version):
|
|||||||
msg = msg.format(compiler, best_target, best_target.family)
|
msg = msg.format(compiler, best_target, best_target.family)
|
||||||
raise UnsupportedMicroarchitecture(msg)
|
raise UnsupportedMicroarchitecture(msg)
|
||||||
|
|
||||||
|
# Check that the version matches the expected format
|
||||||
|
if not re.match(r"^(?:\d+\.)*\d+$", version):
|
||||||
|
msg = (
|
||||||
|
"invalid format for the compiler version argument. "
|
||||||
|
"Only dot separated digits are allowed."
|
||||||
|
)
|
||||||
|
raise InvalidCompilerVersion(msg)
|
||||||
|
|
||||||
# If we have information on this compiler we need to check the
|
# If we have information on this compiler we need to check the
|
||||||
# version being used
|
# version being used
|
||||||
compiler_info = self.compilers[compiler]
|
compiler_info = self.compilers[compiler]
|
||||||
@@ -292,7 +307,7 @@ def generic_microarchitecture(name):
|
|||||||
Args:
|
Args:
|
||||||
name (str): name of the micro-architecture
|
name (str): name of the micro-architecture
|
||||||
"""
|
"""
|
||||||
return Microarchitecture(name, parents=[], vendor="generic", features=[], compilers={})
|
return Microarchitecture(name, parents=[], vendor="generic", features=set(), compilers={})
|
||||||
|
|
||||||
|
|
||||||
def version_components(version):
|
def version_components(version):
|
||||||
@@ -367,7 +382,15 @@ def fill_target_from_dict(name, data, targets):
|
|||||||
TARGETS = LazyDictionary(_known_microarchitectures)
|
TARGETS = LazyDictionary(_known_microarchitectures)
|
||||||
|
|
||||||
|
|
||||||
class UnsupportedMicroarchitecture(ValueError):
|
class ArchspecError(Exception):
|
||||||
|
"""Base class for errors within archspec"""
|
||||||
|
|
||||||
|
|
||||||
|
class UnsupportedMicroarchitecture(ArchspecError, ValueError):
|
||||||
"""Raised if a compiler version does not support optimization for a given
|
"""Raised if a compiler version does not support optimization for a given
|
||||||
micro-architecture.
|
micro-architecture.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidCompilerVersion(ArchspecError, ValueError):
|
||||||
|
"""Raised when an invalid format is used for compiler versions in archspec."""
|
||||||
|
|||||||
@@ -2937,8 +2937,6 @@
|
|||||||
"ilrcpc",
|
"ilrcpc",
|
||||||
"flagm",
|
"flagm",
|
||||||
"ssbs",
|
"ssbs",
|
||||||
"paca",
|
|
||||||
"pacg",
|
|
||||||
"dcpodp",
|
"dcpodp",
|
||||||
"svei8mm",
|
"svei8mm",
|
||||||
"svebf16",
|
"svebf16",
|
||||||
@@ -3066,8 +3064,6 @@
|
|||||||
"flagm",
|
"flagm",
|
||||||
"ssbs",
|
"ssbs",
|
||||||
"sb",
|
"sb",
|
||||||
"paca",
|
|
||||||
"pacg",
|
|
||||||
"dcpodp",
|
"dcpodp",
|
||||||
"sve2",
|
"sve2",
|
||||||
"sveaes",
|
"sveaes",
|
||||||
@@ -3081,8 +3077,7 @@
|
|||||||
"svebf16",
|
"svebf16",
|
||||||
"i8mm",
|
"i8mm",
|
||||||
"bf16",
|
"bf16",
|
||||||
"dgh",
|
"dgh"
|
||||||
"bti"
|
|
||||||
],
|
],
|
||||||
"compilers" : {
|
"compilers" : {
|
||||||
"gcc": [
|
"gcc": [
|
||||||
|
|||||||
13
lib/spack/external/patches/ruamelyaml.patch
vendored
Normal file
13
lib/spack/external/patches/ruamelyaml.patch
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
diff --git a/lib/spack/external/_vendoring/ruamel/yaml/comments.py b/lib/spack/external/_vendoring/ruamel/yaml/comments.py
|
||||||
|
index 1badeda585..892c868af3 100644
|
||||||
|
--- a/lib/spack/external/_vendoring/ruamel/yaml/comments.py
|
||||||
|
+++ b/lib/spack/external/_vendoring/ruamel/yaml/comments.py
|
||||||
|
@@ -497,7 +497,7 @@ def copy_attributes(self, t, memo=None):
|
||||||
|
Tag.attrib, merge_attrib]:
|
||||||
|
if hasattr(self, a):
|
||||||
|
if memo is not None:
|
||||||
|
- setattr(t, a, copy.deepcopy(getattr(self, a, memo)))
|
||||||
|
+ setattr(t, a, copy.deepcopy(getattr(self, a), memo))
|
||||||
|
else:
|
||||||
|
setattr(t, a, getattr(self, a))
|
||||||
|
# fmt: on
|
||||||
@@ -98,3 +98,10 @@ def path_filter_caller(*args, **kwargs):
|
|||||||
if _func:
|
if _func:
|
||||||
return holder_func(_func)
|
return holder_func(_func)
|
||||||
return holder_func
|
return holder_func
|
||||||
|
|
||||||
|
|
||||||
|
def sanitize_win_longpath(path: str) -> str:
|
||||||
|
"""Strip Windows extended path prefix from strings
|
||||||
|
Returns sanitized string.
|
||||||
|
no-op if extended path prefix is not present"""
|
||||||
|
return path.lstrip("\\\\?\\")
|
||||||
|
|||||||
@@ -187,12 +187,18 @@ def polite_filename(filename: str) -> str:
|
|||||||
return _polite_antipattern().sub("_", filename)
|
return _polite_antipattern().sub("_", filename)
|
||||||
|
|
||||||
|
|
||||||
def getuid():
|
def getuid() -> Union[str, int]:
|
||||||
|
"""Returns os getuid on non Windows
|
||||||
|
On Windows returns 0 for admin users, login string otherwise
|
||||||
|
This is in line with behavior from get_owner_uid which
|
||||||
|
always returns the login string on Windows
|
||||||
|
"""
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
import ctypes
|
import ctypes
|
||||||
|
|
||||||
|
# If not admin, use the string name of the login as a unique ID
|
||||||
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
||||||
return 1
|
return os.getlogin()
|
||||||
return 0
|
return 0
|
||||||
else:
|
else:
|
||||||
return os.getuid()
|
return os.getuid()
|
||||||
@@ -213,6 +219,15 @@ def _win_rename(src, dst):
|
|||||||
os.replace(src, dst)
|
os.replace(src, dst)
|
||||||
|
|
||||||
|
|
||||||
|
@system_path_filter
|
||||||
|
def msdos_escape_parens(path):
|
||||||
|
"""MS-DOS interprets parens as grouping parameters even in a quoted string"""
|
||||||
|
if sys.platform == "win32":
|
||||||
|
return path.replace("(", "^(").replace(")", "^)")
|
||||||
|
else:
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def rename(src, dst):
|
def rename(src, dst):
|
||||||
# On Windows, os.rename will fail if the destination file already exists
|
# On Windows, os.rename will fail if the destination file already exists
|
||||||
@@ -553,7 +568,13 @@ def exploding_archive_handler(tarball_container, stage):
|
|||||||
|
|
||||||
|
|
||||||
@system_path_filter(arg_slice=slice(1))
|
@system_path_filter(arg_slice=slice(1))
|
||||||
def get_owner_uid(path, err_msg=None):
|
def get_owner_uid(path, err_msg=None) -> Union[str, int]:
|
||||||
|
"""Returns owner UID of path destination
|
||||||
|
On non Windows this is the value of st_uid
|
||||||
|
On Windows this is the login string associated with the
|
||||||
|
owning user.
|
||||||
|
|
||||||
|
"""
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
mkdirp(path, mode=stat.S_IRWXU)
|
mkdirp(path, mode=stat.S_IRWXU)
|
||||||
|
|
||||||
@@ -745,7 +766,6 @@ def copy_tree(
|
|||||||
src: str,
|
src: str,
|
||||||
dest: str,
|
dest: str,
|
||||||
symlinks: bool = True,
|
symlinks: bool = True,
|
||||||
allow_broken_symlinks: bool = sys.platform != "win32",
|
|
||||||
ignore: Optional[Callable[[str], bool]] = None,
|
ignore: Optional[Callable[[str], bool]] = None,
|
||||||
_permissions: bool = False,
|
_permissions: bool = False,
|
||||||
):
|
):
|
||||||
@@ -768,8 +788,6 @@ def copy_tree(
|
|||||||
src (str): the directory to copy
|
src (str): the directory to copy
|
||||||
dest (str): the destination directory
|
dest (str): the destination directory
|
||||||
symlinks (bool): whether or not to preserve symlinks
|
symlinks (bool): whether or not to preserve symlinks
|
||||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
|
||||||
On Windows, setting this to True will raise an exception. Defaults to true on unix.
|
|
||||||
ignore (typing.Callable): function indicating which files to ignore
|
ignore (typing.Callable): function indicating which files to ignore
|
||||||
_permissions (bool): for internal use only
|
_permissions (bool): for internal use only
|
||||||
|
|
||||||
@@ -777,8 +795,6 @@ def copy_tree(
|
|||||||
IOError: if *src* does not match any files or directories
|
IOError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* is a parent directory of *dest*
|
ValueError: if *src* is a parent directory of *dest*
|
||||||
"""
|
"""
|
||||||
if allow_broken_symlinks and sys.platform == "win32":
|
|
||||||
raise llnl.util.symlink.SymlinkError("Cannot allow broken symlinks on Windows!")
|
|
||||||
if _permissions:
|
if _permissions:
|
||||||
tty.debug("Installing {0} to {1}".format(src, dest))
|
tty.debug("Installing {0} to {1}".format(src, dest))
|
||||||
else:
|
else:
|
||||||
@@ -822,7 +838,7 @@ def copy_tree(
|
|||||||
if islink(s):
|
if islink(s):
|
||||||
link_target = resolve_link_target_relative_to_the_link(s)
|
link_target = resolve_link_target_relative_to_the_link(s)
|
||||||
if symlinks:
|
if symlinks:
|
||||||
target = os.readlink(s)
|
target = readlink(s)
|
||||||
if os.path.isabs(target):
|
if os.path.isabs(target):
|
||||||
|
|
||||||
def escaped_path(path):
|
def escaped_path(path):
|
||||||
@@ -851,16 +867,14 @@ def escaped_path(path):
|
|||||||
copy_mode(s, d)
|
copy_mode(s, d)
|
||||||
|
|
||||||
for target, d, s in links:
|
for target, d, s in links:
|
||||||
symlink(target, d, allow_broken_symlinks=allow_broken_symlinks)
|
symlink(target, d)
|
||||||
if _permissions:
|
if _permissions:
|
||||||
set_install_permissions(d)
|
set_install_permissions(d)
|
||||||
copy_mode(s, d)
|
copy_mode(s, d)
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def install_tree(
|
def install_tree(src, dest, symlinks=True, ignore=None):
|
||||||
src, dest, symlinks=True, ignore=None, allow_broken_symlinks=sys.platform != "win32"
|
|
||||||
):
|
|
||||||
"""Recursively install an entire directory tree rooted at *src*.
|
"""Recursively install an entire directory tree rooted at *src*.
|
||||||
|
|
||||||
Same as :py:func:`copy_tree` with the addition of setting proper
|
Same as :py:func:`copy_tree` with the addition of setting proper
|
||||||
@@ -871,21 +885,12 @@ def install_tree(
|
|||||||
dest (str): the destination directory
|
dest (str): the destination directory
|
||||||
symlinks (bool): whether or not to preserve symlinks
|
symlinks (bool): whether or not to preserve symlinks
|
||||||
ignore (typing.Callable): function indicating which files to ignore
|
ignore (typing.Callable): function indicating which files to ignore
|
||||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
|
||||||
On Windows, setting this to True will raise an exception.
|
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
IOError: if *src* does not match any files or directories
|
IOError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* is a parent directory of *dest*
|
ValueError: if *src* is a parent directory of *dest*
|
||||||
"""
|
"""
|
||||||
copy_tree(
|
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||||
src,
|
|
||||||
dest,
|
|
||||||
symlinks=symlinks,
|
|
||||||
allow_broken_symlinks=allow_broken_symlinks,
|
|
||||||
ignore=ignore,
|
|
||||||
_permissions=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
@@ -2429,9 +2434,10 @@ def add_library_dependent(self, *dest):
|
|||||||
"""
|
"""
|
||||||
for pth in dest:
|
for pth in dest:
|
||||||
if os.path.isfile(pth):
|
if os.path.isfile(pth):
|
||||||
self._additional_library_dependents.add(pathlib.Path(pth).parent)
|
new_pth = pathlib.Path(pth).parent
|
||||||
else:
|
else:
|
||||||
self._additional_library_dependents.add(pathlib.Path(pth))
|
new_pth = pathlib.Path(pth)
|
||||||
|
self._additional_library_dependents.add(new_pth)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def rpaths(self):
|
def rpaths(self):
|
||||||
@@ -2509,6 +2515,12 @@ def establish_link(self):
|
|||||||
|
|
||||||
# for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib)
|
# for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib)
|
||||||
# install a symlink to each dependent library
|
# install a symlink to each dependent library
|
||||||
|
|
||||||
|
# do not rpath for system libraries included in the dag
|
||||||
|
# we should not be modifying libraries managed by the Windows system
|
||||||
|
# as this will negatively impact linker behavior and can result in permission
|
||||||
|
# errors if those system libs are not modifiable by Spack
|
||||||
|
if "windows-system" not in getattr(self.pkg, "tags", []):
|
||||||
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
|
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
|
||||||
self._link(library, lib_dir)
|
self._link(library, lib_dir)
|
||||||
|
|
||||||
|
|||||||
@@ -8,69 +8,44 @@
|
|||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
from llnl.util import lang, tty
|
from llnl.util import lang, tty
|
||||||
|
|
||||||
from ..path import system_path_filter
|
from ..path import sanitize_win_longpath, system_path_filter
|
||||||
|
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
from win32file import CreateHardLink
|
from win32file import CreateHardLink
|
||||||
|
|
||||||
is_windows = sys.platform == "win32"
|
|
||||||
|
|
||||||
|
def _windows_symlink(
|
||||||
|
src: str, dst: str, target_is_directory: bool = False, *, dir_fd: Union[int, None] = None
|
||||||
|
):
|
||||||
|
"""On Windows with System Administrator privileges this will be a normal symbolic link via
|
||||||
|
os.symlink. On Windows without privledges the link will be a junction for a directory and a
|
||||||
|
hardlink for a file. On Windows the various link types are:
|
||||||
|
|
||||||
def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not is_windows):
|
Symbolic Link: A link to a file or directory on the same or different volume (drive letter) or
|
||||||
"""
|
even to a remote file or directory (using UNC in its path). Need System Administrator
|
||||||
Create a link.
|
privileges to make these.
|
||||||
|
|
||||||
On non-Windows and Windows with System Administrator
|
Hard Link: A link to a file on the same volume (drive letter) only. Every file (file's data)
|
||||||
privleges this will be a normal symbolic link via
|
has at least 1 hard link (file's name). But when this method creates a new hard link there will
|
||||||
os.symlink.
|
be 2. Deleting all hard links effectively deletes the file. Don't need System Administrator
|
||||||
|
privileges.
|
||||||
|
|
||||||
On Windows without privledges the link will be a
|
Junction: A link to a directory on the same or different volume (drive letter) but not to a
|
||||||
junction for a directory and a hardlink for a file.
|
remote directory. Don't need System Administrator privileges."""
|
||||||
On Windows the various link types are:
|
source_path = os.path.normpath(src)
|
||||||
|
|
||||||
Symbolic Link: A link to a file or directory on the
|
|
||||||
same or different volume (drive letter) or even to
|
|
||||||
a remote file or directory (using UNC in its path).
|
|
||||||
Need System Administrator privileges to make these.
|
|
||||||
|
|
||||||
Hard Link: A link to a file on the same volume (drive
|
|
||||||
letter) only. Every file (file's data) has at least 1
|
|
||||||
hard link (file's name). But when this method creates
|
|
||||||
a new hard link there will be 2. Deleting all hard
|
|
||||||
links effectively deletes the file. Don't need System
|
|
||||||
Administrator privileges.
|
|
||||||
|
|
||||||
Junction: A link to a directory on the same or different
|
|
||||||
volume (drive letter) but not to a remote directory. Don't
|
|
||||||
need System Administrator privileges.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
source_path (str): The real file or directory that the link points to.
|
|
||||||
Must be absolute OR relative to the link.
|
|
||||||
link_path (str): The path where the link will exist.
|
|
||||||
allow_broken_symlinks (bool): On Linux or Mac, don't raise an exception if the source_path
|
|
||||||
doesn't exist. This will still raise an exception on Windows.
|
|
||||||
"""
|
|
||||||
source_path = os.path.normpath(source_path)
|
|
||||||
win_source_path = source_path
|
win_source_path = source_path
|
||||||
link_path = os.path.normpath(link_path)
|
link_path = os.path.normpath(dst)
|
||||||
|
|
||||||
# Never allow broken links on Windows.
|
|
||||||
if sys.platform == "win32" and allow_broken_symlinks:
|
|
||||||
raise ValueError("allow_broken_symlinks parameter cannot be True on Windows.")
|
|
||||||
|
|
||||||
if not allow_broken_symlinks:
|
|
||||||
# Perform basic checks to make sure symlinking will succeed
|
# Perform basic checks to make sure symlinking will succeed
|
||||||
if os.path.lexists(link_path):
|
if os.path.lexists(link_path):
|
||||||
raise AlreadyExistsError(
|
raise AlreadyExistsError(f"Link path ({link_path}) already exists. Cannot create link.")
|
||||||
f"Link path ({link_path}) already exists. Cannot create link."
|
|
||||||
)
|
|
||||||
|
|
||||||
if not os.path.exists(source_path):
|
if not os.path.exists(source_path):
|
||||||
if os.path.isabs(source_path) and not allow_broken_symlinks:
|
if os.path.isabs(source_path):
|
||||||
# An absolute source path that does not exist will result in a broken link.
|
# An absolute source path that does not exist will result in a broken link.
|
||||||
raise SymlinkError(
|
raise SymlinkError(
|
||||||
f"Source path ({source_path}) is absolute but does not exist. Resulting "
|
f"Source path ({source_path}) is absolute but does not exist. Resulting "
|
||||||
@@ -88,20 +63,20 @@ def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not
|
|||||||
# relative because hardlink/junction dont resolve relative paths the same
|
# relative because hardlink/junction dont resolve relative paths the same
|
||||||
# way as os.symlink. This is ignored on other operating systems.
|
# way as os.symlink. This is ignored on other operating systems.
|
||||||
win_source_path = relative_path
|
win_source_path = relative_path
|
||||||
elif not allow_broken_symlinks:
|
else:
|
||||||
raise SymlinkError(
|
raise SymlinkError(
|
||||||
f"The source path ({source_path}) is not relative to the link path "
|
f"The source path ({source_path}) is not relative to the link path "
|
||||||
f"({link_path}). Resulting link would be broken so not making link."
|
f"({link_path}). Resulting link would be broken so not making link."
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create the symlink
|
# Create the symlink
|
||||||
if sys.platform == "win32" and not _windows_can_symlink():
|
if not _windows_can_symlink():
|
||||||
_windows_create_link(win_source_path, link_path)
|
_windows_create_link(win_source_path, link_path)
|
||||||
else:
|
else:
|
||||||
os.symlink(source_path, link_path, target_is_directory=os.path.isdir(source_path))
|
os.symlink(source_path, link_path, target_is_directory=os.path.isdir(source_path))
|
||||||
|
|
||||||
|
|
||||||
def islink(path: str) -> bool:
|
def _windows_islink(path: str) -> bool:
|
||||||
"""Override os.islink to give correct answer for spack logic.
|
"""Override os.islink to give correct answer for spack logic.
|
||||||
|
|
||||||
For Non-Windows: a link can be determined with the os.path.islink method.
|
For Non-Windows: a link can be determined with the os.path.islink method.
|
||||||
@@ -247,9 +222,9 @@ def _windows_create_junction(source: str, link: str):
|
|||||||
out, err = proc.communicate()
|
out, err = proc.communicate()
|
||||||
tty.debug(out.decode())
|
tty.debug(out.decode())
|
||||||
if proc.returncode != 0:
|
if proc.returncode != 0:
|
||||||
err = err.decode()
|
err_str = err.decode()
|
||||||
tty.error(err)
|
tty.error(err_str)
|
||||||
raise SymlinkError("Make junction command returned a non-zero return code.", err)
|
raise SymlinkError("Make junction command returned a non-zero return code.", err_str)
|
||||||
|
|
||||||
|
|
||||||
def _windows_create_hard_link(path: str, link: str):
|
def _windows_create_hard_link(path: str, link: str):
|
||||||
@@ -269,14 +244,14 @@ def _windows_create_hard_link(path: str, link: str):
|
|||||||
CreateHardLink(link, path)
|
CreateHardLink(link, path)
|
||||||
|
|
||||||
|
|
||||||
def readlink(path: str):
|
def _windows_readlink(path: str, *, dir_fd=None):
|
||||||
"""Spack utility to override of os.readlink method to work cross platform"""
|
"""Spack utility to override of os.readlink method to work cross platform"""
|
||||||
if _windows_is_hardlink(path):
|
if _windows_is_hardlink(path):
|
||||||
return _windows_read_hard_link(path)
|
return _windows_read_hard_link(path)
|
||||||
elif _windows_is_junction(path):
|
elif _windows_is_junction(path):
|
||||||
return _windows_read_junction(path)
|
return _windows_read_junction(path)
|
||||||
else:
|
else:
|
||||||
return os.readlink(path)
|
return sanitize_win_longpath(os.readlink(path, dir_fd=dir_fd))
|
||||||
|
|
||||||
|
|
||||||
def _windows_read_hard_link(link: str) -> str:
|
def _windows_read_hard_link(link: str) -> str:
|
||||||
@@ -338,6 +313,16 @@ def resolve_link_target_relative_to_the_link(link):
|
|||||||
return os.path.join(link_dir, target)
|
return os.path.join(link_dir, target)
|
||||||
|
|
||||||
|
|
||||||
|
if sys.platform == "win32":
|
||||||
|
symlink = _windows_symlink
|
||||||
|
readlink = _windows_readlink
|
||||||
|
islink = _windows_islink
|
||||||
|
else:
|
||||||
|
symlink = os.symlink
|
||||||
|
readlink = os.readlink
|
||||||
|
islink = os.path.islink
|
||||||
|
|
||||||
|
|
||||||
class SymlinkError(RuntimeError):
|
class SymlinkError(RuntimeError):
|
||||||
"""Exception class for errors raised while creating symlinks,
|
"""Exception class for errors raised while creating symlinks,
|
||||||
junctions and hard links
|
junctions and hard links
|
||||||
|
|||||||
@@ -12,7 +12,7 @@
|
|||||||
import traceback
|
import traceback
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from sys import platform as _platform
|
from sys import platform as _platform
|
||||||
from typing import NoReturn
|
from typing import Any, NoReturn
|
||||||
|
|
||||||
if _platform != "win32":
|
if _platform != "win32":
|
||||||
import fcntl
|
import fcntl
|
||||||
@@ -158,21 +158,22 @@ def get_timestamp(force=False):
|
|||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
def msg(message, *args, **kwargs):
|
def msg(message: Any, *args: Any, newline: bool = True) -> None:
|
||||||
if not msg_enabled():
|
if not msg_enabled():
|
||||||
return
|
return
|
||||||
|
|
||||||
if isinstance(message, Exception):
|
if isinstance(message, Exception):
|
||||||
message = "%s: %s" % (message.__class__.__name__, str(message))
|
message = f"{message.__class__.__name__}: {message}"
|
||||||
|
else:
|
||||||
|
message = str(message)
|
||||||
|
|
||||||
newline = kwargs.get("newline", True)
|
|
||||||
st_text = ""
|
st_text = ""
|
||||||
if _stacktrace:
|
if _stacktrace:
|
||||||
st_text = process_stacktrace(2)
|
st_text = process_stacktrace(2)
|
||||||
if newline:
|
|
||||||
cprint("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
|
nl = "\n" if newline else ""
|
||||||
else:
|
cwrite(f"@*b{{{st_text}==>}} {get_timestamp()}{cescape(_output_filter(message))}{nl}")
|
||||||
cwrite("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
|
|
||||||
for arg in args:
|
for arg in args:
|
||||||
print(indent + _output_filter(str(arg)))
|
print(indent + _output_filter(str(arg)))
|
||||||
|
|
||||||
|
|||||||
@@ -237,7 +237,6 @@ def transpose():
|
|||||||
def colified(
|
def colified(
|
||||||
elts: List[Any],
|
elts: List[Any],
|
||||||
cols: int = 0,
|
cols: int = 0,
|
||||||
output: Optional[IO] = None,
|
|
||||||
indent: int = 0,
|
indent: int = 0,
|
||||||
padding: int = 2,
|
padding: int = 2,
|
||||||
tty: Optional[bool] = None,
|
tty: Optional[bool] = None,
|
||||||
|
|||||||
@@ -59,9 +59,11 @@
|
|||||||
|
|
||||||
To output an @, use '@@'. To output a } inside braces, use '}}'.
|
To output an @, use '@@'. To output a } inside braces, use '}}'.
|
||||||
"""
|
"""
|
||||||
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
class ColorParseError(Exception):
|
class ColorParseError(Exception):
|
||||||
@@ -95,14 +97,34 @@ def __init__(self, message):
|
|||||||
} # white
|
} # white
|
||||||
|
|
||||||
# Regex to be used for color formatting
|
# Regex to be used for color formatting
|
||||||
color_re = r"@(?:@|\.|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)"
|
COLOR_RE = re.compile(r"@(?:(@)|(\.)|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)")
|
||||||
|
|
||||||
# Mapping from color arguments to values for tty.set_color
|
# Mapping from color arguments to values for tty.set_color
|
||||||
color_when_values = {"always": True, "auto": None, "never": False}
|
color_when_values = {"always": True, "auto": None, "never": False}
|
||||||
|
|
||||||
# Force color; None: Only color if stdout is a tty
|
|
||||||
# True: Always colorize output, False: Never colorize output
|
def _color_when_value(when):
|
||||||
_force_color = None
|
"""Raise a ValueError for an invalid color setting.
|
||||||
|
|
||||||
|
Valid values are 'always', 'never', and 'auto', or equivalently,
|
||||||
|
True, False, and None.
|
||||||
|
"""
|
||||||
|
if when in color_when_values:
|
||||||
|
return color_when_values[when]
|
||||||
|
elif when not in color_when_values.values():
|
||||||
|
raise ValueError("Invalid color setting: %s" % when)
|
||||||
|
return when
|
||||||
|
|
||||||
|
|
||||||
|
def _color_from_environ() -> Optional[bool]:
|
||||||
|
try:
|
||||||
|
return _color_when_value(os.environ.get("SPACK_COLOR", "auto"))
|
||||||
|
except ValueError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
#: When `None` colorize when stdout is tty, when `True` or `False` always or never colorize resp.
|
||||||
|
_force_color = _color_from_environ()
|
||||||
|
|
||||||
|
|
||||||
def try_enable_terminal_color_on_windows():
|
def try_enable_terminal_color_on_windows():
|
||||||
@@ -163,19 +185,6 @@ def _err_check(result, func, args):
|
|||||||
debug("Unable to support color on Windows terminal")
|
debug("Unable to support color on Windows terminal")
|
||||||
|
|
||||||
|
|
||||||
def _color_when_value(when):
|
|
||||||
"""Raise a ValueError for an invalid color setting.
|
|
||||||
|
|
||||||
Valid values are 'always', 'never', and 'auto', or equivalently,
|
|
||||||
True, False, and None.
|
|
||||||
"""
|
|
||||||
if when in color_when_values:
|
|
||||||
return color_when_values[when]
|
|
||||||
elif when not in color_when_values.values():
|
|
||||||
raise ValueError("Invalid color setting: %s" % when)
|
|
||||||
return when
|
|
||||||
|
|
||||||
|
|
||||||
def get_color_when():
|
def get_color_when():
|
||||||
"""Return whether commands should print color or not."""
|
"""Return whether commands should print color or not."""
|
||||||
if _force_color is not None:
|
if _force_color is not None:
|
||||||
@@ -203,77 +212,64 @@ def color_when(value):
|
|||||||
set_color_when(old_value)
|
set_color_when(old_value)
|
||||||
|
|
||||||
|
|
||||||
class match_to_ansi:
|
def _escape(s: str, color: bool, enclose: bool, zsh: bool) -> str:
|
||||||
def __init__(self, color=True, enclose=False, zsh=False):
|
|
||||||
self.color = _color_when_value(color)
|
|
||||||
self.enclose = enclose
|
|
||||||
self.zsh = zsh
|
|
||||||
|
|
||||||
def escape(self, s):
|
|
||||||
"""Returns a TTY escape sequence for a color"""
|
"""Returns a TTY escape sequence for a color"""
|
||||||
if self.color:
|
if color:
|
||||||
if self.zsh:
|
if zsh:
|
||||||
result = rf"\e[0;{s}m"
|
result = rf"\e[0;{s}m"
|
||||||
else:
|
else:
|
||||||
result = f"\033[{s}m"
|
result = f"\033[{s}m"
|
||||||
|
|
||||||
if self.enclose:
|
if enclose:
|
||||||
result = rf"\[{result}\]"
|
result = rf"\[{result}\]"
|
||||||
|
|
||||||
return result
|
return result
|
||||||
else:
|
else:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def __call__(self, match):
|
|
||||||
"""Convert a match object generated by ``color_re`` into an ansi
|
|
||||||
color code. This can be used as a handler in ``re.sub``.
|
|
||||||
"""
|
|
||||||
style, color, text = match.groups()
|
|
||||||
m = match.group(0)
|
|
||||||
|
|
||||||
if m == "@@":
|
def colorize(
|
||||||
return "@"
|
string: str, color: Optional[bool] = None, enclose: bool = False, zsh: bool = False
|
||||||
elif m == "@.":
|
) -> str:
|
||||||
return self.escape(0)
|
|
||||||
elif m == "@":
|
|
||||||
raise ColorParseError("Incomplete color format: '%s' in %s" % (m, match.string))
|
|
||||||
|
|
||||||
string = styles[style]
|
|
||||||
if color:
|
|
||||||
if color not in colors:
|
|
||||||
raise ColorParseError(
|
|
||||||
"Invalid color specifier: '%s' in '%s'" % (color, match.string)
|
|
||||||
)
|
|
||||||
string += ";" + str(colors[color])
|
|
||||||
|
|
||||||
colored_text = ""
|
|
||||||
if text:
|
|
||||||
colored_text = text + self.escape(0)
|
|
||||||
|
|
||||||
return self.escape(string) + colored_text
|
|
||||||
|
|
||||||
|
|
||||||
def colorize(string, **kwargs):
|
|
||||||
"""Replace all color expressions in a string with ANSI control codes.
|
"""Replace all color expressions in a string with ANSI control codes.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
string (str): The string to replace
|
string: The string to replace
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: The filtered string
|
The filtered string
|
||||||
|
|
||||||
Keyword Arguments:
|
Keyword Arguments:
|
||||||
color (bool): If False, output will be plain text without control
|
color: If False, output will be plain text without control codes, for output to
|
||||||
codes, for output to non-console devices.
|
non-console devices (default: automatically choose color or not)
|
||||||
enclose (bool): If True, enclose ansi color sequences with
|
enclose: If True, enclose ansi color sequences with
|
||||||
square brackets to prevent misestimation of terminal width.
|
square brackets to prevent misestimation of terminal width.
|
||||||
zsh (bool): If True, use zsh ansi codes instead of bash ones (for variables like PS1)
|
zsh: If True, use zsh ansi codes instead of bash ones (for variables like PS1)
|
||||||
"""
|
"""
|
||||||
color = _color_when_value(kwargs.get("color", get_color_when()))
|
color = color if color is not None else get_color_when()
|
||||||
zsh = kwargs.get("zsh", False)
|
|
||||||
string = re.sub(color_re, match_to_ansi(color, kwargs.get("enclose")), string, zsh)
|
def match_to_ansi(match):
|
||||||
string = string.replace("}}", "}")
|
"""Convert a match object generated by ``COLOR_RE`` into an ansi
|
||||||
return string
|
color code. This can be used as a handler in ``re.sub``.
|
||||||
|
"""
|
||||||
|
escaped_at, dot, style, color_code, text = match.groups()
|
||||||
|
|
||||||
|
if escaped_at:
|
||||||
|
return "@"
|
||||||
|
elif dot:
|
||||||
|
return _escape(0, color, enclose, zsh)
|
||||||
|
elif not (style or color_code):
|
||||||
|
raise ColorParseError(
|
||||||
|
f"Incomplete color format: '{match.group(0)}' in '{match.string}'"
|
||||||
|
)
|
||||||
|
|
||||||
|
ansi_code = _escape(f"{styles[style]};{colors.get(color_code, '')}", color, enclose, zsh)
|
||||||
|
if text:
|
||||||
|
return f"{ansi_code}{text}{_escape(0, color, enclose, zsh)}"
|
||||||
|
else:
|
||||||
|
return ansi_code
|
||||||
|
|
||||||
|
return COLOR_RE.sub(match_to_ansi, string).replace("}}", "}")
|
||||||
|
|
||||||
|
|
||||||
def clen(string):
|
def clen(string):
|
||||||
@@ -305,7 +301,7 @@ def cprint(string, stream=None, color=None):
|
|||||||
cwrite(string + "\n", stream, color)
|
cwrite(string + "\n", stream, color)
|
||||||
|
|
||||||
|
|
||||||
def cescape(string):
|
def cescape(string: str) -> str:
|
||||||
"""Escapes special characters needed for color codes.
|
"""Escapes special characters needed for color codes.
|
||||||
|
|
||||||
Replaces the following symbols with their equivalent literal forms:
|
Replaces the following symbols with their equivalent literal forms:
|
||||||
@@ -321,10 +317,7 @@ def cescape(string):
|
|||||||
Returns:
|
Returns:
|
||||||
(str): the string with color codes escaped
|
(str): the string with color codes escaped
|
||||||
"""
|
"""
|
||||||
string = str(string)
|
return string.replace("@", "@@").replace("}", "}}")
|
||||||
string = string.replace("@", "@@")
|
|
||||||
string = string.replace("}", "}}")
|
|
||||||
return string
|
|
||||||
|
|
||||||
|
|
||||||
class ColorStream:
|
class ColorStream:
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||||
__version__ = "0.22.0.dev0"
|
__version__ = "0.23.0.dev0"
|
||||||
spack_version = __version__
|
spack_version = __version__
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -254,8 +254,8 @@ def _search_duplicate_specs_in_externals(error_cls):
|
|||||||
|
|
||||||
@config_packages
|
@config_packages
|
||||||
def _deprecated_preferences(error_cls):
|
def _deprecated_preferences(error_cls):
|
||||||
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.22)"""
|
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.23)"""
|
||||||
# TODO (v0.22): remove this audit as the attributes will not be allowed in config
|
# TODO (v0.23): remove this audit as the attributes will not be allowed in config
|
||||||
errors = []
|
errors = []
|
||||||
packages_yaml = spack.config.CONFIG.get_config("packages")
|
packages_yaml = spack.config.CONFIG.get_config("packages")
|
||||||
|
|
||||||
@@ -421,6 +421,10 @@ def _check_patch_urls(pkgs, error_cls):
|
|||||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||||
r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)"
|
r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||||
)
|
)
|
||||||
|
github_pull_commits_re = (
|
||||||
|
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||||
|
r".+/.+/pull/\d+/commits/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||||
|
)
|
||||||
# Only .diff URLs have stable/full hashes:
|
# Only .diff URLs have stable/full hashes:
|
||||||
# https://forum.gitlab.com/t/patches-with-full-index/29313
|
# https://forum.gitlab.com/t/patches-with-full-index/29313
|
||||||
gitlab_patch_url_re = (
|
gitlab_patch_url_re = (
|
||||||
@@ -436,14 +440,24 @@ def _check_patch_urls(pkgs, error_cls):
|
|||||||
if not isinstance(patch, spack.patch.UrlPatch):
|
if not isinstance(patch, spack.patch.UrlPatch):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if re.match(github_patch_url_re, patch.url):
|
if re.match(github_pull_commits_re, patch.url):
|
||||||
|
url = re.sub(r"/pull/\d+/commits/", r"/commit/", patch.url)
|
||||||
|
url = re.sub(r"^(.*)(?<!full_index=1)$", r"\1?full_index=1", url)
|
||||||
|
errors.append(
|
||||||
|
error_cls(
|
||||||
|
f"patch URL in package {pkg_cls.name} "
|
||||||
|
+ "must not be a pull request commit; "
|
||||||
|
+ f"instead use {url}",
|
||||||
|
[patch.url],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
elif re.match(github_patch_url_re, patch.url):
|
||||||
full_index_arg = "?full_index=1"
|
full_index_arg = "?full_index=1"
|
||||||
if not patch.url.endswith(full_index_arg):
|
if not patch.url.endswith(full_index_arg):
|
||||||
errors.append(
|
errors.append(
|
||||||
error_cls(
|
error_cls(
|
||||||
"patch URL in package {0} must end with {1}".format(
|
f"patch URL in package {pkg_cls.name} "
|
||||||
pkg_cls.name, full_index_arg
|
+ f"must end with {full_index_arg}",
|
||||||
),
|
|
||||||
[patch.url],
|
[patch.url],
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@@ -451,9 +465,7 @@ def _check_patch_urls(pkgs, error_cls):
|
|||||||
if not patch.url.endswith(".diff"):
|
if not patch.url.endswith(".diff"):
|
||||||
errors.append(
|
errors.append(
|
||||||
error_cls(
|
error_cls(
|
||||||
"patch URL in package {0} must end with .diff".format(
|
f"patch URL in package {pkg_cls.name} must end with .diff",
|
||||||
pkg_cls.name
|
|
||||||
),
|
|
||||||
[patch.url],
|
[patch.url],
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@@ -1046,7 +1058,7 @@ def _extracts_errors(triggers, summary):
|
|||||||
group="externals",
|
group="externals",
|
||||||
tag="PKG-EXTERNALS",
|
tag="PKG-EXTERNALS",
|
||||||
description="Sanity checks for external software detection",
|
description="Sanity checks for external software detection",
|
||||||
kwargs=("pkgs",),
|
kwargs=("pkgs", "debug_log"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -1069,7 +1081,7 @@ def packages_with_detection_tests():
|
|||||||
|
|
||||||
|
|
||||||
@external_detection
|
@external_detection
|
||||||
def _test_detection_by_executable(pkgs, error_cls):
|
def _test_detection_by_executable(pkgs, debug_log, error_cls):
|
||||||
"""Test drive external detection for packages"""
|
"""Test drive external detection for packages"""
|
||||||
import spack.detection
|
import spack.detection
|
||||||
|
|
||||||
@@ -1095,6 +1107,7 @@ def _test_detection_by_executable(pkgs, error_cls):
|
|||||||
for idx, test_runner in enumerate(
|
for idx, test_runner in enumerate(
|
||||||
spack.detection.detection_tests(pkg_name, spack.repo.PATH)
|
spack.detection.detection_tests(pkg_name, spack.repo.PATH)
|
||||||
):
|
):
|
||||||
|
debug_log(f"[{__file__}]: running test {idx} for package {pkg_name}")
|
||||||
specs = test_runner.execute()
|
specs = test_runner.execute()
|
||||||
expected_specs = test_runner.expected_specs
|
expected_specs = test_runner.expected_specs
|
||||||
|
|
||||||
@@ -1111,4 +1124,75 @@ def _test_detection_by_executable(pkgs, error_cls):
|
|||||||
details = [msg.format(s, idx) for s in sorted(not_expected)]
|
details = [msg.format(s, idx) for s in sorted(not_expected)]
|
||||||
errors.append(error_cls(summary=summary, details=details))
|
errors.append(error_cls(summary=summary, details=details))
|
||||||
|
|
||||||
|
matched_detection = []
|
||||||
|
for candidate in expected_specs:
|
||||||
|
try:
|
||||||
|
idx = specs.index(candidate)
|
||||||
|
matched_detection.append((candidate, specs[idx]))
|
||||||
|
except (AttributeError, ValueError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||||
|
result = []
|
||||||
|
# Check items are of the same type
|
||||||
|
if not isinstance(_detected, type(_expected)):
|
||||||
|
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
||||||
|
_details = [f"{_detected} was detected instead"]
|
||||||
|
return [error_cls(summary=_summary, details=_details)]
|
||||||
|
|
||||||
|
# If they are string expected is a regex
|
||||||
|
if isinstance(_expected, str):
|
||||||
|
try:
|
||||||
|
_regex = re.compile(_expected)
|
||||||
|
except re.error:
|
||||||
|
_summary = f'{pkg_name}: illegal regex in "{_spec}" extra attributes'
|
||||||
|
_details = [f"{_expected} is not a valid regex"]
|
||||||
|
return [error_cls(summary=_summary, details=_details)]
|
||||||
|
|
||||||
|
if not _regex.match(_detected):
|
||||||
|
_summary = (
|
||||||
|
f'{pkg_name}: error when trying to match "{_expected}" '
|
||||||
|
f"in extra attributes"
|
||||||
|
)
|
||||||
|
_details = [f"{_detected} does not match the regex"]
|
||||||
|
return [error_cls(summary=_summary, details=_details)]
|
||||||
|
|
||||||
|
if isinstance(_expected, dict):
|
||||||
|
_not_detected = set(_expected.keys()) - set(_detected.keys())
|
||||||
|
if _not_detected:
|
||||||
|
_summary = f"{pkg_name}: cannot detect some attributes for spec {_spec}"
|
||||||
|
_details = [
|
||||||
|
f'"{_expected}" was expected',
|
||||||
|
f'"{_detected}" was detected',
|
||||||
|
] + [f'attribute "{s}" was not detected' for s in sorted(_not_detected)]
|
||||||
|
result.append(error_cls(summary=_summary, details=_details))
|
||||||
|
|
||||||
|
_common = set(_expected.keys()) & set(_detected.keys())
|
||||||
|
for _key in _common:
|
||||||
|
result.extend(
|
||||||
|
_compare_extra_attribute(_expected[_key], _detected[_key], _spec=_spec)
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
for expected, detected in matched_detection:
|
||||||
|
# We might not want to test all attributes, so avoid not_expected
|
||||||
|
not_detected = set(expected.extra_attributes) - set(detected.extra_attributes)
|
||||||
|
if not_detected:
|
||||||
|
summary = f"{pkg_name}: cannot detect some attributes for spec {expected}"
|
||||||
|
details = [
|
||||||
|
f'"{s}" was not detected [test_id={idx}]' for s in sorted(not_detected)
|
||||||
|
]
|
||||||
|
errors.append(error_cls(summary=summary, details=details))
|
||||||
|
|
||||||
|
common = set(expected.extra_attributes) & set(detected.extra_attributes)
|
||||||
|
for key in common:
|
||||||
|
errors.extend(
|
||||||
|
_compare_extra_attribute(
|
||||||
|
expected.extra_attributes[key],
|
||||||
|
detected.extra_attributes[key],
|
||||||
|
_spec=expected,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
return errors
|
return errors
|
||||||
|
|||||||
@@ -29,6 +29,7 @@
|
|||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
|
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
|
||||||
|
from llnl.util.symlink import readlink
|
||||||
|
|
||||||
import spack.caches
|
import spack.caches
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
@@ -658,7 +659,7 @@ def get_buildfile_manifest(spec):
|
|||||||
# 2. paths are used as strings.
|
# 2. paths are used as strings.
|
||||||
for rel_path in visitor.symlinks:
|
for rel_path in visitor.symlinks:
|
||||||
abs_path = os.path.join(root, rel_path)
|
abs_path = os.path.join(root, rel_path)
|
||||||
link = os.readlink(abs_path)
|
link = readlink(abs_path)
|
||||||
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
|
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
|
||||||
data["link_to_relocate"].append(rel_path)
|
data["link_to_relocate"].append(rel_path)
|
||||||
|
|
||||||
@@ -2001,6 +2002,7 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
|||||||
with spack.util.path.filter_padding():
|
with spack.util.path.filter_padding():
|
||||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||||
extract_tarball(spec, download_result, force)
|
extract_tarball(spec, download_result, force)
|
||||||
|
spec.package.windows_establish_runtime_linkage()
|
||||||
spack.hooks.post_install(spec, False)
|
spack.hooks.post_install(spec, False)
|
||||||
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,13 @@
|
|||||||
"""Function and classes needed to bootstrap Spack itself."""
|
"""Function and classes needed to bootstrap Spack itself."""
|
||||||
|
|
||||||
from .config import ensure_bootstrap_configuration, is_bootstrapping, store_path
|
from .config import ensure_bootstrap_configuration, is_bootstrapping, store_path
|
||||||
from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
|
from .core import (
|
||||||
|
all_core_root_specs,
|
||||||
|
ensure_clingo_importable_or_raise,
|
||||||
|
ensure_core_dependencies,
|
||||||
|
ensure_gpg_in_path_or_raise,
|
||||||
|
ensure_patchelf_in_path_or_raise,
|
||||||
|
)
|
||||||
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
||||||
from .status import status_message
|
from .status import status_message
|
||||||
|
|
||||||
@@ -13,6 +19,8 @@
|
|||||||
"is_bootstrapping",
|
"is_bootstrapping",
|
||||||
"ensure_bootstrap_configuration",
|
"ensure_bootstrap_configuration",
|
||||||
"ensure_core_dependencies",
|
"ensure_core_dependencies",
|
||||||
|
"ensure_gpg_in_path_or_raise",
|
||||||
|
"ensure_clingo_importable_or_raise",
|
||||||
"ensure_patchelf_in_path_or_raise",
|
"ensure_patchelf_in_path_or_raise",
|
||||||
"all_core_root_specs",
|
"all_core_root_specs",
|
||||||
"ensure_environment_dependencies",
|
"ensure_environment_dependencies",
|
||||||
|
|||||||
@@ -54,10 +54,14 @@ def _try_import_from_store(
|
|||||||
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
|
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
|
||||||
|
|
||||||
for candidate_spec in installed_specs:
|
for candidate_spec in installed_specs:
|
||||||
pkg = candidate_spec["python"].package
|
# previously bootstrapped specs may not have a python-venv dependency.
|
||||||
|
if candidate_spec.dependencies("python-venv"):
|
||||||
|
python, *_ = candidate_spec.dependencies("python-venv")
|
||||||
|
else:
|
||||||
|
python, *_ = candidate_spec.dependencies("python")
|
||||||
module_paths = [
|
module_paths = [
|
||||||
os.path.join(candidate_spec.prefix, pkg.purelib),
|
os.path.join(candidate_spec.prefix, python.package.purelib),
|
||||||
os.path.join(candidate_spec.prefix, pkg.platlib),
|
os.path.join(candidate_spec.prefix, python.package.platlib),
|
||||||
]
|
]
|
||||||
path_before = list(sys.path)
|
path_before = list(sys.path)
|
||||||
|
|
||||||
@@ -209,15 +213,18 @@ def _root_spec(spec_str: str) -> str:
|
|||||||
Args:
|
Args:
|
||||||
spec_str: spec to be bootstrapped. Must be without compiler and target.
|
spec_str: spec to be bootstrapped. Must be without compiler and target.
|
||||||
"""
|
"""
|
||||||
# Add a compiler requirement to the root spec.
|
# Add a compiler and platform requirement to the root spec.
|
||||||
platform = str(spack.platforms.host())
|
platform = str(spack.platforms.host())
|
||||||
|
|
||||||
if platform == "darwin":
|
if platform == "darwin":
|
||||||
spec_str += " %apple-clang"
|
spec_str += " %apple-clang"
|
||||||
|
elif platform == "windows":
|
||||||
|
spec_str += " %msvc"
|
||||||
elif platform == "linux":
|
elif platform == "linux":
|
||||||
spec_str += " %gcc"
|
spec_str += " %gcc"
|
||||||
elif platform == "freebsd":
|
elif platform == "freebsd":
|
||||||
spec_str += " %clang"
|
spec_str += " %clang"
|
||||||
|
spec_str += f" platform={platform}"
|
||||||
target = archspec.cpu.host().family
|
target = archspec.cpu.host().family
|
||||||
spec_str += f" target={target}"
|
spec_str += f" target={target}"
|
||||||
|
|
||||||
|
|||||||
@@ -173,32 +173,11 @@ def _read_metadata(self, package_name: str) -> Any:
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
def _install_by_hash(
|
def _install_by_hash(
|
||||||
self,
|
self, pkg_hash: str, pkg_sha256: str, bincache_platform: spack.platforms.Platform
|
||||||
pkg_hash: str,
|
|
||||||
pkg_sha256: str,
|
|
||||||
index: List[spack.spec.Spec],
|
|
||||||
bincache_platform: spack.platforms.Platform,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
index_spec = next(x for x in index if x.dag_hash() == pkg_hash)
|
|
||||||
# Reconstruct the compiler that we need to use for bootstrapping
|
|
||||||
compiler_entry = {
|
|
||||||
"modules": [],
|
|
||||||
"operating_system": str(index_spec.os),
|
|
||||||
"paths": {
|
|
||||||
"cc": "/dev/null",
|
|
||||||
"cxx": "/dev/null",
|
|
||||||
"f77": "/dev/null",
|
|
||||||
"fc": "/dev/null",
|
|
||||||
},
|
|
||||||
"spec": str(index_spec.compiler),
|
|
||||||
"target": str(index_spec.target.family),
|
|
||||||
}
|
|
||||||
with spack.platforms.use_platform(bincache_platform):
|
with spack.platforms.use_platform(bincache_platform):
|
||||||
with spack.config.override("compilers", [{"compiler": compiler_entry}]):
|
|
||||||
spec_str = "/" + pkg_hash
|
|
||||||
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
|
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
|
||||||
matches = spack.store.find([spec_str], multiple=False, query_fn=query)
|
for match in spack.store.find([f"/{pkg_hash}"], multiple=False, query_fn=query):
|
||||||
for match in matches:
|
|
||||||
spack.binary_distribution.install_root_node(
|
spack.binary_distribution.install_root_node(
|
||||||
match, unsigned=True, force=True, sha256=pkg_sha256
|
match, unsigned=True, force=True, sha256=pkg_sha256
|
||||||
)
|
)
|
||||||
@@ -232,7 +211,7 @@ def _install_and_test(
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
||||||
self._install_by_hash(pkg_hash, pkg_sha256, index, bincache_platform)
|
self._install_by_hash(pkg_hash, pkg_sha256, bincache_platform)
|
||||||
|
|
||||||
info: ConfigDictionary = {}
|
info: ConfigDictionary = {}
|
||||||
if test_fn(query_spec=abstract_spec, query_info=info):
|
if test_fn(query_spec=abstract_spec, query_info=info):
|
||||||
@@ -291,10 +270,6 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
|||||||
with spack_python_interpreter():
|
with spack_python_interpreter():
|
||||||
# Add hint to use frontend operating system on Cray
|
# Add hint to use frontend operating system on Cray
|
||||||
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
||||||
# This is needed to help the old concretizer taking the `setuptools` dependency
|
|
||||||
# only when bootstrapping from sources on Python 3.12
|
|
||||||
if spec_for_current_python() == "python@3.12":
|
|
||||||
concrete_spec.constrain("+force_setuptools")
|
|
||||||
|
|
||||||
if module == "clingo":
|
if module == "clingo":
|
||||||
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
||||||
@@ -559,6 +534,41 @@ def ensure_patchelf_in_path_or_raise() -> spack.util.executable.Executable:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_winsdk_external_or_raise() -> None:
|
||||||
|
"""Ensure the Windows SDK + WGL are available on system
|
||||||
|
If both of these package are found, the Spack user or bootstrap
|
||||||
|
configuration (depending on where Spack is running)
|
||||||
|
will be updated to include all versions and variants detected.
|
||||||
|
If either the WDK or WSDK are not found, this method will raise
|
||||||
|
a RuntimeError.
|
||||||
|
|
||||||
|
**NOTE:** This modifies the Spack config in the current scope,
|
||||||
|
either user or environment depending on the calling context.
|
||||||
|
This is different from all other current bootstrap dependency
|
||||||
|
checks.
|
||||||
|
"""
|
||||||
|
if set(["win-sdk", "wgl"]).issubset(spack.config.get("packages").keys()):
|
||||||
|
return
|
||||||
|
externals = spack.detection.by_path(["win-sdk", "wgl"])
|
||||||
|
if not set(["win-sdk", "wgl"]) == externals.keys():
|
||||||
|
missing_packages_lst = []
|
||||||
|
if "wgl" not in externals:
|
||||||
|
missing_packages_lst.append("wgl")
|
||||||
|
if "win-sdk" not in externals:
|
||||||
|
missing_packages_lst.append("win-sdk")
|
||||||
|
missing_packages = " & ".join(missing_packages_lst)
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Unable to find the {missing_packages}, please install these packages \
|
||||||
|
via the Visual Studio installer \
|
||||||
|
before proceeding with Spack or provide the path to a non standard install with \
|
||||||
|
'spack external find --path'"
|
||||||
|
)
|
||||||
|
# wgl/sdk are not required for bootstrapping Spack, but
|
||||||
|
# are required for building anything non trivial
|
||||||
|
# add to user config so they can be used by subsequent Spack ops
|
||||||
|
spack.detection.update_configuration(externals, buildable=False)
|
||||||
|
|
||||||
|
|
||||||
def ensure_core_dependencies() -> None:
|
def ensure_core_dependencies() -> None:
|
||||||
"""Ensure the presence of all the core dependencies."""
|
"""Ensure the presence of all the core dependencies."""
|
||||||
if sys.platform.lower() == "linux":
|
if sys.platform.lower() == "linux":
|
||||||
|
|||||||
@@ -3,13 +3,11 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
"""Bootstrap non-core Spack dependencies from an environment."""
|
"""Bootstrap non-core Spack dependencies from an environment."""
|
||||||
import glob
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
from typing import Iterable, List
|
||||||
from typing import List
|
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
@@ -28,6 +26,16 @@
|
|||||||
class BootstrapEnvironment(spack.environment.Environment):
|
class BootstrapEnvironment(spack.environment.Environment):
|
||||||
"""Environment to install dependencies of Spack for a given interpreter and architecture"""
|
"""Environment to install dependencies of Spack for a given interpreter and architecture"""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
if not self.spack_yaml().exists():
|
||||||
|
self._write_spack_yaml_file()
|
||||||
|
super().__init__(self.environment_root())
|
||||||
|
|
||||||
|
# Remove python package roots created before python-venv was introduced
|
||||||
|
for s in self.concrete_roots():
|
||||||
|
if "python" in s.package.extendees and not s.dependencies("python-venv"):
|
||||||
|
self.deconcretize(s)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def spack_dev_requirements(cls) -> List[str]:
|
def spack_dev_requirements(cls) -> List[str]:
|
||||||
"""Spack development requirements"""
|
"""Spack development requirements"""
|
||||||
@@ -59,31 +67,19 @@ def view_root(cls) -> pathlib.Path:
|
|||||||
return cls.environment_root().joinpath("view")
|
return cls.environment_root().joinpath("view")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def pythonpaths(cls) -> List[str]:
|
def bin_dir(cls) -> pathlib.Path:
|
||||||
"""Paths to be added to sys.path or PYTHONPATH"""
|
|
||||||
python_dir_part = f"python{'.'.join(str(x) for x in sys.version_info[:2])}"
|
|
||||||
glob_expr = str(cls.view_root().joinpath("**", python_dir_part, "**"))
|
|
||||||
result = glob.glob(glob_expr)
|
|
||||||
if not result:
|
|
||||||
msg = f"Cannot find any Python path in {cls.view_root()}"
|
|
||||||
warnings.warn(msg)
|
|
||||||
return result
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def bin_dirs(cls) -> List[pathlib.Path]:
|
|
||||||
"""Paths to be added to PATH"""
|
"""Paths to be added to PATH"""
|
||||||
return [cls.view_root().joinpath("bin")]
|
return cls.view_root().joinpath("bin")
|
||||||
|
|
||||||
|
def python_dirs(self) -> Iterable[pathlib.Path]:
|
||||||
|
python = next(s for s in self.all_specs_generator() if s.name == "python-venv").package
|
||||||
|
return {self.view_root().joinpath(p) for p in (python.platlib, python.purelib)}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def spack_yaml(cls) -> pathlib.Path:
|
def spack_yaml(cls) -> pathlib.Path:
|
||||||
"""Environment spack.yaml file"""
|
"""Environment spack.yaml file"""
|
||||||
return cls.environment_root().joinpath("spack.yaml")
|
return cls.environment_root().joinpath("spack.yaml")
|
||||||
|
|
||||||
def __init__(self) -> None:
|
|
||||||
if not self.spack_yaml().exists():
|
|
||||||
self._write_spack_yaml_file()
|
|
||||||
super().__init__(self.environment_root())
|
|
||||||
|
|
||||||
def update_installations(self) -> None:
|
def update_installations(self) -> None:
|
||||||
"""Update the installations of this environment."""
|
"""Update the installations of this environment."""
|
||||||
log_enabled = tty.is_debug() or tty.is_verbose()
|
log_enabled = tty.is_debug() or tty.is_verbose()
|
||||||
@@ -100,21 +96,13 @@ def update_installations(self) -> None:
|
|||||||
self.install_all()
|
self.install_all()
|
||||||
self.write(regenerate=True)
|
self.write(regenerate=True)
|
||||||
|
|
||||||
def update_syspath_and_environ(self) -> None:
|
def load(self) -> None:
|
||||||
"""Update ``sys.path`` and the PATH, PYTHONPATH environment variables to point to
|
"""Update PATH and sys.path."""
|
||||||
the environment view.
|
# Make executables available (shouldn't need PYTHONPATH)
|
||||||
"""
|
os.environ["PATH"] = f"{self.bin_dir()}{os.pathsep}{os.environ.get('PATH', '')}"
|
||||||
# Do minimal modifications to sys.path and environment variables. In particular, pay
|
|
||||||
# attention to have the smallest PYTHONPATH / sys.path possible, since that may impact
|
# Spack itself imports pytest
|
||||||
# the performance of the current interpreter
|
sys.path.extend(str(p) for p in self.python_dirs())
|
||||||
sys.path.extend(self.pythonpaths())
|
|
||||||
os.environ["PATH"] = os.pathsep.join(
|
|
||||||
[str(x) for x in self.bin_dirs()] + os.environ.get("PATH", "").split(os.pathsep)
|
|
||||||
)
|
|
||||||
os.environ["PYTHONPATH"] = os.pathsep.join(
|
|
||||||
os.environ.get("PYTHONPATH", "").split(os.pathsep)
|
|
||||||
+ [str(x) for x in self.pythonpaths()]
|
|
||||||
)
|
|
||||||
|
|
||||||
def _write_spack_yaml_file(self) -> None:
|
def _write_spack_yaml_file(self) -> None:
|
||||||
tty.msg(
|
tty.msg(
|
||||||
@@ -164,4 +152,4 @@ def ensure_environment_dependencies() -> None:
|
|||||||
_add_externals_if_missing()
|
_add_externals_if_missing()
|
||||||
with BootstrapEnvironment() as env:
|
with BootstrapEnvironment() as env:
|
||||||
env.update_installations()
|
env.update_installations()
|
||||||
env.update_syspath_and_environ()
|
env.load()
|
||||||
|
|||||||
@@ -43,7 +43,7 @@
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from enum import Flag, auto
|
from enum import Flag, auto
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from typing import List, Tuple
|
from typing import Dict, List, Set, Tuple
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.string import plural
|
from llnl.string import plural
|
||||||
@@ -68,6 +68,7 @@
|
|||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.schema.environment
|
import spack.schema.environment
|
||||||
import spack.spec
|
import spack.spec
|
||||||
|
import spack.stage
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.subprocess_context
|
import spack.subprocess_context
|
||||||
import spack.user_environment
|
import spack.user_environment
|
||||||
@@ -80,7 +81,7 @@
|
|||||||
from spack.installer import InstallError
|
from spack.installer import InstallError
|
||||||
from spack.util.cpus import determine_number_of_jobs
|
from spack.util.cpus import determine_number_of_jobs
|
||||||
from spack.util.environment import (
|
from spack.util.environment import (
|
||||||
SYSTEM_DIRS,
|
SYSTEM_DIR_CASE_ENTRY,
|
||||||
EnvironmentModifications,
|
EnvironmentModifications,
|
||||||
env_flag,
|
env_flag,
|
||||||
filter_system_paths,
|
filter_system_paths,
|
||||||
@@ -90,7 +91,7 @@
|
|||||||
)
|
)
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
from spack.util.log_parse import make_log_context, parse_log_events
|
from spack.util.log_parse import make_log_context, parse_log_events
|
||||||
from spack.util.module_cmd import load_module, module, path_from_modules
|
from spack.util.module_cmd import load_module, path_from_modules
|
||||||
|
|
||||||
#
|
#
|
||||||
# This can be set by the user to globally disable parallel builds.
|
# This can be set by the user to globally disable parallel builds.
|
||||||
@@ -103,9 +104,13 @@
|
|||||||
# Spack's compiler wrappers.
|
# Spack's compiler wrappers.
|
||||||
#
|
#
|
||||||
SPACK_ENV_PATH = "SPACK_ENV_PATH"
|
SPACK_ENV_PATH = "SPACK_ENV_PATH"
|
||||||
|
SPACK_MANAGED_DIRS = "SPACK_MANAGED_DIRS"
|
||||||
SPACK_INCLUDE_DIRS = "SPACK_INCLUDE_DIRS"
|
SPACK_INCLUDE_DIRS = "SPACK_INCLUDE_DIRS"
|
||||||
SPACK_LINK_DIRS = "SPACK_LINK_DIRS"
|
SPACK_LINK_DIRS = "SPACK_LINK_DIRS"
|
||||||
SPACK_RPATH_DIRS = "SPACK_RPATH_DIRS"
|
SPACK_RPATH_DIRS = "SPACK_RPATH_DIRS"
|
||||||
|
SPACK_STORE_INCLUDE_DIRS = "SPACK_STORE_INCLUDE_DIRS"
|
||||||
|
SPACK_STORE_LINK_DIRS = "SPACK_STORE_LINK_DIRS"
|
||||||
|
SPACK_STORE_RPATH_DIRS = "SPACK_STORE_RPATH_DIRS"
|
||||||
SPACK_RPATH_DEPS = "SPACK_RPATH_DEPS"
|
SPACK_RPATH_DEPS = "SPACK_RPATH_DEPS"
|
||||||
SPACK_LINK_DEPS = "SPACK_LINK_DEPS"
|
SPACK_LINK_DEPS = "SPACK_LINK_DEPS"
|
||||||
SPACK_PREFIX = "SPACK_PREFIX"
|
SPACK_PREFIX = "SPACK_PREFIX"
|
||||||
@@ -185,14 +190,6 @@ def __call__(self, *args, **kwargs):
|
|||||||
return super().__call__(*args, **kwargs)
|
return super().__call__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def _on_cray():
|
|
||||||
host_platform = spack.platforms.host()
|
|
||||||
host_os = host_platform.operating_system("default_os")
|
|
||||||
on_cray = str(host_platform) == "cray"
|
|
||||||
using_cnl = re.match(r"cnl\d+", str(host_os))
|
|
||||||
return on_cray, using_cnl
|
|
||||||
|
|
||||||
|
|
||||||
def clean_environment():
|
def clean_environment():
|
||||||
# Stuff in here sanitizes the build environment to eliminate
|
# Stuff in here sanitizes the build environment to eliminate
|
||||||
# anything the user has set that may interfere. We apply it immediately
|
# anything the user has set that may interfere. We apply it immediately
|
||||||
@@ -236,17 +233,6 @@ def clean_environment():
|
|||||||
if varname.endswith("_ROOT") and varname != "SPACK_ROOT":
|
if varname.endswith("_ROOT") and varname != "SPACK_ROOT":
|
||||||
env.unset(varname)
|
env.unset(varname)
|
||||||
|
|
||||||
# On Cray "cluster" systems, unset CRAY_LD_LIBRARY_PATH to avoid
|
|
||||||
# interference with Spack dependencies.
|
|
||||||
# CNL requires these variables to be set (or at least some of them,
|
|
||||||
# depending on the CNL version).
|
|
||||||
on_cray, using_cnl = _on_cray()
|
|
||||||
if on_cray and not using_cnl:
|
|
||||||
env.unset("CRAY_LD_LIBRARY_PATH")
|
|
||||||
for varname in os.environ.keys():
|
|
||||||
if "PKGCONF" in varname:
|
|
||||||
env.unset(varname)
|
|
||||||
|
|
||||||
# Unset the following variables because they can affect installation of
|
# Unset the following variables because they can affect installation of
|
||||||
# Autotools and CMake packages.
|
# Autotools and CMake packages.
|
||||||
build_system_vars = [
|
build_system_vars = [
|
||||||
@@ -376,10 +362,6 @@ def set_compiler_environment_variables(pkg, env):
|
|||||||
_add_werror_handling(keep_werror, env)
|
_add_werror_handling(keep_werror, env)
|
||||||
|
|
||||||
# Set the target parameters that the compiler will add
|
# Set the target parameters that the compiler will add
|
||||||
# Don't set on cray platform because the targeting module handles this
|
|
||||||
if spec.satisfies("platform=cray"):
|
|
||||||
isa_arg = ""
|
|
||||||
else:
|
|
||||||
isa_arg = spec.architecture.target.optimization_flags(compiler)
|
isa_arg = spec.architecture.target.optimization_flags(compiler)
|
||||||
env.set("SPACK_TARGET_ARGS", isa_arg)
|
env.set("SPACK_TARGET_ARGS", isa_arg)
|
||||||
|
|
||||||
@@ -418,7 +400,7 @@ def set_compiler_environment_variables(pkg, env):
|
|||||||
|
|
||||||
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
|
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
|
||||||
|
|
||||||
env.set("SPACK_SYSTEM_DIRS", ":".join(SYSTEM_DIRS))
|
env.set("SPACK_SYSTEM_DIRS", SYSTEM_DIR_CASE_ENTRY)
|
||||||
|
|
||||||
compiler.setup_custom_environment(pkg, env)
|
compiler.setup_custom_environment(pkg, env)
|
||||||
|
|
||||||
@@ -546,9 +528,26 @@ def update_compiler_args_for_dep(dep):
|
|||||||
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
||||||
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
||||||
|
|
||||||
env.set(SPACK_LINK_DIRS, ":".join(link_dirs))
|
# Spack managed directories include the stage, store and upstream stores. We extend this with
|
||||||
env.set(SPACK_INCLUDE_DIRS, ":".join(include_dirs))
|
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
|
||||||
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
|
spack_managed_dirs: Set[str] = {
|
||||||
|
spack.stage.get_stage_root(),
|
||||||
|
spack.store.STORE.db.root,
|
||||||
|
*(db.root for db in spack.store.STORE.db.upstream_dbs),
|
||||||
|
}
|
||||||
|
spack_managed_dirs.update([os.path.realpath(p) for p in spack_managed_dirs])
|
||||||
|
|
||||||
|
env.set(SPACK_MANAGED_DIRS, "|".join(f'"{p}/"*' for p in sorted(spack_managed_dirs)))
|
||||||
|
is_spack_managed = lambda p: any(p.startswith(store) for store in spack_managed_dirs)
|
||||||
|
link_dirs_spack, link_dirs_system = stable_partition(link_dirs, is_spack_managed)
|
||||||
|
include_dirs_spack, include_dirs_system = stable_partition(include_dirs, is_spack_managed)
|
||||||
|
rpath_dirs_spack, rpath_dirs_system = stable_partition(rpath_dirs, is_spack_managed)
|
||||||
|
env.set(SPACK_LINK_DIRS, ":".join(link_dirs_system))
|
||||||
|
env.set(SPACK_INCLUDE_DIRS, ":".join(include_dirs_system))
|
||||||
|
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs_system))
|
||||||
|
env.set(SPACK_STORE_LINK_DIRS, ":".join(link_dirs_spack))
|
||||||
|
env.set(SPACK_STORE_INCLUDE_DIRS, ":".join(include_dirs_spack))
|
||||||
|
env.set(SPACK_STORE_RPATH_DIRS, ":".join(rpath_dirs_spack))
|
||||||
|
|
||||||
|
|
||||||
def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||||
@@ -708,12 +707,28 @@ def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None, **kwa
|
|||||||
return compiler(*compiler_args, output=compiler_output)
|
return compiler(*compiler_args, output=compiler_output)
|
||||||
|
|
||||||
|
|
||||||
def get_rpath_deps(pkg):
|
def _get_rpath_deps_from_spec(
|
||||||
"""Return immediate or transitive RPATHs depending on the package."""
|
spec: spack.spec.Spec, transitive_rpaths: bool
|
||||||
if pkg.transitive_rpaths:
|
) -> List[spack.spec.Spec]:
|
||||||
return [d for d in pkg.spec.traverse(root=False, deptype=("link"))]
|
if not transitive_rpaths:
|
||||||
else:
|
return spec.dependencies(deptype=dt.LINK)
|
||||||
return pkg.spec.dependencies(deptype="link")
|
|
||||||
|
by_name: Dict[str, spack.spec.Spec] = {}
|
||||||
|
|
||||||
|
for dep in spec.traverse(root=False, deptype=dt.LINK):
|
||||||
|
lookup = by_name.get(dep.name)
|
||||||
|
if lookup is None:
|
||||||
|
by_name[dep.name] = dep
|
||||||
|
elif lookup.version < dep.version:
|
||||||
|
by_name[dep.name] = dep
|
||||||
|
|
||||||
|
return list(by_name.values())
|
||||||
|
|
||||||
|
|
||||||
|
def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]:
|
||||||
|
"""Return immediate or transitive dependencies (depending on the package) that need to be
|
||||||
|
rpath'ed. If a package occurs multiple times, the newest version is kept."""
|
||||||
|
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
||||||
|
|
||||||
|
|
||||||
def get_rpaths(pkg):
|
def get_rpaths(pkg):
|
||||||
@@ -795,14 +810,6 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
|||||||
for mod in pkg.compiler.modules:
|
for mod in pkg.compiler.modules:
|
||||||
load_module(mod)
|
load_module(mod)
|
||||||
|
|
||||||
# kludge to handle cray mpich and libsci being automatically loaded by
|
|
||||||
# PrgEnv modules on cray platform. Module unload does no damage when
|
|
||||||
# unnecessary
|
|
||||||
on_cray, _ = _on_cray()
|
|
||||||
if on_cray and not dirty:
|
|
||||||
for mod in ["cray-mpich", "cray-libsci"]:
|
|
||||||
module("unload", mod)
|
|
||||||
|
|
||||||
if target and target.module_name:
|
if target and target.module_name:
|
||||||
load_module(target.module_name)
|
load_module(target.module_name)
|
||||||
|
|
||||||
@@ -821,7 +828,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
|||||||
return env_base
|
return env_base
|
||||||
|
|
||||||
|
|
||||||
class EnvironmentVisitor:
|
class EnvironmentVisitor(traverse.AbstractVisitor):
|
||||||
def __init__(self, *roots: spack.spec.Spec, context: Context):
|
def __init__(self, *roots: spack.spec.Spec, context: Context):
|
||||||
# For the roots (well, marked specs) we follow different edges
|
# For the roots (well, marked specs) we follow different edges
|
||||||
# than for their deps, depending on the context.
|
# than for their deps, depending on the context.
|
||||||
@@ -839,7 +846,7 @@ def __init__(self, *roots: spack.spec.Spec, context: Context):
|
|||||||
self.root_depflag = dt.RUN | dt.LINK
|
self.root_depflag = dt.RUN | dt.LINK
|
||||||
|
|
||||||
def neighbors(self, item):
|
def neighbors(self, item):
|
||||||
spec = item.edge.spec
|
spec = item[0].spec
|
||||||
if spec.dag_hash() in self.root_hashes:
|
if spec.dag_hash() in self.root_hashes:
|
||||||
depflag = self.root_depflag
|
depflag = self.root_depflag
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -39,16 +39,11 @@ def _maybe_set_python_hints(pkg: spack.package_base.PackageBase, args: List[str]
|
|||||||
"""Set the PYTHON_EXECUTABLE, Python_EXECUTABLE, and Python3_EXECUTABLE CMake variables
|
"""Set the PYTHON_EXECUTABLE, Python_EXECUTABLE, and Python3_EXECUTABLE CMake variables
|
||||||
if the package has Python as build or link dep and ``find_python_hints`` is set to True. See
|
if the package has Python as build or link dep and ``find_python_hints`` is set to True. See
|
||||||
``find_python_hints`` for context."""
|
``find_python_hints`` for context."""
|
||||||
if not getattr(pkg, "find_python_hints", False):
|
if not getattr(pkg, "find_python_hints", False) or not pkg.spec.dependencies(
|
||||||
|
"python", dt.BUILD | dt.LINK
|
||||||
|
):
|
||||||
return
|
return
|
||||||
pythons = pkg.spec.dependencies("python", dt.BUILD | dt.LINK)
|
python_executable = pkg.spec["python"].command.path
|
||||||
if len(pythons) != 1:
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
python_executable = pythons[0].package.command.path
|
|
||||||
except RuntimeError:
|
|
||||||
return
|
|
||||||
|
|
||||||
args.extend(
|
args.extend(
|
||||||
[
|
[
|
||||||
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),
|
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),
|
||||||
|
|||||||
144
lib/spack/spack/build_systems/compiler.py
Normal file
144
lib/spack/spack/build_systems/compiler.py
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import itertools
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from typing import Dict, List, Sequence, Tuple, Union
|
||||||
|
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.lang import classproperty
|
||||||
|
|
||||||
|
import spack.compiler
|
||||||
|
import spack.package_base
|
||||||
|
|
||||||
|
# Local "type" for type hints
|
||||||
|
Path = Union[str, pathlib.Path]
|
||||||
|
|
||||||
|
|
||||||
|
class CompilerPackage(spack.package_base.PackageBase):
|
||||||
|
"""A Package mixin for all common logic for packages that implement compilers"""
|
||||||
|
|
||||||
|
# TODO: how do these play nicely with other tags
|
||||||
|
tags: Sequence[str] = ["compiler"]
|
||||||
|
|
||||||
|
#: Optional suffix regexes for searching for this type of compiler.
|
||||||
|
#: Suffixes are used by some frameworks, e.g. macports uses an '-mp-X.Y'
|
||||||
|
#: version suffix for gcc.
|
||||||
|
compiler_suffixes: List[str] = [r"-.*"]
|
||||||
|
|
||||||
|
#: Optional prefix regexes for searching for this compiler
|
||||||
|
compiler_prefixes: List[str] = []
|
||||||
|
|
||||||
|
#: Compiler argument(s) that produces version information
|
||||||
|
#: If multiple arguments, the earlier arguments must produce errors when invalid
|
||||||
|
compiler_version_argument: Union[str, Tuple[str]] = "-dumpversion"
|
||||||
|
|
||||||
|
#: Regex used to extract version from compiler's output
|
||||||
|
compiler_version_regex: str = "(.*)"
|
||||||
|
|
||||||
|
#: Static definition of languages supported by this class
|
||||||
|
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
|
||||||
|
|
||||||
|
def __init__(self, spec: "spack.spec.Spec"):
|
||||||
|
super().__init__(spec)
|
||||||
|
msg = f"Supported languages for {spec} are not a subset of possible supported languages"
|
||||||
|
msg += f" supports: {self.supported_languages}, valid values: {self.compiler_languages}"
|
||||||
|
assert set(self.supported_languages) <= set(self.compiler_languages), msg
|
||||||
|
|
||||||
|
@property
|
||||||
|
def supported_languages(self) -> Sequence[str]:
|
||||||
|
"""Dynamic definition of languages supported by this package"""
|
||||||
|
return self.compiler_languages
|
||||||
|
|
||||||
|
@classproperty
|
||||||
|
def compiler_names(cls) -> Sequence[str]:
|
||||||
|
"""Construct list of compiler names from per-language names"""
|
||||||
|
names = []
|
||||||
|
for language in cls.compiler_languages:
|
||||||
|
names.extend(getattr(cls, f"{language}_names"))
|
||||||
|
return names
|
||||||
|
|
||||||
|
@classproperty
|
||||||
|
def executables(cls) -> Sequence[str]:
|
||||||
|
"""Construct executables for external detection from names, prefixes, and suffixes."""
|
||||||
|
regexp_fmt = r"^({0}){1}({2})$"
|
||||||
|
prefixes = [""] + cls.compiler_prefixes
|
||||||
|
suffixes = [""] + cls.compiler_suffixes
|
||||||
|
if sys.platform == "win32":
|
||||||
|
ext = r"\.(?:exe|bat)"
|
||||||
|
suffixes += [suf + ext for suf in suffixes]
|
||||||
|
return [
|
||||||
|
regexp_fmt.format(prefix, re.escape(name), suffix)
|
||||||
|
for prefix, name, suffix in itertools.product(prefixes, cls.compiler_names, suffixes)
|
||||||
|
]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def determine_version(cls, exe: Path):
|
||||||
|
version_argument = cls.compiler_version_argument
|
||||||
|
if isinstance(version_argument, str):
|
||||||
|
version_argument = (version_argument,)
|
||||||
|
|
||||||
|
for va in version_argument:
|
||||||
|
try:
|
||||||
|
output = spack.compiler.get_compiler_version_output(exe, va)
|
||||||
|
match = re.search(cls.compiler_version_regex, output)
|
||||||
|
if match:
|
||||||
|
return ".".join(match.groups())
|
||||||
|
except spack.util.executable.ProcessError:
|
||||||
|
pass
|
||||||
|
except Exception as e:
|
||||||
|
tty.debug(
|
||||||
|
f"[{__file__}] Cannot detect a valid version for the executable "
|
||||||
|
f"{str(exe)}, for package '{cls.name}': {e}"
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def compiler_bindir(cls, prefix: Path) -> Path:
|
||||||
|
"""Overridable method for the location of the compiler bindir within the preifx"""
|
||||||
|
return os.path.join(prefix, "bin")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def determine_compiler_paths(cls, exes: Sequence[Path]) -> Dict[str, Path]:
|
||||||
|
"""Compute the paths to compiler executables associated with this package
|
||||||
|
|
||||||
|
This is a helper method for ``determine_variants`` to compute the ``extra_attributes``
|
||||||
|
to include with each spec object."""
|
||||||
|
# There are often at least two copies (not symlinks) of each compiler executable in the
|
||||||
|
# same directory: one with a canonical name, e.g. "gfortran", and another one with the
|
||||||
|
# target prefix, e.g. "x86_64-pc-linux-gnu-gfortran". There also might be a copy of "gcc"
|
||||||
|
# with the version suffix, e.g. "x86_64-pc-linux-gnu-gcc-6.3.0". To ensure the consistency
|
||||||
|
# of values in the "paths" dictionary (i.e. we prefer all of them to reference copies
|
||||||
|
# with canonical names if possible), we iterate over the executables in the reversed sorted
|
||||||
|
# order:
|
||||||
|
# First pass over languages identifies exes that are perfect matches for canonical names
|
||||||
|
# Second pass checks for names with prefix/suffix
|
||||||
|
# Second pass is sorted by language name length because longer named languages
|
||||||
|
# e.g. cxx can often contain the names of shorter named languages
|
||||||
|
# e.g. c (e.g. clang/clang++)
|
||||||
|
paths = {}
|
||||||
|
exes = sorted(exes, reverse=True)
|
||||||
|
languages = {
|
||||||
|
lang: getattr(cls, f"{lang}_names")
|
||||||
|
for lang in sorted(cls.compiler_languages, key=len, reverse=True)
|
||||||
|
}
|
||||||
|
for exe in exes:
|
||||||
|
for lang, names in languages.items():
|
||||||
|
if os.path.basename(exe) in names:
|
||||||
|
paths[lang] = exe
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
for lang, names in languages.items():
|
||||||
|
if any(name in os.path.basename(exe) for name in names):
|
||||||
|
paths[lang] = exe
|
||||||
|
break
|
||||||
|
|
||||||
|
return paths
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
|
||||||
|
# path determination is separated so it can be reused in subclasses
|
||||||
|
return "", {"compilers": cls.determine_compiler_paths(exes=exes)}
|
||||||
@@ -110,9 +110,8 @@ def cuda_flags(arch_list):
|
|||||||
# From the NVIDIA install guide we know of conflicts for particular
|
# From the NVIDIA install guide we know of conflicts for particular
|
||||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||||
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
|
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
|
||||||
# platform=linux, since they should also apply to platform=cray, and may
|
# platform=linux, since they may apply to platform=darwin. We currently
|
||||||
# apply to platform=darwin. We currently do not provide conflicts for
|
# do not provide conflicts for platform=darwin with %apple-clang.
|
||||||
# platform=darwin with %apple-clang.
|
|
||||||
|
|
||||||
# Linux x86_64 compiler conflicts from here:
|
# Linux x86_64 compiler conflicts from here:
|
||||||
# https://gist.github.com/ax3l/9489132
|
# https://gist.github.com/ax3l/9489132
|
||||||
@@ -137,11 +136,14 @@ def cuda_flags(arch_list):
|
|||||||
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
|
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
|
||||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
||||||
|
conflicts("%gcc@14:", when="+cuda ^cuda@:12.4")
|
||||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||||
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
|
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
|
||||||
conflicts("%clang@16:", when="+cuda ^cuda@:12.3")
|
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
|
||||||
|
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
||||||
|
conflicts("%clang@18:", when="+cuda ^cuda@:12.4")
|
||||||
|
|
||||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||||
|
|||||||
@@ -846,6 +846,7 @@ def scalapack_libs(self):
|
|||||||
"^mpich@2:" in spec_root
|
"^mpich@2:" in spec_root
|
||||||
or "^cray-mpich" in spec_root
|
or "^cray-mpich" in spec_root
|
||||||
or "^mvapich2" in spec_root
|
or "^mvapich2" in spec_root
|
||||||
|
or "^mvapich" in spec_root
|
||||||
or "^intel-mpi" in spec_root
|
or "^intel-mpi" in spec_root
|
||||||
or "^intel-oneapi-mpi" in spec_root
|
or "^intel-oneapi-mpi" in spec_root
|
||||||
or "^intel-parallel-studio" in spec_root
|
or "^intel-parallel-studio" in spec_root
|
||||||
@@ -936,23 +937,6 @@ def mpi_setup_dependent_build_environment(self, env, dependent_spec, compilers_o
|
|||||||
"I_MPI_ROOT": self.normalize_path("mpi"),
|
"I_MPI_ROOT": self.normalize_path("mpi"),
|
||||||
}
|
}
|
||||||
|
|
||||||
# CAUTION - SIMILAR code in:
|
|
||||||
# var/spack/repos/builtin/packages/mpich/package.py
|
|
||||||
# var/spack/repos/builtin/packages/openmpi/package.py
|
|
||||||
# var/spack/repos/builtin/packages/mvapich2/package.py
|
|
||||||
#
|
|
||||||
# On Cray, the regular compiler wrappers *are* the MPI wrappers.
|
|
||||||
if "platform=cray" in self.spec:
|
|
||||||
# TODO: Confirm
|
|
||||||
wrapper_vars.update(
|
|
||||||
{
|
|
||||||
"MPICC": compilers_of_client["CC"],
|
|
||||||
"MPICXX": compilers_of_client["CXX"],
|
|
||||||
"MPIF77": compilers_of_client["F77"],
|
|
||||||
"MPIF90": compilers_of_client["F90"],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
compiler_wrapper_commands = self.mpi_compiler_wrappers
|
compiler_wrapper_commands = self.mpi_compiler_wrappers
|
||||||
wrapper_vars.update(
|
wrapper_vars.update(
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ class MSBuildPackage(spack.package_base.PackageBase):
|
|||||||
build_system("msbuild")
|
build_system("msbuild")
|
||||||
conflicts("platform=linux", when="build_system=msbuild")
|
conflicts("platform=linux", when="build_system=msbuild")
|
||||||
conflicts("platform=darwin", when="build_system=msbuild")
|
conflicts("platform=darwin", when="build_system=msbuild")
|
||||||
conflicts("platform=cray", when="build_system=msbuild")
|
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("msbuild")
|
@spack.builder.builder("msbuild")
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ class NMakePackage(spack.package_base.PackageBase):
|
|||||||
build_system("nmake")
|
build_system("nmake")
|
||||||
conflicts("platform=linux", when="build_system=nmake")
|
conflicts("platform=linux", when="build_system=nmake")
|
||||||
conflicts("platform=darwin", when="build_system=nmake")
|
conflicts("platform=darwin", when="build_system=nmake")
|
||||||
conflicts("platform=cray", when="build_system=nmake")
|
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("nmake")
|
@spack.builder.builder("nmake")
|
||||||
@@ -145,7 +144,7 @@ def install(self, pkg, spec, prefix):
|
|||||||
opts += self.nmake_install_args()
|
opts += self.nmake_install_args()
|
||||||
if self.makefile_name:
|
if self.makefile_name:
|
||||||
opts.append("/F{}".format(self.makefile_name))
|
opts.append("/F{}".format(self.makefile_name))
|
||||||
opts.append(self.define("PREFIX", prefix))
|
opts.append(self.define("PREFIX", fs.windows_sfn(prefix)))
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).nmake(
|
inspect.getmodule(self.pkg).nmake(
|
||||||
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes
|
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes
|
||||||
|
|||||||
@@ -14,7 +14,7 @@
|
|||||||
from llnl.util.link_tree import LinkTree
|
from llnl.util.link_tree import LinkTree
|
||||||
|
|
||||||
from spack.build_environment import dso_suffix
|
from spack.build_environment import dso_suffix
|
||||||
from spack.directives import conflicts, license, variant
|
from spack.directives import conflicts, license, redistribute, variant
|
||||||
from spack.package_base import InstallError
|
from spack.package_base import InstallError
|
||||||
from spack.util.environment import EnvironmentModifications
|
from spack.util.environment import EnvironmentModifications
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
@@ -30,15 +30,14 @@ class IntelOneApiPackage(Package):
|
|||||||
|
|
||||||
# oneAPI license does not allow mirroring outside of the
|
# oneAPI license does not allow mirroring outside of the
|
||||||
# organization (e.g. University/Company).
|
# organization (e.g. University/Company).
|
||||||
redistribute_source = False
|
redistribute(source=False, binary=False)
|
||||||
|
|
||||||
for c in [
|
for c in [
|
||||||
"target=ppc64:",
|
"target=ppc64:",
|
||||||
"target=ppc64le:",
|
"target=ppc64le:",
|
||||||
"target=aarch64:",
|
"target=aarch64:",
|
||||||
"platform=darwin:",
|
"platform=darwin",
|
||||||
"platform=cray:",
|
"platform=windows",
|
||||||
"platform=windows:",
|
|
||||||
]:
|
]:
|
||||||
conflicts(c, msg="This package in only available for x86_64 and Linux")
|
conflicts(c, msg="This package in only available for x86_64 and Linux")
|
||||||
|
|
||||||
|
|||||||
@@ -138,16 +138,21 @@ def view_file_conflicts(self, view, merge_map):
|
|||||||
return conflicts
|
return conflicts
|
||||||
|
|
||||||
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
||||||
# Patch up shebangs to the python linked in the view only if python is built by Spack.
|
# Patch up shebangs if the package extends Python and we put a Python interpreter in the
|
||||||
if not self.extendee_spec or self.extendee_spec.external:
|
# view.
|
||||||
|
if not self.extendee_spec:
|
||||||
|
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||||
|
|
||||||
|
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||||
|
|
||||||
|
if python.external:
|
||||||
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||||
|
|
||||||
# We only patch shebangs in the bin directory.
|
# We only patch shebangs in the bin directory.
|
||||||
copied_files: Dict[Tuple[int, int], str] = {} # File identifier -> source
|
copied_files: Dict[Tuple[int, int], str] = {} # File identifier -> source
|
||||||
delayed_links: List[Tuple[str, str]] = [] # List of symlinks from merge map
|
delayed_links: List[Tuple[str, str]] = [] # List of symlinks from merge map
|
||||||
|
|
||||||
bin_dir = self.spec.prefix.bin
|
bin_dir = self.spec.prefix.bin
|
||||||
python_prefix = self.extendee_spec.prefix
|
|
||||||
for src, dst in merge_map.items():
|
for src, dst in merge_map.items():
|
||||||
if skip_if_exists and os.path.lexists(dst):
|
if skip_if_exists and os.path.lexists(dst):
|
||||||
continue
|
continue
|
||||||
@@ -168,7 +173,7 @@ def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
|||||||
copied_files[(s.st_dev, s.st_ino)] = dst
|
copied_files[(s.st_dev, s.st_ino)] = dst
|
||||||
shutil.copy2(src, dst)
|
shutil.copy2(src, dst)
|
||||||
fs.filter_file(
|
fs.filter_file(
|
||||||
python_prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst
|
python.prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
view.link(src, dst)
|
view.link(src, dst)
|
||||||
@@ -199,14 +204,13 @@ def remove_files_from_view(self, view, merge_map):
|
|||||||
ignore_namespace = True
|
ignore_namespace = True
|
||||||
|
|
||||||
bin_dir = self.spec.prefix.bin
|
bin_dir = self.spec.prefix.bin
|
||||||
global_view = self.extendee_spec.prefix == view.get_projection_for_spec(self.spec)
|
|
||||||
|
|
||||||
to_remove = []
|
to_remove = []
|
||||||
for src, dst in merge_map.items():
|
for src, dst in merge_map.items():
|
||||||
if ignore_namespace and namespace_init(dst):
|
if ignore_namespace and namespace_init(dst):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if global_view or not fs.path_contains_subdirectory(src, bin_dir):
|
if not fs.path_contains_subdirectory(src, bin_dir):
|
||||||
to_remove.append(dst)
|
to_remove.append(dst)
|
||||||
else:
|
else:
|
||||||
os.remove(dst)
|
os.remove(dst)
|
||||||
@@ -362,6 +366,12 @@ def list_url(cls) -> Optional[str]: # type: ignore[override]
|
|||||||
return f"https://pypi.org/simple/{name}/"
|
return f"https://pypi.org/simple/{name}/"
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def python_spec(self):
|
||||||
|
"""Get python-venv if it exists or python otherwise."""
|
||||||
|
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||||
|
return python
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def headers(self) -> HeaderList:
|
def headers(self) -> HeaderList:
|
||||||
"""Discover header files in platlib."""
|
"""Discover header files in platlib."""
|
||||||
@@ -371,8 +381,9 @@ def headers(self) -> HeaderList:
|
|||||||
|
|
||||||
# Headers should only be in include or platlib, but no harm in checking purelib too
|
# Headers should only be in include or platlib, but no harm in checking purelib too
|
||||||
include = self.prefix.join(self.spec["python"].package.include).join(name)
|
include = self.prefix.join(self.spec["python"].package.include).join(name)
|
||||||
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
|
python = self.python_spec
|
||||||
purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
|
platlib = self.prefix.join(python.package.platlib).join(name)
|
||||||
|
purelib = self.prefix.join(python.package.purelib).join(name)
|
||||||
|
|
||||||
headers_list = map(fs.find_all_headers, [include, platlib, purelib])
|
headers_list = map(fs.find_all_headers, [include, platlib, purelib])
|
||||||
headers = functools.reduce(operator.add, headers_list)
|
headers = functools.reduce(operator.add, headers_list)
|
||||||
@@ -391,8 +402,9 @@ def libs(self) -> LibraryList:
|
|||||||
name = self.spec.name[3:]
|
name = self.spec.name[3:]
|
||||||
|
|
||||||
# Libraries should only be in platlib, but no harm in checking purelib too
|
# Libraries should only be in platlib, but no harm in checking purelib too
|
||||||
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
|
python = self.python_spec
|
||||||
purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
|
platlib = self.prefix.join(python.package.platlib).join(name)
|
||||||
|
purelib = self.prefix.join(python.package.purelib).join(name)
|
||||||
|
|
||||||
find_all_libraries = functools.partial(fs.find_all_libraries, recursive=True)
|
find_all_libraries = functools.partial(fs.find_all_libraries, recursive=True)
|
||||||
libs_list = map(find_all_libraries, [platlib, purelib])
|
libs_list = map(find_all_libraries, [platlib, purelib])
|
||||||
@@ -504,6 +516,8 @@ def global_options(self, spec: Spec, prefix: Prefix) -> Iterable[str]:
|
|||||||
|
|
||||||
def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
||||||
"""Install everything from build directory."""
|
"""Install everything from build directory."""
|
||||||
|
pip = spec["python"].command
|
||||||
|
pip.add_default_arg("-m", "pip")
|
||||||
|
|
||||||
args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
|
args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
|
||||||
|
|
||||||
@@ -519,14 +533,6 @@ def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
|||||||
else:
|
else:
|
||||||
args.append(".")
|
args.append(".")
|
||||||
|
|
||||||
pip = spec["python"].command
|
|
||||||
# Hide user packages, since we don't have build isolation. This is
|
|
||||||
# necessary because pip / setuptools may run hooks from arbitrary
|
|
||||||
# packages during the build. There is no equivalent variable to hide
|
|
||||||
# system packages, so this is not reliable for external Python.
|
|
||||||
pip.add_default_env("PYTHONNOUSERSITE", "1")
|
|
||||||
pip.add_default_arg("-m")
|
|
||||||
pip.add_default_arg("pip")
|
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pip(*args)
|
pip(*args)
|
||||||
|
|
||||||
|
|||||||
@@ -80,6 +80,7 @@
|
|||||||
import spack.variant
|
import spack.variant
|
||||||
from spack.directives import conflicts, depends_on, variant
|
from spack.directives import conflicts, depends_on, variant
|
||||||
from spack.package_base import PackageBase
|
from spack.package_base import PackageBase
|
||||||
|
from spack.util.environment import EnvironmentModifications
|
||||||
|
|
||||||
|
|
||||||
class ROCmPackage(PackageBase):
|
class ROCmPackage(PackageBase):
|
||||||
@@ -156,30 +157,23 @@ def hip_flags(amdgpu_target):
|
|||||||
archs = ",".join(amdgpu_target)
|
archs = ",".join(amdgpu_target)
|
||||||
return "--amdgpu-target={0}".format(archs)
|
return "--amdgpu-target={0}".format(archs)
|
||||||
|
|
||||||
# ASAN
|
def asan_on(self, env: EnvironmentModifications):
|
||||||
@staticmethod
|
llvm_path = self.spec["llvm-amdgpu"].prefix
|
||||||
def asan_on(env, llvm_path):
|
|
||||||
env.set("CC", llvm_path + "/bin/clang")
|
env.set("CC", llvm_path + "/bin/clang")
|
||||||
env.set("CXX", llvm_path + "/bin/clang++")
|
env.set("CXX", llvm_path + "/bin/clang++")
|
||||||
env.set("ASAN_OPTIONS", "detect_leaks=0")
|
env.set("ASAN_OPTIONS", "detect_leaks=0")
|
||||||
|
|
||||||
for root, dirs, files in os.walk(llvm_path):
|
for root, _, files in os.walk(llvm_path):
|
||||||
if "libclang_rt.asan-x86_64.so" in files:
|
if "libclang_rt.asan-x86_64.so" in files:
|
||||||
asan_lib_path = root
|
asan_lib_path = root
|
||||||
env.prepend_path("LD_LIBRARY_PATH", asan_lib_path)
|
env.prepend_path("LD_LIBRARY_PATH", asan_lib_path)
|
||||||
SET_DWARF_VERSION_4 = ""
|
if "rhel" in self.spec.os or "sles" in self.spec.os:
|
||||||
try:
|
|
||||||
# This will throw an error if imported on a non-Linux platform.
|
|
||||||
import distro
|
|
||||||
|
|
||||||
distname = distro.id()
|
|
||||||
except ImportError:
|
|
||||||
distname = "unknown"
|
|
||||||
if "rhel" in distname or "sles" in distname:
|
|
||||||
SET_DWARF_VERSION_4 = "-gdwarf-5"
|
SET_DWARF_VERSION_4 = "-gdwarf-5"
|
||||||
|
else:
|
||||||
|
SET_DWARF_VERSION_4 = ""
|
||||||
|
|
||||||
env.set("CFLAGS", "-fsanitize=address -shared-libasan -g " + SET_DWARF_VERSION_4)
|
env.set("CFLAGS", f"-fsanitize=address -shared-libasan -g {SET_DWARF_VERSION_4}")
|
||||||
env.set("CXXFLAGS", "-fsanitize=address -shared-libasan -g " + SET_DWARF_VERSION_4)
|
env.set("CXXFLAGS", f"-fsanitize=address -shared-libasan -g {SET_DWARF_VERSION_4}")
|
||||||
env.set("LDFLAGS", "-Wl,--enable-new-dtags -fuse-ld=lld -fsanitize=address -g -Wl,")
|
env.set("LDFLAGS", "-Wl,--enable-new-dtags -fuse-ld=lld -fsanitize=address -g -Wl,")
|
||||||
|
|
||||||
# HIP version vs Architecture
|
# HIP version vs Architecture
|
||||||
|
|||||||
@@ -16,8 +16,8 @@
|
|||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
import zipfile
|
import zipfile
|
||||||
from collections import namedtuple
|
from collections import defaultdict, namedtuple
|
||||||
from typing import List, Optional
|
from typing import Dict, List, Optional, Set, Tuple
|
||||||
from urllib.error import HTTPError, URLError
|
from urllib.error import HTTPError, URLError
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
from urllib.request import HTTPHandler, Request, build_opener
|
from urllib.request import HTTPHandler, Request, build_opener
|
||||||
@@ -44,6 +44,7 @@
|
|||||||
from spack import traverse
|
from spack import traverse
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
from spack.reporters import CDash, CDashConfiguration
|
from spack.reporters import CDash, CDashConfiguration
|
||||||
|
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||||
|
|
||||||
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
|
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
|
||||||
@@ -113,54 +114,24 @@ def _remove_reserved_tags(tags):
|
|||||||
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
|
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
|
||||||
|
|
||||||
|
|
||||||
def _spec_deps_key(s):
|
def _spec_ci_label(s):
|
||||||
return f"{s.name}/{s.dag_hash(7)}"
|
return f"{s.name}/{s.dag_hash(7)}"
|
||||||
|
|
||||||
|
|
||||||
def _add_dependency(spec_label, dep_label, deps):
|
PlainNodes = Dict[str, spack.spec.Spec]
|
||||||
if spec_label == dep_label:
|
PlainEdges = Dict[str, Set[str]]
|
||||||
return
|
|
||||||
if spec_label not in deps:
|
|
||||||
deps[spec_label] = set()
|
|
||||||
deps[spec_label].add(dep_label)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_spec_dependencies(specs, deps, spec_labels):
|
def stage_spec_jobs(specs: List[spack.spec.Spec]) -> Tuple[PlainNodes, PlainEdges, List[Set[str]]]:
|
||||||
spec_deps_obj = _compute_spec_deps(specs)
|
"""Turn a DAG into a list of stages (set of nodes), the list is ordered topologically, so that
|
||||||
|
each node in a stage has dependencies only in previous stages.
|
||||||
if spec_deps_obj:
|
|
||||||
dependencies = spec_deps_obj["dependencies"]
|
|
||||||
specs = spec_deps_obj["specs"]
|
|
||||||
|
|
||||||
for entry in specs:
|
|
||||||
spec_labels[entry["label"]] = entry["spec"]
|
|
||||||
|
|
||||||
for entry in dependencies:
|
|
||||||
_add_dependency(entry["spec"], entry["depends"], deps)
|
|
||||||
|
|
||||||
|
|
||||||
def stage_spec_jobs(specs):
|
|
||||||
"""Take a set of release specs and generate a list of "stages", where the
|
|
||||||
jobs in any stage are dependent only on jobs in previous stages. This
|
|
||||||
allows us to maximize build parallelism within the gitlab-ci framework.
|
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
specs (Iterable): Specs to build
|
specs: Specs to build
|
||||||
|
|
||||||
Returns: A tuple of information objects describing the specs, dependencies
|
|
||||||
and stages:
|
|
||||||
|
|
||||||
spec_labels: A dictionary mapping the spec labels (which are formatted
|
|
||||||
as pkg-name/hash-prefix) to concrete specs.
|
|
||||||
|
|
||||||
deps: A dictionary where the keys should also have appeared as keys in
|
|
||||||
the spec_labels dictionary, and the values are the set of
|
|
||||||
dependencies for that spec.
|
|
||||||
|
|
||||||
stages: An ordered list of sets, each of which contains all the jobs to
|
|
||||||
built in that stage. The jobs are expressed in the same format as
|
|
||||||
the keys in the spec_labels and deps objects.
|
|
||||||
|
|
||||||
|
Returns: A tuple (nodes, edges, stages) where ``nodes`` maps labels to specs, ``edges`` maps
|
||||||
|
labels to a set of labels of dependencies, and ``stages`` is a topologically ordered list
|
||||||
|
of sets of labels.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# The convenience method below, "_remove_satisfied_deps()", does not modify
|
# The convenience method below, "_remove_satisfied_deps()", does not modify
|
||||||
@@ -177,17 +148,12 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
|||||||
|
|
||||||
return new_deps
|
return new_deps
|
||||||
|
|
||||||
deps = {}
|
nodes, edges = _extract_dag(specs)
|
||||||
spec_labels = {}
|
|
||||||
|
|
||||||
_get_spec_dependencies(specs, deps, spec_labels)
|
# Save the original edges, as we need to return them at the end of the function. In the loop
|
||||||
|
# below, the "dependencies" variable is rebound rather than mutated, so "edges" is not mutated.
|
||||||
# Save the original deps, as we need to return them at the end of the
|
dependencies = edges
|
||||||
# function. In the while loop below, the "dependencies" variable is
|
unstaged = set(nodes.keys())
|
||||||
# overwritten rather than being modified each time through the loop,
|
|
||||||
# thus preserving the original value of "deps" saved here.
|
|
||||||
dependencies = deps
|
|
||||||
unstaged = set(spec_labels.keys())
|
|
||||||
stages = []
|
stages = []
|
||||||
|
|
||||||
while dependencies:
|
while dependencies:
|
||||||
@@ -203,7 +169,7 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
|||||||
if unstaged:
|
if unstaged:
|
||||||
stages.append(unstaged.copy())
|
stages.append(unstaged.copy())
|
||||||
|
|
||||||
return spec_labels, deps, stages
|
return nodes, edges, stages
|
||||||
|
|
||||||
|
|
||||||
def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions):
|
def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions):
|
||||||
@@ -235,87 +201,22 @@ def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisi
|
|||||||
tty.msg(msg)
|
tty.msg(msg)
|
||||||
|
|
||||||
|
|
||||||
def _compute_spec_deps(spec_list):
|
def _extract_dag(specs: List[spack.spec.Spec]) -> Tuple[PlainNodes, PlainEdges]:
|
||||||
"""
|
"""Extract a sub-DAG as plain old Python objects with external nodes removed."""
|
||||||
Computes all the dependencies for the spec(s) and generates a JSON
|
nodes: PlainNodes = {}
|
||||||
object which provides both a list of unique spec names as well as a
|
edges: PlainEdges = defaultdict(set)
|
||||||
comprehensive list of all the edges in the dependency graph. For
|
|
||||||
example, given a single spec like 'readline@7.0', this function
|
|
||||||
generates the following JSON object:
|
|
||||||
|
|
||||||
.. code-block:: JSON
|
for edge in traverse.traverse_edges(specs, cover="edges"):
|
||||||
|
if (edge.parent and edge.parent.external) or edge.spec.external:
|
||||||
{
|
|
||||||
"dependencies": [
|
|
||||||
{
|
|
||||||
"depends": "readline/ip6aiun",
|
|
||||||
"spec": "readline/ip6aiun"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"depends": "ncurses/y43rifz",
|
|
||||||
"spec": "readline/ip6aiun"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"depends": "ncurses/y43rifz",
|
|
||||||
"spec": "readline/ip6aiun"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"depends": "pkgconf/eg355zb",
|
|
||||||
"spec": "ncurses/y43rifz"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"depends": "pkgconf/eg355zb",
|
|
||||||
"spec": "readline/ip6aiun"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"specs": [
|
|
||||||
{
|
|
||||||
"spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-highs...",
|
|
||||||
"label": "readline/ip6aiun"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"spec": "ncurses@6.1%apple-clang@9.1.0 arch=darwin-highsi...",
|
|
||||||
"label": "ncurses/y43rifz"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"spec": "pkgconf@1.5.4%apple-clang@9.1.0 arch=darwin-high...",
|
|
||||||
"label": "pkgconf/eg355zb"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
"""
|
|
||||||
spec_labels = {}
|
|
||||||
|
|
||||||
specs = []
|
|
||||||
dependencies = []
|
|
||||||
|
|
||||||
def append_dep(s, d):
|
|
||||||
dependencies.append({"spec": s, "depends": d})
|
|
||||||
|
|
||||||
for spec in spec_list:
|
|
||||||
for s in spec.traverse(deptype="all"):
|
|
||||||
if s.external:
|
|
||||||
tty.msg(f"Will not stage external pkg: {s}")
|
|
||||||
continue
|
continue
|
||||||
|
child_id = _spec_ci_label(edge.spec)
|
||||||
|
nodes[child_id] = edge.spec
|
||||||
|
if edge.parent:
|
||||||
|
parent_id = _spec_ci_label(edge.parent)
|
||||||
|
nodes[parent_id] = edge.parent
|
||||||
|
edges[parent_id].add(child_id)
|
||||||
|
|
||||||
skey = _spec_deps_key(s)
|
return nodes, edges
|
||||||
spec_labels[skey] = s
|
|
||||||
|
|
||||||
for d in s.dependencies(deptype="all"):
|
|
||||||
dkey = _spec_deps_key(d)
|
|
||||||
if d.external:
|
|
||||||
tty.msg(f"Will not stage external dep: {d}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
append_dep(skey, dkey)
|
|
||||||
|
|
||||||
for spec_label, concrete_spec in spec_labels.items():
|
|
||||||
specs.append({"label": spec_label, "spec": concrete_spec})
|
|
||||||
|
|
||||||
deps_json_obj = {"specs": specs, "dependencies": dependencies}
|
|
||||||
|
|
||||||
return deps_json_obj
|
|
||||||
|
|
||||||
|
|
||||||
def _spec_matches(spec, match_string):
|
def _spec_matches(spec, match_string):
|
||||||
@@ -327,7 +228,7 @@ def _format_job_needs(
|
|||||||
):
|
):
|
||||||
needs_list = []
|
needs_list = []
|
||||||
for dep_job in dep_jobs:
|
for dep_job in dep_jobs:
|
||||||
dep_spec_key = _spec_deps_key(dep_job)
|
dep_spec_key = _spec_ci_label(dep_job)
|
||||||
rebuild = rebuild_decisions[dep_spec_key].rebuild
|
rebuild = rebuild_decisions[dep_spec_key].rebuild
|
||||||
|
|
||||||
if not prune_dag or rebuild:
|
if not prune_dag or rebuild:
|
||||||
@@ -783,6 +684,22 @@ def generate_gitlab_ci_yaml(
|
|||||||
"instead.",
|
"instead.",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def ensure_expected_target_path(path):
|
||||||
|
"""Returns passed paths with all Windows path separators exchanged
|
||||||
|
for posix separators only if copy_only_pipeline is enabled
|
||||||
|
|
||||||
|
This is required as copy_only_pipelines are a unique scenario where
|
||||||
|
the generate job and child pipelines are run on different platforms.
|
||||||
|
To make this compatible w/ Windows, we cannot write Windows style path separators
|
||||||
|
that will be consumed on by the Posix copy job runner.
|
||||||
|
|
||||||
|
TODO (johnwparent): Refactor config + cli read/write to deal only in posix
|
||||||
|
style paths
|
||||||
|
"""
|
||||||
|
if copy_only_pipeline and path:
|
||||||
|
path = path.replace("\\", "/")
|
||||||
|
return path
|
||||||
|
|
||||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||||
deprecated_mirror_config = False
|
deprecated_mirror_config = False
|
||||||
buildcache_destination = None
|
buildcache_destination = None
|
||||||
@@ -906,7 +823,7 @@ def generate_gitlab_ci_yaml(
|
|||||||
if scope not in include_scopes and scope not in env_includes:
|
if scope not in include_scopes and scope not in env_includes:
|
||||||
include_scopes.insert(0, scope)
|
include_scopes.insert(0, scope)
|
||||||
env_includes.extend(include_scopes)
|
env_includes.extend(include_scopes)
|
||||||
env_yaml_root["spack"]["include"] = env_includes
|
env_yaml_root["spack"]["include"] = [ensure_expected_target_path(i) for i in env_includes]
|
||||||
|
|
||||||
if "gitlab-ci" in env_yaml_root["spack"] and "ci" not in env_yaml_root["spack"]:
|
if "gitlab-ci" in env_yaml_root["spack"] and "ci" not in env_yaml_root["spack"]:
|
||||||
env_yaml_root["spack"]["ci"] = env_yaml_root["spack"].pop("gitlab-ci")
|
env_yaml_root["spack"]["ci"] = env_yaml_root["spack"].pop("gitlab-ci")
|
||||||
@@ -1327,6 +1244,9 @@ def main_script_replacements(cmd):
|
|||||||
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
||||||
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
||||||
}
|
}
|
||||||
|
output_vars = output_object["variables"]
|
||||||
|
for item, val in output_vars.items():
|
||||||
|
output_vars[item] = ensure_expected_target_path(val)
|
||||||
|
|
||||||
# TODO: Remove this block in Spack 0.23
|
# TODO: Remove this block in Spack 0.23
|
||||||
if deprecated_mirror_config and remote_mirror_override:
|
if deprecated_mirror_config and remote_mirror_override:
|
||||||
@@ -1383,7 +1303,6 @@ def main_script_replacements(cmd):
|
|||||||
sorted_output = {}
|
sorted_output = {}
|
||||||
for output_key, output_value in sorted(output_object.items()):
|
for output_key, output_value in sorted(output_object.items()):
|
||||||
sorted_output[output_key] = output_value
|
sorted_output[output_key] = output_value
|
||||||
|
|
||||||
if known_broken_specs_encountered:
|
if known_broken_specs_encountered:
|
||||||
tty.error("This pipeline generated hashes known to be broken on develop:")
|
tty.error("This pipeline generated hashes known to be broken on develop:")
|
||||||
display_broken_spec_messages(broken_specs_url, known_broken_specs_encountered)
|
display_broken_spec_messages(broken_specs_url, known_broken_specs_encountered)
|
||||||
@@ -1578,6 +1497,12 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
|||||||
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def win_quote(quote_str: str) -> str:
|
||||||
|
if IS_WINDOWS:
|
||||||
|
quote_str = f'"{quote_str}"'
|
||||||
|
return quote_str
|
||||||
|
|
||||||
|
|
||||||
def download_and_extract_artifacts(url, work_dir):
|
def download_and_extract_artifacts(url, work_dir):
|
||||||
"""Look for gitlab artifacts.zip at the given url, and attempt to download
|
"""Look for gitlab artifacts.zip at the given url, and attempt to download
|
||||||
and extract the contents into the given work_dir
|
and extract the contents into the given work_dir
|
||||||
@@ -1600,7 +1525,7 @@ def download_and_extract_artifacts(url, work_dir):
|
|||||||
request = Request(url, headers=headers)
|
request = Request(url, headers=headers)
|
||||||
request.get_method = lambda: "GET"
|
request.get_method = lambda: "GET"
|
||||||
|
|
||||||
response = opener.open(request)
|
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
response_code = response.getcode()
|
response_code = response.getcode()
|
||||||
|
|
||||||
if response_code != 200:
|
if response_code != 200:
|
||||||
@@ -2042,9 +1967,9 @@ def compose_command_err_handling(args):
|
|||||||
# but we need to handle EXEs (git, etc) ourselves
|
# but we need to handle EXEs (git, etc) ourselves
|
||||||
catch_exe_failure = (
|
catch_exe_failure = (
|
||||||
"""
|
"""
|
||||||
if ($LASTEXITCODE -ne 0){
|
if ($LASTEXITCODE -ne 0){{
|
||||||
throw "Command {} has failed"
|
throw 'Command {} has failed'
|
||||||
}
|
}}
|
||||||
"""
|
"""
|
||||||
if IS_WINDOWS
|
if IS_WINDOWS
|
||||||
else ""
|
else ""
|
||||||
@@ -2276,13 +2201,13 @@ def __init__(self, ci_cdash):
|
|||||||
def args(self):
|
def args(self):
|
||||||
return [
|
return [
|
||||||
"--cdash-upload-url",
|
"--cdash-upload-url",
|
||||||
self.upload_url,
|
win_quote(self.upload_url),
|
||||||
"--cdash-build",
|
"--cdash-build",
|
||||||
self.build_name,
|
win_quote(self.build_name),
|
||||||
"--cdash-site",
|
"--cdash-site",
|
||||||
self.site,
|
win_quote(self.site),
|
||||||
"--cdash-buildstamp",
|
"--cdash-buildstamp",
|
||||||
self.build_stamp,
|
win_quote(self.build_stamp),
|
||||||
]
|
]
|
||||||
|
|
||||||
@property # type: ignore
|
@property # type: ignore
|
||||||
@@ -2348,7 +2273,7 @@ def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
|||||||
|
|
||||||
request = Request(url, data=enc_data, headers=headers)
|
request = Request(url, data=enc_data, headers=headers)
|
||||||
|
|
||||||
response = opener.open(request)
|
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
response_code = response.getcode()
|
response_code = response.getcode()
|
||||||
|
|
||||||
if response_code not in [200, 201]:
|
if response_code not in [200, 201]:
|
||||||
@@ -2394,7 +2319,7 @@ def populate_buildgroup(self, job_names):
|
|||||||
request = Request(url, data=enc_data, headers=headers)
|
request = Request(url, data=enc_data, headers=headers)
|
||||||
request.get_method = lambda: "PUT"
|
request.get_method = lambda: "PUT"
|
||||||
|
|
||||||
response = opener.open(request)
|
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
response_code = response.getcode()
|
response_code = response.getcode()
|
||||||
|
|
||||||
if response_code != 200:
|
if response_code != 200:
|
||||||
|
|||||||
@@ -334,8 +334,7 @@ def display_specs(specs, args=None, **kwargs):
|
|||||||
variants (bool): Show variants with specs
|
variants (bool): Show variants with specs
|
||||||
indent (int): indent each line this much
|
indent (int): indent each line this much
|
||||||
groups (bool): display specs grouped by arch/compiler (default True)
|
groups (bool): display specs grouped by arch/compiler (default True)
|
||||||
decorators (dict): dictionary mappng specs to decorators
|
decorator (typing.Callable): function to call to decorate specs
|
||||||
header_callback (typing.Callable): called at start of arch/compiler groups
|
|
||||||
all_headers (bool): show headers even when arch/compiler aren't defined
|
all_headers (bool): show headers even when arch/compiler aren't defined
|
||||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||||
|
|
||||||
@@ -384,15 +383,13 @@ def get_arg(name, default=None):
|
|||||||
vfmt = "{variants}" if variants else ""
|
vfmt = "{variants}" if variants else ""
|
||||||
format_string = nfmt + "{@version}" + ffmt + vfmt
|
format_string = nfmt + "{@version}" + ffmt + vfmt
|
||||||
|
|
||||||
transform = {"package": decorator, "fullpackage": decorator}
|
|
||||||
|
|
||||||
def fmt(s, depth=0):
|
def fmt(s, depth=0):
|
||||||
"""Formatter function for all output specs"""
|
"""Formatter function for all output specs"""
|
||||||
string = ""
|
string = ""
|
||||||
if hashes:
|
if hashes:
|
||||||
string += gray_hash(s, hlen) + " "
|
string += gray_hash(s, hlen) + " "
|
||||||
string += depth * " "
|
string += depth * " "
|
||||||
string += s.cformat(format_string, transform=transform)
|
string += decorator(s, s.cformat(format_string))
|
||||||
return string
|
return string
|
||||||
|
|
||||||
def format_list(specs):
|
def format_list(specs):
|
||||||
@@ -451,7 +448,7 @@ def filter_loaded_specs(specs):
|
|||||||
return [x for x in specs if x.dag_hash() in hashes]
|
return [x for x in specs if x.dag_hash() in hashes]
|
||||||
|
|
||||||
|
|
||||||
def print_how_many_pkgs(specs, pkg_type=""):
|
def print_how_many_pkgs(specs, pkg_type="", suffix=""):
|
||||||
"""Given a list of specs, this will print a message about how many
|
"""Given a list of specs, this will print a message about how many
|
||||||
specs are in that list.
|
specs are in that list.
|
||||||
|
|
||||||
@@ -462,7 +459,7 @@ def print_how_many_pkgs(specs, pkg_type=""):
|
|||||||
category, e.g. if pkg_type is "installed" then the message
|
category, e.g. if pkg_type is "installed" then the message
|
||||||
would be "3 installed packages"
|
would be "3 installed packages"
|
||||||
"""
|
"""
|
||||||
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package"))
|
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package") + suffix)
|
||||||
|
|
||||||
|
|
||||||
def spack_is_git_repo():
|
def spack_is_git_repo():
|
||||||
|
|||||||
@@ -84,7 +84,7 @@ def externals(parser, args):
|
|||||||
return
|
return
|
||||||
|
|
||||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs, debug_log=tty.debug)
|
||||||
_process_reports(reports)
|
_process_reports(reports)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -13,7 +13,6 @@
|
|||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import urllib.request
|
|
||||||
from typing import Dict, List, Optional, Tuple, Union
|
from typing import Dict, List, Optional, Tuple, Union
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
@@ -54,6 +53,7 @@
|
|||||||
from spack.oci.oci import (
|
from spack.oci.oci import (
|
||||||
copy_missing_layers_with_retry,
|
copy_missing_layers_with_retry,
|
||||||
get_manifest_and_config_with_retry,
|
get_manifest_and_config_with_retry,
|
||||||
|
list_tags,
|
||||||
upload_blob_with_retry,
|
upload_blob_with_retry,
|
||||||
upload_manifest_with_retry,
|
upload_manifest_with_retry,
|
||||||
)
|
)
|
||||||
@@ -133,6 +133,11 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
|||||||
help="when pushing to an OCI registry, tag an image containing all root specs and their "
|
help="when pushing to an OCI registry, tag an image containing all root specs and their "
|
||||||
"runtime dependencies",
|
"runtime dependencies",
|
||||||
)
|
)
|
||||||
|
push.add_argument(
|
||||||
|
"--private",
|
||||||
|
action="store_true",
|
||||||
|
help="for a private mirror, include non-redistributable packages",
|
||||||
|
)
|
||||||
arguments.add_common_arguments(push, ["specs", "jobs"])
|
arguments.add_common_arguments(push, ["specs", "jobs"])
|
||||||
push.set_defaults(func=push_fn)
|
push.set_defaults(func=push_fn)
|
||||||
|
|
||||||
@@ -367,6 +372,25 @@ def _make_pool() -> MaybePool:
|
|||||||
return NoPool()
|
return NoPool()
|
||||||
|
|
||||||
|
|
||||||
|
def _skip_no_redistribute_for_public(specs):
|
||||||
|
remaining_specs = list()
|
||||||
|
removed_specs = list()
|
||||||
|
for spec in specs:
|
||||||
|
if spec.package.redistribute_binary:
|
||||||
|
remaining_specs.append(spec)
|
||||||
|
else:
|
||||||
|
removed_specs.append(spec)
|
||||||
|
if removed_specs:
|
||||||
|
colified_output = tty.colify.colified(list(s.name for s in removed_specs), indent=4)
|
||||||
|
tty.debug(
|
||||||
|
"The following specs will not be added to the binary cache"
|
||||||
|
" because they cannot be redistributed:\n"
|
||||||
|
f"{colified_output}\n"
|
||||||
|
"You can use `--private` to include them."
|
||||||
|
)
|
||||||
|
return remaining_specs
|
||||||
|
|
||||||
|
|
||||||
def push_fn(args):
|
def push_fn(args):
|
||||||
"""create a binary package and push it to a mirror"""
|
"""create a binary package and push it to a mirror"""
|
||||||
if args.spec_file:
|
if args.spec_file:
|
||||||
@@ -417,6 +441,8 @@ def push_fn(args):
|
|||||||
root="package" in args.things_to_install,
|
root="package" in args.things_to_install,
|
||||||
dependencies="dependencies" in args.things_to_install,
|
dependencies="dependencies" in args.things_to_install,
|
||||||
)
|
)
|
||||||
|
if not args.private:
|
||||||
|
specs = _skip_no_redistribute_for_public(specs)
|
||||||
|
|
||||||
# When pushing multiple specs, print the url once ahead of time, as well as how
|
# When pushing multiple specs, print the url once ahead of time, as well as how
|
||||||
# many specs are being pushed.
|
# many specs are being pushed.
|
||||||
@@ -830,10 +856,7 @@ def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
|
|||||||
|
|
||||||
|
|
||||||
def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None:
|
def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None:
|
||||||
request = urllib.request.Request(url=image_ref.tags_url())
|
tags = list_tags(image_ref)
|
||||||
response = spack.oci.opener.urlopen(request)
|
|
||||||
spack.oci.opener.ensure_status(request, response, 200)
|
|
||||||
tags = json.load(response)["tags"]
|
|
||||||
|
|
||||||
# Fetch all image config files in parallel
|
# Fetch all image config files in parallel
|
||||||
spec_dicts = pool.starmap(
|
spec_dicts = pool.starmap(
|
||||||
|
|||||||
@@ -31,7 +31,6 @@
|
|||||||
level = "long"
|
level = "long"
|
||||||
|
|
||||||
SPACK_COMMAND = "spack"
|
SPACK_COMMAND = "spack"
|
||||||
MAKE_COMMAND = "make"
|
|
||||||
INSTALL_FAIL_CODE = 1
|
INSTALL_FAIL_CODE = 1
|
||||||
FAILED_CREATE_BUILDCACHE_CODE = 100
|
FAILED_CREATE_BUILDCACHE_CODE = 100
|
||||||
|
|
||||||
@@ -40,6 +39,12 @@ def deindent(desc):
|
|||||||
return desc.replace(" ", "")
|
return desc.replace(" ", "")
|
||||||
|
|
||||||
|
|
||||||
|
def unicode_escape(path: str) -> str:
|
||||||
|
"""Returns transformed path with any unicode
|
||||||
|
characters replaced with their corresponding escapes"""
|
||||||
|
return path.encode("unicode-escape").decode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
setup_parser.parser = subparser
|
setup_parser.parser = subparser
|
||||||
subparsers = subparser.add_subparsers(help="CI sub-commands")
|
subparsers = subparser.add_subparsers(help="CI sub-commands")
|
||||||
@@ -551,75 +556,35 @@ def ci_rebuild(args):
|
|||||||
# No hash match anywhere means we need to rebuild spec
|
# No hash match anywhere means we need to rebuild spec
|
||||||
|
|
||||||
# Start with spack arguments
|
# Start with spack arguments
|
||||||
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose"]
|
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose", "install"]
|
||||||
|
|
||||||
config = cfg.get("config")
|
config = cfg.get("config")
|
||||||
if not config["verify_ssl"]:
|
if not config["verify_ssl"]:
|
||||||
spack_cmd.append("-k")
|
spack_cmd.append("-k")
|
||||||
|
|
||||||
install_args = []
|
install_args = [f'--use-buildcache={spack_ci.win_quote("package:never,dependencies:only")}']
|
||||||
|
|
||||||
can_verify = spack_ci.can_verify_binaries()
|
can_verify = spack_ci.can_verify_binaries()
|
||||||
verify_binaries = can_verify and spack_is_pr_pipeline is False
|
verify_binaries = can_verify and spack_is_pr_pipeline is False
|
||||||
if not verify_binaries:
|
if not verify_binaries:
|
||||||
install_args.append("--no-check-signature")
|
install_args.append("--no-check-signature")
|
||||||
|
|
||||||
slash_hash = "/{}".format(job_spec.dag_hash())
|
slash_hash = spack_ci.win_quote("/" + job_spec.dag_hash())
|
||||||
|
|
||||||
# Arguments when installing dependencies from cache
|
|
||||||
deps_install_args = install_args
|
|
||||||
|
|
||||||
# Arguments when installing the root from sources
|
# Arguments when installing the root from sources
|
||||||
root_install_args = install_args + [
|
deps_install_args = install_args + ["--only=dependencies"]
|
||||||
"--keep-stage",
|
root_install_args = install_args + ["--keep-stage", "--only=package"]
|
||||||
"--only=package",
|
|
||||||
"--use-buildcache=package:never,dependencies:only",
|
|
||||||
]
|
|
||||||
if cdash_handler:
|
if cdash_handler:
|
||||||
# Add additional arguments to `spack install` for CDash reporting.
|
# Add additional arguments to `spack install` for CDash reporting.
|
||||||
root_install_args.extend(cdash_handler.args())
|
root_install_args.extend(cdash_handler.args())
|
||||||
root_install_args.append(slash_hash)
|
|
||||||
|
|
||||||
# ["x", "y"] -> "'x' 'y'"
|
|
||||||
args_to_string = lambda args: " ".join("'{}'".format(arg) for arg in args)
|
|
||||||
|
|
||||||
commands = [
|
commands = [
|
||||||
# apparently there's a race when spack bootstraps? do it up front once
|
# apparently there's a race when spack bootstraps? do it up front once
|
||||||
[SPACK_COMMAND, "-e", env.path, "bootstrap", "now"],
|
[SPACK_COMMAND, "-e", unicode_escape(env.path), "bootstrap", "now"],
|
||||||
[
|
spack_cmd + deps_install_args + [slash_hash],
|
||||||
SPACK_COMMAND,
|
spack_cmd + root_install_args + [slash_hash],
|
||||||
"-e",
|
|
||||||
env.path,
|
|
||||||
"env",
|
|
||||||
"depfile",
|
|
||||||
"-o",
|
|
||||||
"Makefile",
|
|
||||||
"--use-buildcache=package:never,dependencies:only",
|
|
||||||
slash_hash, # limit to spec we're building
|
|
||||||
],
|
|
||||||
[
|
|
||||||
# --output-sync requires GNU make 4.x.
|
|
||||||
# Old make errors when you pass it a flag it doesn't recognize,
|
|
||||||
# but it doesn't error or warn when you set unrecognized flags in
|
|
||||||
# this variable.
|
|
||||||
"export",
|
|
||||||
"GNUMAKEFLAGS=--output-sync=recurse",
|
|
||||||
],
|
|
||||||
[
|
|
||||||
MAKE_COMMAND,
|
|
||||||
"SPACK={}".format(args_to_string(spack_cmd)),
|
|
||||||
"SPACK_COLOR=always",
|
|
||||||
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
|
|
||||||
"-j$(nproc)",
|
|
||||||
"install-deps/{}".format(
|
|
||||||
spack.environment.depfile.MakefileSpec(job_spec).safe_format(
|
|
||||||
"{name}-{version}-{hash}"
|
|
||||||
)
|
|
||||||
),
|
|
||||||
],
|
|
||||||
spack_cmd + ["install"] + root_install_args,
|
|
||||||
]
|
]
|
||||||
|
|
||||||
tty.debug("Installing {0} from source".format(job_spec.name))
|
tty.debug("Installing {0} from source".format(job_spec.name))
|
||||||
install_exit_code = spack_ci.process_command("install", commands, repro_dir)
|
install_exit_code = spack_ci.process_command("install", commands, repro_dir)
|
||||||
|
|
||||||
|
|||||||
@@ -106,7 +106,8 @@ def clean(parser, args):
|
|||||||
|
|
||||||
# Then do the cleaning falling through the cases
|
# Then do the cleaning falling through the cases
|
||||||
if args.specs:
|
if args.specs:
|
||||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||||
|
specs = list(spack.cmd.matching_spec_from_env(x) for x in specs)
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
msg = "Cleaning build stage [{0}]"
|
msg = "Cleaning build stage [{0}]"
|
||||||
tty.msg(msg.format(spec.short_spec))
|
tty.msg(msg.format(spec.short_spec))
|
||||||
|
|||||||
@@ -563,12 +563,13 @@ def add_concretizer_args(subparser):
|
|||||||
help="reuse installed packages/buildcaches when possible",
|
help="reuse installed packages/buildcaches when possible",
|
||||||
)
|
)
|
||||||
subgroup.add_argument(
|
subgroup.add_argument(
|
||||||
|
"--fresh-roots",
|
||||||
"--reuse-deps",
|
"--reuse-deps",
|
||||||
action=ConfigSetAction,
|
action=ConfigSetAction,
|
||||||
dest="concretizer:reuse",
|
dest="concretizer:reuse",
|
||||||
const="dependencies",
|
const="dependencies",
|
||||||
default=None,
|
default=None,
|
||||||
help="reuse installed dependencies only",
|
help="concretize with fresh roots and reused dependencies",
|
||||||
)
|
)
|
||||||
subgroup.add_argument(
|
subgroup.add_argument(
|
||||||
"--deprecated",
|
"--deprecated",
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ def setup_parser(subparser):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class AreDepsInstalledVisitor:
|
class AreDepsInstalledVisitor(traverse.AbstractVisitor):
|
||||||
def __init__(self, context: Context = Context.BUILD):
|
def __init__(self, context: Context = Context.BUILD):
|
||||||
if context == Context.BUILD:
|
if context == Context.BUILD:
|
||||||
# TODO: run deps shouldn't be required for build env.
|
# TODO: run deps shouldn't be required for build env.
|
||||||
@@ -53,27 +53,27 @@ def __init__(self, context: Context = Context.BUILD):
|
|||||||
|
|
||||||
self.has_uninstalled_deps = False
|
self.has_uninstalled_deps = False
|
||||||
|
|
||||||
def accept(self, item):
|
def accept(self, item: traverse.EdgeAndDepth) -> bool:
|
||||||
# The root may be installed or uninstalled.
|
# The root may be installed or uninstalled.
|
||||||
if item.depth == 0:
|
if item[1] == 0:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# Early exit after we've seen an uninstalled dep.
|
# Early exit after we've seen an uninstalled dep.
|
||||||
if self.has_uninstalled_deps:
|
if self.has_uninstalled_deps:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
spec = item.edge.spec
|
spec = item[0].spec
|
||||||
if not spec.external and not spec.installed:
|
if not spec.external and not spec.installed:
|
||||||
self.has_uninstalled_deps = True
|
self.has_uninstalled_deps = True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def neighbors(self, item):
|
def neighbors(self, item: traverse.EdgeAndDepth):
|
||||||
# Direct deps: follow build & test edges.
|
# Direct deps: follow build & test edges.
|
||||||
# Transitive deps: follow link / run.
|
# Transitive deps: follow link / run.
|
||||||
depflag = self.direct_deps if item.depth == 0 else dt.LINK | dt.RUN
|
depflag = self.direct_deps if item[1] == 0 else dt.LINK | dt.RUN
|
||||||
return item.edge.spec.edges_to_dependencies(depflag=depflag)
|
return item[0].spec.edges_to_dependencies(depflag=depflag)
|
||||||
|
|
||||||
|
|
||||||
def emulate_env_utility(cmd_name, context: Context, args):
|
def emulate_env_utility(cmd_name, context: Context, args):
|
||||||
|
|||||||
@@ -10,13 +10,13 @@
|
|||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
import llnl.string as string
|
import llnl.string as string
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.tty.colify import colify
|
from llnl.util.tty.colify import colify
|
||||||
from llnl.util.tty.color import colorize
|
from llnl.util.tty.color import cescape, colorize
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.cmd.common
|
import spack.cmd.common
|
||||||
@@ -61,14 +61,7 @@
|
|||||||
#
|
#
|
||||||
def env_create_setup_parser(subparser):
|
def env_create_setup_parser(subparser):
|
||||||
"""create a new environment"""
|
"""create a new environment"""
|
||||||
subparser.add_argument(
|
subparser.add_argument("env_name", metavar="env", help="name or directory of environment")
|
||||||
"env_name",
|
|
||||||
metavar="env",
|
|
||||||
help=(
|
|
||||||
"name of managed environment or directory of the anonymous env "
|
|
||||||
"(when using --dir/-d) to activate"
|
|
||||||
),
|
|
||||||
)
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-d", "--dir", action="store_true", help="create an environment in a specific directory"
|
"-d", "--dir", action="store_true", help="create an environment in a specific directory"
|
||||||
)
|
)
|
||||||
@@ -94,6 +87,9 @@ def env_create_setup_parser(subparser):
|
|||||||
default=None,
|
default=None,
|
||||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
||||||
)
|
)
|
||||||
|
subparser.add_argument(
|
||||||
|
"--include-concrete", action="append", help="name of old environment to copy specs from"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def env_create(args):
|
def env_create(args):
|
||||||
@@ -111,19 +107,32 @@ def env_create(args):
|
|||||||
# the environment should not include a view.
|
# the environment should not include a view.
|
||||||
with_view = None
|
with_view = None
|
||||||
|
|
||||||
|
include_concrete = None
|
||||||
|
if hasattr(args, "include_concrete"):
|
||||||
|
include_concrete = args.include_concrete
|
||||||
|
|
||||||
env = _env_create(
|
env = _env_create(
|
||||||
args.env_name,
|
args.env_name,
|
||||||
init_file=args.envfile,
|
init_file=args.envfile,
|
||||||
dir=args.dir,
|
dir=args.dir or os.path.sep in args.env_name or args.env_name in (".", ".."),
|
||||||
with_view=with_view,
|
with_view=with_view,
|
||||||
keep_relative=args.keep_relative,
|
keep_relative=args.keep_relative,
|
||||||
|
include_concrete=include_concrete,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Generate views, only really useful for environments created from spack.lock files.
|
# Generate views, only really useful for environments created from spack.lock files.
|
||||||
env.regenerate_views()
|
env.regenerate_views()
|
||||||
|
|
||||||
|
|
||||||
def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep_relative=False):
|
def _env_create(
|
||||||
|
name_or_path: str,
|
||||||
|
*,
|
||||||
|
init_file: Optional[str] = None,
|
||||||
|
dir: bool = False,
|
||||||
|
with_view: Optional[str] = None,
|
||||||
|
keep_relative: bool = False,
|
||||||
|
include_concrete: Optional[List[str]] = None,
|
||||||
|
):
|
||||||
"""Create a new environment, with an optional yaml description.
|
"""Create a new environment, with an optional yaml description.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
@@ -135,22 +144,31 @@ def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep
|
|||||||
keep_relative (bool): if True, develop paths are copied verbatim into
|
keep_relative (bool): if True, develop paths are copied verbatim into
|
||||||
the new environment file, otherwise they may be made absolute if the
|
the new environment file, otherwise they may be made absolute if the
|
||||||
new environment is in a different location
|
new environment is in a different location
|
||||||
|
include_concrete (list): list of the included concrete environments
|
||||||
"""
|
"""
|
||||||
if not dir:
|
if not dir:
|
||||||
env = ev.create(
|
env = ev.create(
|
||||||
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
name_or_path,
|
||||||
|
init_file=init_file,
|
||||||
|
with_view=with_view,
|
||||||
|
keep_relative=keep_relative,
|
||||||
|
include_concrete=include_concrete,
|
||||||
)
|
)
|
||||||
tty.msg("Created environment '%s' in %s" % (name_or_path, env.path))
|
tty.msg(
|
||||||
tty.msg("You can activate this environment with:")
|
colorize(
|
||||||
tty.msg(" spack env activate %s" % (name_or_path))
|
f"Created environment @c{{{cescape(name_or_path)}}} in: @c{{{cescape(env.path)}}}"
|
||||||
return env
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
env = ev.create_in_dir(
|
env = ev.create_in_dir(
|
||||||
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
name_or_path,
|
||||||
|
init_file=init_file,
|
||||||
|
with_view=with_view,
|
||||||
|
keep_relative=keep_relative,
|
||||||
|
include_concrete=include_concrete,
|
||||||
)
|
)
|
||||||
tty.msg("Created environment in %s" % env.path)
|
tty.msg(colorize(f"Created independent environment in: @c{{{cescape(env.path)}}}"))
|
||||||
tty.msg("You can activate this environment with:")
|
tty.msg(f"Activate with: {colorize(f'@c{{spack env activate {cescape(name_or_path)}}}')}")
|
||||||
tty.msg(" spack env activate %s" % env.path)
|
|
||||||
return env
|
return env
|
||||||
|
|
||||||
|
|
||||||
@@ -436,6 +454,12 @@ def env_remove_setup_parser(subparser):
|
|||||||
"""remove an existing environment"""
|
"""remove an existing environment"""
|
||||||
subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove")
|
subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove")
|
||||||
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||||
|
subparser.add_argument(
|
||||||
|
"-f",
|
||||||
|
"--force",
|
||||||
|
action="store_true",
|
||||||
|
help="remove the environment even if it is included in another environment",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def env_remove(args):
|
def env_remove(args):
|
||||||
@@ -445,14 +469,36 @@ def env_remove(args):
|
|||||||
and manifests embedded in repositories should be removed manually.
|
and manifests embedded in repositories should be removed manually.
|
||||||
"""
|
"""
|
||||||
read_envs = []
|
read_envs = []
|
||||||
|
valid_envs = []
|
||||||
bad_envs = []
|
bad_envs = []
|
||||||
for env_name in args.rm_env:
|
invalid_envs = []
|
||||||
|
|
||||||
|
for env_name in ev.all_environment_names():
|
||||||
try:
|
try:
|
||||||
env = ev.read(env_name)
|
env = ev.read(env_name)
|
||||||
|
valid_envs.append(env_name)
|
||||||
|
|
||||||
|
if env_name in args.rm_env:
|
||||||
read_envs.append(env)
|
read_envs.append(env)
|
||||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||||
|
invalid_envs.append(env_name)
|
||||||
|
|
||||||
|
if env_name in args.rm_env:
|
||||||
bad_envs.append(env_name)
|
bad_envs.append(env_name)
|
||||||
|
|
||||||
|
# Check if env is linked to another before trying to remove
|
||||||
|
for name in valid_envs:
|
||||||
|
# don't check if environment is included to itself
|
||||||
|
if name == env_name:
|
||||||
|
continue
|
||||||
|
environ = ev.Environment(ev.root(name))
|
||||||
|
if ev.root(env_name) in environ.included_concrete_envs:
|
||||||
|
msg = f'Environment "{env_name}" is being used by environment "{name}"'
|
||||||
|
if args.force:
|
||||||
|
tty.warn(msg)
|
||||||
|
else:
|
||||||
|
tty.die(msg)
|
||||||
|
|
||||||
if not args.yes_to_all:
|
if not args.yes_to_all:
|
||||||
environments = string.plural(len(args.rm_env), "environment", show_n=False)
|
environments = string.plural(len(args.rm_env), "environment", show_n=False)
|
||||||
envs = string.comma_and(args.rm_env)
|
envs = string.comma_and(args.rm_env)
|
||||||
|
|||||||
@@ -14,6 +14,7 @@
|
|||||||
import spack.cmd as cmd
|
import spack.cmd as cmd
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.repo
|
import spack.repo
|
||||||
|
import spack.store
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
from spack.database import InstallStatuses
|
from spack.database import InstallStatuses
|
||||||
|
|
||||||
@@ -69,6 +70,12 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
arguments.add_common_arguments(subparser, ["long", "very_long", "tags", "namespaces"])
|
arguments.add_common_arguments(subparser, ["long", "very_long", "tags", "namespaces"])
|
||||||
|
|
||||||
|
subparser.add_argument(
|
||||||
|
"-r",
|
||||||
|
"--only-roots",
|
||||||
|
action="store_true",
|
||||||
|
help="don't show full list of installed specs in an environment",
|
||||||
|
)
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-c",
|
"-c",
|
||||||
"--show-concretized",
|
"--show-concretized",
|
||||||
@@ -189,26 +196,22 @@ def query_arguments(args):
|
|||||||
return q_args
|
return q_args
|
||||||
|
|
||||||
|
|
||||||
def setup_env(env):
|
def make_env_decorator(env):
|
||||||
"""Create a function for decorating specs when in an environment."""
|
"""Create a function for decorating specs when in an environment."""
|
||||||
|
|
||||||
def strip_build(seq):
|
roots = set(env.roots())
|
||||||
return set(s.copy(deps=("link", "run")) for s in seq)
|
removed = set(env.removed_specs())
|
||||||
|
|
||||||
added = set(strip_build(env.added_specs()))
|
|
||||||
roots = set(strip_build(env.roots()))
|
|
||||||
removed = set(strip_build(env.removed_specs()))
|
|
||||||
|
|
||||||
def decorator(spec, fmt):
|
def decorator(spec, fmt):
|
||||||
# add +/-/* to show added/removed/root specs
|
# add +/-/* to show added/removed/root specs
|
||||||
if any(spec.dag_hash() == r.dag_hash() for r in roots):
|
if any(spec.dag_hash() == r.dag_hash() for r in roots):
|
||||||
return color.colorize("@*{%s}" % fmt)
|
return color.colorize(f"@*{{{fmt}}}")
|
||||||
elif spec in removed:
|
elif spec in removed:
|
||||||
return color.colorize("@K{%s}" % fmt)
|
return color.colorize(f"@K{{{fmt}}}")
|
||||||
else:
|
else:
|
||||||
return "%s" % fmt
|
return fmt
|
||||||
|
|
||||||
return decorator, added, roots, removed
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
def display_env(env, args, decorator, results):
|
def display_env(env, args, decorator, results):
|
||||||
@@ -223,10 +226,54 @@ def display_env(env, args, decorator, results):
|
|||||||
"""
|
"""
|
||||||
tty.msg("In environment %s" % env.name)
|
tty.msg("In environment %s" % env.name)
|
||||||
|
|
||||||
if not env.user_specs:
|
num_roots = len(env.user_specs) or "No"
|
||||||
tty.msg("No root specs")
|
tty.msg(f"{num_roots} root specs")
|
||||||
|
|
||||||
|
concrete_specs = {
|
||||||
|
root: concrete_root
|
||||||
|
for root, concrete_root in zip(env.concretized_user_specs, env.concrete_roots())
|
||||||
|
}
|
||||||
|
|
||||||
|
def root_decorator(spec, string):
|
||||||
|
"""Decorate root specs with their install status if needed"""
|
||||||
|
concrete = concrete_specs.get(spec)
|
||||||
|
if concrete:
|
||||||
|
status = color.colorize(concrete.install_status().value)
|
||||||
|
hash = concrete.dag_hash()
|
||||||
else:
|
else:
|
||||||
tty.msg("Root specs")
|
status = color.colorize(spack.spec.InstallStatus.absent.value)
|
||||||
|
hash = "-" * 32
|
||||||
|
|
||||||
|
# TODO: status has two extra spaces on the end of it, but fixing this and other spec
|
||||||
|
# TODO: space format idiosyncrasies is complicated. Fix this eventually
|
||||||
|
status = status[:-2]
|
||||||
|
|
||||||
|
if args.long or args.very_long:
|
||||||
|
hash = color.colorize(f"@K{{{hash[: 7 if args.long else None]}}}")
|
||||||
|
return f"{status} {hash} {string}"
|
||||||
|
else:
|
||||||
|
return f"{status} {string}"
|
||||||
|
|
||||||
|
with spack.store.STORE.db.read_transaction():
|
||||||
|
cmd.display_specs(
|
||||||
|
env.user_specs,
|
||||||
|
args,
|
||||||
|
# these are overrides of CLI args
|
||||||
|
paths=False,
|
||||||
|
long=False,
|
||||||
|
very_long=False,
|
||||||
|
# these enforce details in the root specs to show what the user asked for
|
||||||
|
namespaces=True,
|
||||||
|
show_flags=True,
|
||||||
|
show_full_compiler=True,
|
||||||
|
decorator=root_decorator,
|
||||||
|
variants=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
print()
|
||||||
|
|
||||||
|
if env.included_concrete_envs:
|
||||||
|
tty.msg("Included specs")
|
||||||
|
|
||||||
# Root specs cannot be displayed with prefixes, since those are not
|
# Root specs cannot be displayed with prefixes, since those are not
|
||||||
# set for abstract specs. Same for hashes
|
# set for abstract specs. Same for hashes
|
||||||
@@ -236,10 +283,10 @@ def display_env(env, args, decorator, results):
|
|||||||
# Roots are displayed with variants, etc. so that we can see
|
# Roots are displayed with variants, etc. so that we can see
|
||||||
# specifically what the user asked for.
|
# specifically what the user asked for.
|
||||||
cmd.display_specs(
|
cmd.display_specs(
|
||||||
env.user_specs,
|
env.included_user_specs,
|
||||||
root_args,
|
root_args,
|
||||||
decorator=lambda s, f: color.colorize("@*{%s}" % f),
|
decorator=lambda s, f: color.colorize("@*{%s}" % f),
|
||||||
namespaces=True,
|
namespace=True,
|
||||||
show_flags=True,
|
show_flags=True,
|
||||||
show_full_compiler=True,
|
show_full_compiler=True,
|
||||||
variants=True,
|
variants=True,
|
||||||
@@ -254,7 +301,7 @@ def display_env(env, args, decorator, results):
|
|||||||
# Display a header for the installed packages section IF there are installed
|
# Display a header for the installed packages section IF there are installed
|
||||||
# packages. If there aren't any, we'll just end up printing "0 installed packages"
|
# packages. If there aren't any, we'll just end up printing "0 installed packages"
|
||||||
# later.
|
# later.
|
||||||
if results:
|
if results and not args.only_roots:
|
||||||
tty.msg("Installed packages")
|
tty.msg("Installed packages")
|
||||||
|
|
||||||
|
|
||||||
@@ -263,9 +310,10 @@ def find(parser, args):
|
|||||||
results = args.specs(**q_args)
|
results = args.specs(**q_args)
|
||||||
|
|
||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
decorator = lambda s, f: f
|
if not env and args.only_roots:
|
||||||
if env:
|
tty.die("-r / --only-roots requires an active environment")
|
||||||
decorator, _, roots, _ = setup_env(env)
|
|
||||||
|
decorator = make_env_decorator(env) if env else lambda s, f: f
|
||||||
|
|
||||||
# use groups by default except with format.
|
# use groups by default except with format.
|
||||||
if args.groups is None:
|
if args.groups is None:
|
||||||
@@ -292,9 +340,12 @@ def find(parser, args):
|
|||||||
if env:
|
if env:
|
||||||
display_env(env, args, decorator, results)
|
display_env(env, args, decorator, results)
|
||||||
|
|
||||||
|
count_suffix = " (not shown)"
|
||||||
|
if not args.only_roots:
|
||||||
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
|
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
|
||||||
|
count_suffix = ""
|
||||||
|
|
||||||
# print number of installed packages last (as the list may be long)
|
# print number of installed packages last (as the list may be long)
|
||||||
if sys.stdout.isatty() and args.groups:
|
if sys.stdout.isatty() and args.groups:
|
||||||
pkg_type = "loaded" if args.loaded else "installed"
|
pkg_type = "loaded" if args.loaded else "installed"
|
||||||
spack.cmd.print_how_many_pkgs(results, pkg_type)
|
spack.cmd.print_how_many_pkgs(results, pkg_type, suffix=count_suffix)
|
||||||
|
|||||||
@@ -50,7 +50,7 @@
|
|||||||
@B{++}, @r{--}, @r{~~}, @B{==} propagate variants to package dependencies
|
@B{++}, @r{--}, @r{~~}, @B{==} propagate variants to package dependencies
|
||||||
|
|
||||||
architecture variants:
|
architecture variants:
|
||||||
@m{platform=platform} linux, darwin, cray, etc.
|
@m{platform=platform} linux, darwin, freebsd, windows
|
||||||
@m{os=operating_system} specific <operating_system>
|
@m{os=operating_system} specific <operating_system>
|
||||||
@m{target=target} specific <target> processor
|
@m{target=target} specific <target> processor
|
||||||
@m{arch=platform-os-target} shortcut for all three above
|
@m{arch=platform-os-target} shortcut for all three above
|
||||||
|
|||||||
@@ -263,8 +263,8 @@ def _fmt_name_and_default(variant):
|
|||||||
return color.colorize(f"@c{{{variant.name}}} @C{{[{_fmt_value(variant.default)}]}}")
|
return color.colorize(f"@c{{{variant.name}}} @C{{[{_fmt_value(variant.default)}]}}")
|
||||||
|
|
||||||
|
|
||||||
def _fmt_when(when, indent):
|
def _fmt_when(when: "spack.spec.Spec", indent: int):
|
||||||
return color.colorize(f"{indent * ' '}@B{{when}} {color.cescape(when)}")
|
return color.colorize(f"{indent * ' '}@B{{when}} {color.cescape(str(when))}")
|
||||||
|
|
||||||
|
|
||||||
def _fmt_variant_description(variant, width, indent):
|
def _fmt_variant_description(variant, width, indent):
|
||||||
@@ -441,7 +441,7 @@ def get_url(version):
|
|||||||
return "No URL"
|
return "No URL"
|
||||||
|
|
||||||
url = get_url(preferred) if pkg.has_code else ""
|
url = get_url(preferred) if pkg.has_code else ""
|
||||||
line = version(" {0}".format(pad(preferred))) + color.cescape(url)
|
line = version(" {0}".format(pad(preferred))) + color.cescape(str(url))
|
||||||
color.cwrite(line)
|
color.cwrite(line)
|
||||||
|
|
||||||
print()
|
print()
|
||||||
@@ -464,7 +464,7 @@ def get_url(version):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
for v, url in vers:
|
for v, url in vers:
|
||||||
line = version(" {0}".format(pad(v))) + color.cescape(url)
|
line = version(" {0}".format(pad(v))) + color.cescape(str(url))
|
||||||
color.cprint(line)
|
color.cprint(line)
|
||||||
|
|
||||||
|
|
||||||
@@ -475,10 +475,7 @@ def print_virtuals(pkg, args):
|
|||||||
color.cprint(section_title("Virtual Packages: "))
|
color.cprint(section_title("Virtual Packages: "))
|
||||||
if pkg.provided:
|
if pkg.provided:
|
||||||
for when, specs in reversed(sorted(pkg.provided.items())):
|
for when, specs in reversed(sorted(pkg.provided.items())):
|
||||||
line = " %s provides %s" % (
|
line = " %s provides %s" % (when.cformat(), ", ".join(s.cformat() for s in specs))
|
||||||
when.colorized(),
|
|
||||||
", ".join(s.colorized() for s in specs),
|
|
||||||
)
|
|
||||||
print(line)
|
print(line)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -497,7 +494,9 @@ def print_licenses(pkg, args):
|
|||||||
pad = padder(pkg.licenses, 4)
|
pad = padder(pkg.licenses, 4)
|
||||||
for when_spec in pkg.licenses:
|
for when_spec in pkg.licenses:
|
||||||
license_identifier = pkg.licenses[when_spec]
|
license_identifier = pkg.licenses[when_spec]
|
||||||
line = license(" {0}".format(pad(license_identifier))) + color.cescape(when_spec)
|
line = license(" {0}".format(pad(license_identifier))) + color.cescape(
|
||||||
|
str(when_spec)
|
||||||
|
)
|
||||||
color.cprint(line)
|
color.cprint(line)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -61,7 +61,6 @@ def install_kwargs_from_args(args):
|
|||||||
"dependencies_use_cache": cache_opt(args.use_cache, dep_use_bc),
|
"dependencies_use_cache": cache_opt(args.use_cache, dep_use_bc),
|
||||||
"dependencies_cache_only": cache_opt(args.cache_only, dep_use_bc),
|
"dependencies_cache_only": cache_opt(args.cache_only, dep_use_bc),
|
||||||
"include_build_deps": args.include_build_deps,
|
"include_build_deps": args.include_build_deps,
|
||||||
"explicit": True, # Use true as a default for install command
|
|
||||||
"stop_at": args.until,
|
"stop_at": args.until,
|
||||||
"unsigned": args.unsigned,
|
"unsigned": args.unsigned,
|
||||||
"install_deps": ("dependencies" in args.things_to_install),
|
"install_deps": ("dependencies" in args.things_to_install),
|
||||||
@@ -473,6 +472,7 @@ def install_without_active_env(args, install_kwargs, reporter_factory):
|
|||||||
require_user_confirmation_for_overwrite(concrete_specs, args)
|
require_user_confirmation_for_overwrite(concrete_specs, args)
|
||||||
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
|
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
|
||||||
|
|
||||||
installs = [(s.package, install_kwargs) for s in concrete_specs]
|
installs = [s.package for s in concrete_specs]
|
||||||
builder = PackageInstaller(installs)
|
install_kwargs["explicit"] = [s.dag_hash() for s in concrete_specs]
|
||||||
|
builder = PackageInstaller(installs, install_kwargs)
|
||||||
builder.install()
|
builder.install()
|
||||||
|
|||||||
@@ -71,6 +71,11 @@ def setup_parser(subparser):
|
|||||||
help="the number of versions to fetch for each spec, choose 'all' to"
|
help="the number of versions to fetch for each spec, choose 'all' to"
|
||||||
" retrieve all versions of each package",
|
" retrieve all versions of each package",
|
||||||
)
|
)
|
||||||
|
create_parser.add_argument(
|
||||||
|
"--private",
|
||||||
|
action="store_true",
|
||||||
|
help="for a private mirror, include non-redistributable packages",
|
||||||
|
)
|
||||||
arguments.add_common_arguments(create_parser, ["specs"])
|
arguments.add_common_arguments(create_parser, ["specs"])
|
||||||
arguments.add_concretizer_args(create_parser)
|
arguments.add_concretizer_args(create_parser)
|
||||||
|
|
||||||
@@ -359,7 +364,6 @@ def concrete_specs_from_user(args):
|
|||||||
specs = filter_externals(specs)
|
specs = filter_externals(specs)
|
||||||
specs = list(set(specs))
|
specs = list(set(specs))
|
||||||
specs.sort(key=lambda s: (s.name, s.version))
|
specs.sort(key=lambda s: (s.name, s.version))
|
||||||
specs, _ = lang.stable_partition(specs, predicate_fn=not_excluded_fn(args))
|
|
||||||
return specs
|
return specs
|
||||||
|
|
||||||
|
|
||||||
@@ -404,36 +408,50 @@ def concrete_specs_from_cli_or_file(args):
|
|||||||
return specs
|
return specs
|
||||||
|
|
||||||
|
|
||||||
def not_excluded_fn(args):
|
class IncludeFilter:
|
||||||
"""Return a predicate that evaluate to True if a spec was not explicitly
|
def __init__(self, args):
|
||||||
excluded by the user.
|
self.exclude_specs = []
|
||||||
"""
|
|
||||||
exclude_specs = []
|
|
||||||
if args.exclude_file:
|
if args.exclude_file:
|
||||||
exclude_specs.extend(specs_from_text_file(args.exclude_file, concretize=False))
|
self.exclude_specs.extend(specs_from_text_file(args.exclude_file, concretize=False))
|
||||||
if args.exclude_specs:
|
if args.exclude_specs:
|
||||||
exclude_specs.extend(spack.cmd.parse_specs(str(args.exclude_specs).split()))
|
self.exclude_specs.extend(spack.cmd.parse_specs(str(args.exclude_specs).split()))
|
||||||
|
self.private = args.private
|
||||||
|
|
||||||
def not_excluded(x):
|
def __call__(self, x):
|
||||||
return not any(x.satisfies(y) for y in exclude_specs)
|
return all([self._not_license_excluded(x), self._not_cmdline_excluded(x)])
|
||||||
|
|
||||||
return not_excluded
|
def _not_license_excluded(self, x):
|
||||||
|
"""True if the spec is for a private mirror, or as long as the
|
||||||
|
package does not explicitly forbid redistributing source."""
|
||||||
|
if self.private:
|
||||||
|
return True
|
||||||
|
elif x.package_class.redistribute_source(x):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
tty.debug(
|
||||||
|
"Skip adding {0} to mirror: the package.py file"
|
||||||
|
" indicates that a public mirror should not contain"
|
||||||
|
" it.".format(x.name)
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _not_cmdline_excluded(self, x):
|
||||||
|
"""True if a spec was not explicitly excluded by the user."""
|
||||||
|
return not any(x.satisfies(y) for y in self.exclude_specs)
|
||||||
|
|
||||||
|
|
||||||
def concrete_specs_from_environment(selection_fn):
|
def concrete_specs_from_environment():
|
||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
assert env, "an active environment is required"
|
assert env, "an active environment is required"
|
||||||
mirror_specs = env.all_specs()
|
mirror_specs = env.all_specs()
|
||||||
mirror_specs = filter_externals(mirror_specs)
|
mirror_specs = filter_externals(mirror_specs)
|
||||||
mirror_specs, _ = lang.stable_partition(mirror_specs, predicate_fn=selection_fn)
|
|
||||||
return mirror_specs
|
return mirror_specs
|
||||||
|
|
||||||
|
|
||||||
def all_specs_with_all_versions(selection_fn):
|
def all_specs_with_all_versions():
|
||||||
specs = [spack.spec.Spec(n) for n in spack.repo.all_package_names()]
|
specs = [spack.spec.Spec(n) for n in spack.repo.all_package_names()]
|
||||||
mirror_specs = spack.mirror.get_all_versions(specs)
|
mirror_specs = spack.mirror.get_all_versions(specs)
|
||||||
mirror_specs.sort(key=lambda s: (s.name, s.version))
|
mirror_specs.sort(key=lambda s: (s.name, s.version))
|
||||||
mirror_specs, _ = lang.stable_partition(mirror_specs, predicate_fn=selection_fn)
|
|
||||||
return mirror_specs
|
return mirror_specs
|
||||||
|
|
||||||
|
|
||||||
@@ -454,12 +472,6 @@ def versions_per_spec(args):
|
|||||||
return num_versions
|
return num_versions
|
||||||
|
|
||||||
|
|
||||||
def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions):
|
|
||||||
present, mirrored, error = spack.mirror.create(path, mirror_specs, skip_unstable_versions)
|
|
||||||
tty.msg("Summary for mirror in {}".format(path))
|
|
||||||
process_mirror_stats(present, mirrored, error)
|
|
||||||
|
|
||||||
|
|
||||||
def process_mirror_stats(present, mirrored, error):
|
def process_mirror_stats(present, mirrored, error):
|
||||||
p, m, e = len(present), len(mirrored), len(error)
|
p, m, e = len(present), len(mirrored), len(error)
|
||||||
tty.msg(
|
tty.msg(
|
||||||
@@ -505,30 +517,28 @@ def mirror_create(args):
|
|||||||
# When no directory is provided, the source dir is used
|
# When no directory is provided, the source dir is used
|
||||||
path = args.directory or spack.caches.fetch_cache_location()
|
path = args.directory or spack.caches.fetch_cache_location()
|
||||||
|
|
||||||
|
mirror_specs, mirror_fn = _specs_and_action(args)
|
||||||
|
mirror_fn(mirror_specs, path=path, skip_unstable_versions=args.skip_unstable_versions)
|
||||||
|
|
||||||
|
|
||||||
|
def _specs_and_action(args):
|
||||||
|
include_fn = IncludeFilter(args)
|
||||||
|
|
||||||
if args.all and not ev.active_environment():
|
if args.all and not ev.active_environment():
|
||||||
create_mirror_for_all_specs(
|
mirror_specs = all_specs_with_all_versions()
|
||||||
path=path,
|
mirror_fn = create_mirror_for_all_specs
|
||||||
skip_unstable_versions=args.skip_unstable_versions,
|
elif args.all and ev.active_environment():
|
||||||
selection_fn=not_excluded_fn(args),
|
mirror_specs = concrete_specs_from_environment()
|
||||||
)
|
mirror_fn = create_mirror_for_individual_specs
|
||||||
return
|
else:
|
||||||
|
|
||||||
if args.all and ev.active_environment():
|
|
||||||
create_mirror_for_all_specs_inside_environment(
|
|
||||||
path=path,
|
|
||||||
skip_unstable_versions=args.skip_unstable_versions,
|
|
||||||
selection_fn=not_excluded_fn(args),
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
mirror_specs = concrete_specs_from_user(args)
|
mirror_specs = concrete_specs_from_user(args)
|
||||||
create_mirror_for_individual_specs(
|
mirror_fn = create_mirror_for_individual_specs
|
||||||
mirror_specs, path=path, skip_unstable_versions=args.skip_unstable_versions
|
|
||||||
)
|
mirror_specs, _ = lang.stable_partition(mirror_specs, predicate_fn=include_fn)
|
||||||
|
return mirror_specs, mirror_fn
|
||||||
|
|
||||||
|
|
||||||
def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
|
def create_mirror_for_all_specs(mirror_specs, path, skip_unstable_versions):
|
||||||
mirror_specs = all_specs_with_all_versions(selection_fn=selection_fn)
|
|
||||||
mirror_cache, mirror_stats = spack.mirror.mirror_cache_and_stats(
|
mirror_cache, mirror_stats = spack.mirror.mirror_cache_and_stats(
|
||||||
path, skip_unstable_versions=skip_unstable_versions
|
path, skip_unstable_versions=skip_unstable_versions
|
||||||
)
|
)
|
||||||
@@ -540,11 +550,10 @@ def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
|
|||||||
process_mirror_stats(*mirror_stats.stats())
|
process_mirror_stats(*mirror_stats.stats())
|
||||||
|
|
||||||
|
|
||||||
def create_mirror_for_all_specs_inside_environment(path, skip_unstable_versions, selection_fn):
|
def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions):
|
||||||
mirror_specs = concrete_specs_from_environment(selection_fn=selection_fn)
|
present, mirrored, error = spack.mirror.create(path, mirror_specs, skip_unstable_versions)
|
||||||
create_mirror_for_individual_specs(
|
tty.msg("Summary for mirror in {}".format(path))
|
||||||
mirror_specs, path=path, skip_unstable_versions=skip_unstable_versions
|
process_mirror_stats(present, mirrored, error)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def mirror_destroy(args):
|
def mirror_destroy(args):
|
||||||
|
|||||||
@@ -23,7 +23,7 @@
|
|||||||
|
|
||||||
|
|
||||||
# tutorial configuration parameters
|
# tutorial configuration parameters
|
||||||
tutorial_branch = "releases/v0.21"
|
tutorial_branch = "releases/v0.22"
|
||||||
tutorial_mirror = "file:///mirror"
|
tutorial_mirror = "file:///mirror"
|
||||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||||
|
|
||||||
|
|||||||
@@ -151,6 +151,7 @@ def is_installed(spec):
|
|||||||
key=lambda s: s.dag_hash(),
|
key=lambda s: s.dag_hash(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
with spack.store.STORE.db.read_transaction():
|
||||||
return [spec for spec in specs if is_installed(spec)]
|
return [spec for spec in specs if is_installed(spec)]
|
||||||
|
|
||||||
|
|
||||||
@@ -239,6 +240,8 @@ def get_uninstall_list(args, specs: List[spack.spec.Spec], env: Optional[ev.Envi
|
|||||||
print()
|
print()
|
||||||
tty.info("The following environments still reference these specs:")
|
tty.info("The following environments still reference these specs:")
|
||||||
colify([e.name for e in other_dependent_envs.keys()], indent=4)
|
colify([e.name for e in other_dependent_envs.keys()], indent=4)
|
||||||
|
if env:
|
||||||
|
msgs.append("use `spack remove` to remove the spec from the current environment")
|
||||||
msgs.append("use `spack env remove` to remove environments")
|
msgs.append("use `spack env remove` to remove environments")
|
||||||
msgs.append("use `spack uninstall --force` to override")
|
msgs.append("use `spack uninstall --force` to override")
|
||||||
print()
|
print()
|
||||||
|
|||||||
@@ -214,8 +214,6 @@ def unit_test(parser, args, unknown_args):
|
|||||||
|
|
||||||
# Ensure clingo is available before switching to the
|
# Ensure clingo is available before switching to the
|
||||||
# mock configuration used by unit tests
|
# mock configuration used by unit tests
|
||||||
# Note: skip on windows here because for the moment,
|
|
||||||
# clingo is wholly unsupported from bootstrap
|
|
||||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||||
spack.bootstrap.ensure_core_dependencies()
|
spack.bootstrap.ensure_core_dependencies()
|
||||||
if pytest is None:
|
if pytest is None:
|
||||||
|
|||||||
@@ -20,8 +20,10 @@
|
|||||||
|
|
||||||
import spack.compilers
|
import spack.compilers
|
||||||
import spack.error
|
import spack.error
|
||||||
|
import spack.schema.environment
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
|
import spack.util.libc
|
||||||
import spack.util.module_cmd
|
import spack.util.module_cmd
|
||||||
import spack.version
|
import spack.version
|
||||||
from spack.util.environment import filter_system_paths
|
from spack.util.environment import filter_system_paths
|
||||||
@@ -107,7 +109,6 @@ def _parse_link_paths(string):
|
|||||||
"""
|
"""
|
||||||
lib_search_paths = False
|
lib_search_paths = False
|
||||||
raw_link_dirs = []
|
raw_link_dirs = []
|
||||||
tty.debug("parsing implicit link info")
|
|
||||||
for line in string.splitlines():
|
for line in string.splitlines():
|
||||||
if lib_search_paths:
|
if lib_search_paths:
|
||||||
if line.startswith("\t"):
|
if line.startswith("\t"):
|
||||||
@@ -122,7 +123,7 @@ def _parse_link_paths(string):
|
|||||||
continue
|
continue
|
||||||
if _LINKER_LINE_IGNORE.match(line):
|
if _LINKER_LINE_IGNORE.match(line):
|
||||||
continue
|
continue
|
||||||
tty.debug("linker line: %s" % line)
|
tty.debug(f"implicit link dirs: link line: {line}")
|
||||||
|
|
||||||
next_arg = False
|
next_arg = False
|
||||||
for arg in line.split():
|
for arg in line.split():
|
||||||
@@ -138,15 +139,12 @@ def _parse_link_paths(string):
|
|||||||
link_dir_arg = _LINK_DIR_ARG.match(arg)
|
link_dir_arg = _LINK_DIR_ARG.match(arg)
|
||||||
if link_dir_arg:
|
if link_dir_arg:
|
||||||
link_dir = link_dir_arg.group("dir")
|
link_dir = link_dir_arg.group("dir")
|
||||||
tty.debug("linkdir: %s" % link_dir)
|
|
||||||
raw_link_dirs.append(link_dir)
|
raw_link_dirs.append(link_dir)
|
||||||
|
|
||||||
link_dir_arg = _LIBPATH_ARG.match(arg)
|
link_dir_arg = _LIBPATH_ARG.match(arg)
|
||||||
if link_dir_arg:
|
if link_dir_arg:
|
||||||
link_dir = link_dir_arg.group("dir")
|
link_dir = link_dir_arg.group("dir")
|
||||||
tty.debug("libpath: %s", link_dir)
|
|
||||||
raw_link_dirs.append(link_dir)
|
raw_link_dirs.append(link_dir)
|
||||||
tty.debug("found raw link dirs: %s" % ", ".join(raw_link_dirs))
|
|
||||||
|
|
||||||
implicit_link_dirs = list()
|
implicit_link_dirs = list()
|
||||||
visited = set()
|
visited = set()
|
||||||
@@ -156,7 +154,7 @@ def _parse_link_paths(string):
|
|||||||
implicit_link_dirs.append(normalized_path)
|
implicit_link_dirs.append(normalized_path)
|
||||||
visited.add(normalized_path)
|
visited.add(normalized_path)
|
||||||
|
|
||||||
tty.debug("found link dirs: %s" % ", ".join(implicit_link_dirs))
|
tty.debug(f"implicit link dirs: result: {', '.join(implicit_link_dirs)}")
|
||||||
return implicit_link_dirs
|
return implicit_link_dirs
|
||||||
|
|
||||||
|
|
||||||
@@ -417,17 +415,35 @@ def real_version(self):
|
|||||||
self._real_version = self.version
|
self._real_version = self.version
|
||||||
return self._real_version
|
return self._real_version
|
||||||
|
|
||||||
def implicit_rpaths(self):
|
def implicit_rpaths(self) -> List[str]:
|
||||||
if self.enable_implicit_rpaths is False:
|
if self.enable_implicit_rpaths is False:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
# Put CXX first since it has the most linking issues
|
output = self.compiler_verbose_output
|
||||||
# And because it has flags that affect linking
|
|
||||||
link_dirs = self._get_compiler_link_paths()
|
if not output:
|
||||||
|
return []
|
||||||
|
|
||||||
|
link_dirs = _parse_non_system_link_dirs(output)
|
||||||
|
|
||||||
all_required_libs = list(self.required_libs) + Compiler._all_compiler_rpath_libraries
|
all_required_libs = list(self.required_libs) + Compiler._all_compiler_rpath_libraries
|
||||||
return list(paths_containing_libs(link_dirs, all_required_libs))
|
return list(paths_containing_libs(link_dirs, all_required_libs))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def default_libc(self) -> Optional["spack.spec.Spec"]:
|
||||||
|
"""Determine libc targeted by the compiler from link line"""
|
||||||
|
output = self.compiler_verbose_output
|
||||||
|
|
||||||
|
if not output:
|
||||||
|
return None
|
||||||
|
|
||||||
|
dynamic_linker = spack.util.libc.parse_dynamic_linker(output)
|
||||||
|
|
||||||
|
if not dynamic_linker:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return spack.util.libc.libc_from_dynamic_linker(dynamic_linker)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def required_libs(self):
|
def required_libs(self):
|
||||||
"""For executables created with this compiler, the compiler libraries
|
"""For executables created with this compiler, the compiler libraries
|
||||||
@@ -436,17 +452,17 @@ def required_libs(self):
|
|||||||
# By default every compiler returns the empty list
|
# By default every compiler returns the empty list
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def _get_compiler_link_paths(self):
|
@property
|
||||||
|
def compiler_verbose_output(self) -> Optional[str]:
|
||||||
|
"""Verbose output from compiling a dummy C source file. Output is cached."""
|
||||||
|
if not hasattr(self, "_compile_c_source_output"):
|
||||||
|
self._compile_c_source_output = self._compile_dummy_c_source()
|
||||||
|
return self._compile_c_source_output
|
||||||
|
|
||||||
|
def _compile_dummy_c_source(self) -> Optional[str]:
|
||||||
cc = self.cc if self.cc else self.cxx
|
cc = self.cc if self.cc else self.cxx
|
||||||
if not cc or not self.verbose_flag:
|
if not cc or not self.verbose_flag:
|
||||||
# Cannot determine implicit link paths without a compiler / verbose flag
|
return None
|
||||||
return []
|
|
||||||
|
|
||||||
# What flag types apply to first_compiler, in what order
|
|
||||||
if cc == self.cc:
|
|
||||||
flags = ["cflags", "cppflags", "ldflags"]
|
|
||||||
else:
|
|
||||||
flags = ["cxxflags", "cppflags", "ldflags"]
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
tmpdir = tempfile.mkdtemp(prefix="spack-implicit-link-info")
|
tmpdir = tempfile.mkdtemp(prefix="spack-implicit-link-info")
|
||||||
@@ -458,20 +474,19 @@ def _get_compiler_link_paths(self):
|
|||||||
"int main(int argc, char* argv[]) { (void)argc; (void)argv; return 0; }\n"
|
"int main(int argc, char* argv[]) { (void)argc; (void)argv; return 0; }\n"
|
||||||
)
|
)
|
||||||
cc_exe = spack.util.executable.Executable(cc)
|
cc_exe = spack.util.executable.Executable(cc)
|
||||||
for flag_type in flags:
|
for flag_type in ["cflags" if cc == self.cc else "cxxflags", "cppflags", "ldflags"]:
|
||||||
cc_exe.add_default_arg(*self.flags.get(flag_type, []))
|
cc_exe.add_default_arg(*self.flags.get(flag_type, []))
|
||||||
|
|
||||||
with self.compiler_environment():
|
with self.compiler_environment():
|
||||||
output = cc_exe(self.verbose_flag, fin, "-o", fout, output=str, error=str)
|
return cc_exe(self.verbose_flag, fin, "-o", fout, output=str, error=str)
|
||||||
return _parse_non_system_link_dirs(output)
|
|
||||||
except spack.util.executable.ProcessError as pe:
|
except spack.util.executable.ProcessError as pe:
|
||||||
tty.debug("ProcessError: Command exited with non-zero status: " + pe.long_message)
|
tty.debug("ProcessError: Command exited with non-zero status: " + pe.long_message)
|
||||||
return []
|
return None
|
||||||
finally:
|
finally:
|
||||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def verbose_flag(self):
|
def verbose_flag(self) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
This property should be overridden in the compiler subclass if a
|
This property should be overridden in the compiler subclass if a
|
||||||
verbose flag is available.
|
verbose flag is available.
|
||||||
@@ -669,8 +684,8 @@ def __str__(self):
|
|||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def compiler_environment(self):
|
def compiler_environment(self):
|
||||||
# yield immediately if no modules
|
# Avoid modifying os.environ if possible.
|
||||||
if not self.modules:
|
if not self.modules and not self.environment:
|
||||||
yield
|
yield
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -680,20 +695,12 @@ def compiler_environment(self):
|
|||||||
try:
|
try:
|
||||||
# load modules and set env variables
|
# load modules and set env variables
|
||||||
for module in self.modules:
|
for module in self.modules:
|
||||||
# On cray, mic-knl module cannot be loaded without cce module
|
|
||||||
# See: https://github.com/spack/spack/issues/3153
|
|
||||||
if os.environ.get("CRAY_CPU_TARGET") == "mic-knl":
|
|
||||||
spack.util.module_cmd.load_module("cce")
|
|
||||||
spack.util.module_cmd.load_module(module)
|
spack.util.module_cmd.load_module(module)
|
||||||
|
|
||||||
# apply other compiler environment changes
|
# apply other compiler environment changes
|
||||||
env = spack.util.environment.EnvironmentModifications()
|
spack.schema.environment.parse(self.environment).apply_modifications()
|
||||||
env.extend(spack.schema.environment.parse(self.environment))
|
|
||||||
env.apply_modifications()
|
|
||||||
|
|
||||||
yield
|
yield
|
||||||
except BaseException:
|
|
||||||
raise
|
|
||||||
finally:
|
finally:
|
||||||
# Restore environment regardless of whether inner code succeeded
|
# Restore environment regardless of whether inner code succeeded
|
||||||
os.environ.clear()
|
os.environ.clear()
|
||||||
|
|||||||
@@ -164,43 +164,66 @@ def _compiler_config_from_package_config(config):
|
|||||||
|
|
||||||
|
|
||||||
def _compiler_config_from_external(config):
|
def _compiler_config_from_external(config):
|
||||||
|
extra_attributes_key = "extra_attributes"
|
||||||
|
compilers_key = "compilers"
|
||||||
|
c_key, cxx_key, fortran_key = "c", "cxx", "fortran"
|
||||||
|
|
||||||
|
# Allow `@x.y.z` instead of `@=x.y.z`
|
||||||
spec = spack.spec.parse_with_version_concrete(config["spec"])
|
spec = spack.spec.parse_with_version_concrete(config["spec"])
|
||||||
# use str(spec.versions) to allow `@x.y.z` instead of `@=x.y.z`
|
|
||||||
compiler_spec = spack.spec.CompilerSpec(
|
compiler_spec = spack.spec.CompilerSpec(
|
||||||
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
|
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
|
||||||
)
|
)
|
||||||
|
|
||||||
extra_attributes = config.get("extra_attributes", {})
|
err_header = f"The external spec '{spec}' cannot be used as a compiler"
|
||||||
prefix = config.get("prefix", None)
|
|
||||||
|
|
||||||
compiler_class = class_for_compiler_name(compiler_spec.name)
|
# If extra_attributes is not there I might not want to use this entry as a compiler,
|
||||||
paths = extra_attributes.get("paths", {})
|
# therefore just leave a debug message, but don't be loud with a warning.
|
||||||
compiler_langs = ["cc", "cxx", "fc", "f77"]
|
if extra_attributes_key not in config:
|
||||||
for lang in compiler_langs:
|
tty.debug(f"[{__file__}] {err_header}: missing the '{extra_attributes_key}' key")
|
||||||
if paths.setdefault(lang, None):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not prefix:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Check for files that satisfy the naming scheme for this compiler
|
|
||||||
bindir = os.path.join(prefix, "bin")
|
|
||||||
for f, regex in itertools.product(os.listdir(bindir), compiler_class.search_regexps(lang)):
|
|
||||||
if regex.match(f):
|
|
||||||
paths[lang] = os.path.join(bindir, f)
|
|
||||||
|
|
||||||
if all(v is None for v in paths.values()):
|
|
||||||
return None
|
return None
|
||||||
|
extra_attributes = config[extra_attributes_key]
|
||||||
|
|
||||||
|
# If I have 'extra_attributes' warn if 'compilers' is missing, or we don't have a C compiler
|
||||||
|
if compilers_key not in extra_attributes:
|
||||||
|
warnings.warn(
|
||||||
|
f"{err_header}: missing the '{compilers_key}' key under '{extra_attributes_key}'"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
attribute_compilers = extra_attributes[compilers_key]
|
||||||
|
|
||||||
|
if c_key not in attribute_compilers:
|
||||||
|
warnings.warn(
|
||||||
|
f"{err_header}: missing the C compiler path under "
|
||||||
|
f"'{extra_attributes_key}:{compilers_key}'"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
c_compiler = attribute_compilers[c_key]
|
||||||
|
|
||||||
|
# C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
|
||||||
|
if cxx_key not in attribute_compilers:
|
||||||
|
tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
|
||||||
|
|
||||||
|
if fortran_key not in attribute_compilers:
|
||||||
|
tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
|
||||||
|
|
||||||
|
# compilers format has cc/fc/f77, externals format has "c/fortran"
|
||||||
|
paths = {
|
||||||
|
"cc": c_compiler,
|
||||||
|
"cxx": attribute_compilers.get(cxx_key, None),
|
||||||
|
"fc": attribute_compilers.get(fortran_key, None),
|
||||||
|
"f77": attribute_compilers.get(fortran_key, None),
|
||||||
|
}
|
||||||
|
|
||||||
if not spec.architecture:
|
if not spec.architecture:
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
operating_system = host_platform.operating_system("default_os")
|
operating_system = host_platform.operating_system("default_os")
|
||||||
target = host_platform.target("default_target").microarchitecture
|
target = host_platform.target("default_target").microarchitecture
|
||||||
else:
|
else:
|
||||||
target = spec.target
|
target = spec.architecture.target
|
||||||
if not target:
|
if not target:
|
||||||
host_platform = spack.platforms.host()
|
target = spack.platforms.host().target("default_target")
|
||||||
target = host_platform.target("default_target").microarchitecture
|
target = target.microarchitecture
|
||||||
|
|
||||||
operating_system = spec.os
|
operating_system = spec.os
|
||||||
if not operating_system:
|
if not operating_system:
|
||||||
@@ -967,10 +990,11 @@ def _default_make_compilers(cmp_id, paths):
|
|||||||
make_mixed_toolchain(flat_compilers)
|
make_mixed_toolchain(flat_compilers)
|
||||||
|
|
||||||
# Finally, create the compiler list
|
# Finally, create the compiler list
|
||||||
compilers = []
|
compilers: List["spack.compiler.Compiler"] = []
|
||||||
for compiler_id, _, compiler in flat_compilers:
|
for compiler_id, _, compiler in flat_compilers:
|
||||||
make_compilers = getattr(compiler_id.os, "make_compilers", _default_make_compilers)
|
make_compilers = getattr(compiler_id.os, "make_compilers", _default_make_compilers)
|
||||||
compilers.extend(make_compilers(compiler_id, compiler))
|
candidates = make_compilers(compiler_id, compiler)
|
||||||
|
compilers.extend(x for x in candidates if x.cc is not None)
|
||||||
|
|
||||||
return compilers
|
return compilers
|
||||||
|
|
||||||
|
|||||||
@@ -38,10 +38,10 @@ class Clang(Compiler):
|
|||||||
cxx_names = ["clang++"]
|
cxx_names = ["clang++"]
|
||||||
|
|
||||||
# Subclasses use possible names of Fortran 77 compiler
|
# Subclasses use possible names of Fortran 77 compiler
|
||||||
f77_names = ["flang"]
|
f77_names = ["flang-new", "flang"]
|
||||||
|
|
||||||
# Subclasses use possible names of Fortran 90 compiler
|
# Subclasses use possible names of Fortran 90 compiler
|
||||||
fc_names = ["flang"]
|
fc_names = ["flang-new", "flang"]
|
||||||
|
|
||||||
version_argument = "--version"
|
version_argument = "--version"
|
||||||
|
|
||||||
@@ -96,6 +96,8 @@ def verbose_flag(self):
|
|||||||
|
|
||||||
openmp_flag = "-fopenmp"
|
openmp_flag = "-fopenmp"
|
||||||
|
|
||||||
|
# C++ flags based on CMake Modules/Compiler/Clang.cmake
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cxx11_flag(self):
|
def cxx11_flag(self):
|
||||||
if self.real_version < Version("3.3"):
|
if self.real_version < Version("3.3"):
|
||||||
@@ -120,6 +122,24 @@ def cxx17_flag(self):
|
|||||||
|
|
||||||
return "-std=c++17"
|
return "-std=c++17"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cxx20_flag(self):
|
||||||
|
if self.real_version < Version("5.0"):
|
||||||
|
raise UnsupportedCompilerFlag(self, "the C++20 standard", "cxx20_flag", "< 5.0")
|
||||||
|
elif self.real_version < Version("11.0"):
|
||||||
|
return "-std=c++2a"
|
||||||
|
else:
|
||||||
|
return "-std=c++20"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cxx23_flag(self):
|
||||||
|
if self.real_version < Version("12.0"):
|
||||||
|
raise UnsupportedCompilerFlag(self, "the C++23 standard", "cxx23_flag", "< 12.0")
|
||||||
|
elif self.real_version < Version("17.0"):
|
||||||
|
return "-std=c++2b"
|
||||||
|
else:
|
||||||
|
return "-std=c++23"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def c99_flag(self):
|
def c99_flag(self):
|
||||||
return "-std=c99"
|
return "-std=c99"
|
||||||
@@ -142,7 +162,10 @@ def c17_flag(self):
|
|||||||
def c23_flag(self):
|
def c23_flag(self):
|
||||||
if self.real_version < Version("9.0"):
|
if self.real_version < Version("9.0"):
|
||||||
raise UnsupportedCompilerFlag(self, "the C23 standard", "c23_flag", "< 9.0")
|
raise UnsupportedCompilerFlag(self, "the C23 standard", "c23_flag", "< 9.0")
|
||||||
|
elif self.real_version < Version("18.0"):
|
||||||
return "-std=c2x"
|
return "-std=c2x"
|
||||||
|
else:
|
||||||
|
return "-std=c23"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cc_pic_flag(self):
|
def cc_pic_flag(self):
|
||||||
@@ -171,10 +194,11 @@ def extract_version_from_output(cls, output):
|
|||||||
|
|
||||||
match = re.search(
|
match = re.search(
|
||||||
# Normal clang compiler versions are left as-is
|
# Normal clang compiler versions are left as-is
|
||||||
r"clang version ([^ )\n]+)-svn[~.\w\d-]*|"
|
r"(?:clang|flang-new) version ([^ )\n]+)-svn[~.\w\d-]*|"
|
||||||
# Don't include hyphenated patch numbers in the version
|
# Don't include hyphenated patch numbers in the version
|
||||||
# (see https://github.com/spack/spack/pull/14365 for details)
|
# (see https://github.com/spack/spack/pull/14365 for details)
|
||||||
r"clang version ([^ )\n]+?)-[~.\w\d-]*|" r"clang version ([^ )\n]+)",
|
r"(?:clang|flang-new) version ([^ )\n]+?)-[~.\w\d-]*|"
|
||||||
|
r"(?:clang|flang-new) version ([^ )\n]+)",
|
||||||
output,
|
output,
|
||||||
)
|
)
|
||||||
if match:
|
if match:
|
||||||
|
|||||||
@@ -8,7 +8,7 @@
|
|||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from typing import Dict, List, Set
|
from typing import Dict, List
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
@@ -20,15 +20,7 @@
|
|||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
from spack.version import Version, VersionRange
|
from spack.version import Version, VersionRange
|
||||||
|
|
||||||
avail_fc_version: Set[str] = set()
|
FC_PATH: Dict[str, str] = dict()
|
||||||
fc_path: Dict[str, str] = dict()
|
|
||||||
|
|
||||||
fortran_mapping = {
|
|
||||||
"2021.3.0": "19.29.30133",
|
|
||||||
"2021.2.1": "19.28.29913",
|
|
||||||
"2021.2.0": "19.28.29334",
|
|
||||||
"2021.1.0": "19.28.29333",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class CmdCall:
|
class CmdCall:
|
||||||
@@ -115,15 +107,13 @@ def command_str(self):
|
|||||||
return f"{script} {self.arch} {self.sdk_ver} {self.vcvars_ver}"
|
return f"{script} {self.arch} {self.sdk_ver} {self.vcvars_ver}"
|
||||||
|
|
||||||
|
|
||||||
def get_valid_fortran_pth(comp_ver):
|
def get_valid_fortran_pth():
|
||||||
cl_ver = str(comp_ver)
|
"""Assign maximum available fortran compiler version"""
|
||||||
|
# TODO (johnwparent): validate compatibility w/ try compiler
|
||||||
|
# functionality when added
|
||||||
sort_fn = lambda fc_ver: Version(fc_ver)
|
sort_fn = lambda fc_ver: Version(fc_ver)
|
||||||
sort_fc_ver = sorted(list(avail_fc_version), key=sort_fn)
|
sort_fc_ver = sorted(list(FC_PATH.keys()), key=sort_fn)
|
||||||
for ver in sort_fc_ver:
|
return FC_PATH[sort_fc_ver[-1]] if sort_fc_ver else None
|
||||||
if ver in fortran_mapping:
|
|
||||||
if Version(cl_ver) <= Version(fortran_mapping[ver]):
|
|
||||||
return fc_path[ver]
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class Msvc(Compiler):
|
class Msvc(Compiler):
|
||||||
@@ -167,11 +157,9 @@ def __init__(self, *args, **kwargs):
|
|||||||
# This positional argument "paths" is later parsed and process by the base class
|
# This positional argument "paths" is later parsed and process by the base class
|
||||||
# via the call to `super` later in this method
|
# via the call to `super` later in this method
|
||||||
paths = args[3]
|
paths = args[3]
|
||||||
# This positional argument "cspec" is also parsed and handled by the base class
|
latest_fc = get_valid_fortran_pth()
|
||||||
# constructor
|
new_pth = [pth if pth else latest_fc for pth in paths[2:]]
|
||||||
cspec = args[0]
|
paths[2:] = new_pth
|
||||||
new_pth = [pth if pth else get_valid_fortran_pth(cspec.version) for pth in paths]
|
|
||||||
paths[:] = new_pth
|
|
||||||
# Initialize, deferring to base class but then adding the vcvarsallfile
|
# Initialize, deferring to base class but then adding the vcvarsallfile
|
||||||
# file based on compiler executable path.
|
# file based on compiler executable path.
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
@@ -183,7 +171,7 @@ def __init__(self, *args, **kwargs):
|
|||||||
# and stores their path, but their respective VCVARS
|
# and stores their path, but their respective VCVARS
|
||||||
# file must be invoked before useage.
|
# file must be invoked before useage.
|
||||||
env_cmds = []
|
env_cmds = []
|
||||||
compiler_root = os.path.join(self.cc, "../../../../../../..")
|
compiler_root = os.path.join(os.path.dirname(self.cc), "../../../../../..")
|
||||||
vcvars_script_path = os.path.join(compiler_root, "Auxiliary", "Build", "vcvars64.bat")
|
vcvars_script_path = os.path.join(compiler_root, "Auxiliary", "Build", "vcvars64.bat")
|
||||||
# get current platform architecture and format for vcvars argument
|
# get current platform architecture and format for vcvars argument
|
||||||
arch = spack.platforms.real_host().default.lower()
|
arch = spack.platforms.real_host().default.lower()
|
||||||
@@ -198,11 +186,34 @@ def __init__(self, *args, **kwargs):
|
|||||||
# paths[2] refers to the fc path and is a generic check
|
# paths[2] refers to the fc path and is a generic check
|
||||||
# for a fortran compiler
|
# for a fortran compiler
|
||||||
if paths[2]:
|
if paths[2]:
|
||||||
|
|
||||||
|
def get_oneapi_root(pth: str):
|
||||||
|
"""From within a prefix known to be a oneAPI path
|
||||||
|
determine the oneAPI root path from arbitrary point
|
||||||
|
under root
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pth: path prefixed within oneAPI root
|
||||||
|
"""
|
||||||
|
if not pth:
|
||||||
|
return ""
|
||||||
|
while os.path.basename(pth) and os.path.basename(pth) != "oneAPI":
|
||||||
|
pth = os.path.dirname(pth)
|
||||||
|
return pth
|
||||||
|
|
||||||
# If this found, it sets all the vars
|
# If this found, it sets all the vars
|
||||||
oneapi_root = os.path.join(self.cc, "../../..")
|
oneapi_root = get_oneapi_root(self.fc)
|
||||||
|
if not oneapi_root:
|
||||||
|
raise RuntimeError(f"Non-oneAPI Fortran compiler {self.fc} assigned to MSVC")
|
||||||
oneapi_root_setvars = os.path.join(oneapi_root, "setvars.bat")
|
oneapi_root_setvars = os.path.join(oneapi_root, "setvars.bat")
|
||||||
|
# some oneAPI exes return a version more precise than their
|
||||||
|
# install paths specify, so we determine path from
|
||||||
|
# the install path rather than the fc executable itself
|
||||||
|
numver = r"\d+\.\d+(?:\.\d+)?"
|
||||||
|
pattern = f"((?:{numver})|(?:latest))"
|
||||||
|
version_from_path = re.search(pattern, self.fc).group(1)
|
||||||
oneapi_version_setvars = os.path.join(
|
oneapi_version_setvars = os.path.join(
|
||||||
oneapi_root, "compiler", str(self.ifx_version), "env", "vars.bat"
|
oneapi_root, "compiler", version_from_path, "env", "vars.bat"
|
||||||
)
|
)
|
||||||
# order matters here, the specific version env must be invoked first,
|
# order matters here, the specific version env must be invoked first,
|
||||||
# otherwise it will be ignored if the root setvars sets up the oneapi
|
# otherwise it will be ignored if the root setvars sets up the oneapi
|
||||||
@@ -314,23 +325,19 @@ def setup_custom_environment(self, pkg, env):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def fc_version(cls, fc):
|
def fc_version(cls, fc):
|
||||||
# We're using intel for the Fortran compilers, which exist if
|
|
||||||
# ONEAPI_ROOT is a meaningful variable
|
|
||||||
if not sys.platform == "win32":
|
if not sys.platform == "win32":
|
||||||
return "unknown"
|
return "unknown"
|
||||||
fc_ver = cls.default_version(fc)
|
fc_ver = cls.default_version(fc)
|
||||||
avail_fc_version.add(fc_ver)
|
FC_PATH[fc_ver] = fc
|
||||||
fc_path[fc_ver] = fc
|
|
||||||
if os.getenv("ONEAPI_ROOT"):
|
|
||||||
try:
|
try:
|
||||||
sps = spack.operating_systems.windows_os.WindowsOs().compiler_search_paths
|
sps = spack.operating_systems.windows_os.WindowsOs().compiler_search_paths
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise SpackError("Windows compiler search paths not established")
|
raise SpackError(
|
||||||
|
"Windows compiler search paths not established, "
|
||||||
|
"please report this behavior to github.com/spack/spack"
|
||||||
|
)
|
||||||
clp = spack.util.executable.which_string("cl", path=sps)
|
clp = spack.util.executable.which_string("cl", path=sps)
|
||||||
ver = cls.default_version(clp)
|
return cls.default_version(clp) if clp else fc_ver
|
||||||
else:
|
|
||||||
ver = fc_ver
|
|
||||||
return ver
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def f77_version(cls, f77):
|
def f77_version(cls, f77):
|
||||||
|
|||||||
@@ -64,7 +64,7 @@ def verbose_flag(self):
|
|||||||
#
|
#
|
||||||
# This way, we at least enable the implicit rpath detection, which is
|
# This way, we at least enable the implicit rpath detection, which is
|
||||||
# based on compilation of a C file (see method
|
# based on compilation of a C file (see method
|
||||||
# spack.compiler._get_compiler_link_paths): in the case of a mixed
|
# spack.compiler._compile_dummy_c_source): in the case of a mixed
|
||||||
# NAG/GCC toolchain, the flag will be passed to g++ (e.g.
|
# NAG/GCC toolchain, the flag will be passed to g++ (e.g.
|
||||||
# 'g++ -Wl,-v ./main.c'), otherwise, the flag will be passed to nagfor
|
# 'g++ -Wl,-v ./main.c'), otherwise, the flag will be passed to nagfor
|
||||||
# (e.g. 'nagfor -Wl,-v ./main.c' - note that nagfor recognizes '.c'
|
# (e.g. 'nagfor -Wl,-v ./main.c' - note that nagfor recognizes '.c'
|
||||||
|
|||||||
@@ -74,6 +74,10 @@ class Concretizer:
|
|||||||
#: during concretization. Used for testing and for mirror creation
|
#: during concretization. Used for testing and for mirror creation
|
||||||
check_for_compiler_existence = None
|
check_for_compiler_existence = None
|
||||||
|
|
||||||
|
#: Packages that the old concretizer cannot deal with correctly, and cannot build anyway.
|
||||||
|
#: Those will not be considered as providers for virtuals.
|
||||||
|
non_buildable_packages = {"glibc", "musl"}
|
||||||
|
|
||||||
def __init__(self, abstract_spec=None):
|
def __init__(self, abstract_spec=None):
|
||||||
if Concretizer.check_for_compiler_existence is None:
|
if Concretizer.check_for_compiler_existence is None:
|
||||||
Concretizer.check_for_compiler_existence = not spack.config.get(
|
Concretizer.check_for_compiler_existence = not spack.config.get(
|
||||||
@@ -113,7 +117,11 @@ def _valid_virtuals_and_externals(self, spec):
|
|||||||
pref_key = lambda spec: 0 # no-op pref key
|
pref_key = lambda spec: 0 # no-op pref key
|
||||||
|
|
||||||
if spec.virtual:
|
if spec.virtual:
|
||||||
candidates = spack.repo.PATH.providers_for(spec)
|
candidates = [
|
||||||
|
s
|
||||||
|
for s in spack.repo.PATH.providers_for(spec)
|
||||||
|
if s.name not in self.non_buildable_packages
|
||||||
|
]
|
||||||
if not candidates:
|
if not candidates:
|
||||||
raise spack.error.UnsatisfiableProviderSpecError(candidates[0], spec)
|
raise spack.error.UnsatisfiableProviderSpecError(candidates[0], spec)
|
||||||
|
|
||||||
|
|||||||
@@ -1562,8 +1562,9 @@ def ensure_latest_format_fn(section: str) -> Callable[[YamlConfigDict], bool]:
|
|||||||
def use_configuration(
|
def use_configuration(
|
||||||
*scopes_or_paths: Union[ConfigScope, str]
|
*scopes_or_paths: Union[ConfigScope, str]
|
||||||
) -> Generator[Configuration, None, None]:
|
) -> Generator[Configuration, None, None]:
|
||||||
"""Use the configuration scopes passed as arguments within the
|
"""Use the configuration scopes passed as arguments within the context manager.
|
||||||
context manager.
|
|
||||||
|
This function invalidates caches, and is therefore very slow.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
*scopes_or_paths: scope objects or paths to be used
|
*scopes_or_paths: scope objects or paths to be used
|
||||||
|
|||||||
@@ -12,26 +12,26 @@
|
|||||||
},
|
},
|
||||||
"os_package_manager": "yum_amazon"
|
"os_package_manager": "yum_amazon"
|
||||||
},
|
},
|
||||||
"fedora:38": {
|
"fedora:40": {
|
||||||
"bootstrap": {
|
"bootstrap": {
|
||||||
"template": "container/fedora_38.dockerfile",
|
"template": "container/fedora.dockerfile",
|
||||||
"image": "docker.io/fedora:38"
|
"image": "docker.io/fedora:40"
|
||||||
},
|
},
|
||||||
"os_package_manager": "dnf",
|
"os_package_manager": "dnf",
|
||||||
"build": "spack/fedora38",
|
"build": "spack/fedora40",
|
||||||
"final": {
|
"final": {
|
||||||
"image": "docker.io/fedora:38"
|
"image": "docker.io/fedora:40"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"fedora:37": {
|
"fedora:39": {
|
||||||
"bootstrap": {
|
"bootstrap": {
|
||||||
"template": "container/fedora_37.dockerfile",
|
"template": "container/fedora.dockerfile",
|
||||||
"image": "docker.io/fedora:37"
|
"image": "docker.io/fedora:39"
|
||||||
},
|
},
|
||||||
"os_package_manager": "dnf",
|
"os_package_manager": "dnf",
|
||||||
"build": "spack/fedora37",
|
"build": "spack/fedora39",
|
||||||
"final": {
|
"final": {
|
||||||
"image": "docker.io/fedora:37"
|
"image": "docker.io/fedora:39"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"rockylinux:9": {
|
"rockylinux:9": {
|
||||||
@@ -116,6 +116,13 @@
|
|||||||
},
|
},
|
||||||
"os_package_manager": "apt"
|
"os_package_manager": "apt"
|
||||||
},
|
},
|
||||||
|
"ubuntu:24.04": {
|
||||||
|
"bootstrap": {
|
||||||
|
"template": "container/ubuntu_2404.dockerfile"
|
||||||
|
},
|
||||||
|
"os_package_manager": "apt",
|
||||||
|
"build": "spack/ubuntu-noble"
|
||||||
|
},
|
||||||
"ubuntu:22.04": {
|
"ubuntu:22.04": {
|
||||||
"bootstrap": {
|
"bootstrap": {
|
||||||
"template": "container/ubuntu_2204.dockerfile"
|
"template": "container/ubuntu_2204.dockerfile"
|
||||||
@@ -129,13 +136,6 @@
|
|||||||
},
|
},
|
||||||
"build": "spack/ubuntu-focal",
|
"build": "spack/ubuntu-focal",
|
||||||
"os_package_manager": "apt"
|
"os_package_manager": "apt"
|
||||||
},
|
|
||||||
"ubuntu:18.04": {
|
|
||||||
"bootstrap": {
|
|
||||||
"template": "container/ubuntu_1804.dockerfile"
|
|
||||||
},
|
|
||||||
"os_package_manager": "apt",
|
|
||||||
"build": "spack/ubuntu-bionic"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"os_package_managers": {
|
"os_package_managers": {
|
||||||
|
|||||||
@@ -25,6 +25,7 @@
|
|||||||
import socket
|
import socket
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
from json import JSONDecoder
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
Callable,
|
Callable,
|
||||||
@@ -818,7 +819,8 @@ def _read_from_file(self, filename):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
with open(filename, "r") as f:
|
with open(filename, "r") as f:
|
||||||
fdata = sjson.load(f)
|
# In the future we may use a stream of JSON objects, hence `raw_decode` for compat.
|
||||||
|
fdata, _ = JSONDecoder().raw_decode(f.read())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise CorruptDatabaseError("error parsing database:", str(e)) from e
|
raise CorruptDatabaseError("error parsing database:", str(e)) from e
|
||||||
|
|
||||||
@@ -833,27 +835,24 @@ def check(cond, msg):
|
|||||||
|
|
||||||
# High-level file checks
|
# High-level file checks
|
||||||
db = fdata["database"]
|
db = fdata["database"]
|
||||||
check("installs" in db, "no 'installs' in JSON DB.")
|
|
||||||
check("version" in db, "no 'version' in JSON DB.")
|
check("version" in db, "no 'version' in JSON DB.")
|
||||||
|
|
||||||
installs = db["installs"]
|
|
||||||
|
|
||||||
# TODO: better version checking semantics.
|
# TODO: better version checking semantics.
|
||||||
version = vn.Version(db["version"])
|
version = vn.Version(db["version"])
|
||||||
if version > _DB_VERSION:
|
if version > _DB_VERSION:
|
||||||
raise InvalidDatabaseVersionError(self, _DB_VERSION, version)
|
raise InvalidDatabaseVersionError(self, _DB_VERSION, version)
|
||||||
elif version < _DB_VERSION:
|
elif version < _DB_VERSION and not any(
|
||||||
if not any(old == version and new == _DB_VERSION for old, new in _SKIP_REINDEX):
|
old == version and new == _DB_VERSION for old, new in _SKIP_REINDEX
|
||||||
tty.warn(
|
):
|
||||||
"Spack database version changed from %s to %s. Upgrading."
|
tty.warn(f"Spack database version changed from {version} to {_DB_VERSION}. Upgrading.")
|
||||||
% (version, _DB_VERSION)
|
|
||||||
)
|
|
||||||
|
|
||||||
self.reindex(spack.store.STORE.layout)
|
self.reindex(spack.store.STORE.layout)
|
||||||
installs = dict(
|
installs = dict(
|
||||||
(k, v.to_dict(include_fields=self._record_fields))
|
(k, v.to_dict(include_fields=self._record_fields)) for k, v in self._data.items()
|
||||||
for k, v in self._data.items()
|
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
check("installs" in db, "no 'installs' in JSON DB.")
|
||||||
|
installs = db["installs"]
|
||||||
|
|
||||||
spec_reader = reader(version)
|
spec_reader = reader(version)
|
||||||
|
|
||||||
|
|||||||
@@ -83,26 +83,15 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
|
|||||||
return path_to_dict(search_paths)
|
return path_to_dict(search_paths)
|
||||||
|
|
||||||
|
|
||||||
def get_elf_compat(path):
|
|
||||||
"""For ELF files, get a triplet (EI_CLASS, EI_DATA, e_machine) and see if
|
|
||||||
it is host-compatible."""
|
|
||||||
# On ELF platforms supporting, we try to be a bit smarter when it comes to shared
|
|
||||||
# libraries, by dropping those that are not host compatible.
|
|
||||||
with open(path, "rb") as f:
|
|
||||||
elf = elf_utils.parse_elf(f, only_header=True)
|
|
||||||
return (elf.is_64_bit, elf.is_little_endian, elf.elf_hdr.e_machine)
|
|
||||||
|
|
||||||
|
|
||||||
def accept_elf(path, host_compat):
|
def accept_elf(path, host_compat):
|
||||||
"""Accept an ELF file if the header matches the given compat triplet,
|
"""Accept an ELF file if the header matches the given compat triplet. In case it's not an ELF
|
||||||
obtained with :py:func:`get_elf_compat`. In case it's not an ELF (e.g.
|
(e.g. static library, or some arbitrary file, fall back to is_readable_file)."""
|
||||||
static library, or some arbitrary file, fall back to is_readable_file)."""
|
|
||||||
# Fast path: assume libraries at least have .so in their basename.
|
# Fast path: assume libraries at least have .so in their basename.
|
||||||
# Note: don't replace with splitext, because of libsmth.so.1.2.3 file names.
|
# Note: don't replace with splitext, because of libsmth.so.1.2.3 file names.
|
||||||
if ".so" not in os.path.basename(path):
|
if ".so" not in os.path.basename(path):
|
||||||
return llnl.util.filesystem.is_readable_file(path)
|
return llnl.util.filesystem.is_readable_file(path)
|
||||||
try:
|
try:
|
||||||
return host_compat == get_elf_compat(path)
|
return host_compat == elf_utils.get_elf_compat(path)
|
||||||
except (OSError, elf_utils.ElfParsingError):
|
except (OSError, elf_utils.ElfParsingError):
|
||||||
return llnl.util.filesystem.is_readable_file(path)
|
return llnl.util.filesystem.is_readable_file(path)
|
||||||
|
|
||||||
@@ -155,7 +144,7 @@ def libraries_in_ld_and_system_library_path(
|
|||||||
search_paths = list(llnl.util.lang.dedupe(search_paths, key=file_identifier))
|
search_paths = list(llnl.util.lang.dedupe(search_paths, key=file_identifier))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
host_compat = get_elf_compat(sys.executable)
|
host_compat = elf_utils.get_elf_compat(sys.executable)
|
||||||
accept = lambda path: accept_elf(path, host_compat)
|
accept = lambda path: accept_elf(path, host_compat)
|
||||||
except (OSError, elf_utils.ElfParsingError):
|
except (OSError, elf_utils.ElfParsingError):
|
||||||
accept = llnl.util.filesystem.is_readable_file
|
accept = llnl.util.filesystem.is_readable_file
|
||||||
|
|||||||
@@ -11,6 +11,7 @@
|
|||||||
|
|
||||||
from llnl.util import filesystem
|
from llnl.util import filesystem
|
||||||
|
|
||||||
|
import spack.platforms
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
from spack.util import spack_yaml
|
from spack.util import spack_yaml
|
||||||
@@ -32,6 +33,8 @@ class ExpectedTestResult(NamedTuple):
|
|||||||
|
|
||||||
#: Spec to be detected
|
#: Spec to be detected
|
||||||
spec: str
|
spec: str
|
||||||
|
#: Attributes expected in the external spec
|
||||||
|
extra_attributes: Dict[str, str]
|
||||||
|
|
||||||
|
|
||||||
class DetectionTest(NamedTuple):
|
class DetectionTest(NamedTuple):
|
||||||
@@ -100,7 +103,10 @@ def _create_executable_scripts(self, mock_executables: MockExecutables) -> List[
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def expected_specs(self) -> List[spack.spec.Spec]:
|
def expected_specs(self) -> List[spack.spec.Spec]:
|
||||||
return [spack.spec.Spec(r.spec) for r in self.test.results]
|
return [
|
||||||
|
spack.spec.Spec.from_detection(item.spec, extra_attributes=item.extra_attributes)
|
||||||
|
for item in self.test.results
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> List[Runner]:
|
def detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> List[Runner]:
|
||||||
@@ -117,9 +123,13 @@ def detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> List[Runn
|
|||||||
"""
|
"""
|
||||||
result = []
|
result = []
|
||||||
detection_tests_content = read_detection_tests(pkg_name, repository)
|
detection_tests_content = read_detection_tests(pkg_name, repository)
|
||||||
|
current_platform = str(spack.platforms.host())
|
||||||
|
|
||||||
tests_by_path = detection_tests_content.get("paths", [])
|
tests_by_path = detection_tests_content.get("paths", [])
|
||||||
for single_test_data in tests_by_path:
|
for single_test_data in tests_by_path:
|
||||||
|
if current_platform not in single_test_data.get("platforms", [current_platform]):
|
||||||
|
continue
|
||||||
|
|
||||||
mock_executables = []
|
mock_executables = []
|
||||||
for layout in single_test_data["layout"]:
|
for layout in single_test_data["layout"]:
|
||||||
mock_executables.append(
|
mock_executables.append(
|
||||||
@@ -127,7 +137,11 @@ def detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> List[Runn
|
|||||||
)
|
)
|
||||||
expected_results = []
|
expected_results = []
|
||||||
for assertion in single_test_data["results"]:
|
for assertion in single_test_data["results"]:
|
||||||
expected_results.append(ExpectedTestResult(spec=assertion["spec"]))
|
expected_results.append(
|
||||||
|
ExpectedTestResult(
|
||||||
|
spec=assertion["spec"], extra_attributes=assertion.get("extra_attributes", {})
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
current_test = DetectionTest(
|
current_test = DetectionTest(
|
||||||
pkg_name=pkg_name, layout=mock_executables, results=expected_results
|
pkg_name=pkg_name, layout=mock_executables, results=expected_results
|
||||||
|
|||||||
@@ -27,6 +27,7 @@ class OpenMpi(Package):
|
|||||||
* ``variant``
|
* ``variant``
|
||||||
* ``version``
|
* ``version``
|
||||||
* ``requires``
|
* ``requires``
|
||||||
|
* ``redistribute``
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import collections
|
import collections
|
||||||
@@ -63,6 +64,7 @@ class OpenMpi(Package):
|
|||||||
__all__ = [
|
__all__ = [
|
||||||
"DirectiveError",
|
"DirectiveError",
|
||||||
"DirectiveMeta",
|
"DirectiveMeta",
|
||||||
|
"DisableRedistribute",
|
||||||
"version",
|
"version",
|
||||||
"conflicts",
|
"conflicts",
|
||||||
"depends_on",
|
"depends_on",
|
||||||
@@ -75,6 +77,7 @@ class OpenMpi(Package):
|
|||||||
"resource",
|
"resource",
|
||||||
"build_system",
|
"build_system",
|
||||||
"requires",
|
"requires",
|
||||||
|
"redistribute",
|
||||||
]
|
]
|
||||||
|
|
||||||
#: These are variant names used by Spack internally; packages can't use them
|
#: These are variant names used by Spack internally; packages can't use them
|
||||||
@@ -598,9 +601,68 @@ def _execute_depends_on(pkg: "spack.package_base.PackageBase"):
|
|||||||
return _execute_depends_on
|
return _execute_depends_on
|
||||||
|
|
||||||
|
|
||||||
|
#: Store whether a given Spec source/binary should not be redistributed.
|
||||||
|
class DisableRedistribute:
|
||||||
|
def __init__(self, source, binary):
|
||||||
|
self.source = source
|
||||||
|
self.binary = binary
|
||||||
|
|
||||||
|
|
||||||
|
@directive("disable_redistribute")
|
||||||
|
def redistribute(source=None, binary=None, when: WhenType = None):
|
||||||
|
"""Can be used inside a Package definition to declare that
|
||||||
|
the package source and/or compiled binaries should not be
|
||||||
|
redistributed.
|
||||||
|
|
||||||
|
By default, Packages allow source/binary distribution (i.e. in
|
||||||
|
mirrors). Because of this, and because overlapping enable/
|
||||||
|
disable specs are not allowed, this directive only allows users
|
||||||
|
to explicitly disable redistribution for specs.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return lambda pkg: _execute_redistribute(pkg, source, binary, when)
|
||||||
|
|
||||||
|
|
||||||
|
def _execute_redistribute(
|
||||||
|
pkg: "spack.package_base.PackageBase", source=None, binary=None, when: WhenType = None
|
||||||
|
):
|
||||||
|
if source is None and binary is None:
|
||||||
|
return
|
||||||
|
elif (source is True) or (binary is True):
|
||||||
|
raise DirectiveError(
|
||||||
|
"Source/binary distribution are true by default, they can only "
|
||||||
|
"be explicitly disabled."
|
||||||
|
)
|
||||||
|
|
||||||
|
if source is None:
|
||||||
|
source = True
|
||||||
|
if binary is None:
|
||||||
|
binary = True
|
||||||
|
|
||||||
|
when_spec = _make_when_spec(when)
|
||||||
|
if not when_spec:
|
||||||
|
return
|
||||||
|
if source is False:
|
||||||
|
max_constraint = spack.spec.Spec(f"{pkg.name}@{when_spec.versions}")
|
||||||
|
if not max_constraint.satisfies(when_spec):
|
||||||
|
raise DirectiveError("Source distribution can only be disabled for versions")
|
||||||
|
|
||||||
|
if when_spec in pkg.disable_redistribute:
|
||||||
|
disable = pkg.disable_redistribute[when_spec]
|
||||||
|
if not source:
|
||||||
|
disable.source = True
|
||||||
|
if not binary:
|
||||||
|
disable.binary = True
|
||||||
|
else:
|
||||||
|
pkg.disable_redistribute[when_spec] = DisableRedistribute(
|
||||||
|
source=not source, binary=not binary
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@directive(("extendees", "dependencies"))
|
@directive(("extendees", "dependencies"))
|
||||||
def extends(spec, when=None, type=("build", "run"), patches=None):
|
def extends(spec, when=None, type=("build", "run"), patches=None):
|
||||||
"""Same as depends_on, but also adds this package to the extendee list.
|
"""Same as depends_on, but also adds this package to the extendee list.
|
||||||
|
In case of Python, also adds a dependency on python-venv.
|
||||||
|
|
||||||
keyword arguments can be passed to extends() so that extension
|
keyword arguments can be passed to extends() so that extension
|
||||||
packages can pass parameters to the extendee's extension
|
packages can pass parameters to the extendee's extension
|
||||||
@@ -616,6 +678,11 @@ def _execute_extends(pkg):
|
|||||||
_depends_on(pkg, spec, when=when, type=type, patches=patches)
|
_depends_on(pkg, spec, when=when, type=type, patches=patches)
|
||||||
spec_obj = spack.spec.Spec(spec)
|
spec_obj = spack.spec.Spec(spec)
|
||||||
|
|
||||||
|
# When extending python, also add a dependency on python-venv. This is done so that
|
||||||
|
# Spack environment views are Python virtual environments.
|
||||||
|
if spec_obj.name == "python" and not pkg.name == "python-venv":
|
||||||
|
_depends_on(pkg, "python-venv", when=when, type=("build", "run"))
|
||||||
|
|
||||||
# TODO: the values of the extendees dictionary are not used. Remove in next refactor.
|
# TODO: the values of the extendees dictionary are not used. Remove in next refactor.
|
||||||
pkg.extendees[spec_obj.name] = (spec_obj, None)
|
pkg.extendees[spec_obj.name] = (spec_obj, None)
|
||||||
|
|
||||||
|
|||||||
@@ -15,6 +15,7 @@
|
|||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.symlink import readlink
|
||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
@@ -181,7 +182,7 @@ def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
|
|||||||
base_dir = (
|
base_dir = (
|
||||||
self.path_for_spec(deprecator_spec)
|
self.path_for_spec(deprecator_spec)
|
||||||
if deprecator_spec
|
if deprecator_spec
|
||||||
else os.readlink(deprecated_spec.prefix)
|
else readlink(deprecated_spec.prefix)
|
||||||
)
|
)
|
||||||
|
|
||||||
yaml_path = os.path.join(
|
yaml_path = os.path.join(
|
||||||
|
|||||||
@@ -34,6 +34,9 @@
|
|||||||
* ``spec``: a string representation of the abstract spec that was concretized
|
* ``spec``: a string representation of the abstract spec that was concretized
|
||||||
|
|
||||||
4. ``concrete_specs``: a dictionary containing the specs in the environment.
|
4. ``concrete_specs``: a dictionary containing the specs in the environment.
|
||||||
|
5. ``include_concrete`` (dictionary): an optional dictionary that includes the roots
|
||||||
|
and concrete specs from the included environments, keyed by the path to that
|
||||||
|
environment
|
||||||
|
|
||||||
Compatibility
|
Compatibility
|
||||||
-------------
|
-------------
|
||||||
@@ -50,26 +53,37 @@
|
|||||||
- ``v2``
|
- ``v2``
|
||||||
- ``v3``
|
- ``v3``
|
||||||
- ``v4``
|
- ``v4``
|
||||||
|
- ``v5``
|
||||||
* - ``v0.12:0.14``
|
* - ``v0.12:0.14``
|
||||||
- ✅
|
- ✅
|
||||||
-
|
-
|
||||||
-
|
-
|
||||||
-
|
-
|
||||||
|
-
|
||||||
* - ``v0.15:0.16``
|
* - ``v0.15:0.16``
|
||||||
- ✅
|
- ✅
|
||||||
- ✅
|
- ✅
|
||||||
-
|
-
|
||||||
-
|
-
|
||||||
|
-
|
||||||
* - ``v0.17``
|
* - ``v0.17``
|
||||||
- ✅
|
- ✅
|
||||||
- ✅
|
- ✅
|
||||||
- ✅
|
- ✅
|
||||||
-
|
-
|
||||||
|
-
|
||||||
* - ``v0.18:``
|
* - ``v0.18:``
|
||||||
- ✅
|
- ✅
|
||||||
- ✅
|
- ✅
|
||||||
- ✅
|
- ✅
|
||||||
- ✅
|
- ✅
|
||||||
|
-
|
||||||
|
* - ``v0.22:``
|
||||||
|
- ✅
|
||||||
|
- ✅
|
||||||
|
- ✅
|
||||||
|
- ✅
|
||||||
|
- ✅
|
||||||
|
|
||||||
Version 1
|
Version 1
|
||||||
---------
|
---------
|
||||||
@@ -334,6 +348,118 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Version 5
|
||||||
|
---------
|
||||||
|
|
||||||
|
Version 5 doesn't change the top-level lockfile format, but an optional dictionary is
|
||||||
|
added. The dictionary has the ``root`` and ``concrete_specs`` of the included
|
||||||
|
environments, which are keyed by the path to that environment. Since this is optional
|
||||||
|
if the environment does not have any included environments ``include_concrete`` will
|
||||||
|
not be a part of the lockfile.
|
||||||
|
|
||||||
|
.. code-block:: json
|
||||||
|
|
||||||
|
{
|
||||||
|
"_meta": {
|
||||||
|
"file-type": "spack-lockfile",
|
||||||
|
"lockfile-version": 5,
|
||||||
|
"specfile-version": 3
|
||||||
|
},
|
||||||
|
"roots": [
|
||||||
|
{
|
||||||
|
"hash": "<dag_hash 1>",
|
||||||
|
"spec": "<abstract spec 1>"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"hash": "<dag_hash 2>",
|
||||||
|
"spec": "<abstract spec 2>"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"concrete_specs": {
|
||||||
|
"<dag_hash 1>": {
|
||||||
|
"... <spec dict attributes> ...": { },
|
||||||
|
"dependencies": [
|
||||||
|
{
|
||||||
|
"name": "depname_1",
|
||||||
|
"hash": "<dag_hash for depname_1>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "depname_2",
|
||||||
|
"hash": "<dag_hash for depname_2>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"hash": "<dag_hash 1>",
|
||||||
|
},
|
||||||
|
"<daghash 2>": {
|
||||||
|
"... <spec dict attributes> ...": { },
|
||||||
|
"dependencies": [
|
||||||
|
{
|
||||||
|
"name": "depname_3",
|
||||||
|
"hash": "<dag_hash for depname_3>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "depname_4",
|
||||||
|
"hash": "<dag_hash for depname_4>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"hash": "<dag_hash 2>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"include_concrete": {
|
||||||
|
"<path to environment>": {
|
||||||
|
"roots": [
|
||||||
|
{
|
||||||
|
"hash": "<dag_hash 1>",
|
||||||
|
"spec": "<abstract spec 1>"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"hash": "<dag_hash 2>",
|
||||||
|
"spec": "<abstract spec 2>"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"concrete_specs": {
|
||||||
|
"<dag_hash 1>": {
|
||||||
|
"... <spec dict attributes> ...": { },
|
||||||
|
"dependencies": [
|
||||||
|
{
|
||||||
|
"name": "depname_1",
|
||||||
|
"hash": "<dag_hash for depname_1>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "depname_2",
|
||||||
|
"hash": "<dag_hash for depname_2>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"hash": "<dag_hash 1>",
|
||||||
|
},
|
||||||
|
"<daghash 2>": {
|
||||||
|
"... <spec dict attributes> ...": { },
|
||||||
|
"dependencies": [
|
||||||
|
{
|
||||||
|
"name": "depname_3",
|
||||||
|
"hash": "<dag_hash for depname_3>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "depname_4",
|
||||||
|
"hash": "<dag_hash for depname_4>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"hash": "<dag_hash 2>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from .environment import (
|
from .environment import (
|
||||||
|
|||||||
@@ -59,7 +59,7 @@ def __init__(
|
|||||||
self.buildcache_flag = ""
|
self.buildcache_flag = ""
|
||||||
|
|
||||||
|
|
||||||
class DepfileSpecVisitor:
|
class DepfileSpecVisitor(traverse.AbstractVisitor):
|
||||||
"""This visitor produces an adjacency list of a (reduced) DAG, which
|
"""This visitor produces an adjacency list of a (reduced) DAG, which
|
||||||
is used to generate depfile targets with their prerequisites. Currently
|
is used to generate depfile targets with their prerequisites. Currently
|
||||||
it only drops build deps when using buildcache only mode.
|
it only drops build deps when using buildcache only mode.
|
||||||
@@ -75,17 +75,17 @@ def __init__(self, pkg_buildcache: UseBuildCache, deps_buildcache: UseBuildCache
|
|||||||
self.depflag_root = _deptypes(pkg_buildcache)
|
self.depflag_root = _deptypes(pkg_buildcache)
|
||||||
self.depflag_deps = _deptypes(deps_buildcache)
|
self.depflag_deps = _deptypes(deps_buildcache)
|
||||||
|
|
||||||
def neighbors(self, node):
|
def neighbors(self, node: traverse.EdgeAndDepth) -> List[spack.spec.DependencySpec]:
|
||||||
"""Produce a list of spec to follow from node"""
|
"""Produce a list of spec to follow from node"""
|
||||||
depflag = self.depflag_root if node.depth == 0 else self.depflag_deps
|
depflag = self.depflag_root if node[1] == 0 else self.depflag_deps
|
||||||
return traverse.sort_edges(node.edge.spec.edges_to_dependencies(depflag=depflag))
|
return traverse.sort_edges(node[0].spec.edges_to_dependencies(depflag=depflag))
|
||||||
|
|
||||||
def accept(self, node):
|
def accept(self, node: traverse.EdgeAndDepth) -> bool:
|
||||||
self.adjacency_list.append(
|
self.adjacency_list.append(
|
||||||
DepfileNode(
|
DepfileNode(
|
||||||
target=node.edge.spec,
|
target=node[0].spec,
|
||||||
prereqs=[edge.spec for edge in self.neighbors(node)],
|
prereqs=[edge.spec for edge in self.neighbors(node)],
|
||||||
buildcache=self.pkg_buildcache if node.depth == 0 else self.deps_buildcache,
|
buildcache=self.pkg_buildcache if node[1] == 0 else self.deps_buildcache,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -16,13 +16,13 @@
|
|||||||
import urllib.parse
|
import urllib.parse
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import warnings
|
import warnings
|
||||||
from typing import Dict, Iterable, List, Optional, Set, Tuple, Union
|
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
import llnl.util.tty.color as clr
|
import llnl.util.tty.color as clr
|
||||||
from llnl.util.link_tree import ConflictingSpecsError
|
from llnl.util.link_tree import ConflictingSpecsError
|
||||||
from llnl.util.symlink import symlink
|
from llnl.util.symlink import readlink, symlink
|
||||||
|
|
||||||
import spack.compilers
|
import spack.compilers
|
||||||
import spack.concretize
|
import spack.concretize
|
||||||
@@ -106,17 +106,16 @@ def environment_name(path: Union[str, pathlib.Path]) -> str:
|
|||||||
return path_str
|
return path_str
|
||||||
|
|
||||||
|
|
||||||
def check_disallowed_env_config_mods(scopes):
|
def ensure_no_disallowed_env_config_mods(scopes: List[spack.config.ConfigScope]) -> None:
|
||||||
for scope in scopes:
|
for scope in scopes:
|
||||||
with spack.config.use_configuration(scope):
|
config = scope.get_section("config")
|
||||||
if spack.config.get("config:environments_root"):
|
if config and "environments_root" in config["config"]:
|
||||||
raise SpackEnvironmentError(
|
raise SpackEnvironmentError(
|
||||||
"Spack environments are prohibited from modifying 'config:environments_root' "
|
"Spack environments are prohibited from modifying 'config:environments_root' "
|
||||||
"because it can make the definition of the environment ill-posed. Please "
|
"because it can make the definition of the environment ill-posed. Please "
|
||||||
"remove from your environment and place it in a permanent scope such as "
|
"remove from your environment and place it in a permanent scope such as "
|
||||||
"defaults, system, site, etc."
|
"defaults, system, site, etc."
|
||||||
)
|
)
|
||||||
return scopes
|
|
||||||
|
|
||||||
|
|
||||||
def default_manifest_yaml():
|
def default_manifest_yaml():
|
||||||
@@ -160,6 +159,8 @@ def default_manifest_yaml():
|
|||||||
default_view_name = "default"
|
default_view_name = "default"
|
||||||
# Default behavior to link all packages into views (vs. only root packages)
|
# Default behavior to link all packages into views (vs. only root packages)
|
||||||
default_view_link = "all"
|
default_view_link = "all"
|
||||||
|
# The name for any included concrete specs
|
||||||
|
included_concrete_name = "include_concrete"
|
||||||
|
|
||||||
|
|
||||||
def installed_specs():
|
def installed_specs():
|
||||||
@@ -294,6 +295,7 @@ def create(
|
|||||||
init_file: Optional[Union[str, pathlib.Path]] = None,
|
init_file: Optional[Union[str, pathlib.Path]] = None,
|
||||||
with_view: Optional[Union[str, pathlib.Path, bool]] = None,
|
with_view: Optional[Union[str, pathlib.Path, bool]] = None,
|
||||||
keep_relative: bool = False,
|
keep_relative: bool = False,
|
||||||
|
include_concrete: Optional[List[str]] = None,
|
||||||
) -> "Environment":
|
) -> "Environment":
|
||||||
"""Create a managed environment in Spack and returns it.
|
"""Create a managed environment in Spack and returns it.
|
||||||
|
|
||||||
@@ -310,10 +312,15 @@ def create(
|
|||||||
string, it specifies the path to the view
|
string, it specifies the path to the view
|
||||||
keep_relative: if True, develop paths are copied verbatim into the new environment file,
|
keep_relative: if True, develop paths are copied verbatim into the new environment file,
|
||||||
otherwise they are made absolute
|
otherwise they are made absolute
|
||||||
|
include_concrete: list of concrete environment names/paths to be included
|
||||||
"""
|
"""
|
||||||
environment_dir = environment_dir_from_name(name, exists_ok=False)
|
environment_dir = environment_dir_from_name(name, exists_ok=False)
|
||||||
return create_in_dir(
|
return create_in_dir(
|
||||||
environment_dir, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
environment_dir,
|
||||||
|
init_file=init_file,
|
||||||
|
with_view=with_view,
|
||||||
|
keep_relative=keep_relative,
|
||||||
|
include_concrete=include_concrete,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -322,6 +329,7 @@ def create_in_dir(
|
|||||||
init_file: Optional[Union[str, pathlib.Path]] = None,
|
init_file: Optional[Union[str, pathlib.Path]] = None,
|
||||||
with_view: Optional[Union[str, pathlib.Path, bool]] = None,
|
with_view: Optional[Union[str, pathlib.Path, bool]] = None,
|
||||||
keep_relative: bool = False,
|
keep_relative: bool = False,
|
||||||
|
include_concrete: Optional[List[str]] = None,
|
||||||
) -> "Environment":
|
) -> "Environment":
|
||||||
"""Create an environment in the directory passed as input and returns it.
|
"""Create an environment in the directory passed as input and returns it.
|
||||||
|
|
||||||
@@ -335,6 +343,7 @@ def create_in_dir(
|
|||||||
string, it specifies the path to the view
|
string, it specifies the path to the view
|
||||||
keep_relative: if True, develop paths are copied verbatim into the new environment file,
|
keep_relative: if True, develop paths are copied verbatim into the new environment file,
|
||||||
otherwise they are made absolute
|
otherwise they are made absolute
|
||||||
|
include_concrete: concrete environment names/paths to be included
|
||||||
"""
|
"""
|
||||||
initialize_environment_dir(root, envfile=init_file)
|
initialize_environment_dir(root, envfile=init_file)
|
||||||
|
|
||||||
@@ -347,6 +356,12 @@ def create_in_dir(
|
|||||||
if with_view is not None:
|
if with_view is not None:
|
||||||
manifest.set_default_view(with_view)
|
manifest.set_default_view(with_view)
|
||||||
|
|
||||||
|
if include_concrete is not None:
|
||||||
|
set_included_envs_to_env_paths(include_concrete)
|
||||||
|
validate_included_envs_exists(include_concrete)
|
||||||
|
validate_included_envs_concrete(include_concrete)
|
||||||
|
manifest.set_include_concrete(include_concrete)
|
||||||
|
|
||||||
manifest.flush()
|
manifest.flush()
|
||||||
|
|
||||||
except (spack.config.ConfigFormatError, SpackEnvironmentConfigError) as e:
|
except (spack.config.ConfigFormatError, SpackEnvironmentConfigError) as e:
|
||||||
@@ -420,6 +435,67 @@ def ensure_env_root_path_exists():
|
|||||||
fs.mkdirp(env_root_path())
|
fs.mkdirp(env_root_path())
|
||||||
|
|
||||||
|
|
||||||
|
def set_included_envs_to_env_paths(include_concrete: List[str]) -> None:
|
||||||
|
"""If the included environment(s) is the environment name
|
||||||
|
it is replaced by the path to the environment
|
||||||
|
|
||||||
|
Args:
|
||||||
|
include_concrete: list of env name or path to env"""
|
||||||
|
|
||||||
|
for i, env_name in enumerate(include_concrete):
|
||||||
|
if is_env_dir(env_name):
|
||||||
|
include_concrete[i] = env_name
|
||||||
|
elif exists(env_name):
|
||||||
|
include_concrete[i] = root(env_name)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_included_envs_exists(include_concrete: List[str]) -> None:
|
||||||
|
"""Checks that all of the included environments exist
|
||||||
|
|
||||||
|
Args:
|
||||||
|
include_concrete: list of already existing concrete environments to include
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
SpackEnvironmentError: if any of the included environments do not exist
|
||||||
|
"""
|
||||||
|
|
||||||
|
missing_envs = set()
|
||||||
|
|
||||||
|
for i, env_name in enumerate(include_concrete):
|
||||||
|
if not is_env_dir(env_name):
|
||||||
|
missing_envs.add(env_name)
|
||||||
|
|
||||||
|
if missing_envs:
|
||||||
|
msg = "The following environment(s) are missing: {0}".format(", ".join(missing_envs))
|
||||||
|
raise SpackEnvironmentError(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_included_envs_concrete(include_concrete: List[str]) -> None:
|
||||||
|
"""Checks that all of the included environments are concrete
|
||||||
|
|
||||||
|
Args:
|
||||||
|
include_concrete: list of already existing concrete environments to include
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
SpackEnvironmentError: if any of the included environments are not concrete
|
||||||
|
"""
|
||||||
|
|
||||||
|
non_concrete_envs = set()
|
||||||
|
|
||||||
|
for env_path in include_concrete:
|
||||||
|
if not os.path.exists(Environment(env_path).lock_path):
|
||||||
|
non_concrete_envs.add(Environment(env_path).name)
|
||||||
|
|
||||||
|
if non_concrete_envs:
|
||||||
|
msg = "The following environment(s) are not concrete: {0}\n" "Please run:".format(
|
||||||
|
", ".join(non_concrete_envs)
|
||||||
|
)
|
||||||
|
for env in non_concrete_envs:
|
||||||
|
msg += f"\n\t`spack -e {env} concretize`"
|
||||||
|
|
||||||
|
raise SpackEnvironmentError(msg)
|
||||||
|
|
||||||
|
|
||||||
def all_environment_names():
|
def all_environment_names():
|
||||||
"""List the names of environments that currently exist."""
|
"""List the names of environments that currently exist."""
|
||||||
# just return empty if the env path does not exist. A read-only
|
# just return empty if the env path does not exist. A read-only
|
||||||
@@ -586,7 +662,7 @@ def _current_root(self):
|
|||||||
if not os.path.islink(self.root):
|
if not os.path.islink(self.root):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
root = os.readlink(self.root)
|
root = readlink(self.root)
|
||||||
if os.path.isabs(root):
|
if os.path.isabs(root):
|
||||||
return root
|
return root
|
||||||
|
|
||||||
@@ -822,6 +898,18 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
|
|||||||
self.specs_by_hash: Dict[str, Spec] = {}
|
self.specs_by_hash: Dict[str, Spec] = {}
|
||||||
#: Repository for this environment (memoized)
|
#: Repository for this environment (memoized)
|
||||||
self._repo = None
|
self._repo = None
|
||||||
|
|
||||||
|
#: Environment paths for concrete (lockfile) included environments
|
||||||
|
self.included_concrete_envs: List[str] = []
|
||||||
|
#: First-level included concretized spec data from/to the lockfile.
|
||||||
|
self.included_concrete_spec_data: Dict[str, Dict[str, List[str]]] = {}
|
||||||
|
#: User specs from included environments from the last concretization
|
||||||
|
self.included_concretized_user_specs: Dict[str, List[Spec]] = {}
|
||||||
|
#: Roots from included environments with the last concretization, in order
|
||||||
|
self.included_concretized_order: Dict[str, List[str]] = {}
|
||||||
|
#: Concretized specs by hash from the included environments
|
||||||
|
self.included_specs_by_hash: Dict[str, Dict[str, Spec]] = {}
|
||||||
|
|
||||||
#: Previously active environment
|
#: Previously active environment
|
||||||
self._previous_active = None
|
self._previous_active = None
|
||||||
self._dev_specs = None
|
self._dev_specs = None
|
||||||
@@ -859,7 +947,7 @@ def _read(self):
|
|||||||
|
|
||||||
if os.path.exists(self.lock_path):
|
if os.path.exists(self.lock_path):
|
||||||
with open(self.lock_path) as f:
|
with open(self.lock_path) as f:
|
||||||
read_lock_version = self._read_lockfile(f)
|
read_lock_version = self._read_lockfile(f)["_meta"]["lockfile-version"]
|
||||||
|
|
||||||
if read_lock_version == 1:
|
if read_lock_version == 1:
|
||||||
tty.debug(f"Storing backup of {self.lock_path} at {self._lock_backup_v1_path}")
|
tty.debug(f"Storing backup of {self.lock_path} at {self._lock_backup_v1_path}")
|
||||||
@@ -927,6 +1015,20 @@ def add_view(name, values):
|
|||||||
if self.views == dict():
|
if self.views == dict():
|
||||||
self.views[default_view_name] = ViewDescriptor(self.path, self.view_path_default)
|
self.views[default_view_name] = ViewDescriptor(self.path, self.view_path_default)
|
||||||
|
|
||||||
|
def _process_concrete_includes(self):
|
||||||
|
"""Extract and load into memory included concrete spec data."""
|
||||||
|
self.included_concrete_envs = self.manifest[TOP_LEVEL_KEY].get(included_concrete_name, [])
|
||||||
|
|
||||||
|
if self.included_concrete_envs:
|
||||||
|
if os.path.exists(self.lock_path):
|
||||||
|
with open(self.lock_path) as f:
|
||||||
|
data = self._read_lockfile(f)
|
||||||
|
|
||||||
|
if included_concrete_name in data:
|
||||||
|
self.included_concrete_spec_data = data[included_concrete_name]
|
||||||
|
else:
|
||||||
|
self.include_concrete_envs()
|
||||||
|
|
||||||
def _construct_state_from_manifest(self):
|
def _construct_state_from_manifest(self):
|
||||||
"""Set up user specs and views from the manifest file."""
|
"""Set up user specs and views from the manifest file."""
|
||||||
self.spec_lists = collections.OrderedDict()
|
self.spec_lists = collections.OrderedDict()
|
||||||
@@ -943,6 +1045,31 @@ def _construct_state_from_manifest(self):
|
|||||||
self.spec_lists[user_speclist_name] = user_specs
|
self.spec_lists[user_speclist_name] = user_specs
|
||||||
|
|
||||||
self._process_view(spack.config.get("view", True))
|
self._process_view(spack.config.get("view", True))
|
||||||
|
self._process_concrete_includes()
|
||||||
|
|
||||||
|
def all_concretized_user_specs(self) -> List[Spec]:
|
||||||
|
"""Returns all of the concretized user specs of the environment and
|
||||||
|
its included environment(s)."""
|
||||||
|
concretized_user_specs = self.concretized_user_specs[:]
|
||||||
|
for included_specs in self.included_concretized_user_specs.values():
|
||||||
|
for included in included_specs:
|
||||||
|
# Don't duplicate included spec(s)
|
||||||
|
if included not in concretized_user_specs:
|
||||||
|
concretized_user_specs.append(included)
|
||||||
|
|
||||||
|
return concretized_user_specs
|
||||||
|
|
||||||
|
def all_concretized_orders(self) -> List[str]:
|
||||||
|
"""Returns all of the concretized order of the environment and
|
||||||
|
its included environment(s)."""
|
||||||
|
concretized_order = self.concretized_order[:]
|
||||||
|
for included_concretized_order in self.included_concretized_order.values():
|
||||||
|
for included in included_concretized_order:
|
||||||
|
# Don't duplicate included spec(s)
|
||||||
|
if included not in concretized_order:
|
||||||
|
concretized_order.append(included)
|
||||||
|
|
||||||
|
return concretized_order
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def user_specs(self):
|
def user_specs(self):
|
||||||
@@ -967,6 +1094,26 @@ def _read_dev_specs(self):
|
|||||||
dev_specs[name] = local_entry
|
dev_specs[name] = local_entry
|
||||||
return dev_specs
|
return dev_specs
|
||||||
|
|
||||||
|
@property
|
||||||
|
def included_user_specs(self) -> SpecList:
|
||||||
|
"""Included concrete user (or root) specs from last concretization."""
|
||||||
|
spec_list = SpecList()
|
||||||
|
|
||||||
|
if not self.included_concrete_envs:
|
||||||
|
return spec_list
|
||||||
|
|
||||||
|
def add_root_specs(included_concrete_specs):
|
||||||
|
# add specs from the include *and* any nested includes it may have
|
||||||
|
for env, info in included_concrete_specs.items():
|
||||||
|
for root_list in info["roots"]:
|
||||||
|
spec_list.add(root_list["spec"])
|
||||||
|
|
||||||
|
if "include_concrete" in info:
|
||||||
|
add_root_specs(info["include_concrete"])
|
||||||
|
|
||||||
|
add_root_specs(self.included_concrete_spec_data)
|
||||||
|
return spec_list
|
||||||
|
|
||||||
def clear(self, re_read=False):
|
def clear(self, re_read=False):
|
||||||
"""Clear the contents of the environment
|
"""Clear the contents of the environment
|
||||||
|
|
||||||
@@ -978,9 +1125,15 @@ def clear(self, re_read=False):
|
|||||||
self.spec_lists[user_speclist_name] = SpecList()
|
self.spec_lists[user_speclist_name] = SpecList()
|
||||||
|
|
||||||
self._dev_specs = {}
|
self._dev_specs = {}
|
||||||
self.concretized_user_specs = [] # user specs from last concretize
|
|
||||||
self.concretized_order = [] # roots of last concretize, in order
|
self.concretized_order = [] # roots of last concretize, in order
|
||||||
|
self.concretized_user_specs = [] # user specs from last concretize
|
||||||
self.specs_by_hash = {} # concretized specs by hash
|
self.specs_by_hash = {} # concretized specs by hash
|
||||||
|
|
||||||
|
self.included_concrete_spec_data = {} # concretized specs from lockfile of included envs
|
||||||
|
self.included_concretized_order = {} # root specs of the included envs, keyed by env path
|
||||||
|
self.included_concretized_user_specs = {} # user specs from last concretize's included env
|
||||||
|
self.included_specs_by_hash = {} # concretized specs by hash from the included envs
|
||||||
|
|
||||||
self.invalidate_repository_cache()
|
self.invalidate_repository_cache()
|
||||||
self._previous_active = None # previously active environment
|
self._previous_active = None # previously active environment
|
||||||
if not re_read:
|
if not re_read:
|
||||||
@@ -1034,6 +1187,55 @@ def scope_name(self):
|
|||||||
"""Name of the config scope of this environment's manifest file."""
|
"""Name of the config scope of this environment's manifest file."""
|
||||||
return self.manifest.scope_name
|
return self.manifest.scope_name
|
||||||
|
|
||||||
|
def include_concrete_envs(self):
|
||||||
|
"""Copy and save the included envs' specs internally"""
|
||||||
|
|
||||||
|
lockfile_meta = None
|
||||||
|
root_hash_seen = set()
|
||||||
|
concrete_hash_seen = set()
|
||||||
|
self.included_concrete_spec_data = {}
|
||||||
|
|
||||||
|
for env_path in self.included_concrete_envs:
|
||||||
|
# Check that environment exists
|
||||||
|
if not is_env_dir(env_path):
|
||||||
|
raise SpackEnvironmentError(f"Unable to find env at {env_path}")
|
||||||
|
|
||||||
|
env = Environment(env_path)
|
||||||
|
|
||||||
|
with open(env.lock_path) as f:
|
||||||
|
lockfile_as_dict = env._read_lockfile(f)
|
||||||
|
|
||||||
|
# Lockfile_meta must match each env and use at least format version 5
|
||||||
|
if lockfile_meta is None:
|
||||||
|
lockfile_meta = lockfile_as_dict["_meta"]
|
||||||
|
elif lockfile_meta != lockfile_as_dict["_meta"]:
|
||||||
|
raise SpackEnvironmentError("All lockfile _meta values must match")
|
||||||
|
elif lockfile_meta["lockfile-version"] < 5:
|
||||||
|
raise SpackEnvironmentError("The lockfile format must be at version 5 or higher")
|
||||||
|
|
||||||
|
# Copy unique root specs from env
|
||||||
|
self.included_concrete_spec_data[env_path] = {"roots": []}
|
||||||
|
for root_dict in lockfile_as_dict["roots"]:
|
||||||
|
if root_dict["hash"] not in root_hash_seen:
|
||||||
|
self.included_concrete_spec_data[env_path]["roots"].append(root_dict)
|
||||||
|
root_hash_seen.add(root_dict["hash"])
|
||||||
|
|
||||||
|
# Copy unique concrete specs from env
|
||||||
|
for concrete_spec in lockfile_as_dict["concrete_specs"]:
|
||||||
|
if concrete_spec not in concrete_hash_seen:
|
||||||
|
self.included_concrete_spec_data[env_path].update(
|
||||||
|
{"concrete_specs": lockfile_as_dict["concrete_specs"]}
|
||||||
|
)
|
||||||
|
concrete_hash_seen.add(concrete_spec)
|
||||||
|
|
||||||
|
if "include_concrete" in lockfile_as_dict.keys():
|
||||||
|
self.included_concrete_spec_data[env_path]["include_concrete"] = lockfile_as_dict[
|
||||||
|
"include_concrete"
|
||||||
|
]
|
||||||
|
|
||||||
|
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||||
|
self.write()
|
||||||
|
|
||||||
def destroy(self):
|
def destroy(self):
|
||||||
"""Remove this environment from Spack entirely."""
|
"""Remove this environment from Spack entirely."""
|
||||||
shutil.rmtree(self.path)
|
shutil.rmtree(self.path)
|
||||||
@@ -1233,6 +1435,10 @@ def concretize(self, force=False, tests=False):
|
|||||||
for spec in set(self.concretized_user_specs) - set(self.user_specs):
|
for spec in set(self.concretized_user_specs) - set(self.user_specs):
|
||||||
self.deconcretize(spec, concrete=False)
|
self.deconcretize(spec, concrete=False)
|
||||||
|
|
||||||
|
# If a combined env, check updated spec is in the linked envs
|
||||||
|
if self.included_concrete_envs:
|
||||||
|
self.include_concrete_envs()
|
||||||
|
|
||||||
# Pick the right concretization strategy
|
# Pick the right concretization strategy
|
||||||
if self.unify == "when_possible":
|
if self.unify == "when_possible":
|
||||||
return self._concretize_together_where_possible(tests=tests)
|
return self._concretize_together_where_possible(tests=tests)
|
||||||
@@ -1416,7 +1622,7 @@ def _concretize_separately(self, tests=False):
|
|||||||
# Ensure we don't try to bootstrap clingo in parallel
|
# Ensure we don't try to bootstrap clingo in parallel
|
||||||
if spack.config.get("config:concretizer", "clingo") == "clingo":
|
if spack.config.get("config:concretizer", "clingo") == "clingo":
|
||||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||||
spack.bootstrap.ensure_core_dependencies()
|
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||||
|
|
||||||
# Ensure all the indexes have been built or updated, since
|
# Ensure all the indexes have been built or updated, since
|
||||||
# otherwise the processes in the pool may timeout on waiting
|
# otherwise the processes in the pool may timeout on waiting
|
||||||
@@ -1705,8 +1911,14 @@ def _partition_roots_by_install_status(self):
|
|||||||
of per spec."""
|
of per spec."""
|
||||||
installed, uninstalled = [], []
|
installed, uninstalled = [], []
|
||||||
with spack.store.STORE.db.read_transaction():
|
with spack.store.STORE.db.read_transaction():
|
||||||
for concretized_hash in self.concretized_order:
|
for concretized_hash in self.all_concretized_orders():
|
||||||
|
if concretized_hash in self.specs_by_hash:
|
||||||
spec = self.specs_by_hash[concretized_hash]
|
spec = self.specs_by_hash[concretized_hash]
|
||||||
|
else:
|
||||||
|
for env_path in self.included_specs_by_hash.keys():
|
||||||
|
if concretized_hash in self.included_specs_by_hash[env_path]:
|
||||||
|
spec = self.included_specs_by_hash[env_path][concretized_hash]
|
||||||
|
break
|
||||||
if not spec.installed or (
|
if not spec.installed or (
|
||||||
spec.satisfies("dev_path=*") or spec.satisfies("^dev_path=*")
|
spec.satisfies("dev_path=*") or spec.satisfies("^dev_path=*")
|
||||||
):
|
):
|
||||||
@@ -1736,13 +1948,19 @@ def install_specs(self, specs: Optional[List[Spec]] = None, **install_args):
|
|||||||
specs = specs if specs is not None else roots
|
specs = specs if specs is not None else roots
|
||||||
|
|
||||||
# Extend the set of specs to overwrite with modified dev specs and their parents
|
# Extend the set of specs to overwrite with modified dev specs and their parents
|
||||||
install_args["overwrite"] = (
|
overwrite: Set[str] = set()
|
||||||
install_args.get("overwrite", []) + self._dev_specs_that_need_overwrite()
|
overwrite.update(install_args.get("overwrite", []), self._dev_specs_that_need_overwrite())
|
||||||
|
install_args["overwrite"] = overwrite
|
||||||
|
|
||||||
|
explicit: Set[str] = set()
|
||||||
|
explicit.update(
|
||||||
|
install_args.get("explicit", []),
|
||||||
|
(s.dag_hash() for s in specs),
|
||||||
|
(s.dag_hash() for s in roots),
|
||||||
)
|
)
|
||||||
|
install_args["explicit"] = explicit
|
||||||
|
|
||||||
installs = [(spec.package, {**install_args, "explicit": spec in roots}) for spec in specs]
|
PackageInstaller([spec.package for spec in specs], install_args).install()
|
||||||
|
|
||||||
PackageInstaller(installs).install()
|
|
||||||
|
|
||||||
def all_specs_generator(self) -> Iterable[Spec]:
|
def all_specs_generator(self) -> Iterable[Spec]:
|
||||||
"""Returns a generator for all concrete specs"""
|
"""Returns a generator for all concrete specs"""
|
||||||
@@ -1786,8 +2004,14 @@ def added_specs(self):
|
|||||||
|
|
||||||
def concretized_specs(self):
|
def concretized_specs(self):
|
||||||
"""Tuples of (user spec, concrete spec) for all concrete specs."""
|
"""Tuples of (user spec, concrete spec) for all concrete specs."""
|
||||||
for s, h in zip(self.concretized_user_specs, self.concretized_order):
|
for s, h in zip(self.all_concretized_user_specs(), self.all_concretized_orders()):
|
||||||
|
if h in self.specs_by_hash:
|
||||||
yield (s, self.specs_by_hash[h])
|
yield (s, self.specs_by_hash[h])
|
||||||
|
else:
|
||||||
|
for env_path in self.included_specs_by_hash.keys():
|
||||||
|
if h in self.included_specs_by_hash[env_path]:
|
||||||
|
yield (s, self.included_specs_by_hash[env_path][h])
|
||||||
|
break
|
||||||
|
|
||||||
def concrete_roots(self):
|
def concrete_roots(self):
|
||||||
"""Same as concretized_specs, except it returns the list of concrete
|
"""Same as concretized_specs, except it returns the list of concrete
|
||||||
@@ -1916,8 +2140,7 @@ def _get_environment_specs(self, recurse_dependencies=True):
|
|||||||
If these specs appear under different user_specs, only one copy
|
If these specs appear under different user_specs, only one copy
|
||||||
is added to the list returned.
|
is added to the list returned.
|
||||||
"""
|
"""
|
||||||
specs = [self.specs_by_hash[h] for h in self.concretized_order]
|
specs = [self.specs_by_hash[h] for h in self.all_concretized_orders()]
|
||||||
|
|
||||||
if recurse_dependencies:
|
if recurse_dependencies:
|
||||||
specs.extend(
|
specs.extend(
|
||||||
traverse.traverse_nodes(
|
traverse.traverse_nodes(
|
||||||
@@ -1962,31 +2185,76 @@ def _to_lockfile_dict(self):
|
|||||||
"concrete_specs": concrete_specs,
|
"concrete_specs": concrete_specs,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if self.included_concrete_envs:
|
||||||
|
data[included_concrete_name] = self.included_concrete_spec_data
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _read_lockfile(self, file_or_json):
|
def _read_lockfile(self, file_or_json):
|
||||||
"""Read a lockfile from a file or from a raw string."""
|
"""Read a lockfile from a file or from a raw string."""
|
||||||
lockfile_dict = sjson.load(file_or_json)
|
lockfile_dict = sjson.load(file_or_json)
|
||||||
self._read_lockfile_dict(lockfile_dict)
|
self._read_lockfile_dict(lockfile_dict)
|
||||||
return lockfile_dict["_meta"]["lockfile-version"]
|
return lockfile_dict
|
||||||
|
|
||||||
|
def set_included_concretized_user_specs(
|
||||||
|
self,
|
||||||
|
env_name: str,
|
||||||
|
env_info: Dict[str, Dict[str, Any]],
|
||||||
|
included_json_specs_by_hash: Dict[str, Dict[str, Any]],
|
||||||
|
) -> Dict[str, Dict[str, Any]]:
|
||||||
|
"""Sets all of the concretized user specs from included environments
|
||||||
|
to include those from nested included environments.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
env_name: the name (technically the path) of the included environment
|
||||||
|
env_info: included concrete environment data
|
||||||
|
included_json_specs_by_hash: concrete spec data keyed by hash
|
||||||
|
|
||||||
|
Returns: updated specs_by_hash
|
||||||
|
"""
|
||||||
|
self.included_concretized_order[env_name] = []
|
||||||
|
self.included_concretized_user_specs[env_name] = []
|
||||||
|
|
||||||
|
def add_specs(name, info, specs_by_hash):
|
||||||
|
# Add specs from the environment as well as any of its nested
|
||||||
|
# environments.
|
||||||
|
for root_info in info["roots"]:
|
||||||
|
self.included_concretized_order[name].append(root_info["hash"])
|
||||||
|
self.included_concretized_user_specs[name].append(Spec(root_info["spec"]))
|
||||||
|
if "concrete_specs" in info:
|
||||||
|
specs_by_hash.update(info["concrete_specs"])
|
||||||
|
|
||||||
|
if included_concrete_name in info:
|
||||||
|
for included_name, included_info in info[included_concrete_name].items():
|
||||||
|
if included_name not in self.included_concretized_order:
|
||||||
|
self.included_concretized_order[included_name] = []
|
||||||
|
self.included_concretized_user_specs[included_name] = []
|
||||||
|
add_specs(included_name, included_info, specs_by_hash)
|
||||||
|
|
||||||
|
add_specs(env_name, env_info, included_json_specs_by_hash)
|
||||||
|
return included_json_specs_by_hash
|
||||||
|
|
||||||
def _read_lockfile_dict(self, d):
|
def _read_lockfile_dict(self, d):
|
||||||
"""Read a lockfile dictionary into this environment."""
|
"""Read a lockfile dictionary into this environment."""
|
||||||
self.specs_by_hash = {}
|
self.specs_by_hash = {}
|
||||||
|
self.included_specs_by_hash = {}
|
||||||
|
self.included_concretized_user_specs = {}
|
||||||
|
self.included_concretized_order = {}
|
||||||
|
|
||||||
roots = d["roots"]
|
roots = d["roots"]
|
||||||
self.concretized_user_specs = [Spec(r["spec"]) for r in roots]
|
self.concretized_user_specs = [Spec(r["spec"]) for r in roots]
|
||||||
self.concretized_order = [r["hash"] for r in roots]
|
self.concretized_order = [r["hash"] for r in roots]
|
||||||
json_specs_by_hash = d["concrete_specs"]
|
json_specs_by_hash = d["concrete_specs"]
|
||||||
|
included_json_specs_by_hash = {}
|
||||||
|
|
||||||
# Track specs by their lockfile key. Currently spack uses the finest
|
if included_concrete_name in d:
|
||||||
# grained hash as the lockfile key, while older formats used the build
|
for env_name, env_info in d[included_concrete_name].items():
|
||||||
# hash or a previous incarnation of the DAG hash (one that did not
|
included_json_specs_by_hash.update(
|
||||||
# include build deps or package hash).
|
self.set_included_concretized_user_specs(
|
||||||
specs_by_hash = {}
|
env_name, env_info, included_json_specs_by_hash
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
# Track specs by their DAG hash, allows handling DAG hash collisions
|
|
||||||
first_seen = {}
|
|
||||||
current_lockfile_format = d["_meta"]["lockfile-version"]
|
current_lockfile_format = d["_meta"]["lockfile-version"]
|
||||||
try:
|
try:
|
||||||
reader = READER_CLS[current_lockfile_format]
|
reader = READER_CLS[current_lockfile_format]
|
||||||
@@ -1999,6 +2267,39 @@ def _read_lockfile_dict(self, d):
|
|||||||
msg += " You need to use a newer Spack version."
|
msg += " You need to use a newer Spack version."
|
||||||
raise SpackEnvironmentError(msg)
|
raise SpackEnvironmentError(msg)
|
||||||
|
|
||||||
|
first_seen, self.concretized_order = self.filter_specs(
|
||||||
|
reader, json_specs_by_hash, self.concretized_order
|
||||||
|
)
|
||||||
|
|
||||||
|
for spec_dag_hash in self.concretized_order:
|
||||||
|
self.specs_by_hash[spec_dag_hash] = first_seen[spec_dag_hash]
|
||||||
|
|
||||||
|
if any(self.included_concretized_order.values()):
|
||||||
|
first_seen = {}
|
||||||
|
|
||||||
|
for env_name, concretized_order in self.included_concretized_order.items():
|
||||||
|
filtered_spec, self.included_concretized_order[env_name] = self.filter_specs(
|
||||||
|
reader, included_json_specs_by_hash, concretized_order
|
||||||
|
)
|
||||||
|
first_seen.update(filtered_spec)
|
||||||
|
|
||||||
|
for env_path, spec_hashes in self.included_concretized_order.items():
|
||||||
|
self.included_specs_by_hash[env_path] = {}
|
||||||
|
for spec_dag_hash in spec_hashes:
|
||||||
|
self.included_specs_by_hash[env_path].update(
|
||||||
|
{spec_dag_hash: first_seen[spec_dag_hash]}
|
||||||
|
)
|
||||||
|
|
||||||
|
def filter_specs(self, reader, json_specs_by_hash, order_concretized):
|
||||||
|
# Track specs by their lockfile key. Currently spack uses the finest
|
||||||
|
# grained hash as the lockfile key, while older formats used the build
|
||||||
|
# hash or a previous incarnation of the DAG hash (one that did not
|
||||||
|
# include build deps or package hash).
|
||||||
|
specs_by_hash = {}
|
||||||
|
|
||||||
|
# Track specs by their DAG hash, allows handling DAG hash collisions
|
||||||
|
first_seen = {}
|
||||||
|
|
||||||
# First pass: Put each spec in the map ignoring dependencies
|
# First pass: Put each spec in the map ignoring dependencies
|
||||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||||
spec = reader.from_node_dict(node_dict)
|
spec = reader.from_node_dict(node_dict)
|
||||||
@@ -2021,7 +2322,8 @@ def _read_lockfile_dict(self, d):
|
|||||||
# keep. This is only required as long as we support older lockfile
|
# keep. This is only required as long as we support older lockfile
|
||||||
# formats where the mapping from DAG hash to lockfile key is possibly
|
# formats where the mapping from DAG hash to lockfile key is possibly
|
||||||
# one-to-many.
|
# one-to-many.
|
||||||
for lockfile_key in self.concretized_order:
|
|
||||||
|
for lockfile_key in order_concretized:
|
||||||
for s in specs_by_hash[lockfile_key].traverse():
|
for s in specs_by_hash[lockfile_key].traverse():
|
||||||
if s.dag_hash() not in first_seen:
|
if s.dag_hash() not in first_seen:
|
||||||
first_seen[s.dag_hash()] = s
|
first_seen[s.dag_hash()] = s
|
||||||
@@ -2029,12 +2331,10 @@ def _read_lockfile_dict(self, d):
|
|||||||
# Now make sure concretized_order and our internal specs dict
|
# Now make sure concretized_order and our internal specs dict
|
||||||
# contains the keys used by modern spack (i.e. the dag_hash
|
# contains the keys used by modern spack (i.e. the dag_hash
|
||||||
# that includes build deps and package hash).
|
# that includes build deps and package hash).
|
||||||
self.concretized_order = [
|
|
||||||
specs_by_hash[h_key].dag_hash() for h_key in self.concretized_order
|
|
||||||
]
|
|
||||||
|
|
||||||
for spec_dag_hash in self.concretized_order:
|
order_concretized = [specs_by_hash[h_key].dag_hash() for h_key in order_concretized]
|
||||||
self.specs_by_hash[spec_dag_hash] = first_seen[spec_dag_hash]
|
|
||||||
|
return first_seen, order_concretized
|
||||||
|
|
||||||
def write(self, regenerate: bool = True) -> None:
|
def write(self, regenerate: bool = True) -> None:
|
||||||
"""Writes an in-memory environment to its location on disk.
|
"""Writes an in-memory environment to its location on disk.
|
||||||
@@ -2047,7 +2347,7 @@ def write(self, regenerate: bool = True) -> None:
|
|||||||
regenerate: regenerate views and run post-write hooks as well as writing if True.
|
regenerate: regenerate views and run post-write hooks as well as writing if True.
|
||||||
"""
|
"""
|
||||||
self.manifest_uptodate_or_warn()
|
self.manifest_uptodate_or_warn()
|
||||||
if self.specs_by_hash:
|
if self.specs_by_hash or self.included_concrete_envs:
|
||||||
self.ensure_env_directory_exists(dot_env=True)
|
self.ensure_env_directory_exists(dot_env=True)
|
||||||
self.update_environment_repository()
|
self.update_environment_repository()
|
||||||
self.manifest.flush()
|
self.manifest.flush()
|
||||||
@@ -2463,6 +2763,10 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str]) -> None:
|
|||||||
self.scope_name = f"env:{environment_name(self.manifest_dir)}"
|
self.scope_name = f"env:{environment_name(self.manifest_dir)}"
|
||||||
self.config_stage_dir = os.path.join(env_subdir_path(manifest_dir), "config")
|
self.config_stage_dir = os.path.join(env_subdir_path(manifest_dir), "config")
|
||||||
|
|
||||||
|
#: Configuration scopes associated with this environment. Note that these are not
|
||||||
|
#: invalidated by a re-read of the manifest file.
|
||||||
|
self._config_scopes: Optional[List[spack.config.ConfigScope]] = None
|
||||||
|
|
||||||
if not self.manifest_file.exists():
|
if not self.manifest_file.exists():
|
||||||
msg = f"cannot find '{manifest_name}' in {self.manifest_dir}"
|
msg = f"cannot find '{manifest_name}' in {self.manifest_dir}"
|
||||||
raise SpackEnvironmentError(msg)
|
raise SpackEnvironmentError(msg)
|
||||||
@@ -2542,6 +2846,19 @@ def override_user_spec(self, user_spec: str, idx: int) -> None:
|
|||||||
raise SpackEnvironmentError(msg) from e
|
raise SpackEnvironmentError(msg) from e
|
||||||
self.changed = True
|
self.changed = True
|
||||||
|
|
||||||
|
def set_include_concrete(self, include_concrete: List[str]) -> None:
|
||||||
|
"""Sets the included concrete environments in the manifest to the value(s) passed as input.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
include_concrete: list of already existing concrete environments to include
|
||||||
|
"""
|
||||||
|
self.pristine_configuration[included_concrete_name] = []
|
||||||
|
|
||||||
|
for env_path in include_concrete:
|
||||||
|
self.pristine_configuration[included_concrete_name].append(env_path)
|
||||||
|
|
||||||
|
self.changed = True
|
||||||
|
|
||||||
def add_definition(self, user_spec: str, list_name: str) -> None:
|
def add_definition(self, user_spec: str, list_name: str) -> None:
|
||||||
"""Appends a user spec to the first active definition matching the name passed as argument.
|
"""Appends a user spec to the first active definition matching the name passed as argument.
|
||||||
|
|
||||||
@@ -2725,9 +3042,11 @@ def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
|||||||
for i, config_path in enumerate(reversed(includes)):
|
for i, config_path in enumerate(reversed(includes)):
|
||||||
# allow paths to contain spack config/environment variables, etc.
|
# allow paths to contain spack config/environment variables, etc.
|
||||||
config_path = substitute_path_variables(config_path)
|
config_path = substitute_path_variables(config_path)
|
||||||
|
|
||||||
include_url = urllib.parse.urlparse(config_path)
|
include_url = urllib.parse.urlparse(config_path)
|
||||||
|
|
||||||
|
# If scheme is not valid, config_path is not a url
|
||||||
|
# of a type Spack is generally aware
|
||||||
|
if spack.util.url.validate_scheme(include_url.scheme):
|
||||||
# Transform file:// URLs to direct includes.
|
# Transform file:// URLs to direct includes.
|
||||||
if include_url.scheme == "file":
|
if include_url.scheme == "file":
|
||||||
config_path = urllib.request.url2pathname(include_url.path)
|
config_path = urllib.request.url2pathname(include_url.path)
|
||||||
@@ -2808,16 +3127,19 @@ def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def env_config_scopes(self) -> List[spack.config.ConfigScope]:
|
def env_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||||
"""A list of all configuration scopes for the environment manifest.
|
"""A list of all configuration scopes for the environment manifest. On the first call this
|
||||||
|
instantiates all the scopes, on subsequent calls it returns the cached list."""
|
||||||
Returns: All configuration scopes associated with the environment
|
if self._config_scopes is not None:
|
||||||
"""
|
return self._config_scopes
|
||||||
config_name = self.scope_name
|
scopes: List[spack.config.ConfigScope] = [
|
||||||
env_scope = spack.config.SingleFileScope(
|
*self.included_config_scopes,
|
||||||
config_name, str(self.manifest_file), spack.schema.env.schema, [TOP_LEVEL_KEY]
|
spack.config.SingleFileScope(
|
||||||
)
|
self.scope_name, str(self.manifest_file), spack.schema.env.schema, [TOP_LEVEL_KEY]
|
||||||
|
),
|
||||||
return check_disallowed_env_config_mods(self.included_config_scopes + [env_scope])
|
]
|
||||||
|
ensure_no_disallowed_env_config_mods(scopes)
|
||||||
|
self._config_scopes = scopes
|
||||||
|
return scopes
|
||||||
|
|
||||||
def prepare_config_scope(self) -> None:
|
def prepare_config_scope(self) -> None:
|
||||||
"""Add the manifest's scopes to the global configuration search path."""
|
"""Add the manifest's scopes to the global configuration search path."""
|
||||||
|
|||||||
@@ -662,9 +662,6 @@ def add_specs(self, *specs: spack.spec.Spec) -> None:
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Drop externals
|
# Drop externals
|
||||||
for s in specs:
|
|
||||||
if s.external:
|
|
||||||
tty.warn("Skipping external package: " + s.short_spec)
|
|
||||||
specs = [s for s in specs if not s.external]
|
specs = [s for s in specs if not s.external]
|
||||||
|
|
||||||
self._sanity_check_view_projection(specs)
|
self._sanity_check_view_projection(specs)
|
||||||
|
|||||||
@@ -13,7 +13,6 @@
|
|||||||
import spack.config
|
import spack.config
|
||||||
import spack.relocate
|
import spack.relocate
|
||||||
from spack.util.elf import ElfParsingError, parse_elf
|
from spack.util.elf import ElfParsingError, parse_elf
|
||||||
from spack.util.executable import Executable
|
|
||||||
|
|
||||||
|
|
||||||
def is_shared_library_elf(filepath):
|
def is_shared_library_elf(filepath):
|
||||||
@@ -141,7 +140,7 @@ def post_install(spec, explicit=None):
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Only enable on platforms using ELF.
|
# Only enable on platforms using ELF.
|
||||||
if not spec.satisfies("platform=linux") and not spec.satisfies("platform=cray"):
|
if not spec.satisfies("platform=linux"):
|
||||||
return
|
return
|
||||||
|
|
||||||
# Disable this hook when bootstrapping, to avoid recursion.
|
# Disable this hook when bootstrapping, to avoid recursion.
|
||||||
@@ -149,10 +148,9 @@ def post_install(spec, explicit=None):
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Should failing to locate patchelf be a hard error?
|
# Should failing to locate patchelf be a hard error?
|
||||||
patchelf_path = spack.relocate._patchelf()
|
patchelf = spack.relocate._patchelf()
|
||||||
if not patchelf_path:
|
if not patchelf:
|
||||||
return
|
return
|
||||||
patchelf = Executable(patchelf_path)
|
|
||||||
|
|
||||||
fixes = find_and_patch_sonames(spec.prefix, spec.package.non_bindable_shared_objects, patchelf)
|
fixes = find_and_patch_sonames(spec.prefix, spec.package.non_bindable_shared_objects, patchelf)
|
||||||
|
|
||||||
|
|||||||
@@ -12,6 +12,10 @@
|
|||||||
def post_install(spec, explicit):
|
def post_install(spec, explicit):
|
||||||
# Push package to all buildcaches with autopush==True
|
# Push package to all buildcaches with autopush==True
|
||||||
|
|
||||||
|
# Do nothing if spec is an external package
|
||||||
|
if spec.external:
|
||||||
|
return
|
||||||
|
|
||||||
# Do nothing if package was not installed from source
|
# Do nothing if package was not installed from source
|
||||||
pkg = spec.package
|
pkg = spec.package
|
||||||
if pkg.installed_from_binary_cache:
|
if pkg.installed_from_binary_cache:
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user