Compare commits
542 Commits
packages/t
...
show-sha25
Author | SHA1 | Date | |
---|---|---|---|
![]() |
970f18cd45 | ||
![]() |
7b4aa6739f | ||
![]() |
fbdb5a9c95 | ||
![]() |
eda1a6f60f | ||
![]() |
bb77a7733c | ||
![]() |
cb588d933c | ||
![]() |
552a35e12e | ||
![]() |
d5f4c69076 | ||
![]() |
b8ecccbde9 | ||
![]() |
b7ebcc4a7b | ||
![]() |
74684ea089 | ||
![]() |
97f868a54e | ||
![]() |
4005ae8651 | ||
![]() |
cf9720e00d | ||
![]() |
752c0d4fe2 | ||
![]() |
4a2f5f4b50 | ||
![]() |
de62f7fed2 | ||
![]() |
e296d19146 | ||
![]() |
d06d812013 | ||
![]() |
eef041abee | ||
![]() |
2ec4281c4f | ||
![]() |
e80d75cbe3 | ||
![]() |
47e70c5c3a | ||
![]() |
fea2171672 | ||
![]() |
12a475e648 | ||
![]() |
c348891c07 | ||
![]() |
019e90ab36 | ||
![]() |
19137b2653 | ||
![]() |
2761e650fa | ||
![]() |
84ea389017 | ||
![]() |
17f07523f5 | ||
![]() |
bd2ddb8909 | ||
![]() |
f5db757e66 | ||
![]() |
277f8596de | ||
![]() |
c8bebff7f5 | ||
![]() |
61d2d21acc | ||
![]() |
7b27aed4c8 | ||
![]() |
ad0b256407 | ||
![]() |
a2a3a83a26 | ||
![]() |
7d86670826 | ||
![]() |
ae306b73c3 | ||
![]() |
b63cbe4e6e | ||
![]() |
ef220daaca | ||
![]() |
e86a3b68f7 | ||
![]() |
7319408bc7 | ||
![]() |
b34159348f | ||
![]() |
f13d998d21 | ||
![]() |
2912d4a661 | ||
![]() |
8e2ec58859 | ||
![]() |
01eb26578b | ||
![]() |
fe0a8a1735 | ||
![]() |
d523f12e99 | ||
![]() |
1b0631b69e | ||
![]() |
65bb3a12ea | ||
![]() |
5ac2b8a178 | ||
![]() |
b063765c2e | ||
![]() |
4511052d26 | ||
![]() |
3804d128e7 | ||
![]() |
f09ce00fe1 | ||
![]() |
cdde7c3ccf | ||
![]() |
c52c0a482f | ||
![]() |
8a75cdad9a | ||
![]() |
e0eea48ccf | ||
![]() |
61cbfc1da0 | ||
![]() |
d8c8074762 | ||
![]() |
faeef6272d | ||
![]() |
f6ad1e23f8 | ||
![]() |
a0173a5a94 | ||
![]() |
225be45687 | ||
![]() |
3581821d3c | ||
![]() |
79ad6f6b48 | ||
![]() |
6320993409 | ||
![]() |
1472dcace4 | ||
![]() |
755c113c16 | ||
![]() |
43bcb5056f | ||
![]() |
fd1c95a432 | ||
![]() |
5b5be0582f | ||
![]() |
aed1a3f980 | ||
![]() |
978be305a7 | ||
![]() |
7ddb40a804 | ||
![]() |
37664b36da | ||
![]() |
f33912d707 | ||
![]() |
e785d3716e | ||
![]() |
328787b017 | ||
![]() |
67a40c6cc4 | ||
![]() |
eccf97af33 | ||
![]() |
e63e8b5efa | ||
![]() |
bb25210b62 | ||
![]() |
f8ab94061f | ||
![]() |
ed15b73c3b | ||
![]() |
1f6da280b7 | ||
![]() |
1ad5739094 | ||
![]() |
06f33dcdbb | ||
![]() |
582254f891 | ||
![]() |
31694fe9bd | ||
![]() |
a53a14346e | ||
![]() |
c102ff953b | ||
![]() |
59a2a87937 | ||
![]() |
d86feeac54 | ||
![]() |
43e26b330c | ||
![]() |
9c8b5f58c0 | ||
![]() |
50aa5a7b24 | ||
![]() |
ffab156366 | ||
![]() |
e147679d40 | ||
![]() |
ef9bb7ebe5 | ||
![]() |
fc443ea30e | ||
![]() |
b601bace24 | ||
![]() |
cbad3d464a | ||
![]() |
b56e792295 | ||
![]() |
5b279c0732 | ||
![]() |
149753a52e | ||
![]() |
b582eacbc1 | ||
![]() |
037196c2bd | ||
![]() |
d9e8c5f13e | ||
![]() |
275d1d88f4 | ||
![]() |
a07d42d35b | ||
![]() |
19ad29a690 | ||
![]() |
4187c57250 | ||
![]() |
590be9bba1 | ||
![]() |
3edd68d981 | ||
![]() |
5ca0e94bdd | ||
![]() |
f6c9d98c8f | ||
![]() |
9854c9e5f2 | ||
![]() |
e5a602c1bb | ||
![]() |
37fe3b4984 | ||
![]() |
a00fddef4e | ||
![]() |
260b36e272 | ||
![]() |
117480dba9 | ||
![]() |
bc75f23927 | ||
![]() |
b0f1a0eb7c | ||
![]() |
4d616e1168 | ||
![]() |
4de8344c16 | ||
![]() |
411ea019f1 | ||
![]() |
296f99d800 | ||
![]() |
ca4df91e7d | ||
![]() |
9b8c06a049 | ||
![]() |
011ff48f82 | ||
![]() |
adcd05b365 | ||
![]() |
dc160e3a52 | ||
![]() |
ba953352a1 | ||
![]() |
d47e726b76 | ||
![]() |
89ab47284f | ||
![]() |
31bdcd7dc6 | ||
![]() |
f2bd11cbf4 | ||
![]() |
f69e8297a7 | ||
![]() |
c9377d9437 | ||
![]() |
899004e29a | ||
![]() |
df6427d259 | ||
![]() |
31cfcafeba | ||
![]() |
230bc7010a | ||
![]() |
957c0cc9da | ||
![]() |
99e4d6b446 | ||
![]() |
7acd0cd86c | ||
![]() |
d3378ffd25 | ||
![]() |
2356ccc816 | ||
![]() |
1d25275bd1 | ||
![]() |
7678635d36 | ||
![]() |
b2e28a0b08 | ||
![]() |
53385f12da | ||
![]() |
cfae194fbd | ||
![]() |
88c193b83a | ||
![]() |
c006cb573a | ||
![]() |
d8d41e9b0e | ||
![]() |
c6bfe7c6bd | ||
![]() |
4432f5a1fe | ||
![]() |
b9e0914ab2 | ||
![]() |
49a8e84588 | ||
![]() |
d36452cf4e | ||
![]() |
580cc3c91b | ||
![]() |
9ba7af404a | ||
![]() |
2da812cbad | ||
![]() |
420266c5c4 | ||
![]() |
049ade024a | ||
![]() |
75c71f7291 | ||
![]() |
0a7533a609 | ||
![]() |
7ecdc175ff | ||
![]() |
962262a1d3 | ||
![]() |
adaa0a4863 | ||
![]() |
5f56eee8b0 | ||
![]() |
aa6caf9ee6 | ||
![]() |
1eb2cb97ad | ||
![]() |
178a8bbdc5 | ||
![]() |
e4c233710c | ||
![]() |
b661acfa9b | ||
![]() |
7bddcd27d2 | ||
![]() |
5d2c67ec83 | ||
![]() |
62fd5d12c2 | ||
![]() |
64a7525e3f | ||
![]() |
bfe434cbd5 | ||
![]() |
39063baf18 | ||
![]() |
f4a4acd272 | ||
![]() |
8d2a059279 | ||
![]() |
34c89c0f7b | ||
![]() |
e1ea9e12a6 | ||
![]() |
5611523baf | ||
![]() |
4ff07c3918 | ||
![]() |
49489a4815 | ||
![]() |
fb53d31d09 | ||
![]() |
80b9807e10 | ||
![]() |
b573ec3920 | ||
![]() |
cbdc07248f | ||
![]() |
db6a2523d9 | ||
![]() |
c710a1597f | ||
![]() |
8c70912b11 | ||
![]() |
64f90c38be | ||
![]() |
d2f1e29927 | ||
![]() |
57586df91a | ||
![]() |
c00f36b5e2 | ||
![]() |
2a7dd29f95 | ||
![]() |
58e2f7a54f | ||
![]() |
e3afe9a364 | ||
![]() |
b0314faa3d | ||
![]() |
2099e9f5cd | ||
![]() |
5947c13570 | ||
![]() |
1259992159 | ||
![]() |
0477875667 | ||
![]() |
4d5844b460 | ||
![]() |
fc79c37e2d | ||
![]() |
1d76ed7aa4 | ||
![]() |
237f886e5d | ||
![]() |
834ed2f117 | ||
![]() |
73069045ae | ||
![]() |
e0efd2bea2 | ||
![]() |
b9873c5cea | ||
![]() |
2f711bda5f | ||
![]() |
f8381c9a63 | ||
![]() |
c8f61c8662 | ||
![]() |
507965cbc6 | ||
![]() |
1f6ce56d3b | ||
![]() |
3918f83ddc | ||
![]() |
d4dc13fffb | ||
![]() |
5008519a56 | ||
![]() |
dad5ff8796 | ||
![]() |
a24220b53f | ||
![]() |
2186ff720e | ||
![]() |
65d61e12c9 | ||
![]() |
05f3fef72c | ||
![]() |
21c2eedb80 | ||
![]() |
66a3c7bc42 | ||
![]() |
8b3d3ac2de | ||
![]() |
b5610cdb8b | ||
![]() |
6c6b262140 | ||
![]() |
796e372bde | ||
![]() |
78740942f9 | ||
![]() |
02a991688f | ||
![]() |
a8029c8ec4 | ||
![]() |
adb8f37fc5 | ||
![]() |
81b41d5948 | ||
![]() |
0ff980ae87 | ||
![]() |
74a93c04d8 | ||
![]() |
b72c7deacb | ||
![]() |
b061bbbb8f | ||
![]() |
bbfad7e979 | ||
![]() |
3a9963b497 | ||
![]() |
8ac00aa58f | ||
![]() |
13f80ff142 | ||
![]() |
e8291cbd74 | ||
![]() |
0dded55f39 | ||
![]() |
a4ca6452c0 | ||
![]() |
36761715fd | ||
![]() |
02b116bd56 | ||
![]() |
d4d7d5830d | ||
![]() |
389b1824e9 | ||
![]() |
e65be13056 | ||
![]() |
1580c1745c | ||
![]() |
cf54ef0fd3 | ||
![]() |
b8b02e0691 | ||
![]() |
8d986b8a99 | ||
![]() |
4b836cb795 | ||
![]() |
d5966e676d | ||
![]() |
e187508485 | ||
![]() |
80982149d5 | ||
![]() |
a1f2e794c7 | ||
![]() |
dbe323c631 | ||
![]() |
77ddafaaac | ||
![]() |
17efd6153c | ||
![]() |
93f356c1cc | ||
![]() |
386d115333 | ||
![]() |
6b512210d4 | ||
![]() |
ba215ca824 | ||
![]() |
629a3e9396 | ||
![]() |
08b07b9b27 | ||
![]() |
3a38122764 | ||
![]() |
25ab7cc16d | ||
![]() |
41773383ec | ||
![]() |
9855fbf7f1 | ||
![]() |
5ef9d7e3ed | ||
![]() |
5a4b7d3d44 | ||
![]() |
9b40c1e89d | ||
![]() |
edff99aab3 | ||
![]() |
22043617aa | ||
![]() |
7df23c7471 | ||
![]() |
ef87a9a052 | ||
![]() |
af62a062cc | ||
![]() |
e6114f544d | ||
![]() |
8d651625f7 | ||
![]() |
9346306b79 | ||
![]() |
f3a3e85bb9 | ||
![]() |
caaaba464e | ||
![]() |
8fae388f57 | ||
![]() |
a332e0c143 | ||
![]() |
bc662b8764 | ||
![]() |
7a8955597d | ||
![]() |
bcf9c646cf | ||
![]() |
a76fffe8ff | ||
![]() |
26c8714a24 | ||
![]() |
0776ff05d2 | ||
![]() |
d3beef6584 | ||
![]() |
bdd06cb176 | ||
![]() |
f639c4f1e6 | ||
![]() |
f18a106759 | ||
![]() |
5b01ddf832 | ||
![]() |
c1fc98eef8 | ||
![]() |
e9831985e4 | ||
![]() |
30e9545d3e | ||
![]() |
ce0910a82c | ||
![]() |
afc01f9570 | ||
![]() |
fc3a484a8c | ||
![]() |
de0d5ba883 | ||
![]() |
f756ab156c | ||
![]() |
540de118c1 | ||
![]() |
675be13a7b | ||
![]() |
3342866e0e | ||
![]() |
39ff675898 | ||
![]() |
f807337273 | ||
![]() |
8e4e3c9060 | ||
![]() |
6d67992191 | ||
![]() |
0f3fea511e | ||
![]() |
a0611650e2 | ||
![]() |
5959be577f | ||
![]() |
9b5e508d15 | ||
![]() |
66a30aef98 | ||
![]() |
b117074df4 | ||
![]() |
9f4be17451 | ||
![]() |
d70e9e131d | ||
![]() |
d7643d4f88 | ||
![]() |
73b6aa9b92 | ||
![]() |
6d51d94dab | ||
![]() |
1a965e9ec2 | ||
![]() |
a9e9b901d1 | ||
![]() |
95b46dca3d | ||
![]() |
7f6ae2a51e | ||
![]() |
489d5b0f21 | ||
![]() |
f884817009 | ||
![]() |
a30704fdad | ||
![]() |
57eb21ac3d | ||
![]() |
f48c36fc2c | ||
![]() |
a09b9f0659 | ||
![]() |
92d940b7f4 | ||
![]() |
d8c7cbe8f0 | ||
![]() |
717d4800e1 | ||
![]() |
c77916146c | ||
![]() |
f5135018dd | ||
![]() |
adfb3a77ad | ||
![]() |
d5ccf8203d | ||
![]() |
416943f7ed | ||
![]() |
519684978b | ||
![]() |
c9de1cbcda | ||
![]() |
eedc41405b | ||
![]() |
e6f48ceaf5 | ||
![]() |
2ba583e7eb | ||
![]() |
741b6bc0e4 | ||
![]() |
ff98c15065 | ||
![]() |
625d032e80 | ||
![]() |
5227f5f387 | ||
![]() |
170e322701 | ||
![]() |
cb673862d1 | ||
![]() |
31d6e7a901 | ||
![]() |
79db34574b | ||
![]() |
b3831d4e8c | ||
![]() |
35f0feba00 | ||
![]() |
9a04a94a26 | ||
![]() |
a87fc566ec | ||
![]() |
c8f6a19fc0 | ||
![]() |
365892be4c | ||
![]() |
70acce1aad | ||
![]() |
48e2dd8038 | ||
![]() |
2844f7425b | ||
![]() |
f75760d4f2 | ||
![]() |
b8e3f35a8b | ||
![]() |
f610c3e4d0 | ||
![]() |
a0b925dae3 | ||
![]() |
c99518709a | ||
![]() |
d67b5b300c | ||
![]() |
9bcca28afd | ||
![]() |
b07d1e0194 | ||
![]() |
7ad08213dc | ||
![]() |
dce3cf6f31 | ||
![]() |
d763d6f738 | ||
![]() |
d87464e995 | ||
![]() |
280fd1cd68 | ||
![]() |
bfbd0a4d4c | ||
![]() |
be21b0b3bf | ||
![]() |
8f798c01ec | ||
![]() |
853a7b2567 | ||
![]() |
1756aeb45a | ||
![]() |
7a9a634c94 | ||
![]() |
4caf718626 | ||
![]() |
5867d90ccf | ||
![]() |
8a9bfd162d | ||
![]() |
7bc1316ce4 | ||
![]() |
c9d312e9c1 | ||
![]() |
792b576224 | ||
![]() |
5c66f6b994 | ||
![]() |
f17e76e9d8 | ||
![]() |
905e7b9b45 | ||
![]() |
ad294bc19f | ||
![]() |
8dd978ddb9 | ||
![]() |
b0c48b66c2 | ||
![]() |
96880e0b65 | ||
![]() |
48de9d48e2 | ||
![]() |
72581ded8f | ||
![]() |
8c4ff56d9f | ||
![]() |
d2a551c047 | ||
![]() |
9dfe096d3a | ||
![]() |
62f5b18491 | ||
![]() |
8f5af6eb7a | ||
![]() |
d3a0904790 | ||
![]() |
3c2a682876 | ||
![]() |
a52ec2a9cc | ||
![]() |
43a9c6cb66 | ||
![]() |
4cace0cb62 | ||
![]() |
482e2fbde8 | ||
![]() |
ad75e8fc95 | ||
![]() |
af43f6be0d | ||
![]() |
76243bfcd7 | ||
![]() |
feabcb884a | ||
![]() |
1c065f2da9 | ||
![]() |
401e7b4477 | ||
![]() |
1b0020f3ee | ||
![]() |
d21577803f | ||
![]() |
d3518f866b | ||
![]() |
93bf5b302d | ||
![]() |
6e0efdff61 | ||
![]() |
95f16f203a | ||
![]() |
322a83c808 | ||
![]() |
aa53007f82 | ||
![]() |
12fd940e81 | ||
![]() |
e193320ebb | ||
![]() |
77839303ca | ||
![]() |
e572189112 | ||
![]() |
20a18b5710 | ||
![]() |
e17d81692f | ||
![]() |
4bf6c61ea0 | ||
![]() |
21b03d149e | ||
![]() |
21fbebd273 | ||
![]() |
9326e9211c | ||
![]() |
742e8142cf | ||
![]() |
c30a17a302 | ||
![]() |
beecc5dc87 | ||
![]() |
dfb0f58254 | ||
![]() |
31f0905f3f | ||
![]() |
726bf85d15 | ||
![]() |
d187a8ab68 | ||
![]() |
1245a2e058 | ||
![]() |
f8bd11c18f | ||
![]() |
787863e176 | ||
![]() |
1da7d3bfe3 | ||
![]() |
26bc91fe9b | ||
![]() |
470774687d | ||
![]() |
20aec1536a | ||
![]() |
9e1082b625 | ||
![]() |
f8f13ad8aa | ||
![]() |
fcf72201d3 | ||
![]() |
1c528719cb | ||
![]() |
f3faeb0f77 | ||
![]() |
b1707c2e3c | ||
![]() |
0093dd74e3 | ||
![]() |
f92c5d7a2e | ||
![]() |
dcdc678b19 | ||
![]() |
8e3e7a5541 | ||
![]() |
4016172938 | ||
![]() |
95ea678d77 | ||
![]() |
8ccf244c6b | ||
![]() |
308dfeb2a6 | ||
![]() |
118a9e8db7 | ||
![]() |
6740ea79af | ||
![]() |
6af8526aa1 | ||
![]() |
f73cb0ded4 | ||
![]() |
0a683c14a4 | ||
![]() |
9cf5ba82c7 | ||
![]() |
104b5c3f68 | ||
![]() |
c8efea117f | ||
![]() |
486ff2beac | ||
![]() |
12334099f7 | ||
![]() |
e4f571b1ee | ||
![]() |
f8b9178630 | ||
![]() |
ed57db9498 | ||
![]() |
731abee6b4 | ||
![]() |
60469f83e2 | ||
![]() |
f2f3af19c3 | ||
![]() |
cbbdf38f4f | ||
![]() |
44618e31c8 | ||
![]() |
5bc105c01c | ||
![]() |
4e3a8b1928 | ||
![]() |
59179764d7 | ||
![]() |
acce67241e | ||
![]() |
73036f9567 | ||
![]() |
d40ea48db7 | ||
![]() |
fdb5178f99 | ||
![]() |
7a3525a053 | ||
![]() |
394ed4f90d | ||
![]() |
d7f5dbaf89 | ||
![]() |
cb43019455 | ||
![]() |
5303ec9aa5 | ||
![]() |
02499c72c9 | ||
![]() |
f1275536a2 | ||
![]() |
a88239affc | ||
![]() |
260ea91b83 | ||
![]() |
dea075a8b0 | ||
![]() |
5f333c3632 | ||
![]() |
59b5cb0962 | ||
![]() |
16410cda18 | ||
![]() |
100c8b7630 | ||
![]() |
47591f73da | ||
![]() |
d3b4f0bebc | ||
![]() |
5c52ded1be | ||
![]() |
ff7e1b5918 | ||
![]() |
6ea8079919 | ||
![]() |
d48cf4ea3e | ||
![]() |
41044b6f84 | ||
![]() |
338c331115 | ||
![]() |
563f76fbe5 | ||
![]() |
6a2fb38673 | ||
![]() |
429c5149ce | ||
![]() |
043fea4ef9 | ||
![]() |
4aa1f92c9c | ||
![]() |
92d3e0ca6f | ||
![]() |
a28b9ec8f3 | ||
![]() |
d7452f68a0 | ||
![]() |
4c333fb535 | ||
![]() |
f32bdea867 | ||
![]() |
05ad0618cc | ||
![]() |
e0987e0350 | ||
![]() |
519ed7f611 | ||
![]() |
ecbc21a7ce | ||
![]() |
56c61763d7 | ||
![]() |
8168b17ddf | ||
![]() |
d2a5b07d7f | ||
![]() |
b570ca3cf3 | ||
![]() |
343586d832 | ||
![]() |
a8d02bd3b0 |
@@ -5,7 +5,7 @@ coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
threshold: 0.2%
|
||||
threshold: 2.0%
|
||||
|
||||
ignore:
|
||||
- lib/spack/spack/test/.*
|
||||
|
6
.github/workflows/audit.yaml
vendored
6
.github/workflows/audit.yaml
vendored
@@ -28,8 +28,8 @@ jobs:
|
||||
run:
|
||||
shell: ${{ matrix.system.shell }}
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
@@ -66,7 +66,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit externals
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
||||
with:
|
||||
name: coverage-audits-${{ matrix.system.os }}
|
||||
|
2
.github/workflows/bin/bootstrap-test.sh
vendored
2
.github/workflows/bin/bootstrap-test.sh
vendored
@@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
source share/spack/setup-env.sh
|
||||
$PYTHON bin/spack bootstrap disable github-actions-v0.4
|
||||
$PYTHON bin/spack bootstrap disable github-actions-v0.5
|
||||
$PYTHON bin/spack bootstrap disable spack-install
|
||||
$PYTHON bin/spack $SPACK_FLAGS solve zlib
|
||||
tree $BOOTSTRAP/store
|
||||
|
133
.github/workflows/bootstrap.yml
vendored
133
.github/workflows/bootstrap.yml
vendored
@@ -37,14 +37,14 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -53,33 +53,27 @@ jobs:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest", "windows-latest"]
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' && matrix.runner != 'windows-latest' }}
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: |
|
||||
brew install cmake bison tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
env:
|
||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
||||
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
|
||||
VALIDATE_LAST_EXIT: ${{ matrix.runner == 'windows-latest' && './share/spack/qa/validate_last_exit.ps1' || '' }}
|
||||
run: |
|
||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
${{ env.VALIDATE_LAST_EXIT }}
|
||||
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
|
||||
tree $HOME/.spack/bootstrap/store/
|
||||
|
||||
gnupg-sources:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
@@ -89,22 +83,22 @@ jobs:
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: brew install tree gawk
|
||||
- name: Remove system executables
|
||||
run: |
|
||||
brew install tree gawk
|
||||
sudo rm -rf $(command -v gpg gpg2)
|
||||
- name: Setup Ubuntu
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: sudo rm -rf $(command -v gpg gpg2 patchelf)
|
||||
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
|
||||
sudo rm $(command -v gpg gpg2 patchelf)
|
||||
done
|
||||
- name: Checkout
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
@@ -112,28 +106,21 @@ jobs:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest", "windows-latest"]
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' && matrix.runner != 'windows-latest'}}
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: brew install tree
|
||||
- name: Remove system executables
|
||||
run: |
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Setup Ubuntu
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
- name: Setup Windows
|
||||
if: ${{ matrix.runner == 'windows-latest' }}
|
||||
run: |
|
||||
Remove-Item -Path (Get-Command gpg).Path
|
||||
Remove-Item -Path (Get-Command file).Path
|
||||
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
|
||||
sudo rm $(command -v gpg gpg2 patchelf)
|
||||
done
|
||||
- name: Checkout
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: |
|
||||
3.8
|
||||
@@ -141,24 +128,16 @@ jobs:
|
||||
3.10
|
||||
3.11
|
||||
3.12
|
||||
3.13
|
||||
- name: Set bootstrap sources
|
||||
env:
|
||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
||||
run: |
|
||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
- name: Disable from source bootstrap
|
||||
if: ${{ matrix.runner != 'windows-latest' }}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable spack-install
|
||||
- name: Bootstrap clingo
|
||||
# No binary clingo on Windows yet
|
||||
if: ${{ matrix.runner != 'windows-latest' }}
|
||||
run: |
|
||||
set -e
|
||||
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' ; do
|
||||
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' '3.13'; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
if [[ -d "$ver_dir" ]] ; then
|
||||
@@ -178,24 +157,48 @@ jobs:
|
||||
fi
|
||||
done
|
||||
- name: Bootstrap GnuPG
|
||||
env:
|
||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
||||
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
|
||||
VALIDATE_LAST_EXIT: ${{ matrix.runner == 'windows-latest' && './share/spack/qa/validate_last_exit.ps1' || '' }}
|
||||
run: |
|
||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
||||
source share/spack/setup-env.sh
|
||||
spack -d gpg list
|
||||
${{ env.VALIDATE_LAST_EXIT }}
|
||||
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
|
||||
tree $HOME/.spack/bootstrap/store/
|
||||
- name: Bootstrap File
|
||||
env:
|
||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
||||
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
|
||||
VALIDATE_LAST_EXIT: ${{ matrix.runner == 'windows-latest' && './share/spack/qa/validate_last_exit.ps1' || '' }}
|
||||
run: |
|
||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
||||
source share/spack/setup-env.sh
|
||||
spack -d python share/spack/qa/bootstrap-file.py
|
||||
${{ env.VALIDATE_LAST_EXIT }}
|
||||
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
|
||||
tree $HOME/.spack/bootstrap/store/
|
||||
|
||||
windows:
|
||||
runs-on: "windows-latest"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Setup Windows
|
||||
run: |
|
||||
Remove-Item -Path (Get-Command gpg).Path
|
||||
Remove-Item -Path (Get-Command file).Path
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
./share/spack/setup-env.ps1
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
tree $env:userprofile/.spack/bootstrap/store/
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
./share/spack/setup-env.ps1
|
||||
spack -d gpg list
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
tree $env:userprofile/.spack/bootstrap/store/
|
||||
- name: Bootstrap File
|
||||
run: |
|
||||
./share/spack/setup-env.ps1
|
||||
spack -d python share/spack/qa/bootstrap-file.py
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
tree $env:userprofile/.spack/bootstrap/store/
|
||||
|
10
.github/workflows/build-containers.yml
vendored
10
.github/workflows/build-containers.yml
vendored
@@ -55,7 +55,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
id: docker_meta
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||
path: dockerfiles
|
||||
@@ -96,7 +96,7 @@ jobs:
|
||||
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db
|
||||
uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@32945a339266b759abcbdc89316275140b0fc960
|
||||
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
@@ -126,7 +126,7 @@ jobs:
|
||||
needs: deploy-images
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
uses: actions/upload-artifact/merge@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: dockerfiles
|
||||
pattern: dockerfiles_*
|
||||
|
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
6
.github/workflows/coverage.yml
vendored
6
.github/workflows/coverage.yml
vendored
@@ -8,8 +8,8 @@ jobs:
|
||||
upload:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -29,6 +29,6 @@ jobs:
|
||||
- run: coverage xml
|
||||
|
||||
- name: "Upload coverage report to CodeCov"
|
||||
uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
uses: codecov/codecov-action@b9fd7d16f6d7d1b5d2bec1a2887e65ceed900238
|
||||
with:
|
||||
verbose: true
|
||||
|
4
.github/workflows/nightly-win-builds.yml
vendored
4
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
@@ -1,7 +1,7 @@
|
||||
black==24.8.0
|
||||
black==24.10.0
|
||||
clingo==5.7.1
|
||||
flake8==7.1.1
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
types-six==1.16.21.20240513
|
||||
types-six==1.16.21.20241009
|
||||
vermin==1.6.0
|
||||
|
53
.github/workflows/unit_tests.yaml
vendored
53
.github/workflows/unit_tests.yaml
vendored
@@ -40,10 +40,10 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -80,7 +80,7 @@ jobs:
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||
path: coverage
|
||||
@@ -89,10 +89,10 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: coverage-shell
|
||||
path: coverage
|
||||
@@ -130,7 +130,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -149,32 +149,33 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.11'
|
||||
python-version: '3.13'
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
sudo apt-get -y install coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build kcov
|
||||
sudo apt-get -y install coreutils gfortran graphviz gnupg2
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo pytest-xdist
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
- name: Run unit tests (full suite with coverage)
|
||||
env:
|
||||
COVERAGE: true
|
||||
COVERAGE_FILE: coverage/.coverage-clingo-cffi
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
. share/spack/setup-env.sh
|
||||
spack bootstrap disable spack-install
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap status
|
||||
spack solve zlib
|
||||
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretize.py
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: coverage-clingo-cffi
|
||||
path: coverage
|
||||
@@ -187,10 +188,10 @@ jobs:
|
||||
os: [macos-13, macos-14]
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -199,7 +200,7 @@ jobs:
|
||||
pip install --upgrade pytest coverage[toml] pytest-xdist pytest-cov
|
||||
- name: Setup Homebrew packages
|
||||
run: |
|
||||
brew install dash fish gcc gnupg2 kcov
|
||||
brew install dash fish gcc gnupg kcov
|
||||
- name: Run unit tests
|
||||
env:
|
||||
SPACK_TEST_PARALLEL: 4
|
||||
@@ -212,7 +213,7 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||
path: coverage
|
||||
@@ -225,10 +226,10 @@ jobs:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -243,7 +244,7 @@ jobs:
|
||||
run: |
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: coverage-windows
|
||||
path: coverage
|
||||
|
22
.github/workflows/valid-style.yml
vendored
22
.github/workflows/valid-style.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -35,10 +35,10 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -85,7 +85,7 @@ jobs:
|
||||
source share/spack/setup-env.sh
|
||||
spack debug report
|
||||
spack -d bootstrap now --dev
|
||||
spack style -t black
|
||||
spack -d style -t black
|
||||
spack unit-test -V
|
||||
import-check:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -98,14 +98,14 @@ jobs:
|
||||
# PR: use the base of the PR as the old commit
|
||||
- name: Checkout PR base commit
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.sha }}
|
||||
path: old
|
||||
# not a PR: use the previous commit as the old commit
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 2
|
||||
path: old
|
||||
@@ -114,14 +114,14 @@ jobs:
|
||||
run: git -C old reset --hard HEAD^
|
||||
|
||||
- name: Checkout new commit
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
path: new
|
||||
- name: Install circular import checker
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
repository: haampie/circular-import-fighter
|
||||
ref: 555519c6fd5564fd2eb844e7b87e84f4d12602e2
|
||||
ref: 9f60f51bc7134e0be73f27623f1b0357d1718427
|
||||
path: circular-import-fighter
|
||||
- name: Install dependencies
|
||||
working-directory: circular-import-fighter
|
||||
|
@@ -14,3 +14,26 @@ sphinx:
|
||||
python:
|
||||
install:
|
||||
- requirements: lib/spack/docs/requirements.txt
|
||||
|
||||
search:
|
||||
ranking:
|
||||
spack.html: -10
|
||||
spack.*.html: -10
|
||||
llnl.html: -10
|
||||
llnl.*.html: -10
|
||||
_modules/*: -10
|
||||
command_index.html: -9
|
||||
basic_usage.html: 5
|
||||
configuration.html: 5
|
||||
config_yaml.html: 5
|
||||
packages_yaml.html: 5
|
||||
build_settings.html: 5
|
||||
environments.html: 5
|
||||
containers.html: 5
|
||||
mirrors.html: 5
|
||||
module_file_support.html: 5
|
||||
repositories.html: 5
|
||||
binary_caches.html: 5
|
||||
chain.html: 5
|
||||
pipelines.html: 5
|
||||
packaging_guide.html: 5
|
||||
|
71
CHANGELOG.md
71
CHANGELOG.md
@@ -1,3 +1,64 @@
|
||||
# v0.22.2 (2024-09-21)
|
||||
|
||||
## Bugfixes
|
||||
- Forward compatibility with Spack 0.23 packages with language dependencies (#45205, #45191)
|
||||
- Forward compatibility with `urllib` from Python 3.12.6+ (#46453, #46483)
|
||||
- Bump vendored `archspec` for better aarch64 support (#45721, #46445)
|
||||
- Support macOS Sequoia (#45018, #45127)
|
||||
- Fix regression in `{variants.X}` and `{variants.X.value}` format strings (#46206)
|
||||
- Ensure shell escaping of environment variable values in load and activate commands (#42780)
|
||||
- Fix an issue where `spec[pkg]` considers specs outside the current DAG (#45090)
|
||||
- Do not halt concretization on unknown variants in externals (#45326)
|
||||
- Improve validation of `develop` config section (#46485)
|
||||
- Explicitly disable `ccache` if turned off in config, to avoid cache pollution (#45275)
|
||||
- Improve backwards compatibility in `include_concrete` (#45766)
|
||||
- Fix issue where package tags were sometimes repeated (#45160)
|
||||
- Make `setup-env.sh` "sourced only" by dropping execution bits (#45641)
|
||||
- Make certain source/binary fetch errors recoverable instead of a hard error (#45683)
|
||||
- Remove debug statements in package hash computation (#45235)
|
||||
- Remove redundant clingo warnings (#45269)
|
||||
- Remove hard-coded layout version (#45645)
|
||||
- Do not initialize previous store state in `use_store` (#45268)
|
||||
- Docs improvements (#46475)
|
||||
|
||||
## Package updates
|
||||
- `chapel` major update (#42197, #44931, #45304)
|
||||
|
||||
# v0.22.1 (2024-07-04)
|
||||
|
||||
## Bugfixes
|
||||
- Fix reuse of externals on Linux (#44316)
|
||||
- Ensure parent gcc-runtime version >= child (#44834, #44870)
|
||||
- Ensure the latest gcc-runtime is rpath'ed when multiple exist among link deps (#44219)
|
||||
- Improve version detection of glibc (#44154)
|
||||
- Improve heuristics for solver (#44893, #44976, #45023)
|
||||
- Make strong preferences override reuse (#44373)
|
||||
- Reduce verbosity when C compiler is missing (#44182)
|
||||
- Make missing ccache executable an error when required (#44740)
|
||||
- Make every environment view containing `python` a `venv` (#44382)
|
||||
- Fix external detection for compilers with os but no target (#44156)
|
||||
- Fix version optimization for roots (#44272)
|
||||
- Handle common implementations of pagination of tags in OCI build caches (#43136)
|
||||
- Apply fetched patches to develop specs (#44950)
|
||||
- Avoid Windows wrappers for filesystem utilities on non-Windows (#44126)
|
||||
- Fix issue with long filenames in build caches on Windows (#43851)
|
||||
- Fix formatting issue in `spack audit` (#45045)
|
||||
- CI fixes (#44582, #43965, #43967, #44279, #44213)
|
||||
|
||||
## Package updates
|
||||
- protobuf: fix 3.4:3.21 patch checksum (#44443)
|
||||
- protobuf: update hash for patch needed when="@3.4:3.21" (#44210)
|
||||
- git: bump v2.39 to 2.45; deprecate unsafe versions (#44248)
|
||||
- gcc: use -rpath {rpath_dir} not -rpath={rpath dir} (#44315)
|
||||
- Remove mesa18 and libosmesa (#44264)
|
||||
- Enforce consistency of `gl` providers (#44307)
|
||||
- Require libiconv for iconv (#44335, #45026).
|
||||
Notice that glibc/musl also provide iconv, but are not guaranteed to be
|
||||
complete. Set `packages:iconv:require:[glibc]` to restore the old behavior.
|
||||
- py-matplotlib: qualify when to do a post install (#44191)
|
||||
- rust: fix v1.78.0 instructions (#44127)
|
||||
- suite-sparse: improve setting of the `libs` property (#44214)
|
||||
- netlib-lapack: provide blas and lapack together (#44981)
|
||||
|
||||
# v0.22.0 (2024-05-12)
|
||||
|
||||
@@ -319,6 +380,16 @@
|
||||
* 344 committers to packages
|
||||
* 45 committers to core
|
||||
|
||||
# v0.21.3 (2024-10-02)
|
||||
|
||||
## Bugfixes
|
||||
- Forward compatibility with Spack 0.23 packages with language dependencies (#45205, #45191)
|
||||
- Forward compatibility with `urllib` from Python 3.12.6+ (#46453, #46483)
|
||||
- Bump `archspec` to 0.2.5-dev for better aarch64 and Windows support (#42854, #44005,
|
||||
#45721, #46445)
|
||||
- Support macOS Sequoia (#45018, #45127, #43862)
|
||||
- CI and test maintenance (#42909, #42728, #46711, #41943, #43363)
|
||||
|
||||
# v0.21.2 (2024-03-01)
|
||||
|
||||
## Bugfixes
|
||||
|
@@ -1,71 +1,11 @@
|
||||
@ECHO OFF
|
||||
setlocal EnableDelayedExpansion
|
||||
:: (c) 2021 Lawrence Livermore National Laboratory
|
||||
:: To use this file independently of Spack's installer, execute this script in its directory, or add the
|
||||
:: associated bin directory to your PATH. Invoke to launch Spack Shell.
|
||||
::
|
||||
:: source_dir/spack/bin/spack_cmd.bat
|
||||
::
|
||||
pushd %~dp0..
|
||||
set SPACK_ROOT=%CD%
|
||||
pushd %CD%\..
|
||||
set spackinstdir=%CD%
|
||||
popd
|
||||
|
||||
|
||||
:: Check if Python is on the PATH
|
||||
if not defined python_pf_ver (
|
||||
(for /f "delims=" %%F in ('where python.exe') do (
|
||||
set "python_pf_ver=%%F"
|
||||
goto :found_python
|
||||
) ) 2> NUL
|
||||
)
|
||||
:found_python
|
||||
if not defined python_pf_ver (
|
||||
:: If not, look for Python from the Spack installer
|
||||
:get_builtin
|
||||
(for /f "tokens=*" %%g in ('dir /b /a:d "!spackinstdir!\Python*"') do (
|
||||
set "python_ver=%%g")) 2> NUL
|
||||
|
||||
if not defined python_ver (
|
||||
echo Python was not found on your system.
|
||||
echo Please install Python or add Python to your PATH.
|
||||
) else (
|
||||
set "py_path=!spackinstdir!\!python_ver!"
|
||||
set "py_exe=!py_path!\python.exe"
|
||||
)
|
||||
goto :exitpoint
|
||||
) else (
|
||||
:: Python is already on the path
|
||||
set "py_exe=!python_pf_ver!"
|
||||
(for /F "tokens=* USEBACKQ" %%F in (
|
||||
`"!py_exe!" --version`) do (set "output=%%F")) 2>NUL
|
||||
if not "!output:Microsoft Store=!"=="!output!" goto :get_builtin
|
||||
goto :exitpoint
|
||||
)
|
||||
:exitpoint
|
||||
|
||||
set "PATH=%SPACK_ROOT%\bin\;%PATH%"
|
||||
if defined py_path (
|
||||
set "PATH=%py_path%;%PATH%"
|
||||
)
|
||||
|
||||
if defined py_exe (
|
||||
"%py_exe%" "%SPACK_ROOT%\bin\haspywin.py"
|
||||
)
|
||||
|
||||
set "EDITOR=notepad"
|
||||
|
||||
DOSKEY spacktivate=spack env activate $*
|
||||
|
||||
@echo **********************************************************************
|
||||
@echo ** Spack Package Manager
|
||||
@echo **********************************************************************
|
||||
|
||||
IF "%1"=="" GOTO CONTINUE
|
||||
set
|
||||
GOTO:EOF
|
||||
|
||||
:continue
|
||||
set PROMPT=[spack] %PROMPT%
|
||||
%comspec% /k
|
||||
call "%~dp0..\share\spack\setup-env.bat"
|
||||
pushd %SPACK_ROOT%
|
||||
%comspec% /K
|
||||
|
@@ -9,15 +9,15 @@ bootstrap:
|
||||
# may not be able to bootstrap all the software that Spack needs,
|
||||
# depending on its type.
|
||||
sources:
|
||||
- name: 'github-actions-v0.5'
|
||||
- name: github-actions-v0.6
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.6
|
||||
- name: github-actions-v0.5
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.5
|
||||
- name: 'github-actions-v0.4'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.4
|
||||
- name: 'spack-install'
|
||||
- name: spack-install
|
||||
metadata: $spack/share/spack/bootstrap/spack-install
|
||||
trusted:
|
||||
# By default we trust bootstrapping from sources and from binaries
|
||||
# produced on Github via the workflow
|
||||
github-actions-v0.6: true
|
||||
github-actions-v0.5: true
|
||||
github-actions-v0.4: true
|
||||
spack-install: true
|
||||
|
@@ -166,3 +166,74 @@ while `py-numpy` still needs an older version:
|
||||
|
||||
Up to Spack v0.20 ``duplicates:strategy:none`` was the default (and only) behavior. From Spack v0.21 the
|
||||
default behavior is ``duplicates:strategy:minimal``.
|
||||
|
||||
--------
|
||||
Splicing
|
||||
--------
|
||||
|
||||
The ``splice`` key covers config attributes for splicing specs in the solver.
|
||||
|
||||
"Splicing" is a method for replacing a dependency with another spec
|
||||
that provides the same package or virtual. There are two types of
|
||||
splices, referring to different behaviors for shared dependencies
|
||||
between the root spec and the new spec replacing a dependency:
|
||||
"transitive" and "intransitive". A "transitive" splice is one that
|
||||
resolves all conflicts by taking the dependency from the new node. An
|
||||
"intransitive" splice is one that resolves all conflicts by taking the
|
||||
dependency from the original root. From a theory perspective, hybrid
|
||||
splices are possible but are not modeled by Spack.
|
||||
|
||||
All spliced specs retain a ``build_spec`` attribute that points to the
|
||||
original Spec before any splice occurred. The ``build_spec`` for a
|
||||
non-spliced spec is itself.
|
||||
|
||||
The figure below shows examples of transitive and intransitive splices:
|
||||
|
||||
.. figure:: images/splices.png
|
||||
:align: center
|
||||
|
||||
The concretizer can be configured to explicitly splice particular
|
||||
replacements for a target spec. Splicing will allow the user to make
|
||||
use of generically built public binary caches, while swapping in
|
||||
highly optimized local builds for performance critical components
|
||||
and/or components that interact closely with the specific hardware
|
||||
details of the system. The most prominent candidate for splicing is
|
||||
MPI providers. MPI packages have relatively well-understood ABI
|
||||
characteristics, and most High Performance Computing facilities deploy
|
||||
highly optimized MPI packages tailored to their particular
|
||||
hardware. The following config block configures Spack to replace
|
||||
whatever MPI provider each spec was concretized to use with the
|
||||
particular package of ``mpich`` with the hash that begins ``abcdef``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
splice:
|
||||
explicit:
|
||||
- target: mpi
|
||||
replacement: mpich/abcdef
|
||||
transitive: false
|
||||
|
||||
.. warning::
|
||||
|
||||
When configuring an explicit splice, you as the user take on the
|
||||
responsibility for ensuring ABI compatibility between the specs
|
||||
matched by the target and the replacement you provide. If they are
|
||||
not compatible, Spack will not warn you and your application will
|
||||
fail to run.
|
||||
|
||||
The ``target`` field of an explicit splice can be any abstract
|
||||
spec. The ``replacement`` field must be a spec that includes the hash
|
||||
of a concrete spec, and the replacement must either be the same
|
||||
package as the target, provide the virtual that is the target, or
|
||||
provide a virtual that the target provides. The ``transitive`` field
|
||||
is optional -- by default, splices will be transitive.
|
||||
|
||||
.. note::
|
||||
|
||||
With explicit splices configured, it is possible for Spack to
|
||||
concretize to a spec that does not satisfy the input. For example,
|
||||
with the config above ``hdf5 ^mvapich2`` will concretize to user
|
||||
``mpich/abcdef`` instead of ``mvapich2`` as the MPI provider. Spack
|
||||
will warn the user in this case, but will not fail the
|
||||
concretization.
|
||||
|
@@ -130,14 +130,19 @@ before or after a particular phase. For example, in ``perl``, we see:
|
||||
|
||||
@run_after("install")
|
||||
def install_cpanm(self):
|
||||
spec = self.spec
|
||||
|
||||
if spec.satisfies("+cpanm"):
|
||||
with working_dir(join_path("cpanm", "cpanm")):
|
||||
perl = spec["perl"].command
|
||||
perl("Makefile.PL")
|
||||
make()
|
||||
make("install")
|
||||
spec = self.spec
|
||||
maker = make
|
||||
cpan_dir = join_path("cpanm", "cpanm")
|
||||
if sys.platform == "win32":
|
||||
maker = nmake
|
||||
cpan_dir = join_path(self.stage.source_path, cpan_dir)
|
||||
cpan_dir = windows_sfn(cpan_dir)
|
||||
if "+cpanm" in spec:
|
||||
with working_dir(cpan_dir):
|
||||
perl = spec["perl"].command
|
||||
perl("Makefile.PL")
|
||||
maker()
|
||||
maker("install")
|
||||
|
||||
This extra step automatically installs ``cpanm`` in addition to the
|
||||
base Perl installation.
|
||||
@@ -176,8 +181,14 @@ In the ``perl`` package, we can see:
|
||||
|
||||
@run_after("build")
|
||||
@on_package_attributes(run_tests=True)
|
||||
def test(self):
|
||||
make("test")
|
||||
def build_test(self):
|
||||
if sys.platform == "win32":
|
||||
win32_dir = os.path.join(self.stage.source_path, "win32")
|
||||
win32_dir = windows_sfn(win32_dir)
|
||||
with working_dir(win32_dir):
|
||||
nmake("test", ignore_quotes=True)
|
||||
else:
|
||||
make("test")
|
||||
|
||||
As you can guess, this runs ``make test`` *after* building the package,
|
||||
if and only if testing is requested. Again, this is not specific to
|
||||
|
@@ -49,14 +49,14 @@ following phases:
|
||||
#. ``install`` - install the package
|
||||
|
||||
Package developers often add unit tests that can be invoked with
|
||||
``scons test`` or ``scons check``. Spack provides a ``test`` method
|
||||
``scons test`` or ``scons check``. Spack provides a ``build_test`` method
|
||||
to handle this. Since we don't know which one the package developer
|
||||
chose, the ``test`` method does nothing by default, but can be easily
|
||||
chose, the ``build_test`` method does nothing by default, but can be easily
|
||||
overridden like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def test(self):
|
||||
def build_test(self):
|
||||
scons("check")
|
||||
|
||||
|
||||
|
@@ -220,6 +220,8 @@ def setup(sphinx):
|
||||
("py:class", "spack.filesystem_view.SimpleFilesystemView"),
|
||||
("py:class", "spack.traverse.EdgeAndDepth"),
|
||||
("py:class", "archspec.cpu.microarchitecture.Microarchitecture"),
|
||||
# TypeVar that is not handled correctly
|
||||
("py:class", "llnl.util.lang.T"),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
|
@@ -281,7 +281,7 @@ When spack queries for configuration parameters, it searches in
|
||||
higher-precedence scopes first. So, settings in a higher-precedence file
|
||||
can override those with the same key in a lower-precedence one. For
|
||||
list-valued settings, Spack *prepends* higher-precedence settings to
|
||||
lower-precedence settings. Completely ignoring higher-level configuration
|
||||
lower-precedence settings. Completely ignoring lower-precedence configuration
|
||||
options is supported with the ``::`` notation for keys (see
|
||||
:ref:`config-overrides` below).
|
||||
|
||||
@@ -511,6 +511,7 @@ Spack understands over a dozen special variables. These are:
|
||||
* ``$target_family``. The target family for the current host, as
|
||||
detected by ArchSpec. E.g. ``x86_64`` or ``aarch64``.
|
||||
* ``$date``: the current date in the format YYYY-MM-DD
|
||||
* ``$spack_short_version``: the Spack version truncated to the first components.
|
||||
|
||||
|
||||
Note that, as with shell variables, you can write these as ``$varname``
|
||||
|
@@ -712,27 +712,27 @@ Release branches
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
There are currently two types of Spack releases: :ref:`major releases
|
||||
<major-releases>` (``0.17.0``, ``0.18.0``, etc.) and :ref:`point releases
|
||||
<point-releases>` (``0.17.1``, ``0.17.2``, ``0.17.3``, etc.). Here is a
|
||||
<major-releases>` (``0.21.0``, ``0.22.0``, etc.) and :ref:`patch releases
|
||||
<patch-releases>` (``0.22.1``, ``0.22.2``, ``0.22.3``, etc.). Here is a
|
||||
diagram of how Spack release branches work::
|
||||
|
||||
o branch: develop (latest version, v0.19.0.dev0)
|
||||
o branch: develop (latest version, v0.23.0.dev0)
|
||||
|
|
||||
o
|
||||
| o branch: releases/v0.18, tag: v0.18.1
|
||||
| o branch: releases/v0.22, tag: v0.22.1
|
||||
o |
|
||||
| o tag: v0.18.0
|
||||
| o tag: v0.22.0
|
||||
o |
|
||||
| o
|
||||
|/
|
||||
o
|
||||
|
|
||||
o
|
||||
| o branch: releases/v0.17, tag: v0.17.2
|
||||
| o branch: releases/v0.21, tag: v0.21.2
|
||||
o |
|
||||
| o tag: v0.17.1
|
||||
| o tag: v0.21.1
|
||||
o |
|
||||
| o tag: v0.17.0
|
||||
| o tag: v0.21.0
|
||||
o |
|
||||
| o
|
||||
|/
|
||||
@@ -743,8 +743,8 @@ requests target ``develop``. The ``develop`` branch will report that its
|
||||
version is that of the next **major** release with a ``.dev0`` suffix.
|
||||
|
||||
Each Spack release series also has a corresponding branch, e.g.
|
||||
``releases/v0.18`` has ``0.18.x`` versions of Spack, and
|
||||
``releases/v0.17`` has ``0.17.x`` versions. A major release is the first
|
||||
``releases/v0.22`` has ``v0.22.x`` versions of Spack, and
|
||||
``releases/v0.21`` has ``v0.21.x`` versions. A major release is the first
|
||||
tagged version on a release branch. Minor releases are back-ported from
|
||||
develop onto release branches. This is typically done by cherry-picking
|
||||
bugfix commits off of ``develop``.
|
||||
@@ -774,27 +774,40 @@ for more details.
|
||||
Scheduling work for releases
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
We schedule work for releases by creating `GitHub projects
|
||||
<https://github.com/spack/spack/projects>`_. At any time, there may be
|
||||
several open release projects. For example, below are two releases (from
|
||||
some past version of the page linked above):
|
||||
We schedule work for **major releases** through `milestones
|
||||
<https://github.com/spack/spack/milestones>`_ and `GitHub Projects
|
||||
<https://github.com/spack/spack/projects>`_, while **patch releases** use `labels
|
||||
<https://github.com/spack/spack/labels>`_.
|
||||
|
||||
.. image:: images/projects.png
|
||||
There is only one milestone open at a time. Its name corresponds to the next major version, for
|
||||
example ``v0.23``. Important issues and pull requests should be assigned to this milestone by
|
||||
core developers, so that they are not forgotten at the time of release. The milestone is closed
|
||||
when the release is made, and a new milestone is created for the next major release.
|
||||
|
||||
This image shows one release in progress for ``0.15.1`` and another for
|
||||
``0.16.0``. Each of these releases has a project board containing issues
|
||||
and pull requests. GitHub shows a status bar with completed work in
|
||||
green, work in progress in purple, and work not started yet in gray, so
|
||||
it's fairly easy to see progress.
|
||||
Bug reports in GitHub issues are automatically labelled ``bug`` and ``triage``. Spack developers
|
||||
assign one of the labels ``impact-low``, ``impact-medium`` or ``impact-high``. This will make the
|
||||
issue appear in the `Triaged bugs <https://github.com/orgs/spack/projects/6>`_ project board.
|
||||
Important issues should be assigned to the next milestone as well, so they appear at the top of
|
||||
the project board.
|
||||
|
||||
Spack's project boards are not firm commitments so we move work between
|
||||
releases frequently. If we need to make a release and some tasks are not
|
||||
yet done, we will simply move them to the next minor or major release, rather
|
||||
than delaying the release to complete them.
|
||||
Spack's milestones are not firm commitments so we move work between releases frequently. If we
|
||||
need to make a release and some tasks are not yet done, we will simply move them to the next major
|
||||
release milestone, rather than delaying the release to complete them.
|
||||
|
||||
For more on using GitHub project boards, see `GitHub's documentation
|
||||
<https://docs.github.com/en/github/managing-your-work-on-github/about-project-boards>`_.
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Backporting bug fixes
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
When a bug is fixed in the ``develop`` branch, it is often necessary to backport the fix to one
|
||||
(or more) of the ``release/vX.Y`` branches. Only the release manager is responsible for doing
|
||||
backports, but Spack maintainers are responsible for labelling pull requests (and issues if no bug
|
||||
fix is available yet) with ``vX.Y.Z`` labels. The label should correspond to the next patch version
|
||||
that the bug fix should be backported to.
|
||||
|
||||
Backports are done publicly by the release manager using a pull request named ``Backports vX.Y.Z``.
|
||||
This pull request is opened from the ``backports/vX.Y.Z`` branch, targets the ``releases/vX.Y``
|
||||
branch and contains a (growing) list of cherry-picked commits from the ``develop`` branch.
|
||||
Typically there are one or two backport pull requests open at any given time.
|
||||
|
||||
.. _major-releases:
|
||||
|
||||
@@ -802,25 +815,21 @@ For more on using GitHub project boards, see `GitHub's documentation
|
||||
Making major releases
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Assuming a project board has already been created and all required work
|
||||
completed, the steps to make the major release are:
|
||||
Assuming all required work from the milestone is completed, the steps to make the major release
|
||||
are:
|
||||
|
||||
#. Create two new project boards:
|
||||
#. `Create a new milestone <https://github.com/spack/spack/milestones>`_ for the next major
|
||||
release.
|
||||
|
||||
* One for the next major release
|
||||
* One for the next point release
|
||||
#. `Create a new label <https://github.com/spack/spack/labels>`_ for the next patch release.
|
||||
|
||||
#. Move any optional tasks that are not done to one of the new project boards.
|
||||
|
||||
In general, small bugfixes should go to the next point release. Major
|
||||
features, refactors, and changes that could affect concretization should
|
||||
go in the next major release.
|
||||
#. Move any optional tasks that are not done to the next milestone.
|
||||
|
||||
#. Create a branch for the release, based on ``develop``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout -b releases/v0.15 develop
|
||||
$ git checkout -b releases/v0.23 develop
|
||||
|
||||
For a version ``vX.Y.Z``, the branch's name should be
|
||||
``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
|
||||
@@ -856,8 +865,8 @@ completed, the steps to make the major release are:
|
||||
|
||||
Create a pull request targeting the ``develop`` branch, bumping the major
|
||||
version in ``lib/spack/spack/__init__.py`` with a ``dev0`` release segment.
|
||||
For instance when you have just released ``v0.15.0``, set the version
|
||||
to ``(0, 16, 0, 'dev0')`` on ``develop``.
|
||||
For instance when you have just released ``v0.23.0``, set the version
|
||||
to ``(0, 24, 0, 'dev0')`` on ``develop``.
|
||||
|
||||
#. Follow the steps in :ref:`publishing-releases`.
|
||||
|
||||
@@ -866,82 +875,52 @@ completed, the steps to make the major release are:
|
||||
#. Follow the steps in :ref:`announcing-releases`.
|
||||
|
||||
|
||||
.. _point-releases:
|
||||
.. _patch-releases:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Making point releases
|
||||
Making patch releases
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Assuming a project board has already been created and all required work
|
||||
completed, the steps to make the point release are:
|
||||
To make the patch release process both efficient and transparent, we use a *backports pull request*
|
||||
which contains cherry-picked commits from the ``develop`` branch. The majority of the work is to
|
||||
cherry-pick the bug fixes, which ideally should be done as soon as they land on ``develop``:
|
||||
this ensures cherry-picking happens in order, and makes conflicts easier to resolve since the
|
||||
changes are fresh in the mind of the developer.
|
||||
|
||||
#. Create a new project board for the next point release.
|
||||
The backports pull request is always titled ``Backports vX.Y.Z`` and is labelled ``backports``. It
|
||||
is opened from a branch named ``backports/vX.Y.Z`` and targets the ``releases/vX.Y`` branch.
|
||||
|
||||
#. Move any optional tasks that are not done to the next project board.
|
||||
Whenever a pull request labelled ``vX.Y.Z`` is merged, cherry-pick the associated squashed commit
|
||||
on ``develop`` to the ``backports/vX.Y.Z`` branch. For pull requests that were rebased (or not
|
||||
squashed), cherry-pick each associated commit individually. Never force push to the
|
||||
``backports/vX.Y.Z`` branch.
|
||||
|
||||
#. Check out the release branch (it should already exist).
|
||||
.. warning::
|
||||
|
||||
For the ``X.Y.Z`` release, the release branch is called ``releases/vX.Y``.
|
||||
For ``v0.15.1``, you would check out ``releases/v0.15``:
|
||||
Sometimes you may **still** get merge conflicts even if you have
|
||||
cherry-picked all the commits in order. This generally means there
|
||||
is some other intervening pull request that the one you're trying
|
||||
to pick depends on. In these cases, you'll need to make a judgment
|
||||
call regarding those pull requests. Consider the number of affected
|
||||
files and/or the resulting differences.
|
||||
|
||||
.. code-block:: console
|
||||
1. If the changes are small, you might just cherry-pick it.
|
||||
|
||||
$ git checkout releases/v0.15
|
||||
2. If the changes are large, then you may decide that this fix is not
|
||||
worth including in a patch release, in which case you should remove
|
||||
the label from the pull request. Remember that large, manual backports
|
||||
are seldom the right choice for a patch release.
|
||||
|
||||
#. If a pull request to the release branch named ``Backports vX.Y.Z`` is not already
|
||||
in the project, create it. This pull request ought to be created as early as
|
||||
possible when working on a release project, so that we can build the release
|
||||
commits incrementally, and identify potential conflicts at an early stage.
|
||||
When all commits are cherry-picked in the ``backports/vX.Y.Z`` branch, make the patch
|
||||
release as follows:
|
||||
|
||||
#. Cherry-pick each pull request in the ``Done`` column of the release
|
||||
project board onto the ``Backports vX.Y.Z`` pull request.
|
||||
#. `Create a new label <https://github.com/spack/spack/labels>`_ ``vX.Y.{Z+1}`` for the next patch
|
||||
release.
|
||||
|
||||
This is **usually** fairly simple since we squash the commits from the
|
||||
vast majority of pull requests. That means there is only one commit
|
||||
per pull request to cherry-pick. For example, `this pull request
|
||||
<https://github.com/spack/spack/pull/15777>`_ has three commits, but
|
||||
they were squashed into a single commit on merge. You can see the
|
||||
commit that was created here:
|
||||
#. Replace the label ``vX.Y.Z`` with ``vX.Y.{Z+1}`` for all PRs and issues that are not done.
|
||||
|
||||
.. image:: images/pr-commit.png
|
||||
|
||||
You can easily cherry pick it like this (assuming you already have the
|
||||
release branch checked out):
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git cherry-pick 7e46da7
|
||||
|
||||
For pull requests that were rebased (or not squashed), you'll need to
|
||||
cherry-pick each associated commit individually.
|
||||
|
||||
.. warning::
|
||||
|
||||
It is important to cherry-pick commits in the order they happened,
|
||||
otherwise you can get conflicts while cherry-picking. When
|
||||
cherry-picking look at the merge date,
|
||||
**not** the number of the pull request or the date it was opened.
|
||||
|
||||
Sometimes you may **still** get merge conflicts even if you have
|
||||
cherry-picked all the commits in order. This generally means there
|
||||
is some other intervening pull request that the one you're trying
|
||||
to pick depends on. In these cases, you'll need to make a judgment
|
||||
call regarding those pull requests. Consider the number of affected
|
||||
files and or the resulting differences.
|
||||
|
||||
1. If the dependency changes are small, you might just cherry-pick it,
|
||||
too. If you do this, add the task to the release board.
|
||||
|
||||
2. If the changes are large, then you may decide that this fix is not
|
||||
worth including in a point release, in which case you should remove
|
||||
the task from the release project.
|
||||
|
||||
3. You can always decide to manually back-port the fix to the release
|
||||
branch if neither of the above options makes sense, but this can
|
||||
require a lot of work. It's seldom the right choice.
|
||||
|
||||
#. When all the commits from the project board are cherry-picked into
|
||||
the ``Backports vX.Y.Z`` pull request, you can push a commit to:
|
||||
#. Manually push a single commit with commit message ``Set version to vX.Y.Z`` to the
|
||||
``backports/vX.Y.Z`` branch, that both bumps the Spack version number and updates the changelog:
|
||||
|
||||
1. Bump the version in ``lib/spack/spack/__init__.py``.
|
||||
2. Update ``CHANGELOG.md`` with a list of the changes.
|
||||
@@ -950,20 +929,22 @@ completed, the steps to make the point release are:
|
||||
release branch. See `the changelog from 0.14.1
|
||||
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
|
||||
|
||||
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
|
||||
is needed to keep track in the release branch of all the commits that were
|
||||
cherry-picked.
|
||||
|
||||
#. Make sure CI passes on the release branch, including:
|
||||
#. Make sure CI passes on the **backports pull request**, including:
|
||||
|
||||
* Regular unit tests
|
||||
* Build tests
|
||||
* The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_
|
||||
|
||||
If CI does not pass, you'll need to figure out why, and make changes
|
||||
to the release branch until it does. You can make more commits, modify
|
||||
or remove cherry-picked commits, or cherry-pick **more** from
|
||||
``develop`` to make this happen.
|
||||
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
|
||||
is needed to keep track in the release branch of all the commits that were
|
||||
cherry-picked.
|
||||
|
||||
#. Make sure CI passes on the last commit of the **release branch**.
|
||||
|
||||
#. In the rare case you need to include additional commits in the patch release after the backports
|
||||
PR is merged, it is best to delete the last commit ``Set version to vX.Y.Z`` from the release
|
||||
branch with a single force push, open a new backports PR named ``Backports vX.Y.Z (2)``, and
|
||||
repeat the process. Avoid repeated force pushes to the release branch.
|
||||
|
||||
#. Follow the steps in :ref:`publishing-releases`.
|
||||
|
||||
@@ -1038,25 +1019,31 @@ Updating `releases/latest`
|
||||
|
||||
If the new release is the **highest** Spack release yet, you should
|
||||
also tag it as ``releases/latest``. For example, suppose the highest
|
||||
release is currently ``0.15.3``:
|
||||
release is currently ``0.22.3``:
|
||||
|
||||
* If you are releasing ``0.15.4`` or ``0.16.0``, then you should tag
|
||||
it with ``releases/latest``, as these are higher than ``0.15.3``.
|
||||
* If you are releasing ``0.22.4`` or ``0.23.0``, then you should tag
|
||||
it with ``releases/latest``, as these are higher than ``0.22.3``.
|
||||
|
||||
* If you are making a new release of an **older** major version of
|
||||
Spack, e.g. ``0.14.4``, then you should not tag it as
|
||||
Spack, e.g. ``0.21.4``, then you should not tag it as
|
||||
``releases/latest`` (as there are newer major versions).
|
||||
|
||||
To tag ``releases/latest``, do this:
|
||||
To do so, first fetch the latest tag created on GitHub, since you may not have it locally:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout releases/vX.Y # vX.Y is the new release's branch
|
||||
$ git tag --force releases/latest
|
||||
$ git push --force --tags
|
||||
$ git fetch --force git@github.com:spack/spack vX.Y.Z
|
||||
|
||||
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing
|
||||
``releases/latest`` tag with the new one.
|
||||
Then tag ``vX.Y.Z`` as ``releases/latest`` and push the individual tag to GitHub.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git tag --force releases/latest vX.Y.Z
|
||||
$ git push --force git@github.com:spack/spack releases/latest
|
||||
|
||||
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing ``releases/latest``
|
||||
tag with the new one. Do **not** use the ``--tags`` flag when pushing, since this will push *all*
|
||||
local tags.
|
||||
|
||||
|
||||
.. _announcing-releases:
|
||||
|
@@ -5,49 +5,56 @@
|
||||
|
||||
.. _environments:
|
||||
|
||||
=========================
|
||||
Environments (spack.yaml)
|
||||
=========================
|
||||
=====================================
|
||||
Environments (spack.yaml, spack.lock)
|
||||
=====================================
|
||||
|
||||
An environment is used to group together a set of specs for the
|
||||
purpose of building, rebuilding and deploying in a coherent fashion.
|
||||
Environments provide a number of advantages over the *à la carte*
|
||||
approach of building and loading individual Spack modules:
|
||||
An environment is used to group a set of specs intended for some purpose
|
||||
to be built, rebuilt, and deployed in a coherent fashion. Environments
|
||||
define aspects of the installation of the software, such as:
|
||||
|
||||
#. Environments separate the steps of (a) choosing what to
|
||||
install, (b) concretizing, and (c) installing. This allows
|
||||
Environments to remain stable and repeatable, even if Spack packages
|
||||
are upgraded: specs are only re-concretized when the user
|
||||
explicitly asks for it. It is even possible to reliably
|
||||
transport environments between different computers running
|
||||
different versions of Spack!
|
||||
#. Environments allow several specs to be built at once; a more robust
|
||||
solution than ad-hoc scripts making multiple calls to ``spack
|
||||
install``.
|
||||
#. An Environment that is built as a whole can be loaded as a whole
|
||||
into the user environment. An Environment can be built to maintain
|
||||
a filesystem view of its packages, and the environment can load
|
||||
that view into the user environment at activation time. Spack can
|
||||
also generate a script to load all modules related to an
|
||||
environment.
|
||||
#. *which* specs to install;
|
||||
#. *how* those specs are configured; and
|
||||
#. *where* the concretized software will be installed.
|
||||
|
||||
Aggregating this information into an environment for processing has advantages
|
||||
over the *à la carte* approach of building and loading individual Spack modules.
|
||||
|
||||
With environments, you concretize, install, or load (activate) all of the
|
||||
specs with a single command. Concretization fully configures the specs
|
||||
and dependencies of the environment in preparation for installing the
|
||||
software. This is a more robust solution than ad-hoc installation scripts.
|
||||
And you can share an environment or even re-use it on a different computer.
|
||||
|
||||
Environment definitions, especially *how* specs are configured, allow the
|
||||
software to remain stable and repeatable even when Spack packages are upgraded. Changes are only picked up when the environment is explicitly re-concretized.
|
||||
|
||||
Defining *where* specs are installed supports a filesystem view of the
|
||||
environment. Yet Spack maintains a single installation of the software that
|
||||
can be re-used across multiple environments.
|
||||
|
||||
Activating an environment determines *when* all of the associated (and
|
||||
installed) specs are loaded so limits the software loaded to those specs
|
||||
actually needed by the environment. Spack can even generate a script to
|
||||
load all modules related to an environment.
|
||||
|
||||
Other packaging systems also provide environments that are similar in
|
||||
some ways to Spack environments; for example, `Conda environments
|
||||
<https://conda.io/docs/user-guide/tasks/manage-environments.html>`_ or
|
||||
`Python Virtual Environments
|
||||
<https://docs.python.org/3/tutorial/venv.html>`_. Spack environments
|
||||
provide some distinctive features:
|
||||
provide some distinctive features though:
|
||||
|
||||
#. A spec installed "in" an environment is no different from the same
|
||||
spec installed anywhere else in Spack. Environments are assembled
|
||||
simply by collecting together a set of specs.
|
||||
#. Spack Environments may contain more than one spec of the same
|
||||
spec installed anywhere else in Spack.
|
||||
#. Spack environments may contain more than one spec of the same
|
||||
package.
|
||||
|
||||
Spack uses a "manifest and lock" model similar to `Bundler gemfiles
|
||||
<https://bundler.io/man/gemfile.5.html>`_ and other package
|
||||
managers. The user input file is named ``spack.yaml`` and the lock
|
||||
file is named ``spack.lock``
|
||||
<https://bundler.io/man/gemfile.5.html>`_ and other package managers.
|
||||
The environment's user input file (or manifest), is named ``spack.yaml``.
|
||||
The lock file, which contains the fully configured and concretized specs,
|
||||
is named ``spack.lock``.
|
||||
|
||||
.. _environments-using:
|
||||
|
||||
@@ -68,55 +75,60 @@ An environment is created by:
|
||||
|
||||
$ spack env create myenv
|
||||
|
||||
Spack then creates the directory ``var/spack/environments/myenv``.
|
||||
The directory ``$SPACK_ROOT/var/spack/environments/myenv`` is created
|
||||
to manage the environment.
|
||||
|
||||
.. note::
|
||||
|
||||
All managed environments by default are stored in the ``var/spack/environments`` folder.
|
||||
This location can be changed by setting the ``environments_root`` variable in ``config.yaml``.
|
||||
All managed environments by default are stored in the
|
||||
``$SPACK_ROOT/var/spack/environments`` folder. This location can be changed
|
||||
by setting the ``environments_root`` variable in ``config.yaml``.
|
||||
|
||||
In the ``var/spack/environments/myenv`` directory, Spack creates the
|
||||
file ``spack.yaml`` and the hidden directory ``.spack-env``.
|
||||
|
||||
Spack stores metadata in the ``.spack-env`` directory. User
|
||||
interaction will occur through the ``spack.yaml`` file and the Spack
|
||||
commands that affect it. When the environment is concretized, Spack
|
||||
will create a file ``spack.lock`` with the concrete information for
|
||||
Spack creates the file ``spack.yaml``, hidden directory ``.spack-env``, and
|
||||
``spack.lock`` file under ``$SPACK_ROOT/var/spack/environments/myenv``. User
|
||||
interaction occurs through the ``spack.yaml`` file and the Spack commands
|
||||
that affect it. Metadata and, by default, the view are stored in the
|
||||
``.spack-env`` directory. When the environment is concretized, Spack creates
|
||||
the ``spack.lock`` file with the fully configured specs and dependencies for
|
||||
the environment.
|
||||
|
||||
In addition to being the default location for the view associated with
|
||||
an Environment, the ``.spack-env`` directory also contains:
|
||||
The ``.spack-env`` subdirectory also contains:
|
||||
|
||||
* ``repo/``: A repo consisting of the Spack packages used in this
|
||||
environment. This allows the environment to build the same, in
|
||||
theory, even on different versions of Spack with different
|
||||
* ``repo/``: A subdirectory acting as the repo consisting of the Spack
|
||||
packages used in the environment. It allows the environment to build
|
||||
the same, in theory, even on different versions of Spack with different
|
||||
packages!
|
||||
* ``logs/``: A directory containing the build logs for the packages
|
||||
in this Environment.
|
||||
* ``logs/``: A subdirectory containing the build logs for the packages
|
||||
in this environment.
|
||||
|
||||
Spack Environments can also be created from either a manifest file
|
||||
(usually but not necessarily named, ``spack.yaml``) or a lockfile.
|
||||
To create an Environment from a manifest:
|
||||
Spack Environments can also be created from either the user input, or
|
||||
manifest, file or the lockfile. Create an environment from a manifest using:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create myenv spack.yaml
|
||||
|
||||
To create an Environment from a ``spack.lock`` lockfile:
|
||||
The resulting environment is guaranteed to have the same root specs as
|
||||
the original but may concretize differently in the presence of different
|
||||
explicit or default configuration settings (e.g., a different version of
|
||||
Spack or for a different user account).
|
||||
|
||||
Create an environment from a ``spack.lock`` file using:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create myenv spack.lock
|
||||
|
||||
Either of these commands can also take a full path to the
|
||||
initialization file.
|
||||
The resulting environment, when on the same or a compatible machine, is
|
||||
guaranteed to initially have the same concrete specs as the original.
|
||||
|
||||
A Spack Environment created from a ``spack.yaml`` manifest is
|
||||
guaranteed to have the same root specs as the original Environment,
|
||||
but may concretize differently. A Spack Environment created from a
|
||||
``spack.lock`` lockfile is guaranteed to have the same concrete specs
|
||||
as the original Environment. Either may obviously then differ as the
|
||||
user modifies it.
|
||||
.. note::
|
||||
|
||||
Environment creation also accepts a full path to the file.
|
||||
|
||||
If the path is not under the ``$SPACK_ROOT/var/spack/environments``
|
||||
directory then the source is referred to as an
|
||||
:ref:`independent environment <independent_environments>`.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Activating an Environment
|
||||
@@ -129,7 +141,7 @@ To activate an environment, use the following command:
|
||||
$ spack env activate myenv
|
||||
|
||||
By default, the ``spack env activate`` will load the view associated
|
||||
with the Environment into the user environment. The ``-v,
|
||||
with the environment into the user environment. The ``-v,
|
||||
--with-view`` argument ensures this behavior, and the ``-V,
|
||||
--without-view`` argument activates the environment without changing
|
||||
the user environment variables.
|
||||
@@ -142,8 +154,11 @@ user's prompt to begin with the environment name in brackets.
|
||||
$ spack env activate -p myenv
|
||||
[myenv] $ ...
|
||||
|
||||
The ``activate`` command can also be used to create a new environment if it does not already
|
||||
exist.
|
||||
The ``activate`` command can also be used to create a new environment, if it is
|
||||
not already defined, by adding the ``--create`` flag. Managed and independent
|
||||
environments can both be created using the same flags that `spack env create`
|
||||
accepts. If an environment already exists then spack will simply activate it
|
||||
and ignore the create-specific flags.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -168,49 +183,50 @@ or the shortcut alias
|
||||
If the environment was activated with its view, deactivating the
|
||||
environment will remove the view from the user environment.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
Anonymous Environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
.. _independent_environments:
|
||||
|
||||
Apart from managed environments, Spack also supports anonymous environments.
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Independent Environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Anonymous environments can be placed in any directory of choice.
|
||||
Independent environments can be located in any directory outside of Spack.
|
||||
|
||||
.. note::
|
||||
|
||||
When uninstalling packages, Spack asks the user to confirm the removal of packages
|
||||
that are still used in a managed environment. This is not the case for anonymous
|
||||
that are still used in a managed environment. This is not the case for independent
|
||||
environments.
|
||||
|
||||
To create an anonymous environment, use one of the following commands:
|
||||
To create an independent environment, use one of the following commands:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create --dir my_env
|
||||
$ spack env create ./my_env
|
||||
|
||||
As a shorthand, you can also create an anonymous environment upon activation if it does not
|
||||
As a shorthand, you can also create an independent environment upon activation if it does not
|
||||
already exist:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate --create ./my_env
|
||||
|
||||
For convenience, Spack can also place an anonymous environment in a temporary directory for you:
|
||||
For convenience, Spack can also place an independent environment in a temporary directory for you:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate --temp
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Environment Sensitive Commands
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Environment-Aware Commands
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack commands are environment sensitive. For example, the ``find``
|
||||
command shows only the specs in the active Environment if an
|
||||
Environment has been activated. Similarly, the ``install`` and
|
||||
``uninstall`` commands act on the active environment.
|
||||
Spack commands are environment-aware. For example, the ``find``
|
||||
command shows only the specs in the active environment if an
|
||||
environment has been activated. Otherwise it shows all specs in
|
||||
the Spack instance. The same rule applies to the ``install`` and
|
||||
``uninstall`` commands.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -255,32 +271,33 @@ Environment has been activated. Similarly, the ``install`` and
|
||||
|
||||
|
||||
Note that when we installed the abstract spec ``zlib@1.2.8``, it was
|
||||
presented as a root of the Environment. All explicitly installed
|
||||
packages will be listed as roots of the Environment.
|
||||
presented as a root of the environment. All explicitly installed
|
||||
packages will be listed as roots of the environment.
|
||||
|
||||
All of the Spack commands that act on the list of installed specs are
|
||||
Environment-sensitive in this way, including ``install``,
|
||||
``uninstall``, ``find``, ``extensions``, and more. In the
|
||||
environment-aware in this way, including ``install``,
|
||||
``uninstall``, ``find``, ``extensions``, etcetera. In the
|
||||
:ref:`environment-configuration` section we will discuss
|
||||
Environment-sensitive commands further.
|
||||
environment-aware commands further.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Adding Abstract Specs
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
An abstract spec is the user-specified spec before Spack has applied
|
||||
any defaults or dependency information.
|
||||
An abstract spec is the user-specified spec before Spack applies
|
||||
defaults or dependency information.
|
||||
|
||||
Users can add abstract specs to an Environment using the ``spack add``
|
||||
command. The most important component of an Environment is a list of
|
||||
Users can add abstract specs to an environment using the ``spack add``
|
||||
command. The most important component of an environment is a list of
|
||||
abstract specs.
|
||||
|
||||
Adding a spec adds to the manifest (the ``spack.yaml`` file), which is
|
||||
used to define the roots of the Environment, but does not affect the
|
||||
concrete specs in the lockfile, nor does it install the spec.
|
||||
Adding a spec adds it as a root spec of the environment in the user
|
||||
input file (``spack.yaml``). It does not affect the concrete specs
|
||||
in the lock file (``spack.lock``) and it does not install the spec.
|
||||
|
||||
The ``spack add`` command is environment aware. It adds to the
|
||||
currently active environment. All environment aware commands can also
|
||||
The ``spack add`` command is environment-aware. It adds the spec to the
|
||||
currently active environment. An error is generated if there isn't an
|
||||
active environment. All environment-aware commands can also
|
||||
be called using the ``spack -e`` flag to specify the environment.
|
||||
|
||||
.. code-block:: console
|
||||
@@ -300,11 +317,11 @@ or
|
||||
Concretizing
|
||||
^^^^^^^^^^^^
|
||||
|
||||
Once some user specs have been added to an environment, they can be concretized.
|
||||
There are at the moment three different modes of operation to concretize an environment,
|
||||
which are explained in details in :ref:`environments_concretization_config`.
|
||||
Regardless of which mode of operation has been chosen, the following
|
||||
command will ensure all the root specs are concretized according to the
|
||||
Once user specs have been added to an environment, they can be concretized.
|
||||
There are three different modes of operation to concretize an environment,
|
||||
explained in detail in :ref:`environments_concretization_config`.
|
||||
Regardless of which mode of operation is chosen, the following
|
||||
command will ensure all of the root specs are concretized according to the
|
||||
constraints that are prescribed in the configuration:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -313,16 +330,15 @@ constraints that are prescribed in the configuration:
|
||||
|
||||
In the case of specs that are not concretized together, the command
|
||||
above will concretize only the specs that were added and not yet
|
||||
concretized. Forcing a re-concretization of all the specs can be done
|
||||
instead with this command:
|
||||
concretized. Forcing a re-concretization of all of the specs can be done
|
||||
by adding the ``-f`` option:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[myenv]$ spack concretize -f
|
||||
|
||||
When the ``-f`` flag is not used to reconcretize all specs, Spack
|
||||
guarantees that already concretized specs are unchanged in the
|
||||
environment.
|
||||
Without the option, Spack guarantees that already concretized specs are
|
||||
unchanged in the environment.
|
||||
|
||||
The ``concretize`` command does not install any packages. For packages
|
||||
that have already been installed outside of the environment, the
|
||||
@@ -355,16 +371,16 @@ installed specs using the ``-c`` (``--concretized``) flag.
|
||||
Installing an Environment
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In addition to installing individual specs into an Environment, one
|
||||
can install the entire Environment at once using the command
|
||||
In addition to adding individual specs to an environment, one
|
||||
can install the entire environment at once using the command
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[myenv]$ spack install
|
||||
|
||||
If the Environment has been concretized, Spack will install the
|
||||
concretized specs. Otherwise, ``spack install`` will first concretize
|
||||
the Environment and then install the concretized specs.
|
||||
If the environment has been concretized, Spack will install the
|
||||
concretized specs. Otherwise, ``spack install`` will concretize
|
||||
the environment before installing the concretized specs.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -385,17 +401,17 @@ the Environment and then install the concretized specs.
|
||||
|
||||
|
||||
As it installs, ``spack install`` creates symbolic links in the
|
||||
``logs/`` directory in the Environment, allowing for easy inspection
|
||||
``logs/`` directory in the environment, allowing for easy inspection
|
||||
of build logs related to that environment. The ``spack install``
|
||||
command also stores a Spack repo containing the ``package.py`` file
|
||||
used at install time for each package in the ``repos/`` directory in
|
||||
the Environment.
|
||||
the environment.
|
||||
|
||||
The ``--no-add`` option can be used in a concrete environment to tell
|
||||
spack to install specs already present in the environment but not to
|
||||
add any new root specs to the environment. For root specs provided
|
||||
to ``spack install`` on the command line, ``--no-add`` is the default,
|
||||
while for dependency specs on the other hand, it is optional. In other
|
||||
while for dependency specs, it is optional. In other
|
||||
words, if there is an unambiguous match in the active concrete environment
|
||||
for a root spec provided to ``spack install`` on the command line, spack
|
||||
does not require you to specify the ``--no-add`` option to prevent the spec
|
||||
@@ -409,9 +425,13 @@ Developing Packages in a Spack Environment
|
||||
|
||||
The ``spack develop`` command allows one to develop Spack packages in
|
||||
an environment. It requires a spec containing a concrete version, and
|
||||
will configure Spack to install the package from local source. By
|
||||
default, it will also clone the package to a subdirectory in the
|
||||
environment. This package will have a special variant ``dev_path``
|
||||
will configure Spack to install the package from local source.
|
||||
If a version is not provided from the command line interface then spack
|
||||
will automatically pick the highest version the package has defined.
|
||||
This means any infinity versions (``develop``, ``main``, ``stable``) will be
|
||||
preferred in this selection process.
|
||||
By default, ``spack develop`` will also clone the package to a subdirectory in the
|
||||
environment for the local source. This package will have a special variant ``dev_path``
|
||||
set, and Spack will ensure the package and its dependents are rebuilt
|
||||
any time the environment is installed if the package's local source
|
||||
code has been modified. Spack's native implementation to check for modifications
|
||||
@@ -430,7 +450,7 @@ also be used as valid concrete versions (see :ref:`version-specifier`).
|
||||
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
||||
the ``main`` branch of the package, and ``spack install`` will install from
|
||||
that git clone if ``foo`` is in the environment.
|
||||
Further development on ``foo`` can be tested by reinstalling the environment,
|
||||
Further development on ``foo`` can be tested by re-installing the environment,
|
||||
and eventually committed and pushed to the upstream git repo.
|
||||
|
||||
If the package being developed supports out-of-source builds then users can use the
|
||||
@@ -615,7 +635,7 @@ manipulate configuration inline in the ``spack.yaml`` file.
|
||||
Inline configurations
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Inline Environment-scope configuration is done using the same yaml
|
||||
Inline environment-scope configuration is done using the same yaml
|
||||
format as standard Spack configuration scopes, covered in the
|
||||
:ref:`configuration` section. Each section is contained under a
|
||||
top-level yaml object with it's name. For example, a ``spack.yaml``
|
||||
@@ -640,7 +660,7 @@ Included configurations
|
||||
|
||||
Spack environments allow an ``include`` heading in their yaml
|
||||
schema. This heading pulls in external configuration files and applies
|
||||
them to the Environment.
|
||||
them to the environment.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -653,6 +673,9 @@ them to the Environment.
|
||||
Environments can include files or URLs. File paths can be relative or
|
||||
absolute. URLs include the path to the text for individual files or
|
||||
can be the path to a directory containing configuration files.
|
||||
Spack supports ``file``, ``http``, ``https`` and ``ftp`` protocols (or
|
||||
schemes). Spack-specific, environment and user path variables may be
|
||||
used in these paths. See :ref:`config-file-variables` for more information.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Configuration precedence
|
||||
@@ -667,7 +690,7 @@ have higher precedence, as the included configs are applied in reverse order.
|
||||
Manually Editing the Specs List
|
||||
-------------------------------
|
||||
|
||||
The list of abstract/root specs in the Environment is maintained in
|
||||
The list of abstract/root specs in the environment is maintained in
|
||||
the ``spack.yaml`` manifest under the heading ``specs``.
|
||||
|
||||
.. code-block:: yaml
|
||||
@@ -775,7 +798,7 @@ evaluates to the cross-product of those specs. Spec matrices also
|
||||
contain an ``excludes`` directive, which eliminates certain
|
||||
combinations from the evaluated result.
|
||||
|
||||
The following two Environment manifests are identical:
|
||||
The following two environment manifests are identical:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -850,7 +873,7 @@ files are identical.
|
||||
In short files like the example, it may be easier to simply list the
|
||||
included specs. However for more complicated examples involving many
|
||||
packages across many toolchains, separately factored lists make
|
||||
Environments substantially more manageable.
|
||||
environments substantially more manageable.
|
||||
|
||||
Additionally, the ``-l`` option to the ``spack add`` command allows
|
||||
one to add to named lists in the definitions section of the manifest
|
||||
@@ -1066,7 +1089,7 @@ true``). The argument ``--without-view`` can be used to create an
|
||||
environment without any view configured.
|
||||
|
||||
The ``spack env view`` command can be used to change the manage views
|
||||
of an Environment. The subcommand ``spack env view enable`` will add a
|
||||
of an environment. The subcommand ``spack env view enable`` will add a
|
||||
view named ``default`` to an environment. It takes an optional
|
||||
argument to specify the path for the new default view. The subcommand
|
||||
``spack env view disable`` will remove the view named ``default`` from
|
||||
@@ -1234,7 +1257,7 @@ gets installed and is available for use in the ``env`` target.
|
||||
$(SPACK) -e . env depfile -o $@ --make-prefix spack
|
||||
|
||||
env: spack/env
|
||||
$(info Environment installed!)
|
||||
$(info environment installed!)
|
||||
|
||||
clean:
|
||||
rm -rf spack.lock env.mk spack/
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 44 KiB |
Binary file not shown.
Before Width: | Height: | Size: 68 KiB |
BIN
lib/spack/docs/images/splices.png
Normal file
BIN
lib/spack/docs/images/splices.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 358 KiB |
@@ -457,11 +457,11 @@ For instance, the following config options,
|
||||
tcl:
|
||||
all:
|
||||
suffixes:
|
||||
^python@3.12: 'python-3.12'
|
||||
^python@3: 'python{^python.version}'
|
||||
^openblas: 'openblas'
|
||||
|
||||
will add a ``python-3.12`` version string to any packages compiled with
|
||||
Python matching the spec, ``python@3.12``. This is useful to know which
|
||||
will add a ``python-3.12.1`` version string to any packages compiled with
|
||||
Python matching the spec, ``python@3``. This is useful to know which
|
||||
version of Python a set of Python extensions is associated with. Likewise, the
|
||||
``openblas`` string is attached to any program that has openblas in the spec,
|
||||
most likely via the ``+blas`` variant specification.
|
||||
|
@@ -59,7 +59,7 @@ Functional Example
|
||||
------------------
|
||||
|
||||
The simplest fully functional standalone example of a working pipeline can be
|
||||
examined live at this example `project <https://gitlab.com/scott.wittenburg/spack-pipeline-demo>`_
|
||||
examined live at this example `project <https://gitlab.com/spack/pipeline-quickstart>`_
|
||||
on gitlab.com.
|
||||
|
||||
Here's the ``.gitlab-ci.yml`` file from that example that builds and runs the
|
||||
@@ -67,39 +67,46 @@ pipeline:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
stages: [generate, build]
|
||||
stages: [ "generate", "build" ]
|
||||
|
||||
variables:
|
||||
SPACK_REPO: https://github.com/scottwittenburg/spack.git
|
||||
SPACK_REF: pipelines-reproducible-builds
|
||||
SPACK_REPOSITORY: "https://github.com/spack/spack.git"
|
||||
SPACK_REF: "develop-2024-10-06"
|
||||
SPACK_USER_CONFIG_PATH: ${CI_PROJECT_DIR}
|
||||
SPACK_BACKTRACE: 1
|
||||
|
||||
generate-pipeline:
|
||||
stage: generate
|
||||
tags:
|
||||
- docker
|
||||
- saas-linux-small-amd64
|
||||
stage: generate
|
||||
image:
|
||||
name: ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01
|
||||
entrypoint: [""]
|
||||
before_script:
|
||||
- git clone ${SPACK_REPO}
|
||||
- pushd spack && git checkout ${SPACK_REF} && popd
|
||||
- . "./spack/share/spack/setup-env.sh"
|
||||
name: ghcr.io/spack/ubuntu20.04-runner-x86_64:2023-01-01
|
||||
script:
|
||||
- git clone ${SPACK_REPOSITORY}
|
||||
- cd spack && git checkout ${SPACK_REF} && cd ../
|
||||
- . "./spack/share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- spack env activate --without-view .
|
||||
- spack -d ci generate
|
||||
- spack -d -v --color=always
|
||||
ci generate
|
||||
--check-index-only
|
||||
--artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/pipeline.yml"
|
||||
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml"
|
||||
artifacts:
|
||||
paths:
|
||||
- "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
|
||||
build-jobs:
|
||||
build-pipeline:
|
||||
stage: build
|
||||
trigger:
|
||||
include:
|
||||
- artifact: "jobs_scratch_dir/pipeline.yml"
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: generate-pipeline
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: generate-pipeline
|
||||
|
||||
|
||||
The key thing to note above is that there are two jobs: The first job to run,
|
||||
``generate-pipeline``, runs the ``spack ci generate`` command to generate a
|
||||
@@ -114,82 +121,93 @@ And here's the spack environment built by the pipeline represented as a
|
||||
spack:
|
||||
view: false
|
||||
concretizer:
|
||||
unify: false
|
||||
unify: true
|
||||
reuse: false
|
||||
|
||||
definitions:
|
||||
- pkgs:
|
||||
- zlib
|
||||
- bzip2
|
||||
- arch:
|
||||
- '%gcc@7.5.0 arch=linux-ubuntu18.04-x86_64'
|
||||
- bzip2 ~debug
|
||||
- compiler:
|
||||
- '%gcc'
|
||||
|
||||
specs:
|
||||
- matrix:
|
||||
- - $pkgs
|
||||
- - $arch
|
||||
|
||||
mirrors: { "mirror": "s3://spack-public/mirror" }
|
||||
- - $compiler
|
||||
|
||||
ci:
|
||||
enable-artifacts-buildcache: True
|
||||
rebuild-index: False
|
||||
target: gitlab
|
||||
|
||||
pipeline-gen:
|
||||
- any-job:
|
||||
before_script:
|
||||
- git clone ${SPACK_REPO}
|
||||
- pushd spack && git checkout ${SPACK_CHECKOUT_VERSION} && popd
|
||||
- . "./spack/share/spack/setup-env.sh"
|
||||
- build-job:
|
||||
tags: [docker]
|
||||
tags:
|
||||
- saas-linux-small-amd64
|
||||
image:
|
||||
name: ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01
|
||||
entrypoint: [""]
|
||||
name: ghcr.io/spack/ubuntu20.04-runner-x86_64:2023-01-01
|
||||
before_script:
|
||||
- git clone ${SPACK_REPOSITORY}
|
||||
- cd spack && git checkout ${SPACK_REF} && cd ../
|
||||
- . "./spack/share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- export SPACK_USER_CONFIG_PATH=${CI_PROJECT_DIR}
|
||||
- spack config blame mirrors
|
||||
|
||||
|
||||
The elements of this file important to spack ci pipelines are described in more
|
||||
detail below, but there are a couple of things to note about the above working
|
||||
example:
|
||||
|
||||
.. note::
|
||||
There is no ``script`` attribute specified for here. The reason for this is
|
||||
Spack CI will automatically generate reasonable default scripts. More
|
||||
detail on what is in these scripts can be found below.
|
||||
The use of ``reuse: false`` in spack environments used for pipelines is
|
||||
almost always what you want, as without it your pipelines will not rebuild
|
||||
packages even if package hashes have changed. This is due to the concretizer
|
||||
strongly preferring known hashes when ``reuse: true``.
|
||||
|
||||
Also notice the ``before_script`` section. It is required when using any of the
|
||||
default scripts to source the ``setup-env.sh`` script in order to inform
|
||||
the default scripts where to find the ``spack`` executable.
|
||||
The ``ci`` section in the above environment file contains the bare minimum
|
||||
configuration required for ``spack ci generate`` to create a working pipeline.
|
||||
The ``target: gitlab`` tells spack that the desired pipeline output is for
|
||||
gitlab. However, this isn't strictly required, as currently gitlab is the
|
||||
only possible output format for pipelines. The ``pipeline-gen`` section
|
||||
contains the key information needed to specify attributes for the generated
|
||||
jobs. Notice that it contains a list which has only a single element in
|
||||
this case. In real pipelines it will almost certainly have more elements,
|
||||
and in those cases, order is important: spack starts at the bottom of the
|
||||
list and works upwards when applying attributes.
|
||||
|
||||
Normally ``enable-artifacts-buildcache`` is not recommended in production as it
|
||||
results in large binary artifacts getting transferred back and forth between
|
||||
gitlab and the runners. But in this example on gitlab.com where there is no
|
||||
shared, persistent file system, and where no secrets are stored for giving
|
||||
permission to write to an S3 bucket, ``enabled-buildcache-artifacts`` is the only
|
||||
way to propagate binaries from jobs to their dependents.
|
||||
But in this simple case, we use only the special key ``any-job`` to
|
||||
indicate that spack should apply the specified attributes (``tags``, ``image``,
|
||||
and ``before_script``) to any job it generates. This includes jobs for
|
||||
building/pushing all packages, a ``rebuild-index`` job at the end of the
|
||||
pipeline, as well as any ``noop`` jobs that might be needed by gitlab when
|
||||
no rebuilds are required.
|
||||
|
||||
Also, it is usually a good idea to let the pipeline generate a final "rebuild the
|
||||
buildcache index" job, so that subsequent pipeline generation can quickly determine
|
||||
which specs are up to date and which need to be rebuilt (it's a good idea for other
|
||||
reasons as well, but those are out of scope for this discussion). In this case we
|
||||
have disabled it (using ``rebuild-index: False``) because the index would only be
|
||||
generated in the artifacts mirror anyway, and consequently would not be available
|
||||
during subsequent pipeline runs.
|
||||
Something to note is that in this simple case, we rely on spack to
|
||||
generate a reasonable script for the package build jobs (it just creates
|
||||
a script that invokes ``spack ci rebuild``).
|
||||
|
||||
.. note::
|
||||
With the addition of reproducible builds (#22887) a previously working
|
||||
pipeline will require some changes:
|
||||
Another thing to note is the use of the ``SPACK_USER_CONFIG_DIR`` environment
|
||||
variable in any generated jobs. The purpose of this is to make spack
|
||||
aware of one final file in the example, the one that contains the mirror
|
||||
configuration. This file, ``mirrors.yaml`` looks like this:
|
||||
|
||||
* In the build-jobs, the environment location changed.
|
||||
This will typically show as a ``KeyError`` in the failing job. Be sure to
|
||||
point to ``${SPACK_CONCRETE_ENV_DIR}``.
|
||||
.. code-block:: yaml
|
||||
|
||||
* When using ``include`` in your environment, be sure to make the included
|
||||
files available in the build jobs. This means adding those files to the
|
||||
artifact directory. Those files will also be missing in the reproducibility
|
||||
artifact.
|
||||
mirrors:
|
||||
buildcache-destination:
|
||||
url: oci://registry.gitlab.com/spack/pipeline-quickstart
|
||||
binary: true
|
||||
access_pair:
|
||||
id_variable: CI_REGISTRY_USER
|
||||
secret_variable: CI_REGISTRY_PASSWORD
|
||||
|
||||
* Because the location of the environment changed, including files with
|
||||
relative path may have to be adapted to work both in the project context
|
||||
(generation job) and in the concrete env dir context (build job).
|
||||
|
||||
Note the name of the mirror is ``buildcache-destination``, which is required
|
||||
as of Spack 0.23 (see below for more information). The mirror url simply
|
||||
points to the container registry associated with the project, while
|
||||
``id_variable`` and ``secret_variable`` refer to to environment variables
|
||||
containing the access credentials for the mirror.
|
||||
|
||||
When spack builds packages for this example project, they will be pushed to
|
||||
the project container registry, where they will be available for subsequent
|
||||
jobs to install as dependencies, or for other pipelines to use to build runnable
|
||||
container images.
|
||||
|
||||
-----------------------------------
|
||||
Spack commands supporting pipelines
|
||||
@@ -417,15 +435,6 @@ configuration with a ``script`` attribute. Specifying a signing job without a sc
|
||||
does not create a signing job and the job configuration attributes will be ignored.
|
||||
Signing jobs are always assigned the runner tags ``aws``, ``protected``, and ``notary``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Cleanup (cleanup)
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
When using ``temporary-storage-url-prefix`` the cleanup job will destroy the mirror
|
||||
created for the associated Gitlab pipeline. Cleanup jobs do not allow modifying the
|
||||
script, but do expect that the spack command is in the path and require a
|
||||
``before_script`` to be specified that sources the ``setup-env.sh`` script.
|
||||
|
||||
.. _noop_jobs:
|
||||
|
||||
^^^^^^^^^^^^
|
||||
@@ -592,6 +601,77 @@ the attributes will be merged starting from the bottom match going up to the top
|
||||
|
||||
In the case that no match is found in a submapping section, no additional attributes will be applied.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Dynamic Mapping Sections
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For large scale CI where cost optimization is required, dynamic mapping allows for the use of real-time
|
||||
mapping schemes served by a web service. This type of mapping does not support the ``-remove`` type
|
||||
behavior, but it does follow the rest of the merge rules for configurations.
|
||||
|
||||
The dynamic mapping service needs to implement a single REST API interface for getting
|
||||
requests ``GET <URL>[:PORT][/PATH]?spec=<pkg_name@pkg_version +variant1+variant2%compiler@compiler_version>``.
|
||||
|
||||
example request.
|
||||
|
||||
.. code-block::
|
||||
|
||||
https://my-dyn-mapping.spack.io/allocation?spec=zlib-ng@2.1.6 +compat+opt+shared+pic+new_strategies arch=linux-ubuntu20.04-x86_64_v3%gcc@12.0.0
|
||||
|
||||
|
||||
With an example response the updates kubernetes request variables, overrides the max retries for gitlab,
|
||||
and prepends a note about the modifications made by the my-dyn-mapping.spack.io service.
|
||||
|
||||
.. code-block::
|
||||
|
||||
200 OK
|
||||
|
||||
{
|
||||
"variables":
|
||||
{
|
||||
"KUBERNETES_CPU_REQUEST": "500m",
|
||||
"KUBERNETES_MEMORY_REQUEST": "2G",
|
||||
},
|
||||
"retry": { "max:": "1"}
|
||||
"script+:":
|
||||
[
|
||||
"echo \"Job modified by my-dyn-mapping.spack.io\""
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
The ci.yaml configuration section takes the URL endpoint as well as a number of options to configure how responses are handled.
|
||||
|
||||
It is possible to specify a list of allowed and ignored configuration attributes under ``allow`` and ``ignore``
|
||||
respectively. It is also possible to configure required attributes under ``required`` section.
|
||||
|
||||
Options to configure the client timeout and SSL verification using the ``timeout`` and ``verify_ssl`` options.
|
||||
By default, the ``timeout`` is set to the option in ``config:timeout`` and ``veryify_ssl`` is set the the option in ``config::verify_ssl``.
|
||||
|
||||
Passing header parameters to the request can be achieved through the ``header`` section. The values of the variables passed to the
|
||||
header may be environment variables that are expanded at runtime, such as a private token configured on the runner.
|
||||
|
||||
Here is an example configuration pointing to ``my-dyn-mapping.spack.io/allocation``.
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
ci:
|
||||
- dynamic-mapping:
|
||||
endpoint: my-dyn-mapping.spack.io/allocation
|
||||
timeout: 10
|
||||
verify_ssl: True
|
||||
header:
|
||||
PRIVATE_TOKEN: ${MY_PRIVATE_TOKEN}
|
||||
MY_CONFIG: "fuzz_allocation:false"
|
||||
allow:
|
||||
- variables
|
||||
ignore:
|
||||
- script
|
||||
require: []
|
||||
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Bootstrapping
|
||||
^^^^^^^^^^^^^
|
||||
@@ -670,15 +750,6 @@ environment/stack file, and in that case no bootstrapping will be done (only the
|
||||
specs will be staged for building) and the runners will be expected to already
|
||||
have all needed compilers installed and configured for spack to use.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Pipeline Buildcache
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``enable-artifacts-buildcache`` key
|
||||
takes a boolean and determines whether the pipeline uses artifacts to store and
|
||||
pass along the buildcaches from one stage to the next (the default if you don't
|
||||
provide this option is ``False``).
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Broken Specs URL
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
@@ -1,13 +1,13 @@
|
||||
sphinx==7.4.7
|
||||
sphinx==8.1.3
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.6.1
|
||||
sphinx-rtd-theme==2.0.0
|
||||
python-levenshtein==0.25.1
|
||||
sphinx-rtd-theme==3.0.1
|
||||
python-levenshtein==0.26.0
|
||||
docutils==0.20.1
|
||||
pygments==2.18.0
|
||||
urllib3==2.2.3
|
||||
pytest==8.3.3
|
||||
isort==5.13.2
|
||||
black==24.8.0
|
||||
black==24.10.0
|
||||
flake8==7.1.1
|
||||
mypy==1.11.1
|
||||
|
276
lib/spack/env/cc
vendored
276
lib/spack/env/cc
vendored
@@ -101,9 +101,10 @@ setsep() {
|
||||
esac
|
||||
}
|
||||
|
||||
# prepend LISTNAME ELEMENT
|
||||
# prepend LISTNAME ELEMENT [SEP]
|
||||
#
|
||||
# Prepend ELEMENT to the list stored in the variable LISTNAME.
|
||||
# Prepend ELEMENT to the list stored in the variable LISTNAME,
|
||||
# assuming the list is separated by SEP.
|
||||
# Handles empty lists and single-element lists.
|
||||
prepend() {
|
||||
varname="$1"
|
||||
@@ -118,39 +119,18 @@ prepend() {
|
||||
fi
|
||||
}
|
||||
|
||||
# contains LISTNAME ELEMENT
|
||||
# append LISTNAME ELEMENT [SEP]
|
||||
#
|
||||
# Test whether LISTNAME contains ELEMENT.
|
||||
# Set $? to 1 if LISTNAME does not contain ELEMENT.
|
||||
# Set $? to 0 if LISTNAME does not contain ELEMENT.
|
||||
contains() {
|
||||
varname="$1"
|
||||
elt="$2"
|
||||
|
||||
setsep "$varname"
|
||||
|
||||
# the list may: 1) only contain the element, 2) start with the element,
|
||||
# 3) contain the element in the middle, or 4) end wtih the element.
|
||||
eval "[ \"\${$varname}\" = \"$elt\" ]" \
|
||||
|| eval "[ \"\${$varname#${elt}${sep}}\" != \"\${$varname}\" ]" \
|
||||
|| eval "[ \"\${$varname#*${sep}${elt}${sep}}\" != \"\${$varname}\" ]" \
|
||||
|| eval "[ \"\${$varname%${sep}${elt}}\" != \"\${$varname}\" ]"
|
||||
}
|
||||
|
||||
# append LISTNAME ELEMENT [unique]
|
||||
#
|
||||
# Append ELEMENT to the list stored in the variable LISTNAME.
|
||||
# Append ELEMENT to the list stored in the variable LISTNAME,
|
||||
# assuming the list is separated by SEP.
|
||||
# Handles empty lists and single-element lists.
|
||||
#
|
||||
# If the third argument is provided and if it is the string 'unique',
|
||||
# this will not append if ELEMENT is already in the list LISTNAME.
|
||||
append() {
|
||||
varname="$1"
|
||||
elt="$2"
|
||||
|
||||
if empty "$varname"; then
|
||||
eval "$varname=\"\${elt}\""
|
||||
elif [ "$3" != "unique" ] || ! contains "$varname" "$elt" ; then
|
||||
else
|
||||
# Get the appropriate separator for the list we're appending to.
|
||||
setsep "$varname"
|
||||
eval "$varname=\"\${$varname}${sep}\${elt}\""
|
||||
@@ -168,21 +148,10 @@ extend() {
|
||||
if [ "$sep" != " " ]; then
|
||||
IFS="$sep"
|
||||
fi
|
||||
eval "for elt in \${$2}; do append $1 \"$3\${elt}\" ${_append_args}; done"
|
||||
eval "for elt in \${$2}; do append $1 \"$3\${elt}\"; done"
|
||||
unset IFS
|
||||
}
|
||||
|
||||
# extend_unique LISTNAME1 LISTNAME2 [PREFIX]
|
||||
#
|
||||
# Append the elements stored in the variable LISTNAME2 to the list
|
||||
# stored in LISTNAME1, if they are not already present.
|
||||
# If PREFIX is provided, prepend it to each element.
|
||||
extend_unique() {
|
||||
_append_args="unique"
|
||||
extend "$@"
|
||||
unset _append_args
|
||||
}
|
||||
|
||||
# preextend LISTNAME1 LISTNAME2 [PREFIX]
|
||||
#
|
||||
# Prepend the elements stored in the list at LISTNAME2
|
||||
@@ -269,36 +238,6 @@ esac
|
||||
}
|
||||
"
|
||||
|
||||
# path_list functions. Path_lists have 3 parts: spack_store_<list>, <list> and system_<list>,
|
||||
# which are used to prioritize paths when assembling the final command line.
|
||||
|
||||
# init_path_lists LISTNAME
|
||||
# Set <LISTNAME>, spack_store_<LISTNAME>, and system_<LISTNAME> to "".
|
||||
init_path_lists() {
|
||||
eval "spack_store_$1=\"\""
|
||||
eval "$1=\"\""
|
||||
eval "system_$1=\"\""
|
||||
}
|
||||
|
||||
# assign_path_lists LISTNAME1 LISTNAME2
|
||||
# Copy contents of LISTNAME2 into LISTNAME1, for each path_list prefix.
|
||||
assign_path_lists() {
|
||||
eval "spack_store_$1=\"\${spack_store_$2}\""
|
||||
eval "$1=\"\${$2}\""
|
||||
eval "system_$1=\"\${system_$2}\""
|
||||
}
|
||||
|
||||
# append_path_lists LISTNAME ELT
|
||||
# Append the provided ELT to the appropriate list, based on the result of path_order().
|
||||
append_path_lists() {
|
||||
path_order "$2"
|
||||
case $? in
|
||||
0) eval "append spack_store_$1 \"\$2\"" ;;
|
||||
1) eval "append $1 \"\$2\"" ;;
|
||||
2) eval "append system_$1 \"\$2\"" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Check if optional parameters are defined
|
||||
# If we aren't asking for debug flags, don't add them
|
||||
if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then
|
||||
@@ -531,7 +470,12 @@ input_command="$*"
|
||||
parse_Wl() {
|
||||
while [ $# -ne 0 ]; do
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
append_path_lists return_rpath_dirs_list "$1"
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
wl_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
@@ -540,14 +484,24 @@ parse_Wl() {
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
fi
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
fi
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
wl_expect_rpath=yes
|
||||
@@ -555,7 +509,8 @@ parse_Wl() {
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
-Wl)
|
||||
# Nested -Wl,-Wl means we're in NAG compiler territory. We don't support it.
|
||||
# Nested -Wl,-Wl means we're in NAG compiler territory, we don't support
|
||||
# it.
|
||||
return 1
|
||||
;;
|
||||
*)
|
||||
@@ -574,10 +529,21 @@ categorize_arguments() {
|
||||
return_other_args_list=""
|
||||
return_isystem_was_used=""
|
||||
|
||||
init_path_lists return_isystem_include_dirs_list
|
||||
init_path_lists return_include_dirs_list
|
||||
init_path_lists return_lib_dirs_list
|
||||
init_path_lists return_rpath_dirs_list
|
||||
return_isystem_spack_store_include_dirs_list=""
|
||||
return_isystem_system_include_dirs_list=""
|
||||
return_isystem_include_dirs_list=""
|
||||
|
||||
return_spack_store_include_dirs_list=""
|
||||
return_system_include_dirs_list=""
|
||||
return_include_dirs_list=""
|
||||
|
||||
return_spack_store_lib_dirs_list=""
|
||||
return_system_lib_dirs_list=""
|
||||
return_lib_dirs_list=""
|
||||
|
||||
return_spack_store_rpath_dirs_list=""
|
||||
return_system_rpath_dirs_list=""
|
||||
return_rpath_dirs_list=""
|
||||
|
||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||
wl_expect_rpath=no
|
||||
@@ -643,17 +609,32 @@ categorize_arguments() {
|
||||
arg="${1#-isystem}"
|
||||
return_isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append_path_lists return_isystem_include_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_isystem_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_isystem_include_dirs_list "$arg" ;;
|
||||
2) append return_isystem_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append_path_lists return_include_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_include_dirs_list "$arg" ;;
|
||||
2) append return_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append_path_lists return_lib_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_lib_dirs_list "$arg" ;;
|
||||
1) append return_lib_dirs_list "$arg" ;;
|
||||
2) append return_system_lib_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
@@ -686,17 +667,32 @@ categorize_arguments() {
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
append_path_lists return_rpath_dirs_list "$1"
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
@@ -713,32 +709,7 @@ categorize_arguments() {
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
# if mode is not ld, we can just add to other args
|
||||
if [ "$mode" != "ld" ]; then
|
||||
append return_other_args_list "$1"
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
|
||||
# if we're in linker mode, we need to parse raw RPATH args
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
shift
|
||||
[ $# -eq 0 ] && break # ignore -rpath without value
|
||||
append_path_lists return_rpath_dirs_list "$1"
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
append return_other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
@@ -760,10 +731,21 @@ categorize_arguments() {
|
||||
|
||||
categorize_arguments "$@"
|
||||
|
||||
assign_path_lists isystem_include_dirs_list return_isystem_include_dirs_list
|
||||
assign_path_lists include_dirs_list return_include_dirs_list
|
||||
assign_path_lists lib_dirs_list return_lib_dirs_list
|
||||
assign_path_lists rpath_dirs_list return_rpath_dirs_list
|
||||
spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
|
||||
isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
other_args_list="$return_other_args_list"
|
||||
@@ -839,10 +821,21 @@ IFS="$lsep"
|
||||
categorize_arguments $spack_flags_list
|
||||
unset IFS
|
||||
|
||||
assign_path_lists spack_flags_isystem_include_dirs_list return_isystem_include_dirs_list
|
||||
assign_path_lists spack_flags_include_dirs_list return_include_dirs_list
|
||||
assign_path_lists spack_flags_lib_dirs_list return_lib_dirs_list
|
||||
assign_path_lists spack_flags_rpath_dirs_list return_rpath_dirs_list
|
||||
spack_flags_isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_flags_spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
@@ -901,7 +894,7 @@ esac
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ] || [ "$isystem_was_used" = "true" ]; then
|
||||
extend spack_store_isystem_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend isystem_spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend isystem_include_dirs_list SPACK_INCLUDE_DIRS
|
||||
else
|
||||
extend spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
@@ -917,63 +910,64 @@ args_list="$flags_list"
|
||||
|
||||
# Include search paths partitioned by (in store, non-sytem, system)
|
||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||
extend args_list spack_store_spack_flags_include_dirs_list -I
|
||||
extend args_list spack_flags_spack_store_include_dirs_list -I
|
||||
extend args_list spack_store_include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_include_dirs_list -I
|
||||
extend args_list include_dirs_list -I
|
||||
|
||||
extend args_list spack_store_spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list spack_store_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list spack_flags_isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
extend args_list system_spack_flags_include_dirs_list -I
|
||||
extend args_list spack_flags_system_include_dirs_list -I
|
||||
extend args_list system_include_dirs_list -I
|
||||
|
||||
extend args_list system_spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list system_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
# Library search paths partitioned by (in store, non-sytem, system)
|
||||
extend args_list spack_store_spack_flags_lib_dirs_list "-L"
|
||||
extend args_list spack_flags_spack_store_lib_dirs_list "-L"
|
||||
extend args_list spack_store_lib_dirs_list "-L"
|
||||
|
||||
extend args_list spack_flags_lib_dirs_list "-L"
|
||||
extend args_list lib_dirs_list "-L"
|
||||
|
||||
extend args_list system_spack_flags_lib_dirs_list "-L"
|
||||
extend args_list spack_flags_system_lib_dirs_list "-L"
|
||||
extend args_list system_lib_dirs_list "-L"
|
||||
|
||||
# RPATHs arguments
|
||||
rpath_prefix=""
|
||||
case "$mode" in
|
||||
ccld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$linker_arg$dtags_to_add"
|
||||
fi
|
||||
rpath_prefix="$rpath"
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "$rpath"
|
||||
extend args_list spack_store_rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
||||
extend args_list rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
||||
extend args_list system_rpath_dirs_list "$rpath"
|
||||
;;
|
||||
ld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$dtags_to_add"
|
||||
fi
|
||||
rpath_prefix="-rpath${lsep}"
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||
;;
|
||||
esac
|
||||
|
||||
# if mode is ccld or ld, extend RPATH lists with the prefix determined above
|
||||
if [ -n "$rpath_prefix" ]; then
|
||||
extend_unique args_list spack_store_spack_flags_rpath_dirs_list "$rpath_prefix"
|
||||
extend_unique args_list spack_store_rpath_dirs_list "$rpath_prefix"
|
||||
|
||||
extend_unique args_list spack_flags_rpath_dirs_list "$rpath_prefix"
|
||||
extend_unique args_list rpath_dirs_list "$rpath_prefix"
|
||||
|
||||
extend_unique args_list system_spack_flags_rpath_dirs_list "$rpath_prefix"
|
||||
extend_unique args_list system_rpath_dirs_list "$rpath_prefix"
|
||||
fi
|
||||
|
||||
# Other arguments from the input command
|
||||
extend args_list other_args_list
|
||||
extend args_list spack_flags_other_args_list
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.5-dev (commit bceb39528ac49dd0c876b2e9bf3e7482e9c2be4a)
|
||||
* Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
@@ -81,8 +81,13 @@ def __init__(self, name, parents, vendor, features, compilers, generation=0, cpu
|
||||
self.generation = generation
|
||||
# Only relevant for AArch64
|
||||
self.cpu_part = cpu_part
|
||||
# Cache the ancestor computation
|
||||
|
||||
# Cache the "ancestor" computation
|
||||
self._ancestors = None
|
||||
# Cache the "generic" computation
|
||||
self._generic = None
|
||||
# Cache the "family" computation
|
||||
self._family = None
|
||||
|
||||
@property
|
||||
def ancestors(self):
|
||||
@@ -174,18 +179,22 @@ def __contains__(self, feature):
|
||||
@property
|
||||
def family(self):
|
||||
"""Returns the architecture family a given target belongs to"""
|
||||
roots = [x for x in [self] + self.ancestors if not x.ancestors]
|
||||
msg = "a target is expected to belong to just one architecture family"
|
||||
msg += f"[found {', '.join(str(x) for x in roots)}]"
|
||||
assert len(roots) == 1, msg
|
||||
if self._family is None:
|
||||
roots = [x for x in [self] + self.ancestors if not x.ancestors]
|
||||
msg = "a target is expected to belong to just one architecture family"
|
||||
msg += f"[found {', '.join(str(x) for x in roots)}]"
|
||||
assert len(roots) == 1, msg
|
||||
self._family = roots.pop()
|
||||
|
||||
return roots.pop()
|
||||
return self._family
|
||||
|
||||
@property
|
||||
def generic(self):
|
||||
"""Returns the best generic architecture that is compatible with self"""
|
||||
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
||||
return max(generics, key=lambda x: len(x.ancestors))
|
||||
if self._generic is None:
|
||||
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
||||
self._generic = max(generics, key=lambda x: len(x.ancestors))
|
||||
return self._generic
|
||||
|
||||
def to_dict(self):
|
||||
"""Returns a dictionary representation of this object."""
|
||||
|
@@ -1482,7 +1482,6 @@
|
||||
"cldemote",
|
||||
"movdir64b",
|
||||
"movdiri",
|
||||
"pdcm",
|
||||
"serialize",
|
||||
"waitpkg"
|
||||
],
|
||||
@@ -2237,6 +2236,84 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"zen5": {
|
||||
"from": ["zen4"],
|
||||
"vendor": "AuthenticAMD",
|
||||
"features": [
|
||||
"abm",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"avx512_bf16",
|
||||
"avx512_bitalg",
|
||||
"avx512bw",
|
||||
"avx512cd",
|
||||
"avx512dq",
|
||||
"avx512f",
|
||||
"avx512ifma",
|
||||
"avx512vbmi",
|
||||
"avx512_vbmi2",
|
||||
"avx512vl",
|
||||
"avx512_vnni",
|
||||
"avx512_vp2intersect",
|
||||
"avx512_vpopcntdq",
|
||||
"avx_vnni",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"clwb",
|
||||
"clzero",
|
||||
"cppc",
|
||||
"cx16",
|
||||
"f16c",
|
||||
"flush_l1d",
|
||||
"fma",
|
||||
"fsgsbase",
|
||||
"gfni",
|
||||
"ibrs_enhanced",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"movdir64b",
|
||||
"movdiri",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"sse4a",
|
||||
"ssse3",
|
||||
"tsc_adjust",
|
||||
"vaes",
|
||||
"vpclmulqdq",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "14.1:",
|
||||
"name": "znver5",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"aocc": [
|
||||
{
|
||||
"versions": "5.0:",
|
||||
"name": "znver5",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "19.1:",
|
||||
"name": "znver5",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"ppc64": {
|
||||
"from": [],
|
||||
"vendor": "generic",
|
||||
|
@@ -41,6 +41,20 @@ def comma_and(sequence: List[str]) -> str:
|
||||
return comma_list(sequence, "and")
|
||||
|
||||
|
||||
def ordinal(number: int) -> str:
|
||||
"""Return the ordinal representation (1st, 2nd, 3rd, etc.) for the provided number.
|
||||
|
||||
Args:
|
||||
number: int to convert to ordinal number
|
||||
|
||||
Returns: number's corresponding ordinal
|
||||
"""
|
||||
idx = (number % 10) << 1
|
||||
tens = number % 100 // 10
|
||||
suffix = "th" if tens == 1 or idx > 6 else "thstndrd"[idx : idx + 2]
|
||||
return f"{number}{suffix}"
|
||||
|
||||
|
||||
def quote(sequence: List[str], q: str = "'") -> List[str]:
|
||||
"""Quotes each item in the input list with the quote character passed as second argument."""
|
||||
return [f"{q}{e}{q}" for e in sequence]
|
||||
|
@@ -47,6 +47,7 @@
|
||||
"copy_mode",
|
||||
"filter_file",
|
||||
"find",
|
||||
"find_first",
|
||||
"find_headers",
|
||||
"find_all_headers",
|
||||
"find_libraries",
|
||||
|
@@ -12,7 +12,7 @@
|
||||
import sys
|
||||
import traceback
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Callable, Iterable, List, Tuple
|
||||
from typing import Callable, Iterable, List, Tuple, TypeVar
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = r"^\.#|~$"
|
||||
@@ -879,9 +879,12 @@ def enum(**kwargs):
|
||||
return type("Enum", (object,), kwargs)
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
def stable_partition(
|
||||
input_iterable: Iterable, predicate_fn: Callable[[Any], bool]
|
||||
) -> Tuple[List[Any], List[Any]]:
|
||||
input_iterable: Iterable[T], predicate_fn: Callable[[T], bool]
|
||||
) -> Tuple[List[T], List[T]]:
|
||||
"""Partition the input iterable according to a custom predicate.
|
||||
|
||||
Args:
|
||||
@@ -893,12 +896,13 @@ def stable_partition(
|
||||
Tuple of the list of elements evaluating to True, and
|
||||
list of elements evaluating to False.
|
||||
"""
|
||||
true_items, false_items = [], []
|
||||
true_items: List[T] = []
|
||||
false_items: List[T] = []
|
||||
for item in input_iterable:
|
||||
if predicate_fn(item):
|
||||
true_items.append(item)
|
||||
continue
|
||||
false_items.append(item)
|
||||
else:
|
||||
false_items.append(item)
|
||||
return true_items, false_items
|
||||
|
||||
|
||||
|
@@ -263,7 +263,9 @@ def match_to_ansi(match):
|
||||
f"Incomplete color format: '{match.group(0)}' in '{match.string}'"
|
||||
)
|
||||
|
||||
ansi_code = _escape(f"{styles[style]};{colors.get(color_code, '')}", color, enclose, zsh)
|
||||
color_number = colors.get(color_code, "")
|
||||
semi = ";" if color_number else ""
|
||||
ansi_code = _escape(f"{styles[style]}{semi}{color_number}", color, enclose, zsh)
|
||||
if text:
|
||||
return f"{ansi_code}{text}{_escape(0, color, enclose, zsh)}"
|
||||
else:
|
||||
|
@@ -10,7 +10,6 @@
|
||||
import errno
|
||||
import io
|
||||
import multiprocessing
|
||||
import multiprocessing.connection
|
||||
import os
|
||||
import re
|
||||
import select
|
||||
@@ -19,9 +18,10 @@
|
||||
import threading
|
||||
import traceback
|
||||
from contextlib import contextmanager
|
||||
from multiprocessing.connection import Connection
|
||||
from threading import Thread
|
||||
from types import ModuleType
|
||||
from typing import Optional
|
||||
from typing import Callable, Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -345,49 +345,6 @@ def close(self):
|
||||
self.file.close()
|
||||
|
||||
|
||||
class MultiProcessFd:
|
||||
"""Return an object which stores a file descriptor and can be passed as an
|
||||
argument to a function run with ``multiprocessing.Process``, such that
|
||||
the file descriptor is available in the subprocess."""
|
||||
|
||||
def __init__(self, fd):
|
||||
self._connection = None
|
||||
self._fd = None
|
||||
if sys.version_info >= (3, 8):
|
||||
self._connection = multiprocessing.connection.Connection(fd)
|
||||
else:
|
||||
self._fd = fd
|
||||
|
||||
@property
|
||||
def fd(self):
|
||||
if self._connection:
|
||||
return self._connection._handle
|
||||
else:
|
||||
return self._fd
|
||||
|
||||
def close(self):
|
||||
if self._connection:
|
||||
self._connection.close()
|
||||
else:
|
||||
os.close(self._fd)
|
||||
|
||||
|
||||
def close_connection_and_file(multiprocess_fd, file):
|
||||
# MultiprocessFd is intended to transmit a FD
|
||||
# to a child process, this FD is then opened to a Python File object
|
||||
# (using fdopen). In >= 3.8, MultiprocessFd encapsulates a
|
||||
# multiprocessing.connection.Connection; Connection closes the FD
|
||||
# when it is deleted, and prints a warning about duplicate closure if
|
||||
# it is not explicitly closed. In < 3.8, MultiprocessFd encapsulates a
|
||||
# simple FD; closing the FD here appears to conflict with
|
||||
# closure of the File object (in < 3.8 that is). Therefore this needs
|
||||
# to choose whether to close the File or the Connection.
|
||||
if sys.version_info >= (3, 8):
|
||||
multiprocess_fd.close()
|
||||
else:
|
||||
file.close()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def replace_environment(env):
|
||||
"""Replace the current environment (`os.environ`) with `env`.
|
||||
@@ -545,22 +502,20 @@ def __enter__(self):
|
||||
# forcing debug output.
|
||||
self._saved_debug = tty._debug
|
||||
|
||||
# OS-level pipe for redirecting output to logger
|
||||
read_fd, write_fd = os.pipe()
|
||||
# Pipe for redirecting output to logger
|
||||
read_fd, self.write_fd = multiprocessing.Pipe(duplex=False)
|
||||
|
||||
read_multiprocess_fd = MultiProcessFd(read_fd)
|
||||
|
||||
# Multiprocessing pipe for communication back from the daemon
|
||||
# Pipe for communication back from the daemon
|
||||
# Currently only used to save echo value between uses
|
||||
self.parent_pipe, child_pipe = multiprocessing.Pipe()
|
||||
self.parent_pipe, child_pipe = multiprocessing.Pipe(duplex=False)
|
||||
|
||||
# Sets a daemon that writes to file what it reads from a pipe
|
||||
try:
|
||||
# need to pass this b/c multiprocessing closes stdin in child.
|
||||
input_multiprocess_fd = None
|
||||
input_fd = None
|
||||
try:
|
||||
if sys.stdin.isatty():
|
||||
input_multiprocess_fd = MultiProcessFd(os.dup(sys.stdin.fileno()))
|
||||
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
||||
except BaseException:
|
||||
# just don't forward input if this fails
|
||||
pass
|
||||
@@ -569,9 +524,9 @@ def __enter__(self):
|
||||
self.process = multiprocessing.Process(
|
||||
target=_writer_daemon,
|
||||
args=(
|
||||
input_multiprocess_fd,
|
||||
read_multiprocess_fd,
|
||||
write_fd,
|
||||
input_fd,
|
||||
read_fd,
|
||||
self.write_fd,
|
||||
self.echo,
|
||||
self.log_file,
|
||||
child_pipe,
|
||||
@@ -582,9 +537,9 @@ def __enter__(self):
|
||||
self.process.start()
|
||||
|
||||
finally:
|
||||
if input_multiprocess_fd:
|
||||
input_multiprocess_fd.close()
|
||||
read_multiprocess_fd.close()
|
||||
if input_fd:
|
||||
input_fd.close()
|
||||
read_fd.close()
|
||||
|
||||
# Flush immediately before redirecting so that anything buffered
|
||||
# goes to the original stream
|
||||
@@ -602,9 +557,9 @@ def __enter__(self):
|
||||
self._saved_stderr = os.dup(sys.stderr.fileno())
|
||||
|
||||
# redirect to the pipe we created above
|
||||
os.dup2(write_fd, sys.stdout.fileno())
|
||||
os.dup2(write_fd, sys.stderr.fileno())
|
||||
os.close(write_fd)
|
||||
os.dup2(self.write_fd.fileno(), sys.stdout.fileno())
|
||||
os.dup2(self.write_fd.fileno(), sys.stderr.fileno())
|
||||
self.write_fd.close()
|
||||
|
||||
else:
|
||||
# Handle I/O the Python way. This won't redirect lower-level
|
||||
@@ -617,7 +572,7 @@ def __enter__(self):
|
||||
self._saved_stderr = sys.stderr
|
||||
|
||||
# create a file object for the pipe; redirect to it.
|
||||
pipe_fd_out = os.fdopen(write_fd, "w")
|
||||
pipe_fd_out = os.fdopen(self.write_fd.fileno(), "w", closefd=False)
|
||||
sys.stdout = pipe_fd_out
|
||||
sys.stderr = pipe_fd_out
|
||||
|
||||
@@ -653,6 +608,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
else:
|
||||
sys.stdout = self._saved_stdout
|
||||
sys.stderr = self._saved_stderr
|
||||
self.write_fd.close()
|
||||
|
||||
# print log contents in parent if needed.
|
||||
if self.log_file.write_in_parent:
|
||||
@@ -866,14 +822,14 @@ def force_echo(self):
|
||||
|
||||
|
||||
def _writer_daemon(
|
||||
stdin_multiprocess_fd,
|
||||
read_multiprocess_fd,
|
||||
write_fd,
|
||||
echo,
|
||||
log_file_wrapper,
|
||||
control_pipe,
|
||||
filter_fn,
|
||||
):
|
||||
stdin_fd: Optional[Connection],
|
||||
read_fd: Connection,
|
||||
write_fd: Connection,
|
||||
echo: bool,
|
||||
log_file_wrapper: FileWrapper,
|
||||
control_fd: Connection,
|
||||
filter_fn: Optional[Callable[[str], str]],
|
||||
) -> None:
|
||||
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
|
||||
|
||||
The daemon receives output from the parent process and writes it both
|
||||
@@ -910,43 +866,37 @@ def _writer_daemon(
|
||||
``StringIO`` in the parent. This is mainly for testing.
|
||||
|
||||
Arguments:
|
||||
stdin_multiprocess_fd (int): input from the terminal
|
||||
read_multiprocess_fd (int): pipe for reading from parent's redirected
|
||||
stdout
|
||||
echo (bool): initial echo setting -- controlled by user and
|
||||
preserved across multiple writer daemons
|
||||
log_file_wrapper (FileWrapper): file to log all output
|
||||
control_pipe (Pipe): multiprocessing pipe on which to send control
|
||||
information to the parent
|
||||
filter_fn (callable, optional): function to filter each line of output
|
||||
stdin_fd: optional input from the terminal
|
||||
read_fd: pipe for reading from parent's redirected stdout
|
||||
echo: initial echo setting -- controlled by user and preserved across multiple writer
|
||||
daemons
|
||||
log_file_wrapper: file to log all output
|
||||
control_pipe: multiprocessing pipe on which to send control information to the parent
|
||||
filter_fn: optional function to filter each line of output
|
||||
|
||||
"""
|
||||
# If this process was forked, then it will inherit file descriptors from
|
||||
# the parent process. This process depends on closing all instances of
|
||||
# write_fd to terminate the reading loop, so we close the file descriptor
|
||||
# here. Forking is the process spawning method everywhere except Mac OS
|
||||
# for Python >= 3.8 and on Windows
|
||||
if sys.version_info < (3, 8) or sys.platform != "darwin":
|
||||
os.close(write_fd)
|
||||
# This process depends on closing all instances of write_pipe to terminate the reading loop
|
||||
write_fd.close()
|
||||
|
||||
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
||||
# that prevents unbuffered text I/O.
|
||||
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
||||
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8")
|
||||
# 3. closefd=False because Connection has "ownership"
|
||||
read_file = os.fdopen(read_fd.fileno(), "r", 1, encoding="utf-8", closefd=False)
|
||||
|
||||
if stdin_multiprocess_fd:
|
||||
stdin = os.fdopen(stdin_multiprocess_fd.fd)
|
||||
if stdin_fd:
|
||||
stdin_file = os.fdopen(stdin_fd.fileno(), closefd=False)
|
||||
else:
|
||||
stdin = None
|
||||
stdin_file = None
|
||||
|
||||
# list of streams to select from
|
||||
istreams = [in_pipe, stdin] if stdin else [in_pipe]
|
||||
istreams = [read_file, stdin_file] if stdin_file else [read_file]
|
||||
force_echo = False # parent can force echo for certain output
|
||||
|
||||
log_file = log_file_wrapper.unwrap()
|
||||
|
||||
try:
|
||||
with keyboard_input(stdin) as kb:
|
||||
with keyboard_input(stdin_file) as kb:
|
||||
while True:
|
||||
# fix the terminal settings if we recently came to
|
||||
# the foreground
|
||||
@@ -959,12 +909,12 @@ def _writer_daemon(
|
||||
# Allow user to toggle echo with 'v' key.
|
||||
# Currently ignores other chars.
|
||||
# only read stdin if we're in the foreground
|
||||
if stdin in rlist and not _is_background_tty(stdin):
|
||||
if stdin_file and stdin_file in rlist and not _is_background_tty(stdin_file):
|
||||
# it's possible to be backgrounded between the above
|
||||
# check and the read, so we ignore SIGTTIN here.
|
||||
with ignore_signal(signal.SIGTTIN):
|
||||
try:
|
||||
if stdin.read(1) == "v":
|
||||
if stdin_file.read(1) == "v":
|
||||
echo = not echo
|
||||
except IOError as e:
|
||||
# If SIGTTIN is ignored, the system gives EIO
|
||||
@@ -973,13 +923,13 @@ def _writer_daemon(
|
||||
if e.errno != errno.EIO:
|
||||
raise
|
||||
|
||||
if in_pipe in rlist:
|
||||
if read_file in rlist:
|
||||
line_count = 0
|
||||
try:
|
||||
while line_count < 100:
|
||||
# Handle output from the calling process.
|
||||
try:
|
||||
line = _retry(in_pipe.readline)()
|
||||
line = _retry(read_file.readline)()
|
||||
except UnicodeDecodeError:
|
||||
# installs like --test=root gpgme produce non-UTF8 logs
|
||||
line = "<line lost: output was not encoded as UTF-8>\n"
|
||||
@@ -1008,7 +958,7 @@ def _writer_daemon(
|
||||
if xoff in controls:
|
||||
force_echo = False
|
||||
|
||||
if not _input_available(in_pipe):
|
||||
if not _input_available(read_file):
|
||||
break
|
||||
finally:
|
||||
if line_count > 0:
|
||||
@@ -1023,14 +973,14 @@ def _writer_daemon(
|
||||
finally:
|
||||
# send written data back to parent if we used a StringIO
|
||||
if isinstance(log_file, io.StringIO):
|
||||
control_pipe.send(log_file.getvalue())
|
||||
control_fd.send(log_file.getvalue())
|
||||
log_file_wrapper.close()
|
||||
close_connection_and_file(read_multiprocess_fd, in_pipe)
|
||||
if stdin_multiprocess_fd:
|
||||
close_connection_and_file(stdin_multiprocess_fd, stdin)
|
||||
read_fd.close()
|
||||
if stdin_fd:
|
||||
stdin_fd.close()
|
||||
|
||||
# send echo value back to the parent so it can be preserved.
|
||||
control_pipe.send(echo)
|
||||
control_fd.send(echo)
|
||||
|
||||
|
||||
def _retry(function):
|
||||
|
@@ -69,4 +69,15 @@ def get_version() -> str:
|
||||
return spack_version
|
||||
|
||||
|
||||
__all__ = ["spack_version_info", "spack_version", "get_version", "get_spack_commit"]
|
||||
def get_short_version() -> str:
|
||||
"""Short Spack version."""
|
||||
return f"{spack_version_info[0]}.{spack_version_info[1]}"
|
||||
|
||||
|
||||
__all__ = [
|
||||
"spack_version_info",
|
||||
"spack_version",
|
||||
"get_version",
|
||||
"get_spack_commit",
|
||||
"get_short_version",
|
||||
]
|
||||
|
@@ -39,6 +39,7 @@ def _search_duplicate_compilers(error_cls):
|
||||
import collections
|
||||
import collections.abc
|
||||
import glob
|
||||
import inspect
|
||||
import io
|
||||
import itertools
|
||||
import os
|
||||
@@ -50,6 +51,7 @@ def _search_duplicate_compilers(error_cls):
|
||||
from urllib.request import urlopen
|
||||
|
||||
import llnl.util.lang
|
||||
from llnl.string import plural
|
||||
|
||||
import spack.builder
|
||||
import spack.config
|
||||
@@ -386,6 +388,14 @@ def _make_config_error(config_data, summary, error_cls):
|
||||
)
|
||||
|
||||
|
||||
package_deprecated_attributes = AuditClass(
|
||||
group="packages",
|
||||
tag="PKG-DEPRECATED-ATTRIBUTES",
|
||||
description="Sanity checks to preclude use of deprecated package attributes",
|
||||
kwargs=("pkgs",),
|
||||
)
|
||||
|
||||
|
||||
package_properties = AuditClass(
|
||||
group="packages",
|
||||
tag="PKG-PROPERTIES",
|
||||
@@ -404,22 +414,23 @@ def _make_config_error(config_data, summary, error_cls):
|
||||
)
|
||||
|
||||
|
||||
@package_directives
|
||||
@package_properties
|
||||
def _check_build_test_callbacks(pkgs, error_cls):
|
||||
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
||||
"""Ensure stand-alone test methods are not included in build-time callbacks.
|
||||
|
||||
Test methods are for checking the installed software as stand-alone tests.
|
||||
They could also be called during the post-install phase of a build.
|
||||
"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
|
||||
|
||||
# TODO (post-34236): "test*"->"test_*" once remove deprecated methods
|
||||
# TODO (post-34236): "test"->"test_" once remove deprecated methods
|
||||
has_test_method = test_callbacks and any([m.startswith("test") for m in test_callbacks])
|
||||
has_test_method = test_callbacks and any([m.startswith("test_") for m in test_callbacks])
|
||||
if has_test_method:
|
||||
msg = '{0} package contains "test*" method(s) in ' "build_time_test_callbacks"
|
||||
instr = 'Remove all methods whose names start with "test" from: [{0}]'.format(
|
||||
", ".join(test_callbacks)
|
||||
)
|
||||
msg = f"Package {pkg_name} includes stand-alone test methods in build-time checks."
|
||||
callbacks = ", ".join(test_callbacks)
|
||||
instr = f"Remove the following from 'build_time_test_callbacks': {callbacks}"
|
||||
errors.append(error_cls(msg.format(pkg_name), [instr]))
|
||||
|
||||
return errors
|
||||
@@ -517,6 +528,46 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@package_deprecated_attributes
|
||||
def _search_for_deprecated_package_methods(pkgs, error_cls):
|
||||
"""Ensure the package doesn't define or use deprecated methods"""
|
||||
DEPRECATED_METHOD = (("test", "a name starting with 'test_'"),)
|
||||
DEPRECATED_USE = (
|
||||
("self.cache_extra_test_sources(", "cache_extra_test_sources(self, ..)"),
|
||||
("self.install_test_root(", "install_test_root(self, ..)"),
|
||||
("self.run_test(", "test_part(self, ..)"),
|
||||
)
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
methods = inspect.getmembers(pkg_cls, predicate=lambda x: inspect.isfunction(x))
|
||||
method_errors = collections.defaultdict(list)
|
||||
for name, function in methods:
|
||||
for deprecated_name, alternate in DEPRECATED_METHOD:
|
||||
if name == deprecated_name:
|
||||
msg = f"Rename '{deprecated_name}' method to {alternate} instead."
|
||||
method_errors[name].append(msg)
|
||||
|
||||
source = inspect.getsource(function)
|
||||
for deprecated_name, alternate in DEPRECATED_USE:
|
||||
if deprecated_name in source:
|
||||
msg = f"Change '{deprecated_name}' to '{alternate}' in '{name}' method."
|
||||
method_errors[name].append(msg)
|
||||
|
||||
num_methods = len(method_errors)
|
||||
if num_methods > 0:
|
||||
methods = plural(num_methods, "method", show_n=False)
|
||||
error_msg = (
|
||||
f"Package '{pkg_name}' implements or uses unsupported deprecated {methods}."
|
||||
)
|
||||
instr = [f"Make changes to '{pkg_cls.__module__}':"]
|
||||
for name in sorted(method_errors):
|
||||
instr.extend([f" {msg}" for msg in method_errors[name]])
|
||||
errors.append(error_cls(error_msg, instr))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_properties
|
||||
def _ensure_all_package_names_are_lowercase(pkgs, error_cls):
|
||||
"""Ensure package names are lowercase and consistent"""
|
||||
@@ -771,6 +822,89 @@ def _uses_deprecated_globals(pkgs, error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@package_properties
|
||||
def _ensure_test_docstring(pkgs, error_cls):
|
||||
"""Ensure stand-alone test methods have a docstring.
|
||||
|
||||
The docstring of a test method is implicitly used as the description of
|
||||
the corresponding test part during test results reporting.
|
||||
"""
|
||||
doc_regex = r'\s+("""[^"]+""")'
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
methods = inspect.getmembers(pkg_cls, predicate=lambda x: inspect.isfunction(x))
|
||||
method_names = []
|
||||
for name, test_fn in methods:
|
||||
if not name.startswith("test_"):
|
||||
continue
|
||||
|
||||
# Ensure the test method has a docstring
|
||||
source = inspect.getsource(test_fn)
|
||||
match = re.search(doc_regex, source)
|
||||
if match is None or len(match.group(0).replace('"', "").strip()) == 0:
|
||||
method_names.append(name)
|
||||
|
||||
num_methods = len(method_names)
|
||||
if num_methods > 0:
|
||||
methods = plural(num_methods, "method", show_n=False)
|
||||
docstrings = plural(num_methods, "docstring", show_n=False)
|
||||
msg = f"Package {pkg_name} has test {methods} with empty or missing {docstrings}."
|
||||
names = ", ".join(method_names)
|
||||
instr = [
|
||||
"Docstrings are used as descriptions in test outputs.",
|
||||
f"Add a concise summary to the following {methods} in '{pkg_cls.__module__}':",
|
||||
f"{names}",
|
||||
]
|
||||
errors.append(error_cls(msg, instr))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_properties
|
||||
def _ensure_test_implemented(pkgs, error_cls):
|
||||
"""Ensure stand-alone test methods are implemented.
|
||||
|
||||
The test method is also required to be non-empty.
|
||||
"""
|
||||
|
||||
def skip(line):
|
||||
ln = line.strip()
|
||||
return ln.startswith("#") or "pass" in ln
|
||||
|
||||
doc_regex = r'\s+("""[^"]+""")'
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
methods = inspect.getmembers(pkg_cls, predicate=lambda x: inspect.isfunction(x))
|
||||
method_names = []
|
||||
for name, test_fn in methods:
|
||||
if not name.startswith("test_"):
|
||||
continue
|
||||
|
||||
source = inspect.getsource(test_fn)
|
||||
|
||||
# Attempt to ensure the test method is implemented.
|
||||
impl = re.sub(doc_regex, r"", source).splitlines()[1:]
|
||||
lines = [ln.strip() for ln in impl if not skip(ln)]
|
||||
if not lines:
|
||||
method_names.append(name)
|
||||
|
||||
num_methods = len(method_names)
|
||||
if num_methods > 0:
|
||||
methods = plural(num_methods, "method", show_n=False)
|
||||
msg = f"Package {pkg_name} has empty or missing test {methods}."
|
||||
names = ", ".join(method_names)
|
||||
instr = [
|
||||
f"Implement or remove the following {methods} from '{pkg_cls.__module__}': {names}"
|
||||
]
|
||||
errors.append(error_cls(msg, instr))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_https_directives
|
||||
def _linting_package_file(pkgs, error_cls):
|
||||
"""Check for correctness of links"""
|
||||
|
@@ -35,6 +35,7 @@
|
||||
import spack.caches
|
||||
import spack.config as config
|
||||
import spack.database as spack_db
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.hash_types as ht
|
||||
import spack.hooks
|
||||
@@ -251,7 +252,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||
|
||||
spec_list = [
|
||||
s
|
||||
for s in db.query_local(installed=any, in_buildcache=any)
|
||||
for s in db.query_local(installed=any)
|
||||
if s.external or db.query_local_by_spec_hash(s.dag_hash()).in_buildcache
|
||||
]
|
||||
|
||||
@@ -712,15 +713,32 @@ def get_buildfile_manifest(spec):
|
||||
return data
|
||||
|
||||
|
||||
def hashes_to_prefixes(spec):
|
||||
"""Return a dictionary of hashes to prefixes for a spec and its deps, excluding externals"""
|
||||
return {
|
||||
s.dag_hash(): str(s.prefix)
|
||||
def deps_to_relocate(spec):
|
||||
"""Return the transitive link and direct run dependencies of the spec.
|
||||
|
||||
This is a special traversal for dependencies we need to consider when relocating a package.
|
||||
|
||||
Package binaries, scripts, and other files may refer to the prefixes of dependencies, so
|
||||
we need to rewrite those locations when dependencies are in a different place at install time
|
||||
than they were at build time.
|
||||
|
||||
This traversal covers transitive link dependencies and direct run dependencies because:
|
||||
|
||||
1. Spack adds RPATHs for transitive link dependencies so that packages can find needed
|
||||
dependency libraries.
|
||||
2. Packages may call any of their *direct* run dependencies (and may bake their paths into
|
||||
binaries or scripts), so we also need to search for run dependency prefixes when relocating.
|
||||
|
||||
This returns a deduplicated list of transitive link dependencies and direct run dependencies.
|
||||
"""
|
||||
deps = [
|
||||
s
|
||||
for s in itertools.chain(
|
||||
spec.traverse(root=True, deptype="link"), spec.dependencies(deptype="run")
|
||||
)
|
||||
if not s.external
|
||||
}
|
||||
]
|
||||
return llnl.util.lang.dedupe(deps, key=lambda s: s.dag_hash())
|
||||
|
||||
|
||||
def get_buildinfo_dict(spec):
|
||||
@@ -736,7 +754,7 @@ def get_buildinfo_dict(spec):
|
||||
"relocate_binaries": manifest["binary_to_relocate"],
|
||||
"relocate_links": manifest["link_to_relocate"],
|
||||
"hardlinks_deduped": manifest["hardlinks_deduped"],
|
||||
"hash_to_prefix": hashes_to_prefixes(spec),
|
||||
"hash_to_prefix": {d.dag_hash(): str(d.prefix) for d in deps_to_relocate(spec)},
|
||||
}
|
||||
|
||||
|
||||
@@ -1631,7 +1649,6 @@ def _oci_push(
|
||||
Dict[str, spack.oci.oci.Blob],
|
||||
List[Tuple[Spec, BaseException]],
|
||||
]:
|
||||
|
||||
# Spec dag hash -> blob
|
||||
checksums: Dict[str, spack.oci.oci.Blob] = {}
|
||||
|
||||
@@ -2201,11 +2218,36 @@ def relocate_package(spec):
|
||||
# First match specific prefix paths. Possibly the *local* install prefix
|
||||
# of some dependency is in an upstream, so we cannot assume the original
|
||||
# spack store root can be mapped uniformly to the new spack store root.
|
||||
for dag_hash, new_dep_prefix in hashes_to_prefixes(spec).items():
|
||||
if dag_hash in hash_to_old_prefix:
|
||||
old_dep_prefix = hash_to_old_prefix[dag_hash]
|
||||
prefix_to_prefix_bin[old_dep_prefix] = new_dep_prefix
|
||||
prefix_to_prefix_text[old_dep_prefix] = new_dep_prefix
|
||||
#
|
||||
# If the spec is spliced, we need to handle the simultaneous mapping
|
||||
# from the old install_tree to the new install_tree and from the build_spec
|
||||
# to the spliced spec.
|
||||
# Because foo.build_spec is foo for any non-spliced spec, we can simplify
|
||||
# by checking for spliced-in nodes by checking for nodes not in the build_spec
|
||||
# without any explicit check for whether the spec is spliced.
|
||||
# An analog in this algorithm is any spec that shares a name or provides the same virtuals
|
||||
# in the context of the relevant root spec. This ensures that the analog for a spec s
|
||||
# is the spec that s replaced when we spliced.
|
||||
relocation_specs = deps_to_relocate(spec)
|
||||
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
||||
for s in relocation_specs:
|
||||
analog = s
|
||||
if id(s) not in build_spec_ids:
|
||||
analogs = [
|
||||
d
|
||||
for d in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD)
|
||||
if s._splice_match(d, self_root=spec, other_root=spec.build_spec)
|
||||
]
|
||||
if analogs:
|
||||
# Prefer same-name analogs and prefer higher versions
|
||||
# This matches the preferences in Spec.splice, so we will find same node
|
||||
analog = max(analogs, key=lambda a: (a.name == s.name, a.version))
|
||||
|
||||
lookup_dag_hash = analog.dag_hash()
|
||||
if lookup_dag_hash in hash_to_old_prefix:
|
||||
old_dep_prefix = hash_to_old_prefix[lookup_dag_hash]
|
||||
prefix_to_prefix_bin[old_dep_prefix] = str(s.prefix)
|
||||
prefix_to_prefix_text[old_dep_prefix] = str(s.prefix)
|
||||
|
||||
# Only then add the generic fallback of install prefix -> install prefix.
|
||||
prefix_to_prefix_text[old_prefix] = new_prefix
|
||||
@@ -2520,7 +2562,13 @@ def _ensure_common_prefix(tar: tarfile.TarFile) -> str:
|
||||
return pkg_prefix
|
||||
|
||||
|
||||
def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
def install_root_node(
|
||||
spec: spack.spec.Spec,
|
||||
unsigned=False,
|
||||
force: bool = False,
|
||||
sha256: Optional[str] = None,
|
||||
allow_missing: bool = False,
|
||||
) -> None:
|
||||
"""Install the root node of a concrete spec from a buildcache.
|
||||
|
||||
Checking the sha256 sum of a node before installation is usually needed only
|
||||
@@ -2529,11 +2577,10 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
|
||||
Args:
|
||||
spec: spec to be installed (note that only the root node will be installed)
|
||||
unsigned (bool): if True allows installing unsigned binaries
|
||||
force (bool): force installation if the spec is already present in the
|
||||
local store
|
||||
sha256 (str): optional sha256 of the binary package, to be checked
|
||||
before installation
|
||||
unsigned: if True allows installing unsigned binaries
|
||||
force: force installation if the spec is already present in the local store
|
||||
sha256: optional sha256 of the binary package, to be checked before installation
|
||||
allow_missing: when true, allows installing a node with missing dependencies
|
||||
"""
|
||||
# Early termination
|
||||
if spec.external or spec.virtual:
|
||||
@@ -2543,10 +2590,10 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
|
||||
return
|
||||
|
||||
download_result = download_tarball(spec, unsigned)
|
||||
download_result = download_tarball(spec.build_spec, unsigned)
|
||||
if not download_result:
|
||||
msg = 'download of binary cache file for spec "{0}" failed'
|
||||
raise RuntimeError(msg.format(spec.format()))
|
||||
raise RuntimeError(msg.format(spec.build_spec.format()))
|
||||
|
||||
if sha256:
|
||||
checker = spack.util.crypto.Checker(sha256)
|
||||
@@ -2565,8 +2612,13 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
with spack.util.path.filter_padding():
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, force)
|
||||
spec.package.windows_establish_runtime_linkage()
|
||||
if spec.spliced: # overwrite old metadata with new
|
||||
spack.store.STORE.layout.write_spec(
|
||||
spec, spack.store.STORE.layout.spec_file_path(spec)
|
||||
)
|
||||
spack.hooks.post_install(spec, False)
|
||||
spack.store.STORE.db.add(spec)
|
||||
spack.store.STORE.db.add(spec, allow_missing=allow_missing)
|
||||
|
||||
|
||||
def install_single_spec(spec, unsigned=False, force=False):
|
||||
|
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Common basic functions used through the spack.bootstrap package"""
|
||||
import fnmatch
|
||||
import glob
|
||||
import importlib
|
||||
import os.path
|
||||
import re
|
||||
@@ -60,10 +61,19 @@ def _try_import_from_store(
|
||||
python, *_ = candidate_spec.dependencies("python-venv")
|
||||
else:
|
||||
python, *_ = candidate_spec.dependencies("python")
|
||||
module_paths = [
|
||||
os.path.join(candidate_spec.prefix, python.package.purelib),
|
||||
os.path.join(candidate_spec.prefix, python.package.platlib),
|
||||
]
|
||||
|
||||
# if python is installed, ask it for the layout
|
||||
if python.installed:
|
||||
module_paths = [
|
||||
os.path.join(candidate_spec.prefix, python.package.purelib),
|
||||
os.path.join(candidate_spec.prefix, python.package.platlib),
|
||||
]
|
||||
# otherwise search for the site-packages directory
|
||||
# (clingo from binaries with truncated python-venv runtime)
|
||||
else:
|
||||
module_paths = glob.glob(
|
||||
os.path.join(candidate_spec.prefix, "lib", "python*", "site-packages")
|
||||
)
|
||||
path_before = list(sys.path)
|
||||
|
||||
# NOTE: try module_paths first and last, last allows an existing version in path
|
||||
|
@@ -37,6 +37,7 @@
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
import spack.detection
|
||||
import spack.mirror
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.store
|
||||
@@ -44,7 +45,6 @@
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.url
|
||||
import spack.version
|
||||
from spack.installer import PackageInstaller
|
||||
|
||||
@@ -91,12 +91,7 @@ def __init__(self, conf: ConfigDictionary) -> None:
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||
|
||||
# Promote (relative) paths to file urls
|
||||
url = conf["info"]["url"]
|
||||
if spack.util.url.is_path_instead_of_url(url):
|
||||
if not os.path.isabs(url):
|
||||
url = os.path.join(self.metadata_dir, url)
|
||||
url = spack.util.url.path_to_file_url(url)
|
||||
self.url = url
|
||||
self.url = spack.mirror.Mirror(conf["info"]["url"]).fetch_url
|
||||
|
||||
@property
|
||||
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
||||
@@ -175,7 +170,15 @@ def _install_by_hash(
|
||||
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
|
||||
for match in spack.store.find([f"/{pkg_hash}"], multiple=False, query_fn=query):
|
||||
spack.binary_distribution.install_root_node(
|
||||
match, unsigned=True, force=True, sha256=pkg_sha256
|
||||
# allow_missing is true since when bootstrapping clingo we truncate runtime
|
||||
# deps such as gcc-runtime, since we link libstdc++ statically, and the other
|
||||
# further runtime deps are loaded by the Python interpreter. This just silences
|
||||
# warnings about missing dependencies.
|
||||
match,
|
||||
unsigned=True,
|
||||
force=True,
|
||||
sha256=pkg_sha256,
|
||||
allow_missing=True,
|
||||
)
|
||||
|
||||
def _install_and_test(
|
||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -37,13 +37,15 @@
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import stat
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
from collections import defaultdict
|
||||
from enum import Flag, auto
|
||||
from itertools import chain
|
||||
from typing import Dict, List, Set, Tuple
|
||||
from multiprocessing.connection import Connection
|
||||
from typing import Callable, Dict, List, Optional, Set, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -53,7 +55,6 @@
|
||||
from llnl.util.lang import dedupe, stable_partition
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
from llnl.util.tty.log import MultiProcessFd
|
||||
|
||||
import spack.build_systems._checks
|
||||
import spack.build_systems.cmake
|
||||
@@ -74,6 +75,7 @@
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.util.executable
|
||||
import spack.util.libc
|
||||
from spack import traverse
|
||||
from spack.context import Context
|
||||
from spack.error import InstallError, NoHeadersError, NoLibrariesError
|
||||
@@ -89,7 +91,7 @@
|
||||
)
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.log_parse import make_log_context, parse_log_events
|
||||
from spack.util.module_cmd import load_module, path_from_modules
|
||||
from spack.util.module_cmd import load_module
|
||||
|
||||
#
|
||||
# This can be set by the user to globally disable parallel builds.
|
||||
@@ -435,6 +437,35 @@ def optimization_flags(compiler, target):
|
||||
return result
|
||||
|
||||
|
||||
class FilterDefaultDynamicLinkerSearchPaths:
|
||||
"""Remove rpaths to directories that are default search paths of the dynamic linker."""
|
||||
|
||||
def __init__(self, dynamic_linker: Optional[str]) -> None:
|
||||
# Identify directories by (inode, device) tuple, which handles symlinks too.
|
||||
self.default_path_identifiers: Set[Tuple[int, int]] = set()
|
||||
if not dynamic_linker:
|
||||
return
|
||||
for path in spack.util.libc.default_search_paths_from_dynamic_linker(dynamic_linker):
|
||||
try:
|
||||
s = os.stat(path)
|
||||
if stat.S_ISDIR(s.st_mode):
|
||||
self.default_path_identifiers.add((s.st_ino, s.st_dev))
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
def is_dynamic_loader_default_path(self, p: str) -> bool:
|
||||
try:
|
||||
s = os.stat(p)
|
||||
return (s.st_ino, s.st_dev) in self.default_path_identifiers
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
def __call__(self, dirs: List[str]) -> List[str]:
|
||||
if not self.default_path_identifiers:
|
||||
return dirs
|
||||
return [p for p in dirs if not self.is_dynamic_loader_default_path(p)]
|
||||
|
||||
|
||||
def set_wrapper_variables(pkg, env):
|
||||
"""Set environment variables used by the Spack compiler wrapper (which have the prefix
|
||||
`SPACK_`) and also add the compiler wrappers to PATH.
|
||||
@@ -492,69 +523,71 @@ def set_wrapper_variables(pkg, env):
|
||||
env.set("CCACHE_DISABLE", "1")
|
||||
|
||||
# Gather information about various types of dependencies
|
||||
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
||||
rpath_deps = get_rpath_deps(pkg)
|
||||
rpath_hashes = set(s.dag_hash() for s in get_rpath_deps(pkg))
|
||||
link_deps = pkg.spec.traverse(root=False, order="topo", deptype=dt.LINK)
|
||||
external_link_deps, nonexternal_link_deps = stable_partition(link_deps, lambda d: d.external)
|
||||
|
||||
link_dirs = []
|
||||
include_dirs = []
|
||||
rpath_dirs = []
|
||||
|
||||
def _prepend_all(list_to_modify, items_to_add):
|
||||
# Update the original list (creating a new list would be faster but
|
||||
# may not be convenient)
|
||||
for item in reversed(list(items_to_add)):
|
||||
list_to_modify.insert(0, item)
|
||||
for dep in chain(external_link_deps, nonexternal_link_deps):
|
||||
# TODO: is_system_path is wrong, but even if we knew default -L, -I flags from the compiler
|
||||
# and default search dirs from the dynamic linker, it's not obvious how to avoid a possibly
|
||||
# expensive search in `query.libs.directories` and `query.headers.directories`, which is
|
||||
# what this branch is trying to avoid.
|
||||
if is_system_path(dep.prefix):
|
||||
continue
|
||||
# TODO: as of Spack 0.22, multiple instances of the same package may occur among the link
|
||||
# deps, so keying by name is wrong. In practice it is not problematic: we obtain the same
|
||||
# gcc-runtime / glibc here, and repeatedly add the same dirs that are later deduped.
|
||||
query = pkg.spec[dep.name]
|
||||
dep_link_dirs = []
|
||||
try:
|
||||
# Locating libraries can be time consuming, so log start and finish.
|
||||
tty.debug(f"Collecting libraries for {dep.name}")
|
||||
dep_link_dirs.extend(query.libs.directories)
|
||||
tty.debug(f"Libraries for {dep.name} have been collected.")
|
||||
except NoLibrariesError:
|
||||
tty.debug(f"No libraries found for {dep.name}")
|
||||
|
||||
def update_compiler_args_for_dep(dep):
|
||||
if dep in link_deps and (not is_system_path(dep.prefix)):
|
||||
query = pkg.spec[dep.name]
|
||||
dep_link_dirs = list()
|
||||
try:
|
||||
# In some circumstances (particularly for externals) finding
|
||||
# libraries packages can be time consuming, so indicate that
|
||||
# we are performing this operation (and also report when it
|
||||
# finishes).
|
||||
tty.debug("Collecting libraries for {0}".format(dep.name))
|
||||
dep_link_dirs.extend(query.libs.directories)
|
||||
tty.debug("Libraries for {0} have been collected.".format(dep.name))
|
||||
except NoLibrariesError:
|
||||
tty.debug("No libraries found for {0}".format(dep.name))
|
||||
for default_lib_dir in ("lib", "lib64"):
|
||||
default_lib_prefix = os.path.join(dep.prefix, default_lib_dir)
|
||||
if os.path.isdir(default_lib_prefix):
|
||||
dep_link_dirs.append(default_lib_prefix)
|
||||
|
||||
for default_lib_dir in ["lib", "lib64"]:
|
||||
default_lib_prefix = os.path.join(dep.prefix, default_lib_dir)
|
||||
if os.path.isdir(default_lib_prefix):
|
||||
dep_link_dirs.append(default_lib_prefix)
|
||||
link_dirs[:0] = dep_link_dirs
|
||||
if dep.dag_hash() in rpath_hashes:
|
||||
rpath_dirs[:0] = dep_link_dirs
|
||||
|
||||
_prepend_all(link_dirs, dep_link_dirs)
|
||||
if dep in rpath_deps:
|
||||
_prepend_all(rpath_dirs, dep_link_dirs)
|
||||
try:
|
||||
tty.debug(f"Collecting headers for {dep.name}")
|
||||
include_dirs[:0] = query.headers.directories
|
||||
tty.debug(f"Headers for {dep.name} have been collected.")
|
||||
except NoHeadersError:
|
||||
tty.debug(f"No headers found for {dep.name}")
|
||||
|
||||
try:
|
||||
_prepend_all(include_dirs, query.headers.directories)
|
||||
except NoHeadersError:
|
||||
tty.debug("No headers found for {0}".format(dep.name))
|
||||
|
||||
for dspec in pkg.spec.traverse(root=False, order="post"):
|
||||
if dspec.external:
|
||||
update_compiler_args_for_dep(dspec)
|
||||
|
||||
# Just above, we prepended entries for -L/-rpath for externals. We
|
||||
# now do this for non-external packages so that Spack-built packages
|
||||
# are searched first for libraries etc.
|
||||
for dspec in pkg.spec.traverse(root=False, order="post"):
|
||||
if not dspec.external:
|
||||
update_compiler_args_for_dep(dspec)
|
||||
|
||||
# The top-level package is always RPATHed. It hasn't been installed yet
|
||||
# so the RPATHs are added unconditionally (e.g. even though lib64/ may
|
||||
# not be created for the install).
|
||||
for libdir in ["lib64", "lib"]:
|
||||
# The top-level package is heuristically rpath'ed.
|
||||
for libdir in ("lib64", "lib"):
|
||||
lib_path = os.path.join(pkg.prefix, libdir)
|
||||
rpath_dirs.insert(0, lib_path)
|
||||
|
||||
filter_default_dynamic_linker_search_paths = FilterDefaultDynamicLinkerSearchPaths(
|
||||
pkg.compiler.default_dynamic_linker
|
||||
)
|
||||
|
||||
# TODO: filter_system_paths is again wrong (and probably unnecessary due to the is_system_path
|
||||
# branch above). link_dirs should be filtered with entries from _parse_link_paths.
|
||||
link_dirs = list(dedupe(filter_system_paths(link_dirs)))
|
||||
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
||||
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
||||
rpath_dirs = filter_default_dynamic_linker_search_paths(rpath_dirs)
|
||||
|
||||
# TODO: implicit_rpaths is prefiltered by is_system_path, that should be removed in favor of
|
||||
# just this filter.
|
||||
implicit_rpaths = filter_default_dynamic_linker_search_paths(pkg.compiler.implicit_rpaths())
|
||||
if implicit_rpaths:
|
||||
env.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(implicit_rpaths))
|
||||
|
||||
# Spack managed directories include the stage, store and upstream stores. We extend this with
|
||||
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
|
||||
@@ -584,13 +617,11 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
"""
|
||||
module = ModuleChangePropagator(pkg)
|
||||
|
||||
if context == Context.BUILD:
|
||||
module.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
||||
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||
|
||||
jobs = spack.config.determine_number_of_jobs(parallel=pkg.parallel)
|
||||
module.make_jobs = jobs
|
||||
if context == Context.BUILD:
|
||||
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
module.make = MakeExecutable("make", jobs)
|
||||
@@ -759,21 +790,6 @@ def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]
|
||||
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
||||
|
||||
|
||||
def get_rpaths(pkg):
|
||||
"""Get a list of all the rpaths for a package."""
|
||||
rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
|
||||
deps = get_rpath_deps(pkg)
|
||||
rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib))
|
||||
rpaths.extend(d.prefix.lib64 for d in deps if os.path.isdir(d.prefix.lib64))
|
||||
# Second module is our compiler mod name. We use that to get rpaths from
|
||||
# module show output.
|
||||
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
|
||||
mod_rpath = path_from_modules([pkg.compiler.modules[1]])
|
||||
if mod_rpath:
|
||||
rpaths.append(mod_rpath)
|
||||
return list(dedupe(filter_system_paths(rpaths)))
|
||||
|
||||
|
||||
def load_external_modules(pkg):
|
||||
"""Traverse a package's spec DAG and load any external modules.
|
||||
|
||||
@@ -841,10 +857,6 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
|
||||
load_external_modules(pkg)
|
||||
|
||||
implicit_rpaths = pkg.compiler.implicit_rpaths()
|
||||
if implicit_rpaths:
|
||||
env_mods.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(implicit_rpaths))
|
||||
|
||||
# Make sure nothing's strange about the Spack environment.
|
||||
validate(env_mods, tty.warn)
|
||||
env_mods.apply_modifications()
|
||||
@@ -1034,6 +1046,12 @@ def set_all_package_py_globals(self):
|
||||
# This includes runtime dependencies, also runtime deps of direct build deps.
|
||||
set_package_py_globals(pkg, context=Context.RUN)
|
||||
|
||||
# Looping over the set of packages a second time
|
||||
# ensures all globals are loaded into the module space prior to
|
||||
# any package setup. This guarantees package setup methods have
|
||||
# access to expected module level definitions such as "spack_cc"
|
||||
for dspec, flag in chain(self.external, self.nonexternal):
|
||||
pkg = dspec.package
|
||||
for spec in dspec.dependents():
|
||||
# Note: some specs have dependents that are unreachable from the root, so avoid
|
||||
# setting globals for those.
|
||||
@@ -1043,6 +1061,15 @@ def set_all_package_py_globals(self):
|
||||
pkg.setup_dependent_package(dependent_module, spec)
|
||||
dependent_module.propagate_changes_to_mro()
|
||||
|
||||
if self.context == Context.BUILD:
|
||||
pkg = self.specs[0].package
|
||||
module = ModuleChangePropagator(pkg)
|
||||
# std_cmake_args is not sufficiently static to be defined
|
||||
# in set_package_py_globals and is deprecated so its handled
|
||||
# here as a special case
|
||||
module.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
||||
module.propagate_changes_to_mro()
|
||||
|
||||
def get_env_modifications(self) -> EnvironmentModifications:
|
||||
"""Returns the environment variable modifications for the given input specs and context.
|
||||
Environment modifications include:
|
||||
@@ -1112,45 +1139,61 @@ def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
|
||||
env.prepend_path("PATH", bin_dir)
|
||||
|
||||
|
||||
def get_cmake_prefix_path(pkg):
|
||||
# Note that unlike modifications_from_dependencies, this does not include
|
||||
# any edits to CMAKE_PREFIX_PATH defined in custom
|
||||
# setup_dependent_build_environment implementations of dependency packages
|
||||
build_deps = set(pkg.spec.dependencies(deptype=("build", "test")))
|
||||
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
||||
build_link_deps = build_deps | link_deps
|
||||
spack_built = []
|
||||
externals = []
|
||||
# modifications_from_dependencies updates CMAKE_PREFIX_PATH by first
|
||||
# prepending all externals and then all non-externals
|
||||
for dspec in pkg.spec.traverse(root=False, order="post"):
|
||||
if dspec in build_link_deps:
|
||||
if dspec.external:
|
||||
externals.insert(0, dspec)
|
||||
else:
|
||||
spack_built.insert(0, dspec)
|
||||
|
||||
ordered_build_link_deps = spack_built + externals
|
||||
cmake_prefix_path_entries = []
|
||||
for spec in ordered_build_link_deps:
|
||||
cmake_prefix_path_entries.extend(spec.package.cmake_prefix_paths)
|
||||
|
||||
return filter_system_paths(cmake_prefix_path_entries)
|
||||
|
||||
|
||||
def _setup_pkg_and_run(
|
||||
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
|
||||
function: Callable,
|
||||
kwargs: Dict,
|
||||
write_pipe: Connection,
|
||||
input_pipe: Optional[Connection],
|
||||
jsfd1: Optional[Connection],
|
||||
jsfd2: Optional[Connection],
|
||||
):
|
||||
"""Main entry point in the child process for Spack builds.
|
||||
|
||||
``_setup_pkg_and_run`` is called by the child process created in
|
||||
``start_build_process()``, and its main job is to run ``function()`` on behalf of
|
||||
some Spack installation (see :ref:`spack.installer.PackageInstaller._install_task`).
|
||||
|
||||
The child process is passed a ``write_pipe``, on which it's expected to send one of
|
||||
the following:
|
||||
|
||||
* ``StopPhase``: error raised by a build process indicating it's stopping at a
|
||||
particular build phase.
|
||||
|
||||
* ``BaseException``: any exception raised by a child build process, which will be
|
||||
wrapped in ``ChildError`` (which adds a bunch of debug info and log context) and
|
||||
raised in the parent.
|
||||
|
||||
* The return value of ``function()``, which can be anything (except an exception).
|
||||
This is returned to the caller.
|
||||
|
||||
Note: ``jsfd1`` and ``jsfd2`` are passed solely to ensure that the child process
|
||||
does not close these file descriptors. Some ``multiprocessing`` backends will close
|
||||
them automatically in the child if they are not passed at process creation time.
|
||||
|
||||
Arguments:
|
||||
serialized_pkg: Spack package install context object (serialized form of the
|
||||
package that we'll build in the child process).
|
||||
function: function to call in the child process; serialized_pkg is passed to
|
||||
this as the first argument.
|
||||
kwargs: additional keyword arguments to pass to ``function()``.
|
||||
write_pipe: multiprocessing ``Connection`` to the parent process, to which the
|
||||
child *must* send a result (or an error) back to parent on.
|
||||
input_multiprocess_fd: stdin from the parent (not passed currently on Windows)
|
||||
jsfd1: gmake Jobserver file descriptor 1.
|
||||
jsfd2: gmake Jobserver file descriptor 2.
|
||||
|
||||
"""
|
||||
|
||||
context: str = kwargs.get("context", "build")
|
||||
|
||||
try:
|
||||
# We are in the child process. Python sets sys.stdin to
|
||||
# open(os.devnull) to prevent our process and its parent from
|
||||
# simultaneously reading from the original stdin. But, we assume
|
||||
# that the parent process is not going to read from it till we
|
||||
# are done with the child, so we undo Python's precaution.
|
||||
if input_multiprocess_fd is not None:
|
||||
sys.stdin = os.fdopen(input_multiprocess_fd.fd)
|
||||
# We are in the child process. Python sets sys.stdin to open(os.devnull) to prevent our
|
||||
# process and its parent from simultaneously reading from the original stdin. But, we
|
||||
# assume that the parent process is not going to read from it till we are done with the
|
||||
# child, so we undo Python's precaution. closefd=False since Connection has ownership.
|
||||
if input_pipe is not None:
|
||||
sys.stdin = os.fdopen(input_pipe.fileno(), closefd=False)
|
||||
|
||||
pkg = serialized_pkg.restore()
|
||||
|
||||
@@ -1166,13 +1209,14 @@ def _setup_pkg_and_run(
|
||||
# Do not create a full ChildError from this, it's not an error
|
||||
# it's a control statement.
|
||||
write_pipe.send(e)
|
||||
except BaseException:
|
||||
except BaseException as e:
|
||||
# catch ANYTHING that goes wrong in the child process
|
||||
exc_type, exc, tb = sys.exc_info()
|
||||
|
||||
# Need to unwind the traceback in the child because traceback
|
||||
# objects can't be sent to the parent.
|
||||
tb_string = traceback.format_exc()
|
||||
exc_type = type(e)
|
||||
tb = e.__traceback__
|
||||
tb_string = "".join(traceback.format_exception(exc_type, e, tb))
|
||||
|
||||
# build up some context from the offending package so we can
|
||||
# show that, too.
|
||||
@@ -1189,8 +1233,8 @@ def _setup_pkg_and_run(
|
||||
elif context == "test":
|
||||
logfile = os.path.join(pkg.test_suite.stage, pkg.test_suite.test_log_name(pkg.spec))
|
||||
|
||||
error_msg = str(exc)
|
||||
if isinstance(exc, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
||||
error_msg = str(e)
|
||||
if isinstance(e, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
||||
process = "test the installation" if context == "test" else "build from sources"
|
||||
error_msg = (
|
||||
"The '{}' package cannot find an attribute while trying to {}. "
|
||||
@@ -1200,7 +1244,7 @@ def _setup_pkg_and_run(
|
||||
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
|
||||
).format(pkg.name, process, context)
|
||||
error_msg = colorize("@*R{{{}}}".format(error_msg))
|
||||
error_msg = "{}\n\n{}".format(str(exc), error_msg)
|
||||
error_msg = "{}\n\n{}".format(str(e), error_msg)
|
||||
|
||||
# make a pickleable exception to send to parent.
|
||||
msg = "%s: %s" % (exc_type.__name__, error_msg)
|
||||
@@ -1218,8 +1262,8 @@ def _setup_pkg_and_run(
|
||||
|
||||
finally:
|
||||
write_pipe.close()
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
if input_pipe is not None:
|
||||
input_pipe.close()
|
||||
|
||||
|
||||
def start_build_process(pkg, function, kwargs):
|
||||
@@ -1246,23 +1290,9 @@ def child_fun():
|
||||
If something goes wrong, the child process catches the error and
|
||||
passes it to the parent wrapped in a ChildError. The parent is
|
||||
expected to handle (or re-raise) the ChildError.
|
||||
|
||||
This uses `multiprocessing.Process` to create the child process. The
|
||||
mechanism used to create the process differs on different operating
|
||||
systems and for different versions of Python. In some cases "fork"
|
||||
is used (i.e. the "fork" system call) and some cases it starts an
|
||||
entirely new Python interpreter process (in the docs this is referred
|
||||
to as the "spawn" start method). Breaking it down by OS:
|
||||
|
||||
- Linux always uses fork.
|
||||
- Mac OS uses fork before Python 3.8 and "spawn" for 3.8 and after.
|
||||
- Windows always uses the "spawn" start method.
|
||||
|
||||
For more information on `multiprocessing` child process creation
|
||||
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
||||
"""
|
||||
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
||||
input_multiprocess_fd = None
|
||||
input_fd = None
|
||||
jobserver_fd1 = None
|
||||
jobserver_fd2 = None
|
||||
|
||||
@@ -1271,14 +1301,13 @@ def child_fun():
|
||||
try:
|
||||
# Forward sys.stdin when appropriate, to allow toggling verbosity
|
||||
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
|
||||
input_fd = os.dup(sys.stdin.fileno())
|
||||
input_multiprocess_fd = MultiProcessFd(input_fd)
|
||||
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
||||
mflags = os.environ.get("MAKEFLAGS", False)
|
||||
if mflags:
|
||||
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
|
||||
if m:
|
||||
jobserver_fd1 = MultiProcessFd(int(m.group(1)))
|
||||
jobserver_fd2 = MultiProcessFd(int(m.group(2)))
|
||||
jobserver_fd1 = Connection(int(m.group(1)))
|
||||
jobserver_fd2 = Connection(int(m.group(2)))
|
||||
|
||||
p = multiprocessing.Process(
|
||||
target=_setup_pkg_and_run,
|
||||
@@ -1287,7 +1316,7 @@ def child_fun():
|
||||
function,
|
||||
kwargs,
|
||||
write_pipe,
|
||||
input_multiprocess_fd,
|
||||
input_fd,
|
||||
jobserver_fd1,
|
||||
jobserver_fd2,
|
||||
),
|
||||
@@ -1307,8 +1336,8 @@ def child_fun():
|
||||
|
||||
finally:
|
||||
# Close the input stream in the parent process
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
if input_fd is not None:
|
||||
input_fd.close()
|
||||
|
||||
def exitcode_msg(p):
|
||||
typ = "exit" if p.exitcode >= 0 else "signal"
|
||||
|
@@ -7,7 +7,6 @@
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.directives
|
||||
import spack.package_base
|
||||
import spack.util.executable
|
||||
|
||||
from .autotools import AutotoolsBuilder, AutotoolsPackage
|
||||
|
@@ -10,7 +10,6 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
@@ -297,18 +296,6 @@ def initconfig_hardware_entries(self):
|
||||
def std_initconfig_entries(self):
|
||||
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
|
||||
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
|
||||
cmake_rpaths_env = spack.build_environment.get_rpaths(self.pkg)
|
||||
cmake_rpaths_path = ";".join(cmake_rpaths_env)
|
||||
complete_rpath_list = cmake_rpaths_path
|
||||
if "SPACK_COMPILER_EXTRA_RPATHS" in os.environ:
|
||||
spack_extra_rpaths_env = os.environ["SPACK_COMPILER_EXTRA_RPATHS"]
|
||||
spack_extra_rpaths_path = spack_extra_rpaths_env.replace(os.pathsep, ";")
|
||||
complete_rpath_list = "{0};{1}".format(complete_rpath_list, spack_extra_rpaths_path)
|
||||
|
||||
if "SPACK_COMPILER_IMPLICIT_RPATHS" in os.environ:
|
||||
spack_implicit_rpaths_env = os.environ["SPACK_COMPILER_IMPLICIT_RPATHS"]
|
||||
spack_implicit_rpaths_path = spack_implicit_rpaths_env.replace(os.pathsep, ";")
|
||||
complete_rpath_list = "{0};{1}".format(complete_rpath_list, spack_implicit_rpaths_path)
|
||||
|
||||
return [
|
||||
"#------------------{0}".format("-" * 60),
|
||||
@@ -318,8 +305,6 @@ def std_initconfig_entries(self):
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
||||
cmake_cache_string("CMAKE_INSTALL_RPATH_USE_LINK_PATH", "ON"),
|
||||
cmake_cache_string("CMAKE_BUILD_RPATH", complete_rpath_list),
|
||||
cmake_cache_string("CMAKE_INSTALL_RPATH", complete_rpath_list),
|
||||
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
|
||||
]
|
||||
|
||||
|
@@ -8,17 +8,19 @@
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
from typing import List, Optional, Tuple
|
||||
from itertools import chain
|
||||
from typing import List, Optional, Set, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util.lang import stable_partition
|
||||
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
from spack.util.environment import filter_system_paths
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
|
||||
@@ -152,6 +154,24 @@ def _values(x):
|
||||
conflicts(f"generator={x}")
|
||||
|
||||
|
||||
def get_cmake_prefix_path(pkg: spack.package_base.PackageBase) -> List[str]:
|
||||
"""Obtain the CMAKE_PREFIX_PATH entries for a package, based on the cmake_prefix_path package
|
||||
attribute of direct build/test and transitive link dependencies."""
|
||||
# Add direct build/test deps
|
||||
selected: Set[str] = {s.dag_hash() for s in pkg.spec.dependencies(deptype=dt.BUILD | dt.TEST)}
|
||||
# Add transitive link deps
|
||||
selected.update(s.dag_hash() for s in pkg.spec.traverse(root=False, deptype=dt.LINK))
|
||||
# Separate out externals so they do not shadow Spack prefixes
|
||||
externals, spack_built = stable_partition(
|
||||
(s for s in pkg.spec.traverse(root=False, order="topo") if s.dag_hash() in selected),
|
||||
lambda x: x.external,
|
||||
)
|
||||
|
||||
return filter_system_paths(
|
||||
path for spec in chain(spack_built, externals) for path in spec.package.cmake_prefix_paths
|
||||
)
|
||||
|
||||
|
||||
class CMakePackage(spack.package_base.PackageBase):
|
||||
"""Specialized class for packages built using CMake
|
||||
|
||||
@@ -358,6 +378,16 @@ def std_args(pkg, generator=None):
|
||||
"-G",
|
||||
generator,
|
||||
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
|
||||
define("CMAKE_INSTALL_RPATH_USE_LINK_PATH", True),
|
||||
# only include the install prefix lib dirs; rpaths for deps are added by USE_LINK_PATH
|
||||
define(
|
||||
"CMAKE_INSTALL_RPATH",
|
||||
[
|
||||
pathlib.Path(pkg.prefix, "lib").as_posix(),
|
||||
pathlib.Path(pkg.prefix, "lib64").as_posix(),
|
||||
],
|
||||
),
|
||||
define("CMAKE_PREFIX_PATH", get_cmake_prefix_path(pkg)),
|
||||
define("CMAKE_BUILD_TYPE", build_type),
|
||||
]
|
||||
|
||||
@@ -372,15 +402,6 @@ def std_args(pkg, generator=None):
|
||||
_conditional_cmake_defaults(pkg, args)
|
||||
_maybe_set_python_hints(pkg, args)
|
||||
|
||||
# Set up CMake rpath
|
||||
args.extend(
|
||||
[
|
||||
define("CMAKE_INSTALL_RPATH_USE_LINK_PATH", True),
|
||||
define("CMAKE_INSTALL_RPATH", spack.build_environment.get_rpaths(pkg)),
|
||||
define("CMAKE_PREFIX_PATH", spack.build_environment.get_cmake_prefix_path(pkg)),
|
||||
]
|
||||
)
|
||||
|
||||
return args
|
||||
|
||||
@staticmethod
|
||||
@@ -541,6 +562,13 @@ def cmake_args(self):
|
||||
|
||||
def cmake(self, pkg, spec, prefix):
|
||||
"""Runs ``cmake`` in the build directory"""
|
||||
|
||||
# skip cmake phase if it is an incremental develop build
|
||||
if spec.is_develop and os.path.isfile(
|
||||
os.path.join(self.build_directory, "CMakeCache.txt")
|
||||
):
|
||||
return
|
||||
|
||||
options = self.std_cmake_args
|
||||
options += self.cmake_args()
|
||||
options.append(os.path.abspath(self.root_cmakelists_dir))
|
||||
|
@@ -110,8 +110,8 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
|
||||
depends_on("cuda@5.0:10.2", when="cuda_arch=30")
|
||||
depends_on("cuda@5.0:10.2", when="cuda_arch=32")
|
||||
depends_on("cuda@5.0:", when="cuda_arch=35")
|
||||
depends_on("cuda@6.5:", when="cuda_arch=37")
|
||||
depends_on("cuda@5.0:11.8", when="cuda_arch=35")
|
||||
depends_on("cuda@6.5:11.8", when="cuda_arch=37")
|
||||
|
||||
depends_on("cuda@6.0:", when="cuda_arch=50")
|
||||
depends_on("cuda@6.5:", when="cuda_arch=52")
|
||||
@@ -131,6 +131,7 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
depends_on("cuda@11.8:", when="cuda_arch=89")
|
||||
|
||||
depends_on("cuda@12.0:", when="cuda_arch=90")
|
||||
depends_on("cuda@12.0:", when="cuda_arch=90a")
|
||||
|
||||
# From the NVIDIA install guide we know of conflicts for particular
|
||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||
@@ -149,7 +150,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
# minimum supported versions
|
||||
conflicts("%gcc@:4", when="+cuda ^cuda@11.0:")
|
||||
conflicts("%gcc@:5", when="+cuda ^cuda@11.4:")
|
||||
conflicts("%gcc@:7.2", when="+cuda ^cuda@12.4:")
|
||||
conflicts("%clang@:6", when="+cuda ^cuda@12.2:")
|
||||
|
||||
# maximum supported version
|
||||
@@ -241,6 +241,11 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
conflicts("%intel@19.2:", when="+cuda ^cuda@:11.1.0")
|
||||
conflicts("%intel@2021:", when="+cuda ^cuda@:11.4.0")
|
||||
|
||||
# ARM
|
||||
# https://github.com/spack/spack/pull/39666#issuecomment-2377609263
|
||||
# Might need to be expanded to other gcc versions
|
||||
conflicts("%gcc@13.2.0", when="+cuda ^cuda@:12.4 target=aarch64:")
|
||||
|
||||
# XL is mostly relevant for ppc64le Linux
|
||||
conflicts("%xl@:12,14:", when="+cuda ^cuda@:9.1")
|
||||
conflicts("%xl@:12,14:15,17:", when="+cuda ^cuda@9.2")
|
||||
|
@@ -44,16 +44,27 @@ class GoBuilder(BaseBuilder):
|
||||
+-----------------------------------------------+--------------------+
|
||||
| **Method** | **Purpose** |
|
||||
+===============================================+====================+
|
||||
| :py:meth:`~.GoBuilder.build_args` | Specify arguments |
|
||||
| :py:attr:`~.GoBuilder.build_args` | Specify arguments |
|
||||
| | to ``go build`` |
|
||||
+-----------------------------------------------+--------------------+
|
||||
| :py:meth:`~.GoBuilder.check_args` | Specify arguments |
|
||||
| :py:attr:`~.GoBuilder.check_args` | Specify arguments |
|
||||
| | to ``go test`` |
|
||||
+-----------------------------------------------+--------------------+
|
||||
"""
|
||||
|
||||
phases = ("build", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = ("check", "installcheck")
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = (
|
||||
"build_args",
|
||||
"check_args",
|
||||
"build_directory",
|
||||
"install_time_test_callbacks",
|
||||
)
|
||||
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ["check"]
|
||||
|
||||
|
@@ -339,7 +339,7 @@ class PythonPackage(PythonExtension):
|
||||
legacy_buildsystem = "python_pip"
|
||||
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ["test"]
|
||||
install_time_test_callbacks = ["test_imports"]
|
||||
|
||||
build_system("python_pip")
|
||||
|
||||
@@ -429,7 +429,7 @@ class PythonPipBuilder(BaseBuilder):
|
||||
phases = ("install",)
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = ("test",)
|
||||
legacy_methods = ("test_imports",)
|
||||
|
||||
#: Same as legacy_methods, but the signature is different
|
||||
legacy_long_methods = ("install_options", "global_options", "config_settings")
|
||||
@@ -438,7 +438,7 @@ class PythonPipBuilder(BaseBuilder):
|
||||
legacy_attributes = ("archive_files", "build_directory", "install_time_test_callbacks")
|
||||
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ["test"]
|
||||
install_time_test_callbacks = ["test_imports"]
|
||||
|
||||
@staticmethod
|
||||
def std_args(cls) -> List[str]:
|
||||
|
@@ -521,10 +521,6 @@ def stage(self):
|
||||
def prefix(self):
|
||||
return self.pkg.prefix
|
||||
|
||||
def test(self):
|
||||
# Defer tests to virtual and concrete packages
|
||||
pass
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
"""Sets up the build environment for a package.
|
||||
|
||||
|
@@ -74,6 +74,6 @@ def store(self, fetcher, relative_dest):
|
||||
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
FETCH_CACHE: Union[spack.fetch_strategy.FsCache, llnl.util.lang.Singleton] = (
|
||||
FETCH_CACHE: Union["spack.fetch_strategy.FsCache", llnl.util.lang.Singleton] = (
|
||||
llnl.util.lang.Singleton(_fetch_cache)
|
||||
)
|
||||
|
@@ -10,6 +10,7 @@
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import ssl
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
@@ -19,21 +20,21 @@
|
||||
from collections import defaultdict, namedtuple
|
||||
from typing import Dict, List, Optional, Set, Tuple
|
||||
from urllib.error import HTTPError, URLError
|
||||
from urllib.parse import urlencode
|
||||
from urllib.request import HTTPHandler, Request, build_opener
|
||||
from urllib.parse import quote, urlencode, urlparse
|
||||
from urllib.request import HTTPHandler, HTTPSHandler, Request, build_opener
|
||||
|
||||
import ruamel.yaml
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
from llnl.util.lang import Singleton, memoized
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.concretize
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.main
|
||||
import spack.mirror
|
||||
import spack.paths
|
||||
@@ -50,6 +51,31 @@
|
||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||
|
||||
|
||||
def _urlopen():
|
||||
error_handler = web_util.SpackHTTPDefaultErrorHandler()
|
||||
|
||||
# One opener with HTTPS ssl enabled
|
||||
with_ssl = build_opener(
|
||||
HTTPHandler(), HTTPSHandler(context=web_util.ssl_create_default_context()), error_handler
|
||||
)
|
||||
|
||||
# One opener with HTTPS ssl disabled
|
||||
without_ssl = build_opener(
|
||||
HTTPHandler(), HTTPSHandler(context=ssl._create_unverified_context()), error_handler
|
||||
)
|
||||
|
||||
# And dynamically dispatch based on the config:verify_ssl.
|
||||
def dispatch_open(fullurl, data=None, timeout=None, verify_ssl=True):
|
||||
opener = with_ssl if verify_ssl else without_ssl
|
||||
timeout = timeout or spack.config.get("config:connect_timeout", 1)
|
||||
return opener.open(fullurl, data, timeout)
|
||||
|
||||
return dispatch_open
|
||||
|
||||
|
||||
_dyn_mapping_urlopener = Singleton(_urlopen)
|
||||
|
||||
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
|
||||
JOB_RETRY_CONDITIONS = [
|
||||
# "always",
|
||||
@@ -69,8 +95,6 @@
|
||||
|
||||
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
# TODO: Remove this in Spack 0.23
|
||||
SHARED_PR_MIRROR_URL = "s3://spack-binaries-prs/shared_pr_mirror"
|
||||
JOB_NAME_FORMAT = (
|
||||
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{ arch=architecture}"
|
||||
)
|
||||
@@ -175,11 +199,11 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
||||
return nodes, edges, stages
|
||||
|
||||
|
||||
def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions):
|
||||
def _print_staging_summary(spec_labels, stages, rebuild_decisions):
|
||||
if not stages:
|
||||
return
|
||||
|
||||
mirrors = spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True)
|
||||
mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||
tty.msg("Checked the following mirrors for binaries:")
|
||||
for m in mirrors.values():
|
||||
tty.msg(f" {m.fetch_url}")
|
||||
@@ -226,21 +250,14 @@ def _spec_matches(spec, match_string):
|
||||
return spec.intersects(match_string)
|
||||
|
||||
|
||||
def _format_job_needs(
|
||||
dep_jobs, build_group, prune_dag, rebuild_decisions, enable_artifacts_buildcache
|
||||
):
|
||||
def _format_job_needs(dep_jobs, build_group, prune_dag, rebuild_decisions):
|
||||
needs_list = []
|
||||
for dep_job in dep_jobs:
|
||||
dep_spec_key = _spec_ci_label(dep_job)
|
||||
rebuild = rebuild_decisions[dep_spec_key].rebuild
|
||||
|
||||
if not prune_dag or rebuild:
|
||||
needs_list.append(
|
||||
{
|
||||
"job": get_job_name(dep_job, build_group),
|
||||
"artifacts": enable_artifacts_buildcache,
|
||||
}
|
||||
)
|
||||
needs_list.append({"job": get_job_name(dep_job, build_group), "artifacts": False})
|
||||
return needs_list
|
||||
|
||||
|
||||
@@ -384,12 +401,6 @@ def __init__(self, ci_config, spec_labels, stages):
|
||||
|
||||
self.ir = {
|
||||
"jobs": {},
|
||||
"temporary-storage-url-prefix": self.ci_config.get(
|
||||
"temporary-storage-url-prefix", None
|
||||
),
|
||||
"enable-artifacts-buildcache": self.ci_config.get(
|
||||
"enable-artifacts-buildcache", False
|
||||
),
|
||||
"rebuild-index": self.ci_config.get("rebuild-index", True),
|
||||
"broken-specs-url": self.ci_config.get("broken-specs-url", None),
|
||||
"broken-tests-packages": self.ci_config.get("broken-tests-packages", []),
|
||||
@@ -405,9 +416,20 @@ def __init__(self, ci_config, spec_labels, stages):
|
||||
if name not in ["any", "build"]:
|
||||
jobs[name] = self.__init_job("")
|
||||
|
||||
def __init_job(self, spec):
|
||||
def __init_job(self, release_spec):
|
||||
"""Initialize job object"""
|
||||
return {"spec": spec, "attributes": {}}
|
||||
job_object = {"spec": release_spec, "attributes": {}}
|
||||
if release_spec:
|
||||
job_vars = job_object["attributes"].setdefault("variables", {})
|
||||
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec.dag_hash()
|
||||
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
||||
job_vars["SPACK_JOB_SPEC_PKG_VERSION"] = release_spec.format("{version}")
|
||||
job_vars["SPACK_JOB_SPEC_COMPILER_NAME"] = release_spec.format("{compiler.name}")
|
||||
job_vars["SPACK_JOB_SPEC_COMPILER_VERSION"] = release_spec.format("{compiler.version}")
|
||||
job_vars["SPACK_JOB_SPEC_ARCH"] = release_spec.format("{architecture}")
|
||||
job_vars["SPACK_JOB_SPEC_VARIANTS"] = release_spec.format("{variants}")
|
||||
|
||||
return job_object
|
||||
|
||||
def __is_named(self, section):
|
||||
"""Check if a pipeline-gen configuration section is for a named job,
|
||||
@@ -500,6 +522,7 @@ def generate_ir(self):
|
||||
for section in reversed(pipeline_gen):
|
||||
name = self.__is_named(section)
|
||||
has_submapping = "submapping" in section
|
||||
has_dynmapping = "dynamic-mapping" in section
|
||||
section = cfg.InternalConfigScope._process_dict_keyname_overrides(section)
|
||||
|
||||
if name:
|
||||
@@ -542,6 +565,108 @@ def _apply_section(dest, src):
|
||||
job["attributes"] = self.__apply_submapping(
|
||||
job["attributes"], job["spec"], section
|
||||
)
|
||||
elif has_dynmapping:
|
||||
mapping = section["dynamic-mapping"]
|
||||
|
||||
dynmap_name = mapping.get("name")
|
||||
|
||||
# Check if this section should be skipped
|
||||
dynmap_skip = os.environ.get("SPACK_CI_SKIP_DYNAMIC_MAPPING")
|
||||
if dynmap_name and dynmap_skip:
|
||||
if re.match(dynmap_skip, dynmap_name):
|
||||
continue
|
||||
|
||||
# Get the endpoint
|
||||
endpoint = mapping["endpoint"]
|
||||
endpoint_url = urlparse(endpoint)
|
||||
|
||||
# Configure the request header
|
||||
header = {"User-Agent": web_util.SPACK_USER_AGENT}
|
||||
header.update(mapping.get("header", {}))
|
||||
|
||||
# Expand header environment variables
|
||||
# ie. if tokens are passed
|
||||
for value in header.values():
|
||||
value = os.path.expandvars(value)
|
||||
|
||||
verify_ssl = mapping.get("verify_ssl", spack.config.get("config:verify_ssl", True))
|
||||
timeout = mapping.get("timeout", spack.config.get("config:connect_timeout", 1))
|
||||
|
||||
required = mapping.get("require", [])
|
||||
allowed = mapping.get("allow", [])
|
||||
ignored = mapping.get("ignore", [])
|
||||
|
||||
# required keys are implicitly allowed
|
||||
allowed = sorted(set(allowed + required))
|
||||
ignored = sorted(set(ignored))
|
||||
required = sorted(set(required))
|
||||
|
||||
# Make sure required things are not also ignored
|
||||
assert not any([ikey in required for ikey in ignored])
|
||||
|
||||
def job_query(job):
|
||||
job_vars = job["attributes"]["variables"]
|
||||
query = (
|
||||
"{SPACK_JOB_SPEC_PKG_NAME}@{SPACK_JOB_SPEC_PKG_VERSION}"
|
||||
# The preceding spaces are required (ref. https://github.com/spack/spack-gantry/blob/develop/docs/api.md#allocation)
|
||||
" {SPACK_JOB_SPEC_VARIANTS}"
|
||||
" arch={SPACK_JOB_SPEC_ARCH}"
|
||||
"%{SPACK_JOB_SPEC_COMPILER_NAME}@{SPACK_JOB_SPEC_COMPILER_VERSION}"
|
||||
).format_map(job_vars)
|
||||
return f"spec={quote(query)}"
|
||||
|
||||
for job in jobs.values():
|
||||
if not job["spec"]:
|
||||
continue
|
||||
|
||||
# Create request for this job
|
||||
query = job_query(job)
|
||||
request = Request(
|
||||
endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
|
||||
)
|
||||
try:
|
||||
response = _dyn_mapping_urlopener(
|
||||
request, verify_ssl=verify_ssl, timeout=timeout
|
||||
)
|
||||
except Exception as e:
|
||||
# For now just ignore any errors from dynamic mapping and continue
|
||||
# This is still experimental, and failures should not stop CI
|
||||
# from running normally
|
||||
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}")
|
||||
tty.warn(f"{e}")
|
||||
continue
|
||||
|
||||
config = json.load(codecs.getreader("utf-8")(response))
|
||||
|
||||
# Strip ignore keys
|
||||
if ignored:
|
||||
for key in ignored:
|
||||
if key in config:
|
||||
config.pop(key)
|
||||
|
||||
# Only keep allowed keys
|
||||
clean_config = {}
|
||||
if allowed:
|
||||
for key in allowed:
|
||||
if key in config:
|
||||
clean_config[key] = config[key]
|
||||
else:
|
||||
clean_config = config
|
||||
|
||||
# Verify all of the required keys are present
|
||||
if required:
|
||||
missing_keys = []
|
||||
for key in required:
|
||||
if key not in clean_config.keys():
|
||||
missing_keys.append(key)
|
||||
|
||||
if missing_keys:
|
||||
tty.warn(f"Response missing required keys: {missing_keys}")
|
||||
|
||||
if clean_config:
|
||||
job["attributes"] = spack.config.merge_yaml(
|
||||
job.get("attributes", {}), clean_config
|
||||
)
|
||||
|
||||
for _, job in jobs.items():
|
||||
if job["spec"]:
|
||||
@@ -558,14 +683,13 @@ def generate_gitlab_ci_yaml(
|
||||
prune_dag=False,
|
||||
check_index_only=False,
|
||||
artifacts_root=None,
|
||||
remote_mirror_override=None,
|
||||
):
|
||||
"""Generate a gitlab yaml file to run a dynamic child pipeline from
|
||||
the spec matrix in the active environment.
|
||||
|
||||
Arguments:
|
||||
env (spack.environment.Environment): Activated environment object
|
||||
which must contain a gitlab-ci section describing how to map
|
||||
which must contain a ci section describing how to map
|
||||
specs to runners
|
||||
print_summary (bool): Should we print a summary of all the jobs in
|
||||
the stages in which they were placed.
|
||||
@@ -580,39 +704,21 @@ def generate_gitlab_ci_yaml(
|
||||
artifacts_root (str): Path where artifacts like logs, environment
|
||||
files (spack.yaml, spack.lock), etc should be written. GitLab
|
||||
requires this to be within the project directory.
|
||||
remote_mirror_override (str): Typically only needed when one spack.yaml
|
||||
is used to populate several mirrors with binaries, based on some
|
||||
criteria. Spack protected pipelines populate different mirrors based
|
||||
on branch name, facilitated by this option. DEPRECATED
|
||||
"""
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
with env.write_transaction():
|
||||
env.concretize()
|
||||
env.write()
|
||||
|
||||
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
|
||||
|
||||
# Get the joined "ci" config with all of the current scopes resolved
|
||||
ci_config = cfg.get("ci")
|
||||
|
||||
config_deprecated = False
|
||||
if not ci_config:
|
||||
tty.warn("Environment does not have `ci` a configuration")
|
||||
gitlabci_config = yaml_root.get("gitlab-ci")
|
||||
if not gitlabci_config:
|
||||
tty.die("Environment yaml does not have `gitlab-ci` config section. Cannot recover.")
|
||||
|
||||
tty.warn(
|
||||
"The `gitlab-ci` configuration is deprecated in favor of `ci`.\n",
|
||||
"To update run \n\t$ spack env update /path/to/ci/spack.yaml",
|
||||
)
|
||||
translate_deprecated_config(gitlabci_config)
|
||||
ci_config = gitlabci_config
|
||||
config_deprecated = True
|
||||
raise SpackCIError("Environment does not have a `ci` configuration")
|
||||
|
||||
# Default target is gitlab...and only target is gitlab
|
||||
if not ci_config.get("target", "gitlab") == "gitlab":
|
||||
tty.die('Spack CI module only generates target "gitlab"')
|
||||
raise SpackCIError('Spack CI module only generates target "gitlab"')
|
||||
|
||||
cdash_config = cfg.get("cdash")
|
||||
cdash_handler = CDashHandler(cdash_config) if "build-group" in cdash_config else None
|
||||
@@ -673,12 +779,6 @@ def generate_gitlab_ci_yaml(
|
||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE", None)
|
||||
|
||||
copy_only_pipeline = spack_pipeline_type == "spack_copy_only"
|
||||
if copy_only_pipeline and config_deprecated:
|
||||
tty.warn(
|
||||
"SPACK_PIPELINE_TYPE=spack_copy_only is not supported when using\n",
|
||||
"deprecated ci configuration, a no-op pipeline will be generated\n",
|
||||
"instead.",
|
||||
)
|
||||
|
||||
def ensure_expected_target_path(path):
|
||||
"""Returns passed paths with all Windows path separators exchanged
|
||||
@@ -697,38 +797,16 @@ def ensure_expected_target_path(path):
|
||||
return path
|
||||
|
||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||
deprecated_mirror_config = False
|
||||
buildcache_destination = None
|
||||
if "buildcache-destination" in pipeline_mirrors:
|
||||
if remote_mirror_override:
|
||||
tty.die(
|
||||
"Using the deprecated --buildcache-destination cli option and "
|
||||
"having a mirror named 'buildcache-destination' at the same time "
|
||||
"is not allowed"
|
||||
)
|
||||
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
||||
else:
|
||||
deprecated_mirror_config = True
|
||||
# TODO: This will be an error in Spack 0.23
|
||||
if "buildcache-destination" not in pipeline_mirrors:
|
||||
raise SpackCIError("spack ci generate requires a mirror named 'buildcache-destination'")
|
||||
|
||||
# TODO: Remove this block in spack 0.23
|
||||
remote_mirror_url = None
|
||||
if deprecated_mirror_config:
|
||||
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
||||
tty.die("spack ci generate requires an env containing a mirror")
|
||||
|
||||
ci_mirrors = yaml_root["mirrors"]
|
||||
mirror_urls = [url for url in ci_mirrors.values()]
|
||||
remote_mirror_url = mirror_urls[0]
|
||||
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
||||
|
||||
spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None)
|
||||
if spack_buildcache_copy:
|
||||
buildcache_copies = {}
|
||||
buildcache_copy_src_prefix = (
|
||||
buildcache_destination.fetch_url
|
||||
if buildcache_destination
|
||||
else remote_mirror_override or remote_mirror_url
|
||||
)
|
||||
buildcache_copy_src_prefix = buildcache_destination.fetch_url
|
||||
buildcache_copy_dest_prefix = spack_buildcache_copy
|
||||
|
||||
# Check for a list of "known broken" specs that we should not bother
|
||||
@@ -738,55 +816,10 @@ def ensure_expected_target_path(path):
|
||||
if "broken-specs-url" in ci_config:
|
||||
broken_specs_url = ci_config["broken-specs-url"]
|
||||
|
||||
enable_artifacts_buildcache = False
|
||||
if "enable-artifacts-buildcache" in ci_config:
|
||||
tty.warn("Support for enable-artifacts-buildcache will be removed in Spack 0.23")
|
||||
enable_artifacts_buildcache = ci_config["enable-artifacts-buildcache"]
|
||||
|
||||
rebuild_index_enabled = True
|
||||
if "rebuild-index" in ci_config and ci_config["rebuild-index"] is False:
|
||||
rebuild_index_enabled = False
|
||||
|
||||
temp_storage_url_prefix = None
|
||||
if "temporary-storage-url-prefix" in ci_config:
|
||||
tty.warn("Support for temporary-storage-url-prefix will be removed in Spack 0.23")
|
||||
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
||||
|
||||
# If a remote mirror override (alternate buildcache destination) was
|
||||
# specified, add it here in case it has already built hashes we might
|
||||
# generate.
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
mirrors_to_check = None
|
||||
if deprecated_mirror_config and remote_mirror_override:
|
||||
if spack_pipeline_type == "spack_protected_branch":
|
||||
# Overriding the main mirror in this case might result
|
||||
# in skipping jobs on a release pipeline because specs are
|
||||
# up to date in develop. Eventually we want to notice and take
|
||||
# advantage of this by scheduling a job to copy the spec from
|
||||
# develop to the release, but until we have that, this makes
|
||||
# sure we schedule a rebuild job if the spec isn't already in
|
||||
# override mirror.
|
||||
mirrors_to_check = {"override": remote_mirror_override}
|
||||
|
||||
# If we have a remote override and we want generate pipeline using
|
||||
# --check-index-only, then the override mirror needs to be added to
|
||||
# the configured mirrors when bindist.update() is run, or else we
|
||||
# won't fetch its index and include in our local cache.
|
||||
spack.mirror.add(
|
||||
spack.mirror.Mirror(remote_mirror_override, name="ci_pr_mirror"),
|
||||
cfg.default_modify_scope(),
|
||||
)
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
shared_pr_mirror = None
|
||||
if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request":
|
||||
stack_name = os.environ.get("SPACK_CI_STACK_NAME", "")
|
||||
shared_pr_mirror = url_util.join(SHARED_PR_MIRROR_URL, stack_name)
|
||||
spack.mirror.add(
|
||||
spack.mirror.Mirror(shared_pr_mirror, name="ci_shared_pr_mirror"),
|
||||
cfg.default_modify_scope(),
|
||||
)
|
||||
|
||||
pipeline_artifacts_dir = artifacts_root
|
||||
if not pipeline_artifacts_dir:
|
||||
proj_dir = os.environ.get("CI_PROJECT_DIR", os.getcwd())
|
||||
@@ -795,9 +828,8 @@ def ensure_expected_target_path(path):
|
||||
pipeline_artifacts_dir = os.path.abspath(pipeline_artifacts_dir)
|
||||
concrete_env_dir = os.path.join(pipeline_artifacts_dir, "concrete_environment")
|
||||
|
||||
# Now that we've added the mirrors we know about, they should be properly
|
||||
# reflected in the environment manifest file, so copy that into the
|
||||
# concrete environment directory, along with the spack.lock file.
|
||||
# Copy the environment manifest file into the concrete environment directory,
|
||||
# along with the spack.lock file.
|
||||
if not os.path.exists(concrete_env_dir):
|
||||
os.makedirs(concrete_env_dir)
|
||||
shutil.copyfile(env.manifest_path, os.path.join(concrete_env_dir, "spack.yaml"))
|
||||
@@ -822,18 +854,12 @@ def ensure_expected_target_path(path):
|
||||
env_includes.extend(include_scopes)
|
||||
env_yaml_root["spack"]["include"] = [ensure_expected_target_path(i) for i in env_includes]
|
||||
|
||||
if "gitlab-ci" in env_yaml_root["spack"] and "ci" not in env_yaml_root["spack"]:
|
||||
env_yaml_root["spack"]["ci"] = env_yaml_root["spack"].pop("gitlab-ci")
|
||||
translate_deprecated_config(env_yaml_root["spack"]["ci"])
|
||||
|
||||
with open(os.path.join(concrete_env_dir, "spack.yaml"), "w") as fd:
|
||||
fd.write(syaml.dump_config(env_yaml_root, default_flow_style=False))
|
||||
|
||||
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
|
||||
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
|
||||
job_test_dir = os.path.join(pipeline_artifacts_dir, "tests")
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
local_mirror_dir = os.path.join(pipeline_artifacts_dir, "mirror")
|
||||
user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data")
|
||||
|
||||
# We communicate relative paths to the downstream jobs to avoid issues in
|
||||
@@ -847,8 +873,6 @@ def ensure_expected_target_path(path):
|
||||
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
|
||||
rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir)
|
||||
rel_job_test_dir = os.path.relpath(job_test_dir, ci_project_dir)
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
rel_local_mirror_dir = os.path.join(local_mirror_dir, ci_project_dir)
|
||||
rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir)
|
||||
|
||||
# Speed up staging by first fetching binary indices from all mirrors
|
||||
@@ -910,7 +934,7 @@ def ensure_expected_target_path(path):
|
||||
continue
|
||||
|
||||
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
||||
spec=release_spec, mirrors_to_check=mirrors_to_check, index_only=check_index_only
|
||||
spec=release_spec, index_only=check_index_only
|
||||
)
|
||||
|
||||
spec_record.rebuild = not up_to_date_mirrors
|
||||
@@ -952,36 +976,16 @@ def main_script_replacements(cmd):
|
||||
|
||||
job_name = get_job_name(release_spec, build_group)
|
||||
|
||||
job_vars = job_object.setdefault("variables", {})
|
||||
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
|
||||
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
||||
job_vars["SPACK_JOB_SPEC_PKG_VERSION"] = release_spec.format("{version}")
|
||||
job_vars["SPACK_JOB_SPEC_COMPILER_NAME"] = release_spec.format("{compiler.name}")
|
||||
job_vars["SPACK_JOB_SPEC_COMPILER_VERSION"] = release_spec.format("{compiler.version}")
|
||||
job_vars["SPACK_JOB_SPEC_ARCH"] = release_spec.format("{architecture}")
|
||||
job_vars["SPACK_JOB_SPEC_VARIANTS"] = release_spec.format("{variants}")
|
||||
|
||||
job_object["needs"] = []
|
||||
if spec_label in dependencies:
|
||||
if enable_artifacts_buildcache:
|
||||
# Get dependencies transitively, so they're all
|
||||
# available in the artifacts buildcache.
|
||||
dep_jobs = [d for d in release_spec.traverse(deptype="all", root=False)]
|
||||
else:
|
||||
# In this case, "needs" is only used for scheduling
|
||||
# purposes, so we only get the direct dependencies.
|
||||
dep_jobs = []
|
||||
for dep_label in dependencies[spec_label]:
|
||||
dep_jobs.append(spec_labels[dep_label])
|
||||
# In this case, "needs" is only used for scheduling
|
||||
# purposes, so we only get the direct dependencies.
|
||||
dep_jobs = []
|
||||
for dep_label in dependencies[spec_label]:
|
||||
dep_jobs.append(spec_labels[dep_label])
|
||||
|
||||
job_object["needs"].extend(
|
||||
_format_job_needs(
|
||||
dep_jobs,
|
||||
build_group,
|
||||
prune_dag,
|
||||
rebuild_decisions,
|
||||
enable_artifacts_buildcache,
|
||||
)
|
||||
_format_job_needs(dep_jobs, build_group, prune_dag, rebuild_decisions)
|
||||
)
|
||||
|
||||
rebuild_spec = spec_record.rebuild
|
||||
@@ -1038,6 +1042,7 @@ def main_script_replacements(cmd):
|
||||
|
||||
# Let downstream jobs know whether the spec needed rebuilding, regardless
|
||||
# whether DAG pruning was enabled or not.
|
||||
job_vars = job_object["variables"]
|
||||
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = str(rebuild_spec)
|
||||
|
||||
if cdash_handler:
|
||||
@@ -1062,19 +1067,6 @@ def main_script_replacements(cmd):
|
||||
},
|
||||
)
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if enable_artifacts_buildcache:
|
||||
bc_root = os.path.join(local_mirror_dir, "build_cache")
|
||||
job_object["artifacts"]["paths"].extend(
|
||||
[
|
||||
os.path.join(bc_root, p)
|
||||
for p in [
|
||||
bindist.tarball_name(release_spec, ".spec.json"),
|
||||
bindist.tarball_directory_name(release_spec),
|
||||
]
|
||||
]
|
||||
)
|
||||
|
||||
job_object["stage"] = stage_name
|
||||
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
|
||||
job_object["interruptible"] = True
|
||||
@@ -1089,15 +1081,7 @@ def main_script_replacements(cmd):
|
||||
job_id += 1
|
||||
|
||||
if print_summary:
|
||||
_print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions)
|
||||
|
||||
# Clean up remote mirror override if enabled
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if deprecated_mirror_config:
|
||||
if remote_mirror_override:
|
||||
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
||||
_print_staging_summary(spec_labels, stages, rebuild_decisions)
|
||||
|
||||
tty.debug(f"{job_id} build jobs generated in {stage_id} stages")
|
||||
|
||||
@@ -1119,7 +1103,7 @@ def main_script_replacements(cmd):
|
||||
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
|
||||
}
|
||||
|
||||
if copy_only_pipeline and not config_deprecated:
|
||||
if copy_only_pipeline:
|
||||
stage_names.append("copy")
|
||||
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
||||
sync_job["stage"] = "copy"
|
||||
@@ -1129,17 +1113,12 @@ def main_script_replacements(cmd):
|
||||
if "variables" not in sync_job:
|
||||
sync_job["variables"] = {}
|
||||
|
||||
sync_job["variables"]["SPACK_COPY_ONLY_DESTINATION"] = (
|
||||
buildcache_destination.fetch_url
|
||||
if buildcache_destination
|
||||
else remote_mirror_override or remote_mirror_url
|
||||
)
|
||||
sync_job["variables"]["SPACK_COPY_ONLY_DESTINATION"] = buildcache_destination.fetch_url
|
||||
|
||||
if "buildcache-source" in pipeline_mirrors:
|
||||
buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url
|
||||
else:
|
||||
# TODO: Remove this condition in Spack 0.23
|
||||
buildcache_source = os.environ.get("SPACK_SOURCE_MIRROR", None)
|
||||
if "buildcache-source" not in pipeline_mirrors:
|
||||
raise SpackCIError("Copy-only pipelines require a mirror named 'buildcache-source'")
|
||||
|
||||
buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url
|
||||
sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source
|
||||
sync_job["dependencies"] = []
|
||||
|
||||
@@ -1147,27 +1126,6 @@ def main_script_replacements(cmd):
|
||||
job_id += 1
|
||||
|
||||
if job_id > 0:
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if temp_storage_url_prefix:
|
||||
# There were some rebuild jobs scheduled, so we will need to
|
||||
# schedule a job to clean up the temporary storage location
|
||||
# associated with this pipeline.
|
||||
stage_names.append("cleanup-temp-storage")
|
||||
cleanup_job = copy.deepcopy(spack_ci_ir["jobs"]["cleanup"]["attributes"])
|
||||
|
||||
cleanup_job["stage"] = "cleanup-temp-storage"
|
||||
cleanup_job["when"] = "always"
|
||||
cleanup_job["retry"] = service_job_retries
|
||||
cleanup_job["interruptible"] = True
|
||||
|
||||
cleanup_job["script"] = _unpack_script(
|
||||
cleanup_job["script"],
|
||||
op=lambda cmd: cmd.replace("mirror_prefix", temp_storage_url_prefix),
|
||||
)
|
||||
|
||||
cleanup_job["dependencies"] = []
|
||||
output_object["cleanup"] = cleanup_job
|
||||
|
||||
if (
|
||||
"script" in spack_ci_ir["jobs"]["signing"]["attributes"]
|
||||
and spack_pipeline_type == "spack_protected_branch"
|
||||
@@ -1184,11 +1142,9 @@ def main_script_replacements(cmd):
|
||||
signing_job["interruptible"] = True
|
||||
if "variables" not in signing_job:
|
||||
signing_job["variables"] = {}
|
||||
signing_job["variables"]["SPACK_BUILDCACHE_DESTINATION"] = (
|
||||
buildcache_destination.push_url # need the s3 url for aws s3 sync
|
||||
if buildcache_destination
|
||||
else remote_mirror_override or remote_mirror_url
|
||||
)
|
||||
signing_job["variables"][
|
||||
"SPACK_BUILDCACHE_DESTINATION"
|
||||
] = buildcache_destination.push_url
|
||||
signing_job["dependencies"] = []
|
||||
|
||||
output_object["sign-pkgs"] = signing_job
|
||||
@@ -1199,9 +1155,7 @@ def main_script_replacements(cmd):
|
||||
final_job = spack_ci_ir["jobs"]["reindex"]["attributes"]
|
||||
|
||||
final_job["stage"] = "stage-rebuild-index"
|
||||
target_mirror = remote_mirror_override or remote_mirror_url
|
||||
if buildcache_destination:
|
||||
target_mirror = buildcache_destination.push_url
|
||||
target_mirror = buildcache_destination.push_url
|
||||
final_job["script"] = _unpack_script(
|
||||
final_job["script"],
|
||||
op=lambda cmd: cmd.replace("{index_target_mirror}", target_mirror),
|
||||
@@ -1227,17 +1181,11 @@ def main_script_replacements(cmd):
|
||||
"SPACK_CONCRETE_ENV_DIR": rel_concrete_env_dir,
|
||||
"SPACK_VERSION": spack_version,
|
||||
"SPACK_CHECKOUT_VERSION": version_to_clone,
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
"SPACK_REMOTE_MIRROR_URL": remote_mirror_url,
|
||||
"SPACK_JOB_LOG_DIR": rel_job_log_dir,
|
||||
"SPACK_JOB_REPRO_DIR": rel_job_repro_dir,
|
||||
"SPACK_JOB_TEST_DIR": rel_job_test_dir,
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
"SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir,
|
||||
"SPACK_PIPELINE_TYPE": str(spack_pipeline_type),
|
||||
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
"SPACK_CI_SHARED_PR_MIRROR_URL": shared_pr_mirror or "None",
|
||||
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag),
|
||||
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
||||
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
||||
@@ -1246,10 +1194,6 @@ def main_script_replacements(cmd):
|
||||
for item, val in output_vars.items():
|
||||
output_vars[item] = ensure_expected_target_path(val)
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if deprecated_mirror_config and remote_mirror_override:
|
||||
(output_object["variables"]["SPACK_REMOTE_MIRROR_OVERRIDE"]) = remote_mirror_override
|
||||
|
||||
spack_stack_name = os.environ.get("SPACK_CI_STACK_NAME", None)
|
||||
if spack_stack_name:
|
||||
output_object["variables"]["SPACK_CI_STACK_NAME"] = spack_stack_name
|
||||
@@ -1272,17 +1216,12 @@ def main_script_replacements(cmd):
|
||||
else:
|
||||
# No jobs were generated
|
||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||
noop_job["retry"] = service_job_retries
|
||||
# If this job fails ignore the status and carry on
|
||||
noop_job["retry"] = 0
|
||||
noop_job["allow_failure"] = True
|
||||
|
||||
if copy_only_pipeline and config_deprecated:
|
||||
tty.debug("Generating no-op job as copy-only is unsupported here.")
|
||||
noop_job["script"] = [
|
||||
'echo "copy-only pipelines are not supported with deprecated ci configs"'
|
||||
]
|
||||
output_object = {"unsupported-copy": noop_job}
|
||||
else:
|
||||
tty.debug("No specs to rebuild, generating no-op job")
|
||||
output_object = {"no-specs-to-rebuild": noop_job}
|
||||
tty.debug("No specs to rebuild, generating no-op job")
|
||||
output_object = {"no-specs-to-rebuild": noop_job}
|
||||
|
||||
# Ensure the child pipeline always runs
|
||||
output_object["workflow"] = {"rules": [{"when": "always"}]}
|
||||
@@ -2320,83 +2259,6 @@ def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optiona
|
||||
reporter.test_skipped_report(report_dir, spec, reason)
|
||||
|
||||
|
||||
def translate_deprecated_config(config):
|
||||
# Remove all deprecated keys from config
|
||||
mappings = config.pop("mappings", [])
|
||||
match_behavior = config.pop("match_behavior", "first")
|
||||
|
||||
build_job = {}
|
||||
if "image" in config:
|
||||
build_job["image"] = config.pop("image")
|
||||
if "tags" in config:
|
||||
build_job["tags"] = config.pop("tags")
|
||||
if "variables" in config:
|
||||
build_job["variables"] = config.pop("variables")
|
||||
|
||||
# Scripts always override in old CI
|
||||
if "before_script" in config:
|
||||
build_job["before_script:"] = config.pop("before_script")
|
||||
if "script" in config:
|
||||
build_job["script:"] = config.pop("script")
|
||||
if "after_script" in config:
|
||||
build_job["after_script:"] = config.pop("after_script")
|
||||
|
||||
signing_job = None
|
||||
if "signing-job-attributes" in config:
|
||||
signing_job = {"signing-job": config.pop("signing-job-attributes")}
|
||||
|
||||
service_job_attributes = None
|
||||
if "service-job-attributes" in config:
|
||||
service_job_attributes = config.pop("service-job-attributes")
|
||||
|
||||
# If this config already has pipeline-gen do not more
|
||||
if "pipeline-gen" in config:
|
||||
return True if mappings or build_job or signing_job or service_job_attributes else False
|
||||
|
||||
config["target"] = "gitlab"
|
||||
|
||||
config["pipeline-gen"] = []
|
||||
pipeline_gen = config["pipeline-gen"]
|
||||
|
||||
# Build Job
|
||||
submapping = []
|
||||
for section in mappings:
|
||||
submapping_section = {"match": section["match"]}
|
||||
if "runner-attributes" in section:
|
||||
remapped_attributes = {}
|
||||
if match_behavior == "first":
|
||||
for key, value in section["runner-attributes"].items():
|
||||
# Scripts always override in old CI
|
||||
if key == "script":
|
||||
remapped_attributes["script:"] = value
|
||||
elif key == "before_script":
|
||||
remapped_attributes["before_script:"] = value
|
||||
elif key == "after_script":
|
||||
remapped_attributes["after_script:"] = value
|
||||
else:
|
||||
remapped_attributes[key] = value
|
||||
else:
|
||||
# Handle "merge" behavior be allowing scripts to merge in submapping section
|
||||
remapped_attributes = section["runner-attributes"]
|
||||
submapping_section["build-job"] = remapped_attributes
|
||||
|
||||
if "remove-attributes" in section:
|
||||
# Old format only allowed tags in this section, so no extra checks are needed
|
||||
submapping_section["build-job-remove"] = section["remove-attributes"]
|
||||
submapping.append(submapping_section)
|
||||
pipeline_gen.append({"submapping": submapping, "match_behavior": match_behavior})
|
||||
|
||||
if build_job:
|
||||
pipeline_gen.append({"build-job": build_job})
|
||||
|
||||
# Signing Job
|
||||
if signing_job:
|
||||
pipeline_gen.append(signing_job)
|
||||
|
||||
# Service Jobs
|
||||
if service_job_attributes:
|
||||
pipeline_gen.append({"reindex-job": service_job_attributes})
|
||||
pipeline_gen.append({"noop-job": service_job_attributes})
|
||||
pipeline_gen.append({"cleanup-job": service_job_attributes})
|
||||
|
||||
return True
|
||||
class SpackCIError(spack.error.SpackError):
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
@@ -19,12 +19,23 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
# DEPRECATED: equivalent to --generic --target
|
||||
subparser.add_argument(
|
||||
"-g", "--generic-target", action="store_true", help="show the best generic target"
|
||||
"-g",
|
||||
"--generic-target",
|
||||
action="store_true",
|
||||
help="show the best generic target (deprecated)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--known-targets", action="store_true", help="show a list of all known targets and exit"
|
||||
)
|
||||
target_type = subparser.add_mutually_exclusive_group()
|
||||
target_type.add_argument(
|
||||
"--family", action="store_true", help="print generic ISA (x86_64, aarch64, ppc64le, ...)"
|
||||
)
|
||||
target_type.add_argument(
|
||||
"--generic", action="store_true", help="print feature level (x86_64_v3, armv8.4a, ...)"
|
||||
)
|
||||
parts = subparser.add_mutually_exclusive_group()
|
||||
parts2 = subparser.add_mutually_exclusive_group()
|
||||
parts.add_argument(
|
||||
@@ -80,6 +91,7 @@ def display_target_group(header, target_group):
|
||||
|
||||
def arch(parser, args):
|
||||
if args.generic_target:
|
||||
# TODO: add deprecation warning in 0.24
|
||||
print(archspec.cpu.host().generic)
|
||||
return
|
||||
|
||||
@@ -96,6 +108,10 @@ def arch(parser, args):
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system(os_args)
|
||||
host_target = host_platform.target(target_args)
|
||||
if args.family:
|
||||
host_target = host_target.family
|
||||
elif args.generic:
|
||||
host_target = host_target.generic
|
||||
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
||||
|
||||
if args.platform:
|
||||
|
@@ -62,13 +62,6 @@ def setup_parser(subparser):
|
||||
"path to the file where generated jobs file should be written. "
|
||||
"default is .gitlab-ci.yml in the root of the repository",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--copy-to",
|
||||
default=None,
|
||||
help="path to additional directory for job files\n\n"
|
||||
"this option provides an absolute path to a directory where the generated "
|
||||
"jobs yaml file should be copied. default is not to copy",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--optimize",
|
||||
action="store_true",
|
||||
@@ -83,12 +76,6 @@ def setup_parser(subparser):
|
||||
default=False,
|
||||
help="(DEPRECATED) disable DAG scheduling (use 'plain' dependencies)",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--buildcache-destination",
|
||||
default=None,
|
||||
help="override the mirror configured in the environment\n\n"
|
||||
"allows for pushing binaries from the generated pipeline to a different location",
|
||||
)
|
||||
prune_group = generate.add_mutually_exclusive_group()
|
||||
prune_group.add_argument(
|
||||
"--prune-dag",
|
||||
@@ -214,20 +201,10 @@ def ci_generate(args):
|
||||
|
||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||
|
||||
if args.copy_to:
|
||||
tty.warn("The flag --copy-to is deprecated and will be removed in Spack 0.23")
|
||||
|
||||
if args.buildcache_destination:
|
||||
tty.warn(
|
||||
"The flag --buildcache-destination is deprecated and will be removed in Spack 0.23"
|
||||
)
|
||||
|
||||
output_file = args.output_file
|
||||
copy_yaml_to = args.copy_to
|
||||
prune_dag = args.prune_dag
|
||||
index_only = args.index_only
|
||||
artifacts_root = args.artifacts_root
|
||||
buildcache_destination = args.buildcache_destination
|
||||
|
||||
if not output_file:
|
||||
output_file = os.path.abspath(".gitlab-ci.yml")
|
||||
@@ -245,15 +222,8 @@ def ci_generate(args):
|
||||
prune_dag=prune_dag,
|
||||
check_index_only=index_only,
|
||||
artifacts_root=artifacts_root,
|
||||
remote_mirror_override=buildcache_destination,
|
||||
)
|
||||
|
||||
if copy_yaml_to:
|
||||
copy_to_dir = os.path.dirname(copy_yaml_to)
|
||||
if not os.path.exists(copy_to_dir):
|
||||
os.makedirs(copy_to_dir)
|
||||
shutil.copyfile(output_file, copy_yaml_to)
|
||||
|
||||
|
||||
def ci_reindex(args):
|
||||
"""rebuild the buildcache index for the remote mirror
|
||||
@@ -298,22 +268,13 @@ def ci_rebuild(args):
|
||||
job_log_dir = os.environ.get("SPACK_JOB_LOG_DIR")
|
||||
job_test_dir = os.environ.get("SPACK_JOB_TEST_DIR")
|
||||
repro_dir = os.environ.get("SPACK_JOB_REPRO_DIR")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
local_mirror_dir = os.environ.get("SPACK_LOCAL_MIRROR_DIR")
|
||||
concrete_env_dir = os.environ.get("SPACK_CONCRETE_ENV_DIR")
|
||||
ci_pipeline_id = os.environ.get("CI_PIPELINE_ID")
|
||||
ci_job_name = os.environ.get("CI_JOB_NAME")
|
||||
signing_key = os.environ.get("SPACK_SIGNING_KEY")
|
||||
job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME")
|
||||
job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH")
|
||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
remote_mirror_override = os.environ.get("SPACK_REMOTE_MIRROR_OVERRIDE")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
remote_mirror_url = os.environ.get("SPACK_REMOTE_MIRROR_URL")
|
||||
spack_ci_stack_name = os.environ.get("SPACK_CI_STACK_NAME")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
shared_pr_mirror_url = os.environ.get("SPACK_CI_SHARED_PR_MIRROR_URL")
|
||||
rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING")
|
||||
require_signing = os.environ.get("SPACK_REQUIRE_SIGNING")
|
||||
|
||||
@@ -333,12 +294,10 @@ def ci_rebuild(args):
|
||||
job_log_dir = os.path.join(ci_project_dir, job_log_dir)
|
||||
job_test_dir = os.path.join(ci_project_dir, job_test_dir)
|
||||
repro_dir = os.path.join(ci_project_dir, repro_dir)
|
||||
local_mirror_dir = os.path.join(ci_project_dir, local_mirror_dir)
|
||||
concrete_env_dir = os.path.join(ci_project_dir, concrete_env_dir)
|
||||
|
||||
# Debug print some of the key environment variables we should have received
|
||||
tty.debug("pipeline_artifacts_dir = {0}".format(pipeline_artifacts_dir))
|
||||
tty.debug("remote_mirror_url = {0}".format(remote_mirror_url))
|
||||
tty.debug("job_spec_pkg_name = {0}".format(job_spec_pkg_name))
|
||||
|
||||
# Query the environment manifest to find out whether we're reporting to a
|
||||
@@ -370,51 +329,11 @@ def ci_rebuild(args):
|
||||
full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False
|
||||
|
||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||
deprecated_mirror_config = False
|
||||
buildcache_destination = None
|
||||
if "buildcache-destination" in pipeline_mirrors:
|
||||
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
||||
else:
|
||||
deprecated_mirror_config = True
|
||||
# TODO: This will be an error in Spack 0.23
|
||||
if "buildcache-destination" not in pipeline_mirrors:
|
||||
tty.die("spack ci rebuild requires a mirror named 'buildcache-destination")
|
||||
|
||||
# If no override url exists, then just push binary package to the
|
||||
# normal remote mirror url.
|
||||
# TODO: Remove in Spack 0.23
|
||||
buildcache_mirror_url = remote_mirror_override or remote_mirror_url
|
||||
if buildcache_destination:
|
||||
buildcache_mirror_url = buildcache_destination.push_url
|
||||
|
||||
# Figure out what is our temporary storage mirror: Is it artifacts
|
||||
# buildcache? Or temporary-storage-url-prefix? In some cases we need to
|
||||
# force something or pipelines might not have a way to propagate build
|
||||
# artifacts from upstream to downstream jobs.
|
||||
# TODO: Remove this in Spack 0.23
|
||||
pipeline_mirror_url = None
|
||||
|
||||
# TODO: Remove this in Spack 0.23
|
||||
temp_storage_url_prefix = None
|
||||
if "temporary-storage-url-prefix" in ci_config:
|
||||
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
||||
pipeline_mirror_url = url_util.join(temp_storage_url_prefix, ci_pipeline_id)
|
||||
|
||||
# TODO: Remove this in Spack 0.23
|
||||
enable_artifacts_mirror = False
|
||||
if "enable-artifacts-buildcache" in ci_config:
|
||||
enable_artifacts_mirror = ci_config["enable-artifacts-buildcache"]
|
||||
if enable_artifacts_mirror or (
|
||||
spack_is_pr_pipeline and not enable_artifacts_mirror and not temp_storage_url_prefix
|
||||
):
|
||||
# If you explicitly enabled the artifacts buildcache feature, or
|
||||
# if this is a PR pipeline but you did not enable either of the
|
||||
# per-pipeline temporary storage features, we force the use of
|
||||
# artifacts buildcache. Otherwise jobs will not have binary
|
||||
# dependencies from previous stages available since we do not
|
||||
# allow pushing binaries to the remote mirror during PR pipelines.
|
||||
enable_artifacts_mirror = True
|
||||
pipeline_mirror_url = url_util.path_to_file_url(local_mirror_dir)
|
||||
mirror_msg = "artifact buildcache enabled, mirror url: {0}".format(pipeline_mirror_url)
|
||||
tty.debug(mirror_msg)
|
||||
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
||||
|
||||
# Get the concrete spec to be built by this job.
|
||||
try:
|
||||
@@ -489,48 +408,7 @@ def ci_rebuild(args):
|
||||
fd.write(spack_info.encode("utf8"))
|
||||
fd.write(b"\n")
|
||||
|
||||
pipeline_mirrors = []
|
||||
|
||||
# If we decided there should be a temporary storage mechanism, add that
|
||||
# mirror now so it's used when we check for a hash match already
|
||||
# built for this spec.
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if pipeline_mirror_url:
|
||||
mirror = spack.mirror.Mirror(pipeline_mirror_url, name=spack_ci.TEMP_STORAGE_MIRROR_NAME)
|
||||
spack.mirror.add(mirror, cfg.default_modify_scope())
|
||||
pipeline_mirrors.append(pipeline_mirror_url)
|
||||
|
||||
# Check configured mirrors for a built spec with a matching hash
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
mirrors_to_check = None
|
||||
if remote_mirror_override:
|
||||
if spack_pipeline_type == "spack_protected_branch":
|
||||
# Passing "mirrors_to_check" below means we *only* look in the override
|
||||
# mirror to see if we should skip building, which is what we want.
|
||||
mirrors_to_check = {"override": remote_mirror_override}
|
||||
|
||||
# Adding this mirror to the list of configured mirrors means dependencies
|
||||
# could be installed from either the override mirror or any other configured
|
||||
# mirror (e.g. remote_mirror_url which is defined in the environment or
|
||||
# pipeline_mirror_url), which is also what we want.
|
||||
spack.mirror.add(
|
||||
spack.mirror.Mirror(remote_mirror_override, name="mirror_override"),
|
||||
cfg.default_modify_scope(),
|
||||
)
|
||||
pipeline_mirrors.append(remote_mirror_override)
|
||||
|
||||
# TODO: Remove this in Spack 0.23
|
||||
if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request":
|
||||
if shared_pr_mirror_url != "None":
|
||||
pipeline_mirrors.append(shared_pr_mirror_url)
|
||||
|
||||
matches = (
|
||||
None
|
||||
if full_rebuild
|
||||
else bindist.get_mirrors_for_spec(
|
||||
job_spec, mirrors_to_check=mirrors_to_check, index_only=False
|
||||
)
|
||||
)
|
||||
matches = None if full_rebuild else bindist.get_mirrors_for_spec(job_spec, index_only=False)
|
||||
|
||||
if matches:
|
||||
# Got a hash match on at least one configured mirror. All
|
||||
@@ -542,25 +420,10 @@ def ci_rebuild(args):
|
||||
tty.msg("No need to rebuild {0}, found hash match at: ".format(job_spec_pkg_name))
|
||||
for match in matches:
|
||||
tty.msg(" {0}".format(match["mirror_url"]))
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if enable_artifacts_mirror:
|
||||
matching_mirror = matches[0]["mirror_url"]
|
||||
build_cache_dir = os.path.join(local_mirror_dir, "build_cache")
|
||||
tty.debug("Getting {0} buildcache from {1}".format(job_spec_pkg_name, matching_mirror))
|
||||
tty.debug("Downloading to {0}".format(build_cache_dir))
|
||||
bindist.download_single_spec(job_spec, build_cache_dir, mirror_url=matching_mirror)
|
||||
|
||||
# Now we are done and successful
|
||||
return 0
|
||||
|
||||
# Before beginning the install, if this is a "rebuild everything" pipeline, we
|
||||
# only want to keep the mirror being used by the current pipeline as it's binary
|
||||
# package destination. This ensures that the when we rebuild everything, we only
|
||||
# consume binary dependencies built in this pipeline.
|
||||
# TODO: Remove this in Spack 0.23
|
||||
if deprecated_mirror_config and full_rebuild:
|
||||
spack_ci.remove_other_mirrors(pipeline_mirrors, cfg.default_modify_scope())
|
||||
|
||||
# No hash match anywhere means we need to rebuild spec
|
||||
|
||||
# Start with spack arguments
|
||||
@@ -681,17 +544,11 @@ def ci_rebuild(args):
|
||||
cdash_handler.copy_test_results(reports_dir, job_test_dir)
|
||||
|
||||
if install_exit_code == 0:
|
||||
# If the install succeeded, push it to one or more mirrors. Failure to push to any mirror
|
||||
# If the install succeeded, push it to the buildcache destination. Failure to push
|
||||
# will result in a non-zero exit code. Pushing is best-effort.
|
||||
mirror_urls = [buildcache_mirror_url]
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if pipeline_mirror_url:
|
||||
mirror_urls.append(pipeline_mirror_url)
|
||||
|
||||
for result in spack_ci.create_buildcache(
|
||||
input_spec=job_spec,
|
||||
destination_mirror_urls=mirror_urls,
|
||||
destination_mirror_urls=[buildcache_destination.push_url],
|
||||
sign_binaries=spack_ci.can_sign_binaries(),
|
||||
):
|
||||
if not result.success:
|
||||
|
@@ -660,34 +660,32 @@ def mirror_name_or_url(m):
|
||||
# accidentally to a dir in the current working directory.
|
||||
|
||||
# If there's a \ or / in the name, it's interpreted as a path or url.
|
||||
if "/" in m or "\\" in m:
|
||||
if "/" in m or "\\" in m or m in (".", ".."):
|
||||
return spack.mirror.Mirror(m)
|
||||
|
||||
# Otherwise, the named mirror is required to exist.
|
||||
try:
|
||||
return spack.mirror.require_mirror_name(m)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(
|
||||
str(e) + ". Did you mean {}?".format(os.path.join(".", m))
|
||||
)
|
||||
raise argparse.ArgumentTypeError(f"{e}. Did you mean {os.path.join('.', m)}?") from e
|
||||
|
||||
|
||||
def mirror_url(url):
|
||||
try:
|
||||
return spack.mirror.Mirror.from_url(url)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(str(e))
|
||||
raise argparse.ArgumentTypeError(str(e)) from e
|
||||
|
||||
|
||||
def mirror_directory(path):
|
||||
try:
|
||||
return spack.mirror.Mirror.from_local_path(path)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(str(e))
|
||||
raise argparse.ArgumentTypeError(str(e)) from e
|
||||
|
||||
|
||||
def mirror_name(name):
|
||||
try:
|
||||
return spack.mirror.require_mirror_name(name)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(str(e))
|
||||
raise argparse.ArgumentTypeError(str(e)) from e
|
||||
|
@@ -99,5 +99,5 @@ def deconcretize(parser, args):
|
||||
" Use `spack deconcretize --all` to deconcretize ALL specs.",
|
||||
)
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [any]
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
|
||||
deconcretize_specs(args, specs)
|
||||
|
@@ -85,8 +85,14 @@ def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
|
||||
|
||||
|
||||
def develop(parser, args):
|
||||
# Note: we could put develop specs in any scope, but I assume
|
||||
# users would only ever want to do this for either (a) an active
|
||||
# env or (b) a specified config file (e.g. that is included by
|
||||
# an environment)
|
||||
# TODO: when https://github.com/spack/spack/pull/35307 is merged,
|
||||
# an active env is not required if a scope is specified
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
if not args.spec:
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
if args.clone is False:
|
||||
raise SpackError("No spec provided to spack develop command")
|
||||
|
||||
@@ -116,16 +122,18 @@ def develop(parser, args):
|
||||
raise SpackError("spack develop requires at most one named spec")
|
||||
|
||||
spec = specs[0]
|
||||
|
||||
version = spec.versions.concrete_range_as_version
|
||||
if not version:
|
||||
raise SpackError("Packages to develop must have a concrete version")
|
||||
# look up the maximum version so infintiy versions are preferred for develop
|
||||
version = max(spec.package_class.versions.keys())
|
||||
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
|
||||
spec.versions = spack.version.VersionList([version])
|
||||
|
||||
# If user does not specify --path, we choose to create a directory in the
|
||||
# active environment's directory, named after the spec
|
||||
path = args.path or spec.name
|
||||
if not os.path.isabs(path):
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
|
||||
else:
|
||||
abspath = path
|
||||
@@ -149,13 +157,6 @@ def develop(parser, args):
|
||||
|
||||
_retrieve_develop_source(spec, abspath)
|
||||
|
||||
# Note: we could put develop specs in any scope, but I assume
|
||||
# users would only ever want to do this for either (a) an active
|
||||
# env or (b) a specified config file (e.g. that is included by
|
||||
# an environment)
|
||||
# TODO: when https://github.com/spack/spack/pull/35307 is merged,
|
||||
# an active env is not required if a scope is specified
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
tty.debug("Updating develop config for {0} transactionally".format(env.name))
|
||||
with env.write_transaction():
|
||||
if args.build_directory is not None:
|
||||
|
@@ -57,35 +57,41 @@
|
||||
# env create
|
||||
#
|
||||
def env_create_setup_parser(subparser):
|
||||
"""create a new environment"""
|
||||
subparser.add_argument("env_name", metavar="env", help="name or directory of environment")
|
||||
"""create a new environment
|
||||
|
||||
create a new environment or, optionally, copy an existing environment
|
||||
|
||||
a manifest file results in a new abstract environment while a lock file
|
||||
creates a new concrete environment
|
||||
"""
|
||||
subparser.add_argument(
|
||||
"env_name", metavar="env", help="name or directory of the new environment"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-d", "--dir", action="store_true", help="create an environment in a specific directory"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--keep-relative",
|
||||
action="store_true",
|
||||
help="copy relative develop paths verbatim into the new environment"
|
||||
" when initializing from envfile",
|
||||
help="copy envfile's relative develop paths verbatim",
|
||||
)
|
||||
view_opts = subparser.add_mutually_exclusive_group()
|
||||
view_opts.add_argument(
|
||||
"--without-view", action="store_true", help="do not maintain a view for this environment"
|
||||
)
|
||||
view_opts.add_argument(
|
||||
"--with-view",
|
||||
help="specify that this environment should maintain a view at the"
|
||||
" specified path (by default the view is maintained in the"
|
||||
" environment directory)",
|
||||
"--with-view", help="maintain view at WITH_VIEW (vs. environment's directory)"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"envfile",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
||||
help="manifest or lock file (ends with '.json' or '.lock')",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--include-concrete", action="append", help="name of old environment to copy specs from"
|
||||
"--include-concrete",
|
||||
action="append",
|
||||
help="copy concrete specs from INCLUDE_CONCRETE's environment",
|
||||
)
|
||||
|
||||
|
||||
@@ -173,7 +179,7 @@ def _env_create(
|
||||
# env activate
|
||||
#
|
||||
def env_activate_setup_parser(subparser):
|
||||
"""set the current environment"""
|
||||
"""set the active environment"""
|
||||
shells = subparser.add_mutually_exclusive_group()
|
||||
shells.add_argument(
|
||||
"--sh",
|
||||
@@ -213,14 +219,14 @@ def env_activate_setup_parser(subparser):
|
||||
|
||||
view_options = subparser.add_mutually_exclusive_group()
|
||||
view_options.add_argument(
|
||||
"--with-view",
|
||||
"-v",
|
||||
"--with-view",
|
||||
metavar="name",
|
||||
help="set runtime environment variables for specific view",
|
||||
help="set runtime environment variables for the named view",
|
||||
)
|
||||
view_options.add_argument(
|
||||
"--without-view",
|
||||
"-V",
|
||||
"--without-view",
|
||||
action="store_true",
|
||||
help="do not set runtime environment variables for any view",
|
||||
)
|
||||
@@ -230,14 +236,14 @@ def env_activate_setup_parser(subparser):
|
||||
"--prompt",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="decorate the command line prompt when activating",
|
||||
help="add the active environment to the command line prompt",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"--temp",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="create and activate an environment in a temporary directory",
|
||||
help="create and activate in a temporary directory",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--create",
|
||||
@@ -249,13 +255,12 @@ def env_activate_setup_parser(subparser):
|
||||
"--envfile",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
||||
help="manifest or lock file (ends with '.json' or '.lock')",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--keep-relative",
|
||||
action="store_true",
|
||||
help="copy relative develop paths verbatim into the new environment"
|
||||
" when initializing from envfile",
|
||||
help="copy envfile's relative develop paths verbatim when create",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-d",
|
||||
@@ -269,10 +274,7 @@ def env_activate_setup_parser(subparser):
|
||||
dest="env_name",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help=(
|
||||
"name of managed environment or directory of the anonymous env"
|
||||
" (when using --dir/-d) to activate"
|
||||
),
|
||||
help=("name or directory of the environment being activated"),
|
||||
)
|
||||
|
||||
|
||||
@@ -385,7 +387,7 @@ def env_activate(args):
|
||||
# env deactivate
|
||||
#
|
||||
def env_deactivate_setup_parser(subparser):
|
||||
"""deactivate any active environment in the shell"""
|
||||
"""deactivate the active environment"""
|
||||
shells = subparser.add_mutually_exclusive_group()
|
||||
shells.add_argument(
|
||||
"--sh",
|
||||
@@ -448,23 +450,27 @@ def env_deactivate(args):
|
||||
# env remove
|
||||
#
|
||||
def env_remove_setup_parser(subparser):
|
||||
"""remove an existing environment"""
|
||||
subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove")
|
||||
"""remove managed environment(s)
|
||||
|
||||
remove existing environment(s) managed by Spack
|
||||
|
||||
directory environments and manifests embedded in repositories must be
|
||||
removed manually
|
||||
"""
|
||||
subparser.add_argument(
|
||||
"rm_env", metavar="env", nargs="+", help="name(s) of the environment(s) being removed"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
subparser.add_argument(
|
||||
"-f",
|
||||
"--force",
|
||||
action="store_true",
|
||||
help="remove the environment even if it is included in another environment",
|
||||
help="force removal even when included in other environment(s)",
|
||||
)
|
||||
|
||||
|
||||
def env_remove(args):
|
||||
"""Remove a *named* environment.
|
||||
|
||||
This removes an environment managed by Spack. Directory environments
|
||||
and manifests embedded in repositories should be removed manually.
|
||||
"""
|
||||
"""remove existing environment(s)"""
|
||||
remove_envs = []
|
||||
valid_envs = []
|
||||
bad_envs = []
|
||||
@@ -519,29 +525,32 @@ def env_remove(args):
|
||||
# env rename
|
||||
#
|
||||
def env_rename_setup_parser(subparser):
|
||||
"""rename an existing environment"""
|
||||
"""rename an existing environment
|
||||
|
||||
rename a managed environment or move an independent/directory environment
|
||||
|
||||
operation cannot be performed to or from an active environment
|
||||
"""
|
||||
subparser.add_argument(
|
||||
"mv_from", metavar="from", help="name (or path) of existing environment"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"mv_to", metavar="to", help="new name (or path) for existing environment"
|
||||
"mv_from", metavar="from", help="current name or directory of the environment"
|
||||
)
|
||||
subparser.add_argument("mv_to", metavar="to", help="new name or directory for the environment")
|
||||
subparser.add_argument(
|
||||
"-d",
|
||||
"--dir",
|
||||
action="store_true",
|
||||
help="the specified arguments correspond to directory paths",
|
||||
help="positional arguments are environment directory paths",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-f", "--force", action="store_true", help="allow overwriting of an existing environment"
|
||||
"-f",
|
||||
"--force",
|
||||
action="store_true",
|
||||
help="force renaming even if overwriting an existing environment",
|
||||
)
|
||||
|
||||
|
||||
def env_rename(args):
|
||||
"""Rename an environment.
|
||||
|
||||
This renames a managed environment or moves an anonymous environment.
|
||||
"""
|
||||
"""rename or move an existing environment"""
|
||||
|
||||
# Directory option has been specified
|
||||
if args.dir:
|
||||
@@ -590,7 +599,7 @@ def env_rename(args):
|
||||
# env list
|
||||
#
|
||||
def env_list_setup_parser(subparser):
|
||||
"""list managed environments"""
|
||||
"""list all managed environments"""
|
||||
|
||||
|
||||
def env_list(args):
|
||||
@@ -626,13 +635,14 @@ def actions():
|
||||
# env view
|
||||
#
|
||||
def env_view_setup_parser(subparser):
|
||||
"""manage a view associated with the environment"""
|
||||
"""manage the environment's view
|
||||
|
||||
provide the path when enabling a view with a non-default path
|
||||
"""
|
||||
subparser.add_argument(
|
||||
"action", choices=ViewAction.actions(), help="action to take for the environment's view"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"view_path", nargs="?", help="when enabling a view, optionally set the path manually"
|
||||
)
|
||||
subparser.add_argument("view_path", nargs="?", help="view's non-default path when enabling it")
|
||||
|
||||
|
||||
def env_view(args):
|
||||
@@ -660,7 +670,7 @@ def env_view(args):
|
||||
# env status
|
||||
#
|
||||
def env_status_setup_parser(subparser):
|
||||
"""print whether there is an active environment"""
|
||||
"""print active environment status"""
|
||||
|
||||
|
||||
def env_status(args):
|
||||
@@ -720,14 +730,22 @@ def env_loads(args):
|
||||
|
||||
|
||||
def env_update_setup_parser(subparser):
|
||||
"""update environments to the latest format"""
|
||||
"""update the environment manifest to the latest schema format
|
||||
|
||||
update the environment to the latest schema format, which may not be
|
||||
readable by older versions of spack
|
||||
|
||||
a backup copy of the manifest is retained in case there is a need to revert
|
||||
this operation
|
||||
"""
|
||||
subparser.add_argument(
|
||||
metavar="env", dest="update_env", help="name or directory of the environment to activate"
|
||||
metavar="env", dest="update_env", help="name or directory of the environment"
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
|
||||
|
||||
def env_update(args):
|
||||
"""update the manifest to the latest format"""
|
||||
manifest_file = ev.manifest_file(args.update_env)
|
||||
backup_file = manifest_file + ".bkp"
|
||||
|
||||
@@ -757,14 +775,22 @@ def env_update(args):
|
||||
|
||||
|
||||
def env_revert_setup_parser(subparser):
|
||||
"""restore environments to their state before update"""
|
||||
"""restore the environment manifest to its previous format
|
||||
|
||||
revert the environment's manifest to the schema format from its last
|
||||
'spack env update'
|
||||
|
||||
the current manifest will be overwritten by the backup copy and the backup
|
||||
copy will be removed
|
||||
"""
|
||||
subparser.add_argument(
|
||||
metavar="env", dest="revert_env", help="name or directory of the environment to activate"
|
||||
metavar="env", dest="revert_env", help="name or directory of the environment"
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
|
||||
|
||||
def env_revert(args):
|
||||
"""restore the environment manifest to its previous format"""
|
||||
manifest_file = ev.manifest_file(args.revert_env)
|
||||
backup_file = manifest_file + ".bkp"
|
||||
|
||||
@@ -796,15 +822,19 @@ def env_revert(args):
|
||||
|
||||
|
||||
def env_depfile_setup_parser(subparser):
|
||||
"""generate a depfile from the concrete environment specs"""
|
||||
"""generate a depfile to exploit parallel builds across specs
|
||||
|
||||
requires the active environment to be concrete
|
||||
"""
|
||||
subparser.add_argument(
|
||||
"--make-prefix",
|
||||
"--make-target-prefix",
|
||||
default=None,
|
||||
metavar="TARGET",
|
||||
help="prefix Makefile targets (and variables) with <TARGET>/<name>\n\nby default "
|
||||
"the absolute path to the directory makedeps under the environment metadata dir is "
|
||||
"used. can be set to an empty string --make-prefix ''",
|
||||
help="prefix Makefile targets/variables with <TARGET>/<name>,\n"
|
||||
"which can be an empty string (--make-prefix '')\n"
|
||||
"defaults to the absolute path of the environment's makedeps\n"
|
||||
"environment metadata dir\n",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--make-disable-jobserver",
|
||||
@@ -819,8 +849,8 @@ def env_depfile_setup_parser(subparser):
|
||||
type=arguments.use_buildcache,
|
||||
default="package:auto,dependencies:auto",
|
||||
metavar="[{auto,only,never},][package:{auto,only,never},][dependencies:{auto,only,never}]",
|
||||
help="when using `only`, redundant build dependencies are pruned from the DAG\n\n"
|
||||
"this flag is passed on to the generated spack install commands",
|
||||
help="use `only` to prune redundant build dependencies\n"
|
||||
"option is also passed to generated spack install commands",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-o",
|
||||
@@ -834,14 +864,14 @@ def env_depfile_setup_parser(subparser):
|
||||
"--generator",
|
||||
default="make",
|
||||
choices=("make",),
|
||||
help="specify the depfile type\n\ncurrently only make is supported",
|
||||
help="specify the depfile type (only supports `make`)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
metavar="specs",
|
||||
dest="specs",
|
||||
nargs=argparse.REMAINDER,
|
||||
default=None,
|
||||
help="generate a depfile only for matching specs in the environment",
|
||||
help="limit the generated file to matching specs",
|
||||
)
|
||||
|
||||
|
||||
@@ -910,7 +940,12 @@ def setup_parser(subparser):
|
||||
setup_parser_cmd_name = "env_%s_setup_parser" % name
|
||||
setup_parser_cmd = globals()[setup_parser_cmd_name]
|
||||
|
||||
subsubparser = sp.add_parser(name, aliases=aliases, help=setup_parser_cmd.__doc__)
|
||||
subsubparser = sp.add_parser(
|
||||
name,
|
||||
aliases=aliases,
|
||||
description=setup_parser_cmd.__doc__,
|
||||
help=spack.cmd.first_line(setup_parser_cmd.__doc__),
|
||||
)
|
||||
setup_parser_cmd(subsubparser)
|
||||
|
||||
|
||||
|
@@ -174,17 +174,17 @@ def query_arguments(args):
|
||||
if (args.missing or args.only_missing) and not args.only_deprecated:
|
||||
installed.append(InstallStatuses.MISSING)
|
||||
|
||||
known = any
|
||||
predicate_fn = None
|
||||
if args.unknown:
|
||||
known = False
|
||||
predicate_fn = lambda x: not spack.repo.PATH.exists(x.spec.name)
|
||||
|
||||
explicit = any
|
||||
explicit = None
|
||||
if args.explicit:
|
||||
explicit = True
|
||||
if args.implicit:
|
||||
explicit = False
|
||||
|
||||
q_args = {"installed": installed, "known": known, "explicit": explicit}
|
||||
q_args = {"installed": installed, "predicate_fn": predicate_fn, "explicit": explicit}
|
||||
|
||||
install_tree = args.install_tree
|
||||
upstreams = spack.config.get("upstreams", {})
|
||||
|
@@ -41,7 +41,7 @@ def setup_parser(subparser):
|
||||
help="do not remove installed build-only dependencies of roots\n"
|
||||
"(default is to keep only link & run dependencies)",
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
spack.cmd.common.arguments.add_common_arguments(subparser, ["yes_to_all", "constraint"])
|
||||
|
||||
|
||||
def roots_from_environments(args, active_env):
|
||||
@@ -97,6 +97,12 @@ def gc(parser, args):
|
||||
root_hashes = None
|
||||
|
||||
specs = spack.store.STORE.db.unused_specs(root_hashes=root_hashes, deptype=deptype)
|
||||
|
||||
# limit search to constraint specs if provided
|
||||
if args.constraint:
|
||||
hashes = set(spec.dag_hash() for spec in args.specs())
|
||||
specs = [spec for spec in specs if spec.dag_hash() in hashes]
|
||||
|
||||
if not specs:
|
||||
tty.msg("There are no unused specs. Spack's store is clean.")
|
||||
return
|
||||
|
@@ -80,8 +80,8 @@ def find_matching_specs(specs, allow_multiple_matches=False):
|
||||
has_errors = True
|
||||
|
||||
# No installed package matches the query
|
||||
if len(matching) == 0 and spec is not any:
|
||||
tty.die("{0} does not match any installed packages.".format(spec))
|
||||
if len(matching) == 0 and spec is not None:
|
||||
tty.die(f"{spec} does not match any installed packages.")
|
||||
|
||||
specs_from_cli.extend(matching)
|
||||
|
||||
@@ -116,6 +116,6 @@ def mark(parser, args):
|
||||
" Use `spack mark --all` to mark ALL packages.",
|
||||
)
|
||||
|
||||
# [any] here handles the --all case by forcing all specs to be returned
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [any]
|
||||
# [None] here handles the --all case by forcing all specs to be returned
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
|
||||
mark_specs(args, specs)
|
||||
|
@@ -378,7 +378,10 @@ def refresh(module_type, specs, args):
|
||||
def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
||||
# Qualifiers to be used when querying the db for specs
|
||||
constraint_qualifiers = {
|
||||
"refresh": {"installed": True, "known": lambda x: not spack.repo.PATH.exists(x)}
|
||||
"refresh": {
|
||||
"installed": True,
|
||||
"predicate_fn": lambda x: spack.repo.PATH.exists(x.spec.name),
|
||||
}
|
||||
}
|
||||
query_args = constraint_qualifiers.get(args.subparser_name, {})
|
||||
|
||||
|
@@ -165,7 +165,7 @@ def test_run(args):
|
||||
if args.fail_fast:
|
||||
spack.config.set("config:fail_fast", True, scope="command_line")
|
||||
|
||||
explicit = args.explicit or any
|
||||
explicit = args.explicit or None
|
||||
explicit_str = "explicitly " if args.explicit else ""
|
||||
|
||||
# Get specs to test
|
||||
|
@@ -90,6 +90,7 @@ def find_matching_specs(
|
||||
env: optional active environment
|
||||
specs: list of specs to be matched against installed packages
|
||||
allow_multiple_matches: if True multiple matches are admitted
|
||||
origin: origin of the spec
|
||||
|
||||
Return:
|
||||
list: list of specs
|
||||
@@ -98,7 +99,7 @@ def find_matching_specs(
|
||||
hashes = env.all_hashes() if env else None
|
||||
|
||||
# List of specs that match expressions given via command line
|
||||
specs_from_cli = []
|
||||
specs_from_cli: List["spack.spec.Spec"] = []
|
||||
has_errors = False
|
||||
for spec in specs:
|
||||
install_query = [InstallStatuses.INSTALLED, InstallStatuses.DEPRECATED]
|
||||
@@ -116,7 +117,7 @@ def find_matching_specs(
|
||||
has_errors = True
|
||||
|
||||
# No installed package matches the query
|
||||
if len(matching) == 0 and spec is not any:
|
||||
if len(matching) == 0 and spec is not None:
|
||||
if env:
|
||||
pkg_type = "packages in environment '%s'" % env.name
|
||||
else:
|
||||
@@ -213,7 +214,7 @@ def get_uninstall_list(args, specs: List[spack.spec.Spec], env: Optional[ev.Envi
|
||||
|
||||
# Gets the list of installed specs that match the ones given via cli
|
||||
# args.all takes care of the case where '-a' is given in the cli
|
||||
matching_specs = find_matching_specs(env, specs, args.all)
|
||||
matching_specs = find_matching_specs(env, specs, args.all, origin=args.origin)
|
||||
dependent_specs = installed_dependents(matching_specs)
|
||||
all_uninstall_specs = matching_specs + dependent_specs if args.dependents else matching_specs
|
||||
other_dependent_envs = dependent_environments(all_uninstall_specs, current_env=env)
|
||||
@@ -301,6 +302,6 @@ def uninstall(parser, args):
|
||||
" Use `spack uninstall --all` to uninstall ALL packages.",
|
||||
)
|
||||
|
||||
# [any] here handles the --all case by forcing all specs to be returned
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [any]
|
||||
# [None] here handles the --all case by forcing all specs to be returned
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
|
||||
uninstall_specs(args, specs)
|
||||
|
@@ -33,6 +33,8 @@
|
||||
YamlFilesystemView.
|
||||
|
||||
"""
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.link_tree import MergeConflictError
|
||||
|
||||
@@ -178,7 +180,12 @@ def setup_parser(sp):
|
||||
|
||||
|
||||
def view(parser, args):
|
||||
"Produce a view of a set of packages."
|
||||
"""Produce a view of a set of packages."""
|
||||
|
||||
if sys.platform == "win32" and args.action in ("hardlink", "hard"):
|
||||
# Hard-linked views are not yet allowed on Windows.
|
||||
# See https://github.com/spack/spack/pull/46335#discussion_r1757411915
|
||||
tty.die("Hard linking is not supported on Windows. Please use symlinks or copy methods.")
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs)
|
||||
path = args.path[0]
|
||||
|
@@ -275,7 +275,7 @@ def __init__(
|
||||
operating_system,
|
||||
target,
|
||||
paths,
|
||||
modules=None,
|
||||
modules: Optional[List[str]] = None,
|
||||
alias=None,
|
||||
environment=None,
|
||||
extra_rpaths=None,
|
||||
@@ -415,14 +415,19 @@ def implicit_rpaths(self) -> List[str]:
|
||||
return list(paths_containing_libs(link_dirs, all_required_libs))
|
||||
|
||||
@property
|
||||
def default_libc(self) -> Optional["spack.spec.Spec"]:
|
||||
"""Determine libc targeted by the compiler from link line"""
|
||||
def default_dynamic_linker(self) -> Optional[str]:
|
||||
"""Determine default dynamic linker from compiler link line"""
|
||||
output = self.compiler_verbose_output
|
||||
|
||||
if not output:
|
||||
return None
|
||||
|
||||
dynamic_linker = spack.util.libc.parse_dynamic_linker(output)
|
||||
return spack.util.libc.parse_dynamic_linker(output)
|
||||
|
||||
@property
|
||||
def default_libc(self) -> Optional["spack.spec.Spec"]:
|
||||
"""Determine libc targeted by the compiler from link line"""
|
||||
dynamic_linker = self.default_dynamic_linker
|
||||
|
||||
if not dynamic_linker:
|
||||
return None
|
||||
|
@@ -92,6 +92,14 @@ def c11_flag(self):
|
||||
else:
|
||||
return "-std=c1x"
|
||||
|
||||
@property
|
||||
def c18_flag(self):
|
||||
# c18 supported since oneapi 2022, which is classic version 2021.5.0
|
||||
if self.real_version < Version("21.5.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C18 standard", "c18_flag", "< 21.5.0")
|
||||
else:
|
||||
return "-std=c18"
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self):
|
||||
return "-fPIC"
|
||||
@@ -116,9 +124,8 @@ def setup_custom_environment(self, pkg, env):
|
||||
# Edge cases for Intel's oneAPI compilers when using the legacy classic compilers:
|
||||
# Always pass flags to disable deprecation warnings, since these warnings can
|
||||
# confuse tools that parse the output of compiler commands (e.g. version checks).
|
||||
if self.cc and self.cc.endswith("icc") and self.real_version >= Version("2021"):
|
||||
if self.real_version >= Version("2021") and self.real_version <= Version("2023"):
|
||||
env.append_flags("SPACK_ALWAYS_CFLAGS", "-diag-disable=10441")
|
||||
if self.cxx and self.cxx.endswith("icpc") and self.real_version >= Version("2021"):
|
||||
env.append_flags("SPACK_ALWAYS_CXXFLAGS", "-diag-disable=10441")
|
||||
if self.fc and self.fc.endswith("ifort") and self.real_version >= Version("2021"):
|
||||
if self.real_version >= Version("2021") and self.real_version <= Version("2024"):
|
||||
env.append_flags("SPACK_ALWAYS_FFLAGS", "-diag-disable=10448")
|
||||
|
@@ -293,6 +293,17 @@ def platform_toolset_ver(self):
|
||||
vs22_toolset = Version(toolset_ver) > Version("142")
|
||||
return toolset_ver if not vs22_toolset else "143"
|
||||
|
||||
@property
|
||||
def visual_studio_version(self):
|
||||
"""The four digit Visual Studio version (i.e. 2019 or 2022)
|
||||
|
||||
Note: This differs from the msvc version or toolset version as
|
||||
those properties track the compiler and build tools version
|
||||
respectively, whereas this tracks the VS release associated
|
||||
with a given MSVC compiler.
|
||||
"""
|
||||
return re.search(r"[0-9]{4}", self.cc).group(0)
|
||||
|
||||
def _compiler_version(self, compiler):
|
||||
"""Returns version object for given compiler"""
|
||||
# ignore_errors below is true here due to ifx's
|
||||
|
@@ -7,7 +7,9 @@
|
||||
from os.path import dirname, join
|
||||
|
||||
from llnl.util import tty
|
||||
from llnl.util.filesystem import ancestor
|
||||
|
||||
import spack.util.executable
|
||||
from spack.compiler import Compiler
|
||||
from spack.version import Version
|
||||
|
||||
@@ -116,6 +118,24 @@ def fc_pic_flag(self):
|
||||
def stdcxx_libs(self):
|
||||
return ("-cxxlib",)
|
||||
|
||||
@property
|
||||
def prefix(self):
|
||||
# OneAPI reports its install prefix when running ``--version``
|
||||
# on the line ``InstalledDir: <prefix>/bin/compiler``.
|
||||
cc = spack.util.executable.Executable(self.cc)
|
||||
with self.compiler_environment():
|
||||
oneapi_output = cc("--version", output=str, error=str)
|
||||
|
||||
for line in oneapi_output.splitlines():
|
||||
if line.startswith("InstalledDir:"):
|
||||
oneapi_prefix = line.split(":")[1].strip()
|
||||
# Go from <prefix>/bin/compiler to <prefix>
|
||||
return ancestor(oneapi_prefix, 2)
|
||||
|
||||
raise RuntimeError(
|
||||
"could not find install prefix of OneAPI from output:\n\t{}".format(oneapi_output)
|
||||
)
|
||||
|
||||
def setup_custom_environment(self, pkg, env):
|
||||
# workaround bug in icpx driver where it requires sycl-post-link is on the PATH
|
||||
# It is located in the same directory as the driver. Error message:
|
||||
@@ -131,11 +151,14 @@ def setup_custom_environment(self, pkg, env):
|
||||
# Edge cases for Intel's oneAPI compilers when using the legacy classic compilers:
|
||||
# Always pass flags to disable deprecation warnings, since these warnings can
|
||||
# confuse tools that parse the output of compiler commands (e.g. version checks).
|
||||
if self.cc and self.cc.endswith("icc") and self.real_version >= Version("2021"):
|
||||
# This is really only needed for Fortran, since oneapi@ should be using either
|
||||
# icx+icpx+ifx or icx+icpx+ifort. But to be on the safe side (some users may
|
||||
# want to try to swap icpx against icpc, for example), and since the Intel LLVM
|
||||
# compilers accept these diag-disable flags, we apply them for all compilers.
|
||||
if self.real_version >= Version("2021") and self.real_version <= Version("2023"):
|
||||
env.append_flags("SPACK_ALWAYS_CFLAGS", "-diag-disable=10441")
|
||||
if self.cxx and self.cxx.endswith("icpc") and self.real_version >= Version("2021"):
|
||||
env.append_flags("SPACK_ALWAYS_CXXFLAGS", "-diag-disable=10441")
|
||||
if self.fc and self.fc.endswith("ifort") and self.real_version >= Version("2021"):
|
||||
if self.real_version >= Version("2021") and self.real_version <= Version("2024"):
|
||||
env.append_flags("SPACK_ALWAYS_FFLAGS", "-diag-disable=10448")
|
||||
|
||||
# 2024 release bumped the libsycl version because of an ABI
|
||||
|
@@ -32,6 +32,7 @@
|
||||
Container,
|
||||
Dict,
|
||||
Generator,
|
||||
Iterable,
|
||||
List,
|
||||
NamedTuple,
|
||||
Optional,
|
||||
@@ -290,55 +291,6 @@ def __reduce__(self):
|
||||
return ForbiddenLock, tuple()
|
||||
|
||||
|
||||
_QUERY_DOCSTRING = """
|
||||
|
||||
Args:
|
||||
query_spec: queries iterate through specs in the database and
|
||||
return those that satisfy the supplied ``query_spec``. If
|
||||
query_spec is `any`, This will match all specs in the
|
||||
database. If it is a spec, we'll evaluate
|
||||
``spec.satisfies(query_spec)``
|
||||
|
||||
known (bool or None): Specs that are "known" are those
|
||||
for which Spack can locate a ``package.py`` file -- i.e.,
|
||||
Spack "knows" how to install them. Specs that are unknown may
|
||||
represent packages that existed in a previous version of
|
||||
Spack, but have since either changed their name or
|
||||
been removed
|
||||
|
||||
installed (bool or InstallStatus or typing.Iterable or None):
|
||||
if ``True``, includes only installed
|
||||
specs in the search; if ``False`` only missing specs, and if
|
||||
``any``, all specs in database. If an InstallStatus or iterable
|
||||
of InstallStatus, returns specs whose install status
|
||||
(installed, deprecated, or missing) matches (one of) the
|
||||
InstallStatus. (default: True)
|
||||
|
||||
explicit (bool or None): A spec that was installed
|
||||
following a specific user request is marked as explicit. If
|
||||
instead it was pulled-in as a dependency of a user requested
|
||||
spec it's considered implicit.
|
||||
|
||||
start_date (datetime.datetime or None): filters the query
|
||||
discarding specs that have been installed before ``start_date``.
|
||||
|
||||
end_date (datetime.datetime or None): filters the query discarding
|
||||
specs that have been installed after ``end_date``.
|
||||
|
||||
hashes (Container): list or set of hashes that we can use to
|
||||
restrict the search
|
||||
|
||||
in_buildcache (bool or None): Specs that are marked in
|
||||
this database as part of an associated binary cache are
|
||||
``in_buildcache``. All other specs are not. This field is used
|
||||
for querying mirror indices. Default is ``any``.
|
||||
|
||||
Returns:
|
||||
list of specs that match the query
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class LockConfiguration(NamedTuple):
|
||||
"""Data class to configure locks in Database objects
|
||||
|
||||
@@ -604,6 +556,9 @@ def _path(self, spec: "spack.spec.Spec") -> pathlib.Path:
|
||||
return self.dir / f"{spec.name}-{spec.dag_hash()}"
|
||||
|
||||
|
||||
SelectType = Callable[[InstallRecord], bool]
|
||||
|
||||
|
||||
class Database:
|
||||
#: Fields written for each install record
|
||||
record_fields: Tuple[str, ...] = DEFAULT_INSTALL_RECORD_FIELDS
|
||||
@@ -1245,7 +1200,7 @@ def _add(
|
||||
self._data[key].explicit = explicit
|
||||
|
||||
@_autospec
|
||||
def add(self, spec: "spack.spec.Spec", *, explicit: bool = False) -> None:
|
||||
def add(self, spec: "spack.spec.Spec", *, explicit: bool = False, allow_missing=False) -> None:
|
||||
"""Add spec at path to database, locking and reading DB to sync.
|
||||
|
||||
``add()`` will lock and read from the DB on disk.
|
||||
@@ -1254,7 +1209,7 @@ def add(self, spec: "spack.spec.Spec", *, explicit: bool = False) -> None:
|
||||
# TODO: ensure that spec is concrete?
|
||||
# Entire add is transactional.
|
||||
with self.write_transaction():
|
||||
self._add(spec, explicit=explicit)
|
||||
self._add(spec, explicit=explicit, allow_missing=allow_missing)
|
||||
|
||||
def _get_matching_spec_key(self, spec: "spack.spec.Spec", **kwargs) -> str:
|
||||
"""Get the exact spec OR get a single spec that matches."""
|
||||
@@ -1525,62 +1480,51 @@ def get_by_hash(self, dag_hash, default=None, installed=any):
|
||||
|
||||
def _query(
|
||||
self,
|
||||
query_spec=any,
|
||||
known=any,
|
||||
installed=True,
|
||||
explicit=any,
|
||||
start_date=None,
|
||||
end_date=None,
|
||||
hashes=None,
|
||||
in_buildcache=any,
|
||||
origin=None,
|
||||
):
|
||||
"""Run a query on the database."""
|
||||
query_spec: Optional[Union[str, "spack.spec.Spec"]] = None,
|
||||
*,
|
||||
predicate_fn: Optional[SelectType] = None,
|
||||
installed: Union[bool, InstallStatus, List[InstallStatus]] = True,
|
||||
explicit: Optional[bool] = None,
|
||||
start_date: Optional[datetime.datetime] = None,
|
||||
end_date: Optional[datetime.datetime] = None,
|
||||
hashes: Optional[Iterable[str]] = None,
|
||||
in_buildcache: Optional[bool] = None,
|
||||
origin: Optional[str] = None,
|
||||
) -> List["spack.spec.Spec"]:
|
||||
|
||||
# TODO: Specs are a lot like queries. Should there be a
|
||||
# TODO: wildcard spec object, and should specs have attributes
|
||||
# TODO: like installed and known that can be queried? Or are
|
||||
# TODO: these really special cases that only belong here?
|
||||
# Restrict the set of records over which we iterate first
|
||||
matching_hashes = self._data
|
||||
if hashes is not None:
|
||||
matching_hashes = {h: self._data[h] for h in hashes if h in self._data}
|
||||
|
||||
if query_spec is not any:
|
||||
if not isinstance(query_spec, spack.spec.Spec):
|
||||
query_spec = spack.spec.Spec(query_spec)
|
||||
if isinstance(query_spec, str):
|
||||
query_spec = spack.spec.Spec(query_spec)
|
||||
|
||||
# Just look up concrete specs with hashes; no fancy search.
|
||||
if query_spec.concrete:
|
||||
# TODO: handling of hashes restriction is not particularly elegant.
|
||||
hash_key = query_spec.dag_hash()
|
||||
if hash_key in self._data and (not hashes or hash_key in hashes):
|
||||
return [self._data[hash_key].spec]
|
||||
else:
|
||||
return []
|
||||
if query_spec is not None and query_spec.concrete:
|
||||
hash_key = query_spec.dag_hash()
|
||||
if hash_key not in matching_hashes:
|
||||
return []
|
||||
matching_hashes = {hash_key: matching_hashes[hash_key]}
|
||||
|
||||
# Abstract specs require more work -- currently we test
|
||||
# against everything.
|
||||
results = []
|
||||
start_date = start_date or datetime.datetime.min
|
||||
end_date = end_date or datetime.datetime.max
|
||||
|
||||
# save specs whose name doesn't match for last, to avoid a virtual check
|
||||
deferred = []
|
||||
|
||||
for key, rec in self._data.items():
|
||||
if hashes is not None and rec.spec.dag_hash() not in hashes:
|
||||
continue
|
||||
|
||||
for rec in matching_hashes.values():
|
||||
if origin and not (origin == rec.origin):
|
||||
continue
|
||||
|
||||
if not rec.install_type_matches(installed):
|
||||
continue
|
||||
|
||||
if in_buildcache is not any and rec.in_buildcache != in_buildcache:
|
||||
if in_buildcache is not None and rec.in_buildcache != in_buildcache:
|
||||
continue
|
||||
|
||||
if explicit is not any and rec.explicit != explicit:
|
||||
if explicit is not None and rec.explicit != explicit:
|
||||
continue
|
||||
|
||||
if known is not any and known(rec.spec.name):
|
||||
if predicate_fn is not None and not predicate_fn(rec):
|
||||
continue
|
||||
|
||||
if start_date or end_date:
|
||||
@@ -1588,7 +1532,7 @@ def _query(
|
||||
if not (start_date < inst_date < end_date):
|
||||
continue
|
||||
|
||||
if query_spec is any:
|
||||
if query_spec is None or query_spec.concrete:
|
||||
results.append(rec.spec)
|
||||
continue
|
||||
|
||||
@@ -1606,36 +1550,118 @@ def _query(
|
||||
# If we did fine something, the query spec can't be virtual b/c we matched an actual
|
||||
# package installation, so skip the virtual check entirely. If we *didn't* find anything,
|
||||
# check all the deferred specs *if* the query is virtual.
|
||||
if not results and query_spec is not any and deferred and query_spec.virtual:
|
||||
if not results and query_spec is not None and deferred and query_spec.virtual:
|
||||
results = [spec for spec in deferred if spec.satisfies(query_spec)]
|
||||
|
||||
return results
|
||||
|
||||
if _query.__doc__ is None:
|
||||
_query.__doc__ = ""
|
||||
_query.__doc__ += _QUERY_DOCSTRING
|
||||
def query_local(
|
||||
self,
|
||||
query_spec: Optional[Union[str, "spack.spec.Spec"]] = None,
|
||||
*,
|
||||
predicate_fn: Optional[SelectType] = None,
|
||||
installed: Union[bool, InstallStatus, List[InstallStatus]] = True,
|
||||
explicit: Optional[bool] = None,
|
||||
start_date: Optional[datetime.datetime] = None,
|
||||
end_date: Optional[datetime.datetime] = None,
|
||||
hashes: Optional[List[str]] = None,
|
||||
in_buildcache: Optional[bool] = None,
|
||||
origin: Optional[str] = None,
|
||||
) -> List["spack.spec.Spec"]:
|
||||
"""Queries the local Spack database.
|
||||
|
||||
def query_local(self, *args, **kwargs):
|
||||
"""Query only the local Spack database.
|
||||
This function doesn't guarantee any sorting of the returned data for performance reason,
|
||||
since comparing specs for __lt__ may be an expensive operation.
|
||||
|
||||
This function doesn't guarantee any sorting of the returned
|
||||
data for performance reason, since comparing specs for __lt__
|
||||
may be an expensive operation.
|
||||
Args:
|
||||
query_spec: if query_spec is ``None``, match all specs in the database.
|
||||
If it is a spec, return all specs matching ``spec.satisfies(query_spec)``.
|
||||
|
||||
predicate_fn: optional predicate taking an InstallRecord as argument, and returning
|
||||
whether that record is selected for the query. It can be used to craft criteria
|
||||
that need some data for selection not provided by the Database itself.
|
||||
|
||||
installed: if ``True``, includes only installed specs in the search. If ``False`` only
|
||||
missing specs, and if ``any``, all specs in database. If an InstallStatus or
|
||||
iterable of InstallStatus, returns specs whose install status matches at least
|
||||
one of the InstallStatus.
|
||||
|
||||
explicit: a spec that was installed following a specific user request is marked as
|
||||
explicit. If instead it was pulled-in as a dependency of a user requested spec
|
||||
it's considered implicit.
|
||||
|
||||
start_date: if set considers only specs installed from the starting date.
|
||||
|
||||
end_date: if set considers only specs installed until the ending date.
|
||||
|
||||
in_buildcache: specs that are marked in this database as part of an associated binary
|
||||
cache are ``in_buildcache``. All other specs are not. This field is used for
|
||||
querying mirror indices. By default, it does not check this status.
|
||||
|
||||
hashes: list of hashes used to restrict the search
|
||||
|
||||
origin: origin of the spec
|
||||
"""
|
||||
with self.read_transaction():
|
||||
return self._query(*args, **kwargs)
|
||||
return self._query(
|
||||
query_spec,
|
||||
predicate_fn=predicate_fn,
|
||||
installed=installed,
|
||||
explicit=explicit,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
hashes=hashes,
|
||||
in_buildcache=in_buildcache,
|
||||
origin=origin,
|
||||
)
|
||||
|
||||
if query_local.__doc__ is None:
|
||||
query_local.__doc__ = ""
|
||||
query_local.__doc__ += _QUERY_DOCSTRING
|
||||
def query(
|
||||
self,
|
||||
query_spec: Optional[Union[str, "spack.spec.Spec"]] = None,
|
||||
*,
|
||||
predicate_fn: Optional[SelectType] = None,
|
||||
installed: Union[bool, InstallStatus, List[InstallStatus]] = True,
|
||||
explicit: Optional[bool] = None,
|
||||
start_date: Optional[datetime.datetime] = None,
|
||||
end_date: Optional[datetime.datetime] = None,
|
||||
in_buildcache: Optional[bool] = None,
|
||||
hashes: Optional[List[str]] = None,
|
||||
origin: Optional[str] = None,
|
||||
install_tree: str = "all",
|
||||
):
|
||||
"""Queries the Spack database including all upstream databases.
|
||||
|
||||
def query(self, *args, **kwargs):
|
||||
"""Query the Spack database including all upstream databases.
|
||||
Args:
|
||||
query_spec: if query_spec is ``None``, match all specs in the database.
|
||||
If it is a spec, return all specs matching ``spec.satisfies(query_spec)``.
|
||||
|
||||
Additional Arguments:
|
||||
install_tree (str): query 'all' (default), 'local', 'upstream', or upstream path
|
||||
predicate_fn: optional predicate taking an InstallRecord as argument, and returning
|
||||
whether that record is selected for the query. It can be used to craft criteria
|
||||
that need some data for selection not provided by the Database itself.
|
||||
|
||||
installed: if ``True``, includes only installed specs in the search. If ``False`` only
|
||||
missing specs, and if ``any``, all specs in database. If an InstallStatus or
|
||||
iterable of InstallStatus, returns specs whose install status matches at least
|
||||
one of the InstallStatus.
|
||||
|
||||
explicit: a spec that was installed following a specific user request is marked as
|
||||
explicit. If instead it was pulled-in as a dependency of a user requested spec
|
||||
it's considered implicit.
|
||||
|
||||
start_date: if set considers only specs installed from the starting date.
|
||||
|
||||
end_date: if set considers only specs installed until the ending date.
|
||||
|
||||
in_buildcache: specs that are marked in this database as part of an associated binary
|
||||
cache are ``in_buildcache``. All other specs are not. This field is used for
|
||||
querying mirror indices. By default, it does not check this status.
|
||||
|
||||
hashes: list of hashes used to restrict the search
|
||||
|
||||
install_tree: query 'all' (default), 'local', 'upstream', or upstream path
|
||||
|
||||
origin: origin of the spec
|
||||
"""
|
||||
install_tree = kwargs.pop("install_tree", "all")
|
||||
valid_trees = ["all", "upstream", "local", self.root] + [u.root for u in self.upstream_dbs]
|
||||
if install_tree not in valid_trees:
|
||||
msg = "Invalid install_tree argument to Database.query()\n"
|
||||
@@ -1651,28 +1677,54 @@ def query(self, *args, **kwargs):
|
||||
# queries for upstream DBs need to *not* lock - we may not
|
||||
# have permissions to do this and the upstream DBs won't know about
|
||||
# us anyway (so e.g. they should never uninstall specs)
|
||||
upstream_results.extend(upstream_db._query(*args, **kwargs) or [])
|
||||
upstream_results.extend(
|
||||
upstream_db._query(
|
||||
query_spec,
|
||||
predicate_fn=predicate_fn,
|
||||
installed=installed,
|
||||
explicit=explicit,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
hashes=hashes,
|
||||
in_buildcache=in_buildcache,
|
||||
origin=origin,
|
||||
)
|
||||
or []
|
||||
)
|
||||
|
||||
local_results = []
|
||||
local_results: Set["spack.spec.Spec"] = set()
|
||||
if install_tree in ("all", "local") or self.root == install_tree:
|
||||
local_results = set(self.query_local(*args, **kwargs))
|
||||
local_results = set(
|
||||
self.query_local(
|
||||
query_spec,
|
||||
predicate_fn=predicate_fn,
|
||||
installed=installed,
|
||||
explicit=explicit,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
hashes=hashes,
|
||||
in_buildcache=in_buildcache,
|
||||
origin=origin,
|
||||
)
|
||||
)
|
||||
|
||||
results = list(local_results) + list(x for x in upstream_results if x not in local_results)
|
||||
|
||||
return sorted(results)
|
||||
|
||||
if query.__doc__ is None:
|
||||
query.__doc__ = ""
|
||||
query.__doc__ += _QUERY_DOCSTRING
|
||||
|
||||
def query_one(self, query_spec, known=any, installed=True):
|
||||
def query_one(
|
||||
self,
|
||||
query_spec: Optional[Union[str, "spack.spec.Spec"]],
|
||||
predicate_fn: Optional[SelectType] = None,
|
||||
installed: Union[bool, InstallStatus, List[InstallStatus]] = True,
|
||||
) -> Optional["spack.spec.Spec"]:
|
||||
"""Query for exactly one spec that matches the query spec.
|
||||
|
||||
Raises an assertion error if more than one spec matches the
|
||||
query. Returns None if no installed package matches.
|
||||
Returns None if no installed package matches.
|
||||
|
||||
Raises:
|
||||
AssertionError: if more than one spec matches the query.
|
||||
"""
|
||||
concrete_specs = self.query(query_spec, known=known, installed=installed)
|
||||
concrete_specs = self.query(query_spec, predicate_fn=predicate_fn, installed=installed)
|
||||
assert len(concrete_specs) <= 1
|
||||
return concrete_specs[0] if concrete_specs else None
|
||||
|
||||
|
@@ -11,6 +11,7 @@
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
import warnings
|
||||
from typing import Dict, Iterable, List, Optional, Set, Tuple, Type
|
||||
|
||||
@@ -18,6 +19,7 @@
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty
|
||||
|
||||
import spack.error
|
||||
import spack.spec
|
||||
import spack.util.elf as elf_utils
|
||||
import spack.util.environment
|
||||
@@ -66,6 +68,21 @@ def file_identifier(path):
|
||||
return s.st_dev, s.st_ino
|
||||
|
||||
|
||||
def dedupe_paths(paths: List[str]) -> List[str]:
|
||||
"""Deduplicate paths based on inode and device number. In case the list contains first a
|
||||
symlink and then the directory it points to, the symlink is replaced with the directory path.
|
||||
This ensures that we pick for example ``/usr/bin`` over ``/bin`` if the latter is a symlink to
|
||||
the former`."""
|
||||
seen: Dict[Tuple[int, int], str] = {}
|
||||
for path in paths:
|
||||
identifier = file_identifier(path)
|
||||
if identifier not in seen:
|
||||
seen[identifier] = path
|
||||
elif not os.path.islink(path):
|
||||
seen[identifier] = path
|
||||
return list(seen.values())
|
||||
|
||||
|
||||
def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
|
||||
"""Get the paths of all executables available from the current PATH.
|
||||
|
||||
@@ -82,8 +99,7 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
|
||||
"""
|
||||
search_paths = llnl.util.filesystem.search_paths_for_executables(*path_hints)
|
||||
# Make use we don't doubly list /usr/lib and /lib etc
|
||||
search_paths = list(llnl.util.lang.dedupe(search_paths, key=file_identifier))
|
||||
return path_to_dict(search_paths)
|
||||
return path_to_dict(dedupe_paths(search_paths))
|
||||
|
||||
|
||||
def accept_elf(path, host_compat):
|
||||
@@ -144,7 +160,7 @@ def libraries_in_ld_and_system_library_path(
|
||||
search_paths = list(filter(os.path.isdir, search_paths))
|
||||
|
||||
# Make use we don't doubly list /usr/lib and /lib etc
|
||||
search_paths = list(llnl.util.lang.dedupe(search_paths, key=file_identifier))
|
||||
search_paths = dedupe_paths(search_paths)
|
||||
|
||||
try:
|
||||
host_compat = elf_utils.get_elf_compat(sys.executable)
|
||||
@@ -260,8 +276,12 @@ def detect_specs(
|
||||
)
|
||||
except Exception as e:
|
||||
specs = []
|
||||
if spack.error.SHOW_BACKTRACE:
|
||||
details = traceback.format_exc()
|
||||
else:
|
||||
details = f"[{e.__class__.__name__}: {e}]"
|
||||
warnings.warn(
|
||||
f'error detecting "{pkg.name}" from prefix {candidate_path} [{str(e)}]'
|
||||
f'error detecting "{pkg.name}" from prefix {candidate_path}: {details}'
|
||||
)
|
||||
|
||||
if not specs:
|
||||
@@ -435,9 +455,9 @@ def by_path(
|
||||
llnl.util.tty.debug(
|
||||
f"[EXTERNAL DETECTION] Skipping {pkg_name}: timeout reached"
|
||||
)
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
llnl.util.tty.debug(
|
||||
f"[EXTERNAL DETECTION] Skipping {pkg_name}: exception occured {e}"
|
||||
f"[EXTERNAL DETECTION] Skipping {pkg_name}: {traceback.format_exc()}"
|
||||
)
|
||||
|
||||
return result
|
||||
|
@@ -9,11 +9,13 @@
|
||||
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
from enum import Enum
|
||||
from typing import List, Optional
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.environment.environment as ev
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.traverse as traverse
|
||||
|
||||
@@ -226,6 +228,7 @@ def to_dict(self):
|
||||
"install_deps_target": self._target("install-deps"),
|
||||
"any_hash_target": self._target("%"),
|
||||
"jobserver_support": self.jobserver_support,
|
||||
"spack_script": shlex.quote(spack.paths.spack_script),
|
||||
"adjacency_list": self.make_adjacency_list,
|
||||
"phony_convenience_targets": " ".join(self.phony_convenience_targets),
|
||||
"pkg_ids_variable": self.pkg_identifier_variable,
|
||||
|
@@ -1159,6 +1159,8 @@ def clear(self, re_read=False):
|
||||
# things that cannot be recreated from file
|
||||
self.new_specs = [] # write packages for these on write()
|
||||
|
||||
self.manifest.clear()
|
||||
|
||||
@property
|
||||
def active(self):
|
||||
"""True if this environment is currently active."""
|
||||
@@ -1954,17 +1956,16 @@ def install_specs(self, specs: Optional[List[Spec]] = None, **install_args):
|
||||
specs = specs if specs is not None else roots
|
||||
|
||||
# Extend the set of specs to overwrite with modified dev specs and their parents
|
||||
overwrite: Set[str] = set()
|
||||
overwrite.update(install_args.get("overwrite", []), self._dev_specs_that_need_overwrite())
|
||||
install_args["overwrite"] = overwrite
|
||||
install_args["overwrite"] = {
|
||||
*install_args.get("overwrite", ()),
|
||||
*self._dev_specs_that_need_overwrite(),
|
||||
}
|
||||
|
||||
explicit: Set[str] = set()
|
||||
explicit.update(
|
||||
install_args.get("explicit", []),
|
||||
(s.dag_hash() for s in specs),
|
||||
(s.dag_hash() for s in roots),
|
||||
)
|
||||
install_args["explicit"] = explicit
|
||||
# Only environment roots are marked explicit
|
||||
install_args["explicit"] = {
|
||||
*install_args.get("explicit", ()),
|
||||
*(s.dag_hash() for s in roots),
|
||||
}
|
||||
|
||||
PackageInstaller([spec.package for spec in specs], **install_args).install()
|
||||
|
||||
@@ -2163,6 +2164,13 @@ def _concrete_specs_dict(self):
|
||||
# Assumes no legacy formats, since this was just created.
|
||||
spec_dict[ht.dag_hash.name] = s.dag_hash()
|
||||
concrete_specs[s.dag_hash()] = spec_dict
|
||||
|
||||
if s.build_spec is not s:
|
||||
for d in s.build_spec.traverse():
|
||||
build_spec_dict = d.node_dict_with_hashes(hash=ht.dag_hash)
|
||||
build_spec_dict[ht.dag_hash.name] = d.dag_hash()
|
||||
concrete_specs[d.dag_hash()] = build_spec_dict
|
||||
|
||||
return concrete_specs
|
||||
|
||||
def _concrete_roots_dict(self):
|
||||
@@ -2322,7 +2330,7 @@ def filter_specs(self, reader, json_specs_by_hash, order_concretized):
|
||||
specs_by_hash[lockfile_key] = spec
|
||||
|
||||
# Second pass: For each spec, get its dependencies from the node dict
|
||||
# and add them to the spec
|
||||
# and add them to the spec, including build specs
|
||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||
name, data = reader.name_and_data(node_dict)
|
||||
for _, dep_hash, deptypes, _, virtuals in reader.dependencies_from_node_dict(data):
|
||||
@@ -2330,6 +2338,10 @@ def filter_specs(self, reader, json_specs_by_hash, order_concretized):
|
||||
specs_by_hash[dep_hash], depflag=dt.canonicalize(deptypes), virtuals=virtuals
|
||||
)
|
||||
|
||||
if "build_spec" in node_dict:
|
||||
_, bhash, _ = reader.extract_build_spec_info_from_node_dict(node_dict)
|
||||
specs_by_hash[lockfile_key]._build_spec = specs_by_hash[bhash]
|
||||
|
||||
# Traverse the root specs one at a time in the order they appear.
|
||||
# The first time we see each DAG hash, that's the one we want to
|
||||
# keep. This is only required as long as we support older lockfile
|
||||
@@ -2789,6 +2801,11 @@ def remove_user_spec(self, user_spec: str) -> None:
|
||||
raise SpackEnvironmentError(msg) from e
|
||||
self.changed = True
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Clear all user specs from the list of root specs"""
|
||||
self.configuration["specs"] = []
|
||||
self.changed = True
|
||||
|
||||
def override_user_spec(self, user_spec: str, idx: int) -> None:
|
||||
"""Overrides the user spec at index idx with the one passed as input.
|
||||
|
||||
|
@@ -48,8 +48,6 @@ def activate_header(env, shell, prompt=None, view: Optional[str] = None):
|
||||
cmds += 'set "SPACK_ENV=%s"\n' % env.path
|
||||
if view:
|
||||
cmds += 'set "SPACK_ENV_VIEW=%s"\n' % view
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
elif shell == "pwsh":
|
||||
cmds += "$Env:SPACK_ENV='%s'\n" % env.path
|
||||
if view:
|
||||
|
@@ -12,6 +12,9 @@
|
||||
#: this is module-scoped because it needs to be set very early
|
||||
debug = 0
|
||||
|
||||
#: whether to show a backtrace when an error is printed, enabled with --backtrace.
|
||||
SHOW_BACKTRACE = False
|
||||
|
||||
|
||||
class SpackError(Exception):
|
||||
"""This is the superclass for all Spack errors.
|
||||
|
@@ -33,7 +33,7 @@
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from pathlib import PurePath
|
||||
from typing import List, Optional
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import llnl.url
|
||||
import llnl.util
|
||||
@@ -49,6 +49,7 @@
|
||||
import spack.util.archive
|
||||
import spack.util.crypto as crypto
|
||||
import spack.util.git
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
import spack.version
|
||||
@@ -110,6 +111,28 @@ def __init__(self, **kwargs):
|
||||
|
||||
self.package = None
|
||||
|
||||
def source_provenance(self) -> Dict:
|
||||
"""Create a metadata dictionary that describes the artifacts fetched by this FetchStrategy.
|
||||
|
||||
The returned dictionary is added to the content used to determine the full hash
|
||||
for a package. It should be serializable as JSON.
|
||||
|
||||
It should include data like sha256 hashes for archives, commits for source
|
||||
repositories, and any information needed to describe exactly what artifacts went
|
||||
into a build.
|
||||
|
||||
If a package has no soruce artifacts, it should return an empty dictionary.
|
||||
|
||||
"""
|
||||
attrs = syaml.syaml_dict()
|
||||
if self.url_attr:
|
||||
attrs["type"] = "archive" if self.url_attr == "url" else self.url_attr
|
||||
for attr in self.optional_attrs:
|
||||
value = getattr(self, attr, None)
|
||||
if value:
|
||||
attrs[attr] = value
|
||||
return attrs
|
||||
|
||||
def set_package(self, package):
|
||||
self.package = package
|
||||
|
||||
@@ -152,17 +175,6 @@ def cachable(self):
|
||||
bool: True if can cache, False otherwise.
|
||||
"""
|
||||
|
||||
def source_id(self):
|
||||
"""A unique ID for the source.
|
||||
|
||||
It is intended that a human could easily generate this themselves using
|
||||
the information available to them in the Spack package.
|
||||
|
||||
The returned value is added to the content which determines the full
|
||||
hash for a package using `str()`.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def mirror_id(self):
|
||||
"""This is a unique ID for a source that is intended to help identify
|
||||
reuse of resources across packages.
|
||||
@@ -213,9 +225,9 @@ def cachable(self):
|
||||
"""Report False as there is no code to cache."""
|
||||
return False
|
||||
|
||||
def source_id(self):
|
||||
"""BundlePackages don't have a source id."""
|
||||
return ""
|
||||
def source_provenance(self) -> Dict:
|
||||
"""BundlePackages don't have a source of their own."""
|
||||
return {}
|
||||
|
||||
def mirror_id(self):
|
||||
"""BundlePackages don't have a mirror id."""
|
||||
@@ -260,8 +272,15 @@ def curl(self) -> Executable:
|
||||
self._curl = web_util.require_curl()
|
||||
return self._curl
|
||||
|
||||
def source_id(self):
|
||||
return self.digest
|
||||
def source_provenance(self) -> Dict:
|
||||
attrs = super().source_provenance()
|
||||
if self.digest:
|
||||
try:
|
||||
hash_type = spack.util.crypto.hash_algo_for_digest(self.digest)
|
||||
except ValueError:
|
||||
hash_type = "digest"
|
||||
attrs[hash_type] = self.digest
|
||||
return attrs
|
||||
|
||||
def mirror_id(self):
|
||||
if not self.digest:
|
||||
@@ -772,9 +791,15 @@ def git(self):
|
||||
def cachable(self):
|
||||
return self.cache_enabled and bool(self.commit)
|
||||
|
||||
def source_id(self):
|
||||
# TODO: tree-hash would secure download cache and mirrors, commit only secures checkouts.
|
||||
return self.commit
|
||||
def source_provenance(self) -> Dict:
|
||||
attrs = super().source_provenance()
|
||||
|
||||
# need to fully resolve submodule callbacks for node dicts
|
||||
submodules = attrs.get("submodules", None)
|
||||
if submodules and callable(submodules):
|
||||
attrs["submodules"] = submodules(self.package)
|
||||
|
||||
return attrs
|
||||
|
||||
def mirror_id(self):
|
||||
if self.commit:
|
||||
@@ -1084,17 +1109,6 @@ def cvs(self):
|
||||
def cachable(self):
|
||||
return self.cache_enabled and (bool(self.branch) or bool(self.date))
|
||||
|
||||
def source_id(self):
|
||||
if not (self.branch or self.date):
|
||||
# We need a branch or a date to make a checkout reproducible
|
||||
return None
|
||||
id = "id"
|
||||
if self.branch:
|
||||
id += "-branch=" + self.branch
|
||||
if self.date:
|
||||
id += "-date=" + self.date
|
||||
return id
|
||||
|
||||
def mirror_id(self):
|
||||
if not (self.branch or self.date):
|
||||
# We need a branch or a date to make a checkout reproducible
|
||||
@@ -1197,9 +1211,6 @@ def svn(self):
|
||||
def cachable(self):
|
||||
return self.cache_enabled and bool(self.revision)
|
||||
|
||||
def source_id(self):
|
||||
return self.revision
|
||||
|
||||
def mirror_id(self):
|
||||
if self.revision:
|
||||
repo_path = urllib.parse.urlparse(self.url).path
|
||||
@@ -1307,9 +1318,6 @@ def hg(self):
|
||||
def cachable(self):
|
||||
return self.cache_enabled and bool(self.revision)
|
||||
|
||||
def source_id(self):
|
||||
return self.revision
|
||||
|
||||
def mirror_id(self):
|
||||
if self.revision:
|
||||
repo_path = urllib.parse.urlparse(self.url).path
|
||||
|
@@ -33,6 +33,7 @@
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.config
|
||||
import spack.directory_layout
|
||||
import spack.paths
|
||||
import spack.projections
|
||||
import spack.relocate
|
||||
@@ -50,7 +51,7 @@
|
||||
_projections_path = ".spack/projections.yaml"
|
||||
|
||||
|
||||
LinkCallbackType = Callable[[str, str, "FilesystemView", Optional["spack.spec.Spec"]], None]
|
||||
LinkCallbackType = Callable[[str, str, "FilesystemView", Optional[spack.spec.Spec]], None]
|
||||
|
||||
|
||||
def view_symlink(src: str, dst: str, *args, **kwargs) -> None:
|
||||
@@ -62,7 +63,7 @@ def view_hardlink(src: str, dst: str, *args, **kwargs) -> None:
|
||||
|
||||
|
||||
def view_copy(
|
||||
src: str, dst: str, view: "FilesystemView", spec: Optional["spack.spec.Spec"] = None
|
||||
src: str, dst: str, view: "FilesystemView", spec: Optional[spack.spec.Spec] = None
|
||||
) -> None:
|
||||
"""
|
||||
Copy a file from src to dst.
|
||||
@@ -100,10 +101,12 @@ def view_copy(
|
||||
|
||||
spack.relocate.relocate_text(files=[dst], prefixes=prefix_to_projection)
|
||||
|
||||
try:
|
||||
os.chown(dst, src_stat.st_uid, src_stat.st_gid)
|
||||
except OSError:
|
||||
tty.debug(f"Can't change the permissions for {dst}")
|
||||
# The os module on Windows does not have a chown function.
|
||||
if sys.platform != "win32":
|
||||
try:
|
||||
os.chown(dst, src_stat.st_uid, src_stat.st_gid)
|
||||
except OSError:
|
||||
tty.debug(f"Can't change the permissions for {dst}")
|
||||
|
||||
|
||||
#: supported string values for `link_type` in an env, mapped to canonical values
|
||||
@@ -158,7 +161,7 @@ class FilesystemView:
|
||||
def __init__(
|
||||
self,
|
||||
root: str,
|
||||
layout: "spack.directory_layout.DirectoryLayout",
|
||||
layout: spack.directory_layout.DirectoryLayout,
|
||||
*,
|
||||
projections: Optional[Dict] = None,
|
||||
ignore_conflicts: bool = False,
|
||||
@@ -180,7 +183,10 @@ def __init__(
|
||||
|
||||
# Setup link function to include view
|
||||
self.link_type = link_type
|
||||
self.link = ft.partial(function_for_link_type(link_type), view=self)
|
||||
self._link = function_for_link_type(link_type)
|
||||
|
||||
def link(self, src: str, dst: str, spec: Optional[spack.spec.Spec] = None) -> None:
|
||||
self._link(src, dst, self, spec)
|
||||
|
||||
def add_specs(self, *specs, **kwargs):
|
||||
"""
|
||||
@@ -281,7 +287,7 @@ class YamlFilesystemView(FilesystemView):
|
||||
def __init__(
|
||||
self,
|
||||
root: str,
|
||||
layout: "spack.directory_layout.DirectoryLayout",
|
||||
layout: spack.directory_layout.DirectoryLayout,
|
||||
*,
|
||||
projections: Optional[Dict] = None,
|
||||
ignore_conflicts: bool = False,
|
||||
|
@@ -5,7 +5,6 @@
|
||||
"""Definitions that control how Spack creates Spec hashes."""
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.repo
|
||||
|
||||
hashes = []
|
||||
|
||||
@@ -13,20 +12,17 @@
|
||||
class SpecHashDescriptor:
|
||||
"""This class defines how hashes are generated on Spec objects.
|
||||
|
||||
Spec hashes in Spack are generated from a serialized (e.g., with
|
||||
YAML) representation of the Spec graph. The representation may only
|
||||
include certain dependency types, and it may optionally include a
|
||||
canonicalized hash of the package.py for each node in the graph.
|
||||
Spec hashes in Spack are generated from a serialized JSON representation of the DAG.
|
||||
The representation may only include certain dependency types, and it may optionally
|
||||
include a canonicalized hash of the ``package.py`` for each node in the graph.
|
||||
|
||||
We currently use different hashes for different use cases."""
|
||||
"""
|
||||
|
||||
def __init__(self, depflag: dt.DepFlag, package_hash, name, override=None):
|
||||
def __init__(self, depflag: dt.DepFlag, package_hash, name):
|
||||
self.depflag = depflag
|
||||
self.package_hash = package_hash
|
||||
self.name = name
|
||||
hashes.append(self)
|
||||
# Allow spec hashes to have an alternate computation method
|
||||
self.override = override
|
||||
|
||||
@property
|
||||
def attr(self):
|
||||
@@ -54,18 +50,6 @@ def __repr__(self):
|
||||
)
|
||||
|
||||
|
||||
def _content_hash_override(spec):
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
return pkg.content_hash()
|
||||
|
||||
|
||||
#: Package hash used as part of dag hash
|
||||
package_hash = SpecHashDescriptor(
|
||||
depflag=0, package_hash=True, name="package_hash", override=_content_hash_override
|
||||
)
|
||||
|
||||
|
||||
# Deprecated hash types, no longer used, but needed to understand old serialized
|
||||
# spec formats
|
||||
|
||||
|
@@ -372,8 +372,7 @@ def phase_tests(
|
||||
builder.pkg.test_suite.current_test_spec = builder.pkg.spec
|
||||
builder.pkg.test_suite.current_base_spec = builder.pkg.spec
|
||||
|
||||
# TODO (post-34236): "test"->"test_" once remove deprecated methods
|
||||
have_tests = any(name.startswith("test") for name in method_names)
|
||||
have_tests = any(name.startswith("test_") for name in method_names)
|
||||
if have_tests:
|
||||
copy_test_files(builder.pkg, builder.pkg.spec)
|
||||
|
||||
@@ -477,16 +476,9 @@ def write_tested_status(self):
|
||||
def test_part(pkg: Pb, test_name: str, purpose: str, work_dir: str = ".", verbose: bool = False):
|
||||
wdir = "." if work_dir is None else work_dir
|
||||
tester = pkg.tester
|
||||
# TODO (post-34236): "test"->"test_" once remove deprecated methods
|
||||
assert test_name and test_name.startswith(
|
||||
"test"
|
||||
), f"Test name must start with 'test' but {test_name} was provided"
|
||||
|
||||
if test_name == "test":
|
||||
tty.warn(
|
||||
"{}: the 'test' method is deprecated. Convert stand-alone "
|
||||
"test(s) to methods with names starting 'test_'.".format(pkg.name)
|
||||
)
|
||||
"test_"
|
||||
), f"Test name must start with 'test_' but {test_name} was provided"
|
||||
|
||||
title = "test: {}: {}".format(test_name, purpose or "unspecified purpose")
|
||||
with fs.working_dir(wdir, create=True):
|
||||
@@ -646,28 +638,11 @@ def test_functions(
|
||||
except spack.repo.UnknownPackageError:
|
||||
tty.debug(f"{vname}: virtual does not appear to have a package file")
|
||||
|
||||
# TODO (post-34236): Remove if removing empty test method check
|
||||
def skip(line):
|
||||
# This should match the lines in the deprecated test() method
|
||||
ln = line.strip()
|
||||
return ln.startswith("#") or ("warn" in ln and "deprecated" in ln)
|
||||
|
||||
doc_regex = r'\s+("""[\w\s\(\)\-\,\;\:]+""")'
|
||||
tests = []
|
||||
for clss in classes:
|
||||
methods = inspect.getmembers(clss, predicate=lambda x: inspect.isfunction(x))
|
||||
for name, test_fn in methods:
|
||||
# TODO (post-34236): "test"->"test_" once remove deprecated methods
|
||||
if not name.startswith("test"):
|
||||
continue
|
||||
|
||||
# TODO (post-34236): Could remove empty method check once remove
|
||||
# TODO (post-34236): deprecated methods though some use cases,
|
||||
# TODO (post-34236): such as checking packages have actual, non-
|
||||
# TODO (post-34236): empty tests, may want this check to remain.
|
||||
source = re.sub(doc_regex, r"", inspect.getsource(test_fn)).splitlines()[1:]
|
||||
lines = [ln.strip() for ln in source if not skip(ln)]
|
||||
if not lines:
|
||||
if not name.startswith("test_"):
|
||||
continue
|
||||
|
||||
tests.append((clss.__name__, test_fn)) # type: ignore[union-attr]
|
||||
|
@@ -2,8 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""
|
||||
This module encapsulates package installation functionality.
|
||||
"""This module encapsulates package installation functionality.
|
||||
|
||||
The PackageInstaller coordinates concurrent builds of packages for the same
|
||||
Spack instance by leveraging the dependency DAG and file system locks. It
|
||||
@@ -17,16 +16,18 @@
|
||||
File system locks enable coordination such that no two processes attempt to
|
||||
build the same or a failed dependency package.
|
||||
|
||||
Failures to install dependency packages result in removal of their dependents'
|
||||
build tasks from the current process. A failure file is also written (and
|
||||
locked) so that other processes can detect the failure and adjust their build
|
||||
tasks accordingly.
|
||||
If a dependency package fails to install, its dependents' tasks will be
|
||||
removed from the installing process's queue. A failure file is also written
|
||||
and locked. Other processes use this file to detect the failure and dequeue
|
||||
its dependents.
|
||||
|
||||
This module supports the coordination of local and distributed concurrent
|
||||
installations of packages in a Spack instance.
|
||||
|
||||
"""
|
||||
|
||||
import copy
|
||||
import enum
|
||||
import glob
|
||||
import heapq
|
||||
import io
|
||||
@@ -35,6 +36,7 @@
|
||||
import shutil
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
from collections import defaultdict
|
||||
from gzip import GzipFile
|
||||
from typing import Dict, Iterator, List, Optional, Set, Tuple, Union
|
||||
@@ -42,6 +44,7 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lock as lk
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import ordinal
|
||||
from llnl.util.lang import pretty_seconds
|
||||
from llnl.util.tty.color import colorize
|
||||
from llnl.util.tty.log import log_output
|
||||
@@ -57,6 +60,7 @@
|
||||
import spack.package_base
|
||||
import spack.package_prefs as prefs
|
||||
import spack.repo
|
||||
import spack.rewiring
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.executable
|
||||
@@ -70,25 +74,32 @@
|
||||
#: were added (see https://docs.python.org/2/library/heapq.html).
|
||||
_counter = itertools.count(0)
|
||||
|
||||
#: Build status indicating task has been added.
|
||||
STATUS_ADDED = "queued"
|
||||
|
||||
#: Build status indicating the spec failed to install
|
||||
STATUS_FAILED = "failed"
|
||||
class BuildStatus(enum.Enum):
|
||||
"""Different build (task) states."""
|
||||
|
||||
#: Build status indicating the spec is being installed (possibly by another
|
||||
#: process)
|
||||
STATUS_INSTALLING = "installing"
|
||||
#: Build status indicating task has been added/queued.
|
||||
QUEUED = enum.auto()
|
||||
|
||||
#: Build status indicating the spec was sucessfully installed
|
||||
STATUS_INSTALLED = "installed"
|
||||
#: Build status indicating the spec failed to install
|
||||
FAILED = enum.auto()
|
||||
|
||||
#: Build status indicating the task has been popped from the queue
|
||||
STATUS_DEQUEUED = "dequeued"
|
||||
#: Build status indicating the spec is being installed (possibly by another
|
||||
#: process)
|
||||
INSTALLING = enum.auto()
|
||||
|
||||
#: Build status indicating task has been removed (to maintain priority
|
||||
#: queue invariants).
|
||||
STATUS_REMOVED = "removed"
|
||||
#: Build status indicating the spec was sucessfully installed
|
||||
INSTALLED = enum.auto()
|
||||
|
||||
#: Build status indicating the task has been popped from the queue
|
||||
DEQUEUED = enum.auto()
|
||||
|
||||
#: Build status indicating task has been removed (to maintain priority
|
||||
#: queue invariants).
|
||||
REMOVED = enum.auto()
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.name.lower()}"
|
||||
|
||||
|
||||
def _write_timer_json(pkg, timer, cache):
|
||||
@@ -101,13 +112,22 @@ def _write_timer_json(pkg, timer, cache):
|
||||
return
|
||||
|
||||
|
||||
class InstallAction:
|
||||
class ExecuteResult(enum.Enum):
|
||||
# Task succeeded
|
||||
SUCCESS = enum.auto()
|
||||
# Task failed
|
||||
FAILED = enum.auto()
|
||||
# Task is missing build spec and will be requeued
|
||||
MISSING_BUILD_SPEC = enum.auto()
|
||||
|
||||
|
||||
class InstallAction(enum.Enum):
|
||||
#: Don't perform an install
|
||||
NONE = 0
|
||||
NONE = enum.auto()
|
||||
#: Do a standard install
|
||||
INSTALL = 1
|
||||
INSTALL = enum.auto()
|
||||
#: Do an overwrite install
|
||||
OVERWRITE = 2
|
||||
OVERWRITE = enum.auto()
|
||||
|
||||
|
||||
class InstallStatus:
|
||||
@@ -431,7 +451,7 @@ def _process_binary_cache_tarball(
|
||||
"""
|
||||
with timer.measure("fetch"):
|
||||
download_result = binary_distribution.download_tarball(
|
||||
pkg.spec, unsigned, mirrors_for_spec
|
||||
pkg.spec.build_spec, unsigned, mirrors_for_spec
|
||||
)
|
||||
|
||||
if download_result is None:
|
||||
@@ -442,6 +462,11 @@ def _process_binary_cache_tarball(
|
||||
with timer.measure("install"), spack.util.path.filter_padding():
|
||||
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
|
||||
|
||||
if pkg.spec.spliced: # overwrite old metadata with new
|
||||
spack.store.STORE.layout.write_spec(
|
||||
pkg.spec, spack.store.STORE.layout.spec_file_path(pkg.spec)
|
||||
)
|
||||
|
||||
if hasattr(pkg, "_post_buildcache_install_hook"):
|
||||
pkg._post_buildcache_install_hook()
|
||||
|
||||
@@ -677,7 +702,7 @@ def log(pkg: "spack.package_base.PackageBase") -> None:
|
||||
def package_id(spec: "spack.spec.Spec") -> str:
|
||||
"""A "unique" package identifier for installation purposes
|
||||
|
||||
The identifier is used to track build tasks, locks, install, and
|
||||
The identifier is used to track tasks, locks, install, and
|
||||
failure statuses.
|
||||
|
||||
The identifier needs to distinguish between combinations of compilers
|
||||
@@ -736,14 +761,14 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Returns a formal representation of the build request."""
|
||||
"""Return a formal representation of the build request."""
|
||||
rep = f"{self.__class__.__name__}("
|
||||
for attr, value in self.__dict__.items():
|
||||
rep += f"{attr}={value.__repr__()}, "
|
||||
return f"{rep.strip(', ')})"
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Returns a printable version of the build request."""
|
||||
"""Return a printable version of the build request."""
|
||||
return f"package={self.pkg.name}, install_args={self.install_args}"
|
||||
|
||||
def _add_default_args(self) -> None:
|
||||
@@ -840,37 +865,42 @@ def traverse_dependencies(self, spec=None, visited=None) -> Iterator["spack.spec
|
||||
yield dep
|
||||
|
||||
|
||||
class BuildTask:
|
||||
"""Class for representing the build task for a package."""
|
||||
class Task:
|
||||
"""Base class for representing a task for a package."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
pkg: "spack.package_base.PackageBase",
|
||||
request: Optional[BuildRequest],
|
||||
compiler: bool,
|
||||
start: float,
|
||||
attempts: int,
|
||||
status: str,
|
||||
installed: Set[str],
|
||||
request: BuildRequest,
|
||||
*,
|
||||
compiler: bool = False,
|
||||
start: float = 0.0,
|
||||
attempts: int = 0,
|
||||
status: BuildStatus = BuildStatus.QUEUED,
|
||||
installed: Set[str] = set(),
|
||||
):
|
||||
"""
|
||||
Instantiate a build task for a package.
|
||||
Instantiate a task for a package.
|
||||
|
||||
Args:
|
||||
pkg: the package to be built and installed
|
||||
request: the associated install request where ``None`` can be
|
||||
used to indicate the package was explicitly requested by the user
|
||||
compiler: whether task is for a bootstrap compiler
|
||||
request: the associated install request
|
||||
start: the initial start time for the package, in seconds
|
||||
attempts: the number of attempts to install the package
|
||||
attempts: the number of attempts to install the package, which
|
||||
should be 0 when the task is initially instantiated
|
||||
status: the installation status
|
||||
installed: the identifiers of packages that have
|
||||
installed: the (string) identifiers of packages that have
|
||||
been installed so far
|
||||
|
||||
Raises:
|
||||
``InstallError`` if the build status is incompatible with the task
|
||||
``TypeError`` if provided an argument of the wrong type
|
||||
``ValueError`` if provided an argument with the wrong value or state
|
||||
"""
|
||||
|
||||
# Ensure dealing with a package that has a concrete spec
|
||||
if not isinstance(pkg, spack.package_base.PackageBase):
|
||||
raise ValueError(f"{str(pkg)} must be a package")
|
||||
raise TypeError(f"{str(pkg)} must be a package")
|
||||
|
||||
self.pkg = pkg
|
||||
if not self.pkg.spec.concrete:
|
||||
@@ -881,26 +911,34 @@ def __init__(
|
||||
|
||||
# The explicit build request associated with the package
|
||||
if not isinstance(request, BuildRequest):
|
||||
raise ValueError(f"{str(pkg)} must have a build request")
|
||||
|
||||
raise TypeError(f"{request} is not a valid build request")
|
||||
self.request = request
|
||||
|
||||
# Initialize the status to an active state. The status is used to
|
||||
# ensure priority queue invariants when tasks are "removed" from the
|
||||
# queue.
|
||||
if status == STATUS_REMOVED:
|
||||
raise spack.error.InstallError(
|
||||
f"Cannot create a build task for {self.pkg_id} with status '{status}'", pkg=pkg
|
||||
)
|
||||
if not isinstance(status, BuildStatus):
|
||||
raise TypeError(f"{status} is not a valid build status")
|
||||
|
||||
# The initial build task cannot have status "removed".
|
||||
if attempts == 0 and status == BuildStatus.REMOVED:
|
||||
raise spack.error.InstallError(
|
||||
f"Cannot create a task for {self.pkg_id} with status '{status}'", pkg=pkg
|
||||
)
|
||||
self.status = status
|
||||
|
||||
# Package is associated with a bootstrap compiler
|
||||
self.compiler = compiler
|
||||
# cache the PID, which is used for distributed build messages in self.execute
|
||||
self.pid = os.getpid()
|
||||
|
||||
# The initial start time for processing the spec
|
||||
self.start = start
|
||||
|
||||
if not isinstance(installed, set):
|
||||
raise TypeError(
|
||||
f"BuildTask constructor requires 'installed' be a 'set', "
|
||||
f"not '{installed.__class__.__name__}'."
|
||||
)
|
||||
|
||||
# Set of dependents, which needs to include the requesting package
|
||||
# to support tracking of parallel, multi-spec, environment installs.
|
||||
self.dependents = set(get_dependent_ids(self.pkg.spec))
|
||||
@@ -921,16 +959,22 @@ def __init__(
|
||||
)
|
||||
|
||||
# List of uninstalled dependencies, which is used to establish
|
||||
# the priority of the build task.
|
||||
#
|
||||
# the priority of the task.
|
||||
self.uninstalled_deps = set(
|
||||
pkg_id for pkg_id in self.dependencies if pkg_id not in installed
|
||||
)
|
||||
|
||||
# Ensure key sequence-related properties are updated accordingly.
|
||||
self.attempts = 0
|
||||
self.attempts = attempts
|
||||
self._update()
|
||||
|
||||
def execute(self, install_status: InstallStatus) -> ExecuteResult:
|
||||
"""Execute the work of this task.
|
||||
|
||||
The ``install_status`` is an ``InstallStatus`` object used to format progress reporting for
|
||||
this task in the context of the full ``BuildRequest``."""
|
||||
raise NotImplementedError
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.key == other.key
|
||||
|
||||
@@ -950,14 +994,14 @@ def __ne__(self, other):
|
||||
return self.key != other.key
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Returns a formal representation of the build task."""
|
||||
"""Returns a formal representation of the task."""
|
||||
rep = f"{self.__class__.__name__}("
|
||||
for attr, value in self.__dict__.items():
|
||||
rep += f"{attr}={value.__repr__()}, "
|
||||
return f"{rep.strip(', ')})"
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Returns a printable version of the build task."""
|
||||
"""Returns a printable version of the task."""
|
||||
dependencies = f"#dependencies={len(self.dependencies)}"
|
||||
return "priority={0}, status={1}, start={2}, {3}".format(
|
||||
self.priority, self.status, self.start, dependencies
|
||||
@@ -974,8 +1018,7 @@ def _update(self) -> None:
|
||||
|
||||
def add_dependent(self, pkg_id: str) -> None:
|
||||
"""
|
||||
Ensure the dependent package id is in the task's list so it will be
|
||||
properly updated when this package is installed.
|
||||
Ensure the package is in this task's ``dependents`` list.
|
||||
|
||||
Args:
|
||||
pkg_id: package identifier of the dependent package
|
||||
@@ -984,6 +1027,20 @@ def add_dependent(self, pkg_id: str) -> None:
|
||||
tty.debug(f"Adding {pkg_id} as a dependent of {self.pkg_id}")
|
||||
self.dependents.add(pkg_id)
|
||||
|
||||
def add_dependency(self, pkg_id, installed=False):
|
||||
"""
|
||||
Ensure the package is in this task's ``dependencies`` list.
|
||||
|
||||
Args:
|
||||
pkg_id (str): package identifier of the dependency package
|
||||
installed (bool): install status of the dependency package
|
||||
"""
|
||||
if pkg_id != self.pkg_id and pkg_id not in self.dependencies:
|
||||
tty.debug(f"Adding {pkg_id} as a depencency of {self.pkg_id}")
|
||||
self.dependencies.add(pkg_id)
|
||||
if not installed:
|
||||
self.uninstalled_deps.add(pkg_id)
|
||||
|
||||
def flag_installed(self, installed: List[str]) -> None:
|
||||
"""
|
||||
Ensure the dependency is not considered to still be uninstalled.
|
||||
@@ -1000,6 +1057,39 @@ def flag_installed(self, installed: List[str]) -> None:
|
||||
level=2,
|
||||
)
|
||||
|
||||
def _setup_install_dir(self, pkg: "spack.package_base.PackageBase") -> None:
|
||||
"""
|
||||
Create and ensure proper access controls for the install directory.
|
||||
Write a small metadata file with the current spack environment.
|
||||
|
||||
Args:
|
||||
pkg: the package to be built and installed
|
||||
"""
|
||||
# Move to a module level method.
|
||||
if not os.path.exists(pkg.spec.prefix):
|
||||
path = spack.util.path.debug_padded_filter(pkg.spec.prefix)
|
||||
tty.debug(f"Creating the installation directory {path}")
|
||||
spack.store.STORE.layout.create_install_directory(pkg.spec)
|
||||
else:
|
||||
# Set the proper group for the prefix
|
||||
group = prefs.get_package_group(pkg.spec)
|
||||
if group:
|
||||
fs.chgrp(pkg.spec.prefix, group)
|
||||
|
||||
# Set the proper permissions.
|
||||
# This has to be done after group because changing groups blows
|
||||
# away the sticky group bit on the directory
|
||||
mode = os.stat(pkg.spec.prefix).st_mode
|
||||
perms = prefs.get_package_dir_permissions(pkg.spec)
|
||||
if mode != perms:
|
||||
os.chmod(pkg.spec.prefix, perms)
|
||||
|
||||
# Ensure the metadata path exists as well
|
||||
fs.mkdirp(spack.store.STORE.layout.metadata_path(pkg.spec), mode=perms)
|
||||
|
||||
# Always write host environment - we assume this can change
|
||||
spack.store.STORE.layout.write_host_environment(pkg.spec)
|
||||
|
||||
@property
|
||||
def explicit(self) -> bool:
|
||||
return self.pkg.spec.dag_hash() in self.request.install_args.get("explicit", [])
|
||||
@@ -1030,7 +1120,7 @@ def key(self) -> Tuple[int, int]:
|
||||
"""The key is the tuple (# uninstalled dependencies, sequence)."""
|
||||
return (self.priority, self.sequence)
|
||||
|
||||
def next_attempt(self, installed) -> "BuildTask":
|
||||
def next_attempt(self, installed) -> "Task":
|
||||
"""Create a new, updated task for the next installation attempt."""
|
||||
task = copy.copy(self)
|
||||
task._update()
|
||||
@@ -1044,6 +1134,100 @@ def priority(self):
|
||||
return len(self.uninstalled_deps)
|
||||
|
||||
|
||||
class BuildTask(Task):
|
||||
"""Class for representing a build task for a package."""
|
||||
|
||||
def execute(self, install_status):
|
||||
"""
|
||||
Perform the installation of the requested spec and/or dependency
|
||||
represented by the build task.
|
||||
"""
|
||||
install_args = self.request.install_args
|
||||
tests = install_args.get("tests")
|
||||
unsigned = install_args.get("unsigned")
|
||||
|
||||
pkg, pkg_id = self.pkg, self.pkg_id
|
||||
|
||||
tty.msg(install_msg(pkg_id, self.pid, install_status))
|
||||
self.start = self.start or time.time()
|
||||
self.status = BuildStatus.INSTALLING
|
||||
|
||||
# Use the binary cache if requested
|
||||
if self.use_cache:
|
||||
if _install_from_cache(pkg, self.explicit, unsigned):
|
||||
return ExecuteResult.SUCCESS
|
||||
elif self.cache_only:
|
||||
raise spack.error.InstallError(
|
||||
"No binary found when cache-only was specified", pkg=pkg
|
||||
)
|
||||
else:
|
||||
tty.msg(f"No binary for {pkg_id} found: installing from source")
|
||||
|
||||
pkg.run_tests = tests is True or tests and pkg.name in tests
|
||||
|
||||
# hook that allows tests to inspect the Package before installation
|
||||
# see unit_test_check() docs.
|
||||
if not pkg.unit_test_check():
|
||||
return ExecuteResult.FAILED
|
||||
|
||||
try:
|
||||
# Create stage object now and let it be serialized for the child process. That
|
||||
# way monkeypatch in tests works correctly.
|
||||
pkg.stage
|
||||
|
||||
self._setup_install_dir(pkg)
|
||||
|
||||
# Create a child process to do the actual installation.
|
||||
# Preserve verbosity settings across installs.
|
||||
spack.package_base.PackageBase._verbose = spack.build_environment.start_build_process(
|
||||
pkg, build_process, install_args
|
||||
)
|
||||
|
||||
# Note: PARENT of the build process adds the new package to
|
||||
# the database, so that we don't need to re-read from file.
|
||||
spack.store.STORE.db.add(pkg.spec, explicit=self.explicit)
|
||||
except spack.error.StopPhase as e:
|
||||
# A StopPhase exception means that do_install was asked to
|
||||
# stop early from clients, and is not an error at this point
|
||||
pid = f"{self.pid}: " if tty.show_pid() else ""
|
||||
tty.debug(f"{pid}{str(e)}")
|
||||
tty.debug(f"Package stage directory: {pkg.stage.source_path}")
|
||||
return ExecuteResult.SUCCESS
|
||||
|
||||
|
||||
class RewireTask(Task):
|
||||
"""Class for representing a rewire task for a package."""
|
||||
|
||||
def execute(self, install_status):
|
||||
"""Execute rewire task
|
||||
|
||||
Rewire tasks are executed by either rewiring self.package.spec.build_spec that is already
|
||||
installed or downloading and rewiring a binary for the it.
|
||||
|
||||
If not available installed or as binary, return ExecuteResult.MISSING_BUILD_SPEC.
|
||||
This will prompt the Installer to requeue the task with a dependency on the BuildTask
|
||||
to install self.pkg.spec.build_spec
|
||||
"""
|
||||
oldstatus = self.status
|
||||
self.status = BuildStatus.INSTALLING
|
||||
tty.msg(install_msg(self.pkg_id, self.pid, install_status))
|
||||
self.start = self.start or time.time()
|
||||
if not self.pkg.spec.build_spec.installed:
|
||||
try:
|
||||
install_args = self.request.install_args
|
||||
unsigned = install_args.get("unsigned")
|
||||
_process_binary_cache_tarball(self.pkg, explicit=self.explicit, unsigned=unsigned)
|
||||
_print_installed_pkg(self.pkg.prefix)
|
||||
return ExecuteResult.SUCCESS
|
||||
except BaseException as e:
|
||||
tty.error(f"Failed to rewire {self.pkg.spec} from binary. {e}")
|
||||
self.status = oldstatus
|
||||
return ExecuteResult.MISSING_BUILD_SPEC
|
||||
spack.rewiring.rewire_node(self.pkg.spec, self.explicit)
|
||||
_print_installed_pkg(self.pkg.prefix)
|
||||
return ExecuteResult.SUCCESS
|
||||
|
||||
|
||||
class PackageInstaller:
|
||||
"""
|
||||
Class for managing the install process for a Spack instance based on a bottom-up DAG approach.
|
||||
@@ -1137,11 +1321,11 @@ def __init__(
|
||||
# List of build requests
|
||||
self.build_requests = [BuildRequest(pkg, install_args) for pkg in packages]
|
||||
|
||||
# Priority queue of build tasks
|
||||
self.build_pq: List[Tuple[Tuple[int, int], BuildTask]] = []
|
||||
# Priority queue of tasks
|
||||
self.build_pq: List[Tuple[Tuple[int, int], Task]] = []
|
||||
|
||||
# Mapping of unique package ids to build task
|
||||
self.build_tasks: Dict[str, BuildTask] = {}
|
||||
# Mapping of unique package ids to task
|
||||
self.build_tasks: Dict[str, Task] = {}
|
||||
|
||||
# Cache of package locks for failed packages, keyed on package's ids
|
||||
self.failed: Dict[str, Optional[lk.Lock]] = {}
|
||||
@@ -1162,6 +1346,9 @@ def __init__(
|
||||
# fast then that option applies to all build requests.
|
||||
self.fail_fast = False
|
||||
|
||||
# Initializing all_dependencies to empty. This will be set later in _init_queue.
|
||||
self.all_dependencies: Dict[str, Set[str]] = {}
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Returns a formal representation of the package installer."""
|
||||
rep = f"{self.__class__.__name__}("
|
||||
@@ -1180,23 +1367,19 @@ def __str__(self) -> str:
|
||||
def _add_init_task(
|
||||
self,
|
||||
pkg: "spack.package_base.PackageBase",
|
||||
request: Optional[BuildRequest],
|
||||
is_compiler: bool,
|
||||
request: BuildRequest,
|
||||
all_deps: Dict[str, Set[str]],
|
||||
) -> None:
|
||||
"""
|
||||
Creates and queus the initial build task for the package.
|
||||
Creates and queues the initial task for the package.
|
||||
|
||||
Args:
|
||||
pkg: the package to be built and installed
|
||||
request (BuildRequest or None): the associated install request
|
||||
where ``None`` can be used to indicate the package was
|
||||
explicitly requested by the user
|
||||
is_compiler (bool): whether task is for a bootstrap compiler
|
||||
all_deps (defaultdict(set)): dictionary of all dependencies and
|
||||
associated dependents
|
||||
request: the associated install request
|
||||
all_deps: dictionary of all dependencies and associated dependents
|
||||
"""
|
||||
task = BuildTask(pkg, request, is_compiler, 0, 0, STATUS_ADDED, self.installed)
|
||||
cls = RewireTask if pkg.spec.spliced else BuildTask
|
||||
task = cls(pkg, request=request, status=BuildStatus.QUEUED, installed=self.installed)
|
||||
for dep_id in task.dependencies:
|
||||
all_deps[dep_id].add(package_id(pkg.spec))
|
||||
|
||||
@@ -1270,7 +1453,7 @@ def _check_deps_status(self, request: BuildRequest) -> None:
|
||||
else:
|
||||
lock.release_read()
|
||||
|
||||
def _prepare_for_install(self, task: BuildTask) -> None:
|
||||
def _prepare_for_install(self, task: Task) -> None:
|
||||
"""
|
||||
Check the database and leftover installation directories/files and
|
||||
prepare for a new install attempt for an uninstalled package.
|
||||
@@ -1278,7 +1461,7 @@ def _prepare_for_install(self, task: BuildTask) -> None:
|
||||
and ensuring the database is up-to-date.
|
||||
|
||||
Args:
|
||||
task (BuildTask): the build task whose associated package is
|
||||
task: the task whose associated package is
|
||||
being checked
|
||||
"""
|
||||
install_args = task.request.install_args
|
||||
@@ -1329,7 +1512,7 @@ def _prepare_for_install(self, task: BuildTask) -> None:
|
||||
spack.store.STORE.db.update_explicit(task.pkg.spec, True)
|
||||
|
||||
def _cleanup_all_tasks(self) -> None:
|
||||
"""Cleanup all build tasks to include releasing their locks."""
|
||||
"""Cleanup all tasks to include releasing their locks."""
|
||||
for pkg_id in self.locks:
|
||||
self._release_lock(pkg_id)
|
||||
|
||||
@@ -1361,7 +1544,7 @@ def _cleanup_failed(self, pkg_id: str) -> None:
|
||||
|
||||
def _cleanup_task(self, pkg: "spack.package_base.PackageBase") -> None:
|
||||
"""
|
||||
Cleanup the build task for the spec
|
||||
Cleanup the task for the spec
|
||||
|
||||
Args:
|
||||
pkg: the package being installed
|
||||
@@ -1433,7 +1616,7 @@ def _ensure_locked(
|
||||
|
||||
if lock_type == "read":
|
||||
# Wait until the other process finishes if there are no more
|
||||
# build tasks with priority 0 (i.e., with no uninstalled
|
||||
# tasks with priority 0 (i.e., with no uninstalled
|
||||
# dependencies).
|
||||
no_p0 = len(self.build_tasks) == 0 or not self._next_is_pri0()
|
||||
timeout = None if no_p0 else 3.0
|
||||
@@ -1485,6 +1668,33 @@ def _ensure_locked(
|
||||
self.locks[pkg_id] = (lock_type, lock)
|
||||
return self.locks[pkg_id]
|
||||
|
||||
def _requeue_with_build_spec_tasks(self, task):
|
||||
"""Requeue the task and its missing build spec dependencies"""
|
||||
# Full install of the build_spec is necessary because it didn't already exist somewhere
|
||||
spec = task.pkg.spec
|
||||
for dep in spec.build_spec.traverse():
|
||||
dep_pkg = dep.package
|
||||
|
||||
dep_id = package_id(dep)
|
||||
if dep_id not in self.build_tasks:
|
||||
self._add_init_task(dep_pkg, task.request, self.all_dependencies)
|
||||
|
||||
# Clear any persistent failure markings _unless_ they are
|
||||
# associated with another process in this parallel build
|
||||
# of the spec.
|
||||
spack.store.STORE.failure_tracker.clear(dep, force=False)
|
||||
|
||||
# Queue the build spec.
|
||||
build_pkg_id = package_id(spec.build_spec)
|
||||
build_spec_task = self.build_tasks[build_pkg_id]
|
||||
spec_pkg_id = package_id(spec)
|
||||
spec_task = task.next_attempt(self.installed)
|
||||
spec_task.status = BuildStatus.QUEUED
|
||||
# Convey a build spec as a dependency of a deployed spec.
|
||||
build_spec_task.add_dependent(spec_pkg_id)
|
||||
spec_task.add_dependency(build_pkg_id)
|
||||
self._push_task(spec_task)
|
||||
|
||||
def _add_tasks(self, request: BuildRequest, all_deps):
|
||||
"""Add tasks to the priority queue for the given build request.
|
||||
|
||||
@@ -1514,7 +1724,7 @@ def _add_tasks(self, request: BuildRequest, all_deps):
|
||||
|
||||
dep_id = package_id(dep)
|
||||
if dep_id not in self.build_tasks:
|
||||
self._add_init_task(dep_pkg, request, False, all_deps)
|
||||
self._add_init_task(dep_pkg, request, all_deps=all_deps)
|
||||
|
||||
# Clear any persistent failure markings _unless_ they are
|
||||
# associated with another process in this parallel build
|
||||
@@ -1532,80 +1742,29 @@ def _add_tasks(self, request: BuildRequest, all_deps):
|
||||
self._check_deps_status(request)
|
||||
|
||||
# Now add the package itself, if appropriate
|
||||
self._add_init_task(request.pkg, request, False, all_deps)
|
||||
self._add_init_task(request.pkg, request, all_deps=all_deps)
|
||||
|
||||
# Ensure if one request is to fail fast then all requests will.
|
||||
fail_fast = bool(request.install_args.get("fail_fast"))
|
||||
self.fail_fast = self.fail_fast or fail_fast
|
||||
|
||||
def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
||||
def _install_task(self, task: Task, install_status: InstallStatus) -> None:
|
||||
"""
|
||||
Perform the installation of the requested spec and/or dependency
|
||||
represented by the build task.
|
||||
represented by the task.
|
||||
|
||||
Args:
|
||||
task: the installation build task for a package
|
||||
task: the installation task for a package
|
||||
install_status: the installation status for the package"""
|
||||
|
||||
explicit = task.explicit
|
||||
install_args = task.request.install_args
|
||||
cache_only = task.cache_only
|
||||
use_cache = task.use_cache
|
||||
tests = install_args.get("tests", False)
|
||||
assert isinstance(tests, (bool, list)) # make mypy happy.
|
||||
unsigned: Optional[bool] = install_args.get("unsigned")
|
||||
|
||||
pkg, pkg_id = task.pkg, task.pkg_id
|
||||
|
||||
tty.msg(install_msg(pkg_id, self.pid, install_status))
|
||||
task.start = task.start or time.time()
|
||||
task.status = STATUS_INSTALLING
|
||||
|
||||
# Use the binary cache if requested
|
||||
if use_cache:
|
||||
if _install_from_cache(pkg, explicit, unsigned):
|
||||
self._update_installed(task)
|
||||
return
|
||||
elif cache_only:
|
||||
raise spack.error.InstallError(
|
||||
"No binary found when cache-only was specified", pkg=pkg
|
||||
)
|
||||
else:
|
||||
tty.msg(f"No binary for {pkg_id} found: installing from source")
|
||||
|
||||
pkg.run_tests = tests if isinstance(tests, bool) else pkg.name in tests
|
||||
|
||||
# hook that allows tests to inspect the Package before installation
|
||||
# see unit_test_check() docs.
|
||||
if not pkg.unit_test_check():
|
||||
return
|
||||
|
||||
try:
|
||||
self._setup_install_dir(pkg)
|
||||
|
||||
# Create stage object now and let it be serialized for the child process. That
|
||||
# way monkeypatch in tests works correctly.
|
||||
pkg.stage
|
||||
|
||||
# Create a child process to do the actual installation.
|
||||
# Preserve verbosity settings across installs.
|
||||
spack.package_base.PackageBase._verbose = spack.build_environment.start_build_process(
|
||||
pkg, build_process, install_args
|
||||
)
|
||||
# Note: PARENT of the build process adds the new package to
|
||||
# the database, so that we don't need to re-read from file.
|
||||
spack.store.STORE.db.add(pkg.spec, explicit=explicit)
|
||||
|
||||
except spack.error.StopPhase as e:
|
||||
# A StopPhase exception means that the installer was asked to stop early from clients,
|
||||
# and is not an error at this point
|
||||
pid = f"{self.pid}: " if tty.show_pid() else ""
|
||||
tty.debug(f"{pid}{str(e)}")
|
||||
tty.debug(f"Package stage directory: {pkg.stage.source_path}")
|
||||
rc = task.execute(install_status)
|
||||
if rc == ExecuteResult.MISSING_BUILD_SPEC:
|
||||
self._requeue_with_build_spec_tasks(task)
|
||||
else: # if rc == ExecuteResult.SUCCESS or rc == ExecuteResult.FAILED
|
||||
self._update_installed(task)
|
||||
|
||||
def _next_is_pri0(self) -> bool:
|
||||
"""
|
||||
Determine if the next build task has priority 0
|
||||
Determine if the next task has priority 0
|
||||
|
||||
Return:
|
||||
True if it does, False otherwise
|
||||
@@ -1615,31 +1774,31 @@ def _next_is_pri0(self) -> bool:
|
||||
task = self.build_pq[0][1]
|
||||
return task.priority == 0
|
||||
|
||||
def _pop_task(self) -> Optional[BuildTask]:
|
||||
def _pop_task(self) -> Optional[Task]:
|
||||
"""
|
||||
Remove and return the lowest priority build task.
|
||||
Remove and return the lowest priority task.
|
||||
|
||||
Source: Variant of function at docs.python.org/2/library/heapq.html
|
||||
"""
|
||||
while self.build_pq:
|
||||
task = heapq.heappop(self.build_pq)[1]
|
||||
if task.status != STATUS_REMOVED:
|
||||
if task.status != BuildStatus.REMOVED:
|
||||
del self.build_tasks[task.pkg_id]
|
||||
task.status = STATUS_DEQUEUED
|
||||
task.status = BuildStatus.DEQUEUED
|
||||
return task
|
||||
return None
|
||||
|
||||
def _push_task(self, task: BuildTask) -> None:
|
||||
def _push_task(self, task: Task) -> None:
|
||||
"""
|
||||
Push (or queue) the specified build task for the package.
|
||||
Push (or queue) the specified task for the package.
|
||||
|
||||
Source: Customization of "add_task" function at
|
||||
docs.python.org/2/library/heapq.html
|
||||
|
||||
Args:
|
||||
task: the installation build task for a package
|
||||
task: the installation task for a package
|
||||
"""
|
||||
msg = "{0} a build task for {1} with status '{2}'"
|
||||
msg = "{0} a task for {1} with status '{2}'"
|
||||
skip = "Skipping requeue of task for {0}: {1}"
|
||||
|
||||
# Ensure do not (re-)queue installed or failed packages whose status
|
||||
@@ -1652,9 +1811,11 @@ def _push_task(self, task: BuildTask) -> None:
|
||||
tty.debug(skip.format(task.pkg_id, "failed"))
|
||||
return
|
||||
|
||||
# Remove any associated build task since its sequence will change
|
||||
# Remove any associated task since its sequence will change
|
||||
self._remove_task(task.pkg_id)
|
||||
desc = "Queueing" if task.attempts == 0 else "Requeueing"
|
||||
desc = (
|
||||
"Queueing" if task.attempts == 1 else f"Requeueing ({ordinal(task.attempts)} attempt)"
|
||||
)
|
||||
tty.debug(msg.format(desc, task.pkg_id, task.status))
|
||||
|
||||
# Now add the new task to the queue with a new sequence number to
|
||||
@@ -1685,9 +1846,9 @@ def _release_lock(self, pkg_id: str) -> None:
|
||||
except Exception as exc:
|
||||
tty.warn(err.format(exc.__class__.__name__, ltype, pkg_id, str(exc)))
|
||||
|
||||
def _remove_task(self, pkg_id: str) -> Optional[BuildTask]:
|
||||
def _remove_task(self, pkg_id: str) -> Optional[Task]:
|
||||
"""
|
||||
Mark the existing package build task as being removed and return it.
|
||||
Mark the existing package task as being removed and return it.
|
||||
Raises KeyError if not found.
|
||||
|
||||
Source: Variant of function at docs.python.org/2/library/heapq.html
|
||||
@@ -1696,71 +1857,39 @@ def _remove_task(self, pkg_id: str) -> Optional[BuildTask]:
|
||||
pkg_id: identifier for the package to be removed
|
||||
"""
|
||||
if pkg_id in self.build_tasks:
|
||||
tty.debug(f"Removing build task for {pkg_id} from list")
|
||||
tty.debug(f"Removing task for {pkg_id} from list")
|
||||
task = self.build_tasks.pop(pkg_id)
|
||||
task.status = STATUS_REMOVED
|
||||
task.status = BuildStatus.REMOVED
|
||||
return task
|
||||
else:
|
||||
return None
|
||||
|
||||
def _requeue_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
||||
def _requeue_task(self, task: Task, install_status: InstallStatus) -> None:
|
||||
"""
|
||||
Requeues a task that appears to be in progress by another process.
|
||||
|
||||
Args:
|
||||
task (BuildTask): the installation build task for a package
|
||||
task (Task): the installation task for a package
|
||||
"""
|
||||
if task.status not in [STATUS_INSTALLED, STATUS_INSTALLING]:
|
||||
if task.status not in [BuildStatus.INSTALLED, BuildStatus.INSTALLING]:
|
||||
tty.debug(
|
||||
f"{install_msg(task.pkg_id, self.pid, install_status)} "
|
||||
"in progress by another process"
|
||||
)
|
||||
|
||||
new_task = task.next_attempt(self.installed)
|
||||
new_task.status = STATUS_INSTALLING
|
||||
new_task.status = BuildStatus.INSTALLING
|
||||
self._push_task(new_task)
|
||||
|
||||
def _setup_install_dir(self, pkg: "spack.package_base.PackageBase") -> None:
|
||||
"""
|
||||
Create and ensure proper access controls for the install directory.
|
||||
Write a small metadata file with the current spack environment.
|
||||
|
||||
Args:
|
||||
pkg: the package to be built and installed
|
||||
"""
|
||||
if not os.path.exists(pkg.spec.prefix):
|
||||
path = spack.util.path.debug_padded_filter(pkg.spec.prefix)
|
||||
tty.debug(f"Creating the installation directory {path}")
|
||||
spack.store.STORE.layout.create_install_directory(pkg.spec)
|
||||
else:
|
||||
# Set the proper group for the prefix
|
||||
group = prefs.get_package_group(pkg.spec)
|
||||
if group:
|
||||
fs.chgrp(pkg.spec.prefix, group)
|
||||
|
||||
# Set the proper permissions.
|
||||
# This has to be done after group because changing groups blows
|
||||
# away the sticky group bit on the directory
|
||||
mode = os.stat(pkg.spec.prefix).st_mode
|
||||
perms = prefs.get_package_dir_permissions(pkg.spec)
|
||||
if mode != perms:
|
||||
os.chmod(pkg.spec.prefix, perms)
|
||||
|
||||
# Ensure the metadata path exists as well
|
||||
fs.mkdirp(spack.store.STORE.layout.metadata_path(pkg.spec), mode=perms)
|
||||
|
||||
# Always write host environment - we assume this can change
|
||||
spack.store.STORE.layout.write_host_environment(pkg.spec)
|
||||
|
||||
def _update_failed(
|
||||
self, task: BuildTask, mark: bool = False, exc: Optional[BaseException] = None
|
||||
self, task: Task, mark: bool = False, exc: Optional[BaseException] = None
|
||||
) -> None:
|
||||
"""
|
||||
Update the task and transitive dependents as failed; optionally mark
|
||||
externally as failed; and remove associated build tasks.
|
||||
externally as failed; and remove associated tasks.
|
||||
|
||||
Args:
|
||||
task: the build task for the failed package
|
||||
task: the task for the failed package
|
||||
mark: ``True`` if the package and its dependencies are to
|
||||
be marked as "failed", otherwise, ``False``
|
||||
exc: optional exception if associated with the failure
|
||||
@@ -1772,34 +1901,34 @@ def _update_failed(
|
||||
self.failed[pkg_id] = spack.store.STORE.failure_tracker.mark(task.pkg.spec)
|
||||
else:
|
||||
self.failed[pkg_id] = None
|
||||
task.status = STATUS_FAILED
|
||||
task.status = BuildStatus.FAILED
|
||||
|
||||
for dep_id in task.dependents:
|
||||
if dep_id in self.build_tasks:
|
||||
tty.warn(f"Skipping build of {dep_id} since {pkg_id} failed")
|
||||
# Ensure the dependent's uninstalled dependents are
|
||||
# up-to-date and their build tasks removed.
|
||||
# up-to-date and their tasks removed.
|
||||
dep_task = self.build_tasks[dep_id]
|
||||
self._update_failed(dep_task, mark)
|
||||
self._remove_task(dep_id)
|
||||
else:
|
||||
tty.debug(f"No build task for {dep_id} to skip since {pkg_id} failed")
|
||||
tty.debug(f"No task for {dep_id} to skip since {pkg_id} failed")
|
||||
|
||||
def _update_installed(self, task: BuildTask) -> None:
|
||||
def _update_installed(self, task: Task) -> None:
|
||||
"""
|
||||
Mark the task as installed and ensure dependent build tasks are aware.
|
||||
Mark the task as installed and ensure dependent tasks are aware.
|
||||
|
||||
Args:
|
||||
task (BuildTask): the build task for the installed package
|
||||
task: the task for the installed package
|
||||
"""
|
||||
task.status = STATUS_INSTALLED
|
||||
task.status = BuildStatus.INSTALLED
|
||||
self._flag_installed(task.pkg, task.dependents)
|
||||
|
||||
def _flag_installed(
|
||||
self, pkg: "spack.package_base.PackageBase", dependent_ids: Optional[Set[str]] = None
|
||||
) -> None:
|
||||
"""
|
||||
Flag the package as installed and ensure known by all build tasks of
|
||||
Flag the package as installed and ensure known by all tasks of
|
||||
known dependents.
|
||||
|
||||
Args:
|
||||
@@ -1827,7 +1956,7 @@ def _flag_installed(
|
||||
dep_task = self.build_tasks[dep_id]
|
||||
self._push_task(dep_task.next_attempt(self.installed))
|
||||
else:
|
||||
tty.debug(f"{dep_id} has no build task to update for {pkg_id}'s success")
|
||||
tty.debug(f"{dep_id} has no task to update for {pkg_id}'s success")
|
||||
|
||||
def _init_queue(self) -> None:
|
||||
"""Initialize the build queue from the list of build requests."""
|
||||
@@ -1846,8 +1975,9 @@ def _init_queue(self) -> None:
|
||||
task = self.build_tasks[dep_id]
|
||||
for dependent_id in dependents.difference(task.dependents):
|
||||
task.add_dependent(dependent_id)
|
||||
self.all_dependencies = all_dependencies
|
||||
|
||||
def _install_action(self, task: BuildTask) -> int:
|
||||
def _install_action(self, task: Task) -> InstallAction:
|
||||
"""
|
||||
Determine whether the installation should be overwritten (if it already
|
||||
exists) or skipped (if has been handled by another process).
|
||||
@@ -1995,7 +2125,6 @@ def install(self) -> None:
|
||||
self._update_installed(task)
|
||||
path = spack.util.path.debug_padded_filter(pkg.prefix)
|
||||
_print_installed_pkg(path)
|
||||
|
||||
else:
|
||||
# At this point we've failed to get a write or a read
|
||||
# lock, which means another process has taken a write
|
||||
@@ -2035,8 +2164,6 @@ def install(self) -> None:
|
||||
# wrapper -- silence mypy
|
||||
OverwriteInstall(self, spack.store.STORE.db, task, install_status).install() # type: ignore[arg-type] # noqa: E501
|
||||
|
||||
self._update_installed(task)
|
||||
|
||||
# If we installed then we should keep the prefix
|
||||
stop_before_phase = getattr(pkg, "stop_before_phase", None)
|
||||
last_phase = getattr(pkg, "last_phase", None)
|
||||
@@ -2080,13 +2207,15 @@ def install(self) -> None:
|
||||
)
|
||||
# Terminate if requested to do so on the first failure.
|
||||
if self.fail_fast:
|
||||
raise spack.error.InstallError(f"{fail_fast_err}: {str(exc)}", pkg=pkg)
|
||||
raise spack.error.InstallError(
|
||||
f"{fail_fast_err}: {str(exc)}", pkg=pkg
|
||||
) from exc
|
||||
|
||||
# Terminate when a single build request has failed, or summarize errors later.
|
||||
if task.is_build_request:
|
||||
if single_requested_spec:
|
||||
raise
|
||||
failed_build_requests.append((pkg, pkg_id, str(exc)))
|
||||
failed_build_requests.append((pkg, pkg_id, exc))
|
||||
|
||||
finally:
|
||||
# Remove the install prefix if anything went wrong during
|
||||
@@ -2096,7 +2225,8 @@ def install(self) -> None:
|
||||
|
||||
# Perform basic task cleanup for the installed spec to
|
||||
# include downgrading the write to a read lock
|
||||
self._cleanup_task(pkg)
|
||||
if pkg.spec.installed:
|
||||
self._cleanup_task(pkg)
|
||||
|
||||
# Cleanup, which includes releasing all of the read locks
|
||||
self._cleanup_all_tasks()
|
||||
@@ -2112,6 +2242,9 @@ def install(self) -> None:
|
||||
if failed_build_requests or missing:
|
||||
for _, pkg_id, err in failed_build_requests:
|
||||
tty.error(f"{pkg_id}: {err}")
|
||||
if spack.error.debug:
|
||||
# note: in python 3.10+ this can just be print_exception(err)
|
||||
traceback.print_exception(type(err), err, err.__traceback__)
|
||||
|
||||
for _, pkg_id in missing:
|
||||
tty.error(f"{pkg_id}: Package was not installed")
|
||||
@@ -2365,6 +2498,15 @@ def build_process(pkg: "spack.package_base.PackageBase", install_args: dict) ->
|
||||
|
||||
def deprecate(spec: "spack.spec.Spec", deprecator: "spack.spec.Spec", link_fn) -> None:
|
||||
"""Deprecate this package in favor of deprecator spec"""
|
||||
# Here we assume we don't deprecate across different stores, and that same hash
|
||||
# means same binary artifacts
|
||||
if spec.dag_hash() == deprecator.dag_hash():
|
||||
return
|
||||
|
||||
# We can't really have control over external specs, and cannot link anything in their place
|
||||
if spec.external:
|
||||
return
|
||||
|
||||
# Install deprecator if it isn't installed already
|
||||
if not spack.store.STORE.db.query(deprecator):
|
||||
PackageInstaller([deprecator.package], explicit=True).install()
|
||||
@@ -2395,7 +2537,7 @@ def __init__(
|
||||
self,
|
||||
installer: PackageInstaller,
|
||||
database: spack.database.Database,
|
||||
task: BuildTask,
|
||||
task: Task,
|
||||
install_status: InstallStatus,
|
||||
):
|
||||
self.installer = installer
|
||||
|
@@ -102,9 +102,6 @@
|
||||
|
||||
spack_ld_library_path = os.environ.get("LD_LIBRARY_PATH", "")
|
||||
|
||||
#: Whether to print backtraces on error
|
||||
SHOW_BACKTRACE = False
|
||||
|
||||
|
||||
def add_all_commands(parser):
|
||||
"""Add all spack subcommands to the parser."""
|
||||
@@ -492,6 +489,7 @@ def make_argument_parser(**kwargs):
|
||||
help="add stacktraces to all printed statements",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-t",
|
||||
"--backtrace",
|
||||
action="store_true",
|
||||
default="SPACK_BACKTRACE" in os.environ,
|
||||
@@ -527,8 +525,7 @@ def setup_main_options(args):
|
||||
|
||||
if args.debug or args.backtrace:
|
||||
spack.error.debug = True
|
||||
global SHOW_BACKTRACE
|
||||
SHOW_BACKTRACE = True
|
||||
spack.error.SHOW_BACKTRACE = True
|
||||
|
||||
if args.debug:
|
||||
spack.util.debug.register_interrupt_handler()
|
||||
@@ -1021,19 +1018,19 @@ def main(argv=None):
|
||||
e.die() # gracefully die on any SpackErrors
|
||||
|
||||
except KeyboardInterrupt:
|
||||
if spack.config.get("config:debug") or SHOW_BACKTRACE:
|
||||
if spack.config.get("config:debug") or spack.error.SHOW_BACKTRACE:
|
||||
raise
|
||||
sys.stderr.write("\n")
|
||||
tty.error("Keyboard interrupt.")
|
||||
return signal.SIGINT.value
|
||||
|
||||
except SystemExit as e:
|
||||
if spack.config.get("config:debug") or SHOW_BACKTRACE:
|
||||
if spack.config.get("config:debug") or spack.error.SHOW_BACKTRACE:
|
||||
traceback.print_exc()
|
||||
return e.code
|
||||
|
||||
except Exception as e:
|
||||
if spack.config.get("config:debug") or SHOW_BACKTRACE:
|
||||
if spack.config.get("config:debug") or spack.error.SHOW_BACKTRACE:
|
||||
raise
|
||||
tty.error(e)
|
||||
return 3
|
||||
|
@@ -29,7 +29,6 @@
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.mirror
|
||||
import spack.oci.image
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
@@ -89,9 +88,8 @@ def from_url(url: str):
|
||||
"""Create an anonymous mirror by URL. This method validates the URL."""
|
||||
if not urllib.parse.urlparse(url).scheme in supported_url_schemes:
|
||||
raise ValueError(
|
||||
'"{}" is not a valid mirror URL. Scheme must be once of {}.'.format(
|
||||
url, ", ".join(supported_url_schemes)
|
||||
)
|
||||
f'"{url}" is not a valid mirror URL. '
|
||||
f"Scheme must be one of {supported_url_schemes}."
|
||||
)
|
||||
return Mirror(url)
|
||||
|
||||
@@ -757,9 +755,9 @@ def create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats):
|
||||
|
||||
def require_mirror_name(mirror_name):
|
||||
"""Find a mirror by name and raise if it does not exist"""
|
||||
mirror = spack.mirror.MirrorCollection().get(mirror_name)
|
||||
mirror = MirrorCollection().get(mirror_name)
|
||||
if not mirror:
|
||||
raise ValueError('no mirror named "{0}"'.format(mirror_name))
|
||||
raise ValueError(f'no mirror named "{mirror_name}"')
|
||||
return mirror
|
||||
|
||||
|
||||
|
@@ -527,7 +527,8 @@ def use_name(self):
|
||||
parts = name.split("/")
|
||||
name = os.path.join(*parts)
|
||||
# Add optional suffixes based on constraints
|
||||
path_elements = [name] + self.conf.suffixes
|
||||
path_elements = [name]
|
||||
path_elements.extend(map(self.spec.format, self.conf.suffixes))
|
||||
return "-".join(path_elements)
|
||||
|
||||
@property
|
||||
|
@@ -9,12 +9,10 @@
|
||||
packages.
|
||||
"""
|
||||
|
||||
import base64
|
||||
import collections
|
||||
import copy
|
||||
import functools
|
||||
import glob
|
||||
import hashlib
|
||||
import importlib
|
||||
import io
|
||||
import os
|
||||
@@ -32,7 +30,6 @@
|
||||
from llnl.util.lang import classproperty, memoized
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
@@ -50,23 +47,15 @@
|
||||
import spack.store
|
||||
import spack.url
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
import spack.util.package_hash as ph
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.web
|
||||
from spack.error import InstallError, NoURLError, PackageError
|
||||
from spack.filesystem_view import YamlFilesystemView
|
||||
from spack.install_test import (
|
||||
PackageTest,
|
||||
TestFailure,
|
||||
TestStatus,
|
||||
TestSuite,
|
||||
cache_extra_test_sources,
|
||||
install_test_root,
|
||||
)
|
||||
from spack.install_test import PackageTest, TestSuite
|
||||
from spack.solver.version_order import concretization_version_order
|
||||
from spack.stage import DevelopStage, ResourceStage, Stage, StageComposite, compute_stage_name
|
||||
from spack.util.executable import ProcessError, which
|
||||
from spack.util.package_hash import package_hash
|
||||
from spack.version import GitVersion, StandardVersion
|
||||
|
||||
FLAG_HANDLER_RETURN_TYPE = Tuple[
|
||||
@@ -1355,18 +1344,6 @@ def install_configure_args_path(self):
|
||||
"""Return the configure args file path on successful installation."""
|
||||
return os.path.join(self.metadata_dir, _spack_configure_argsfile)
|
||||
|
||||
# TODO (post-34236): Update tests and all packages that use this as a
|
||||
# TODO (post-34236): package method to the function already available
|
||||
# TODO (post-34236): to packages. Once done, remove this property.
|
||||
@property
|
||||
def install_test_root(self):
|
||||
"""Return the install test root directory."""
|
||||
tty.warn(
|
||||
"The 'pkg.install_test_root' property is deprecated with removal "
|
||||
"expected v0.23. Use 'install_test_root(pkg)' instead."
|
||||
)
|
||||
return install_test_root(self)
|
||||
|
||||
def archive_install_test_log(self):
|
||||
"""Archive the install-phase test log, if present."""
|
||||
if getattr(self, "tester", None):
|
||||
@@ -1776,65 +1753,78 @@ def all_patches(cls):
|
||||
|
||||
return patches
|
||||
|
||||
def content_hash(self, content=None):
|
||||
"""Create a hash based on the artifacts and patches used to build this package.
|
||||
def artifact_hashes(self, content=None):
|
||||
"""Create a dictionary of hashes of artifacts used in the build of this package.
|
||||
|
||||
This includes:
|
||||
* source artifacts (tarballs, repositories) used to build;
|
||||
* content hashes (``sha256``'s) of all patches applied by Spack; and
|
||||
* canonicalized contents the ``package.py`` recipe used to build.
|
||||
|
||||
This hash is only included in Spack's DAG hash for concrete specs, but if it
|
||||
happens to be called on a package with an abstract spec, only applicable (i.e.,
|
||||
determinable) portions of the hash will be included.
|
||||
Example::
|
||||
|
||||
{
|
||||
"package_hash": "qovi2hm2n2qsatng2r4n55yzjlhnwflx",
|
||||
"sources": [
|
||||
{
|
||||
"sha256": "fc5fd69bb8736323f026672b1b7235da613d7177e72558893a0bdcd320466d60",
|
||||
"type": "archive"
|
||||
},
|
||||
{
|
||||
"sha256": "56ab9b90f5acbc42eb7a94cf482e6c058a63e8a1effdf572b8b2a6323a06d923",
|
||||
"type": "archive"
|
||||
}
|
||||
}
|
||||
|
||||
All hashes are added to concrete specs at the end of concretization. If this
|
||||
method is called on an abstract spec, only hashes that can be known from the
|
||||
abstract spec will be included.
|
||||
|
||||
"""
|
||||
# list of components to make up the hash
|
||||
hash_content = []
|
||||
hashes = syaml.syaml_dict()
|
||||
|
||||
# source artifacts/repositories
|
||||
# TODO: resources
|
||||
if self.spec.versions.concrete:
|
||||
sources = []
|
||||
try:
|
||||
source_id = fs.for_package_version(self).source_id()
|
||||
fetcher = fs.for_package_version(self)
|
||||
provenance_dict = fetcher.source_provenance()
|
||||
if provenance_dict:
|
||||
sources.append(provenance_dict)
|
||||
|
||||
except (fs.ExtrapolationError, fs.InvalidArgsError):
|
||||
# ExtrapolationError happens if the package has no fetchers defined.
|
||||
# InvalidArgsError happens when there are version directives with args,
|
||||
# but none of them identifies an actual fetcher.
|
||||
source_id = None
|
||||
|
||||
if not source_id:
|
||||
# TODO? in cases where a digest or source_id isn't available,
|
||||
# should this attempt to download the source and set one? This
|
||||
# probably only happens for source repositories which are
|
||||
# referenced by branch name rather than tag or commit ID.
|
||||
# if this is a develop spec, say so
|
||||
from_local_sources = "dev_path" in self.spec.variants
|
||||
|
||||
# don't bother setting a hash if none is available, but warn if
|
||||
# it seems like there should be one.
|
||||
if self.has_code and not self.spec.external and not from_local_sources:
|
||||
message = "Missing a source id for {s.name}@{s.version}"
|
||||
message = "Missing a hash for {s.name}@{s.version}"
|
||||
tty.debug(message.format(s=self))
|
||||
hash_content.append("".encode("utf-8"))
|
||||
else:
|
||||
hash_content.append(source_id.encode("utf-8"))
|
||||
|
||||
for resource in self._get_needed_resources():
|
||||
sources.append(resource.fetcher.source_provenance())
|
||||
|
||||
if sources:
|
||||
hashes["sources"] = sources
|
||||
|
||||
# patch sha256's
|
||||
# Only include these if they've been assigned by the concretizer.
|
||||
# We check spec._patches_assigned instead of spec.concrete because
|
||||
# we have to call package_hash *before* marking specs concrete
|
||||
if self.spec._patches_assigned():
|
||||
hash_content.extend(
|
||||
":".join((p.sha256, str(p.level))).encode("utf-8") for p in self.spec.patches
|
||||
)
|
||||
hashes["patches"] = [
|
||||
{"sha256": patch.sha256, "level": patch.level} for patch in self.spec.patches
|
||||
]
|
||||
|
||||
# package.py contents
|
||||
hash_content.append(package_hash(self.spec, source=content).encode("utf-8"))
|
||||
hashes["package_hash"] = ph.package_hash(self.spec, source=content)
|
||||
|
||||
# put it all together and encode as base32
|
||||
b32_hash = base64.b32encode(
|
||||
hashlib.sha256(bytes().join(sorted(hash_content))).digest()
|
||||
).lower()
|
||||
b32_hash = b32_hash.decode("utf-8")
|
||||
|
||||
return b32_hash
|
||||
return hashes
|
||||
|
||||
@property
|
||||
def cmake_prefix_paths(self):
|
||||
@@ -1875,13 +1865,22 @@ def _has_make_target(self, target):
|
||||
#
|
||||
# BSD Make:
|
||||
# make: don't know how to make test. Stop
|
||||
#
|
||||
# Note: "Stop." is not printed when running a Make jobserver (spack env depfile) that runs
|
||||
# with `make -k/--keep-going`
|
||||
missing_target_msgs = [
|
||||
"No rule to make target `{0}'. Stop.",
|
||||
"No rule to make target '{0}'. Stop.",
|
||||
"don't know how to make {0}. Stop",
|
||||
"No rule to make target `{0}'.",
|
||||
"No rule to make target '{0}'.",
|
||||
"don't know how to make {0}.",
|
||||
]
|
||||
|
||||
kwargs = {"fail_on_error": False, "output": os.devnull, "error": str}
|
||||
kwargs = {
|
||||
"fail_on_error": False,
|
||||
"output": os.devnull,
|
||||
"error": str,
|
||||
# Remove MAKEFLAGS to avoid inherited flags from Make jobserver (spack env depfile)
|
||||
"extra_env": {"MAKEFLAGS": ""},
|
||||
}
|
||||
|
||||
stderr = make("-n", target, **kwargs)
|
||||
|
||||
@@ -1959,31 +1958,6 @@ def _resource_stage(self, resource):
|
||||
resource_stage_folder = "-".join(pieces)
|
||||
return resource_stage_folder
|
||||
|
||||
# TODO (post-34236): Update tests and all packages that use this as a
|
||||
# TODO (post-34236): package method to the routine made available to
|
||||
# TODO (post-34236): packages. Once done, remove this method.
|
||||
def cache_extra_test_sources(self, srcs):
|
||||
"""Copy relative source paths to the corresponding install test subdir
|
||||
|
||||
This method is intended as an optional install test setup helper for
|
||||
grabbing source files/directories during the installation process and
|
||||
copying them to the installation test subdirectory for subsequent use
|
||||
during install testing.
|
||||
|
||||
Args:
|
||||
srcs (str or list): relative path for files and or
|
||||
subdirectories located in the staged source path that are to
|
||||
be copied to the corresponding location(s) under the install
|
||||
testing directory.
|
||||
"""
|
||||
msg = (
|
||||
"'pkg.cache_extra_test_sources(srcs) is deprecated with removal "
|
||||
"expected in v0.23. Use 'cache_extra_test_sources(pkg, srcs)' "
|
||||
"instead."
|
||||
)
|
||||
warnings.warn(msg)
|
||||
cache_extra_test_sources(self, srcs)
|
||||
|
||||
def do_test(self, dirty=False, externals=False):
|
||||
if self.test_requires_compiler:
|
||||
compilers = spack.compilers.compilers_for_spec(
|
||||
@@ -2007,178 +1981,6 @@ def do_test(self, dirty=False, externals=False):
|
||||
|
||||
self.tester.stand_alone_tests(kwargs)
|
||||
|
||||
# TODO (post-34236): Remove this deprecated method when eliminate test,
|
||||
# TODO (post-34236): run_test, etc.
|
||||
@property
|
||||
def _test_deprecated_warning(self):
|
||||
alt = f"Use any name starting with 'test_' instead in {self.spec.name}."
|
||||
return f"The 'test' method is deprecated. {alt}"
|
||||
|
||||
# TODO (post-34236): Remove this deprecated method when eliminate test,
|
||||
# TODO (post-34236): run_test, etc.
|
||||
def test(self):
|
||||
# Defer tests to virtual and concrete packages
|
||||
warnings.warn(self._test_deprecated_warning)
|
||||
|
||||
# TODO (post-34236): Remove this deprecated method when eliminate test,
|
||||
# TODO (post-34236): run_test, etc.
|
||||
def run_test(
|
||||
self,
|
||||
exe,
|
||||
options=[],
|
||||
expected=[],
|
||||
status=0,
|
||||
installed=False,
|
||||
purpose=None,
|
||||
skip_missing=False,
|
||||
work_dir=None,
|
||||
):
|
||||
"""Run the test and confirm the expected results are obtained
|
||||
|
||||
Log any failures and continue, they will be re-raised later
|
||||
|
||||
Args:
|
||||
exe (str): the name of the executable
|
||||
options (str or list): list of options to pass to the runner
|
||||
expected (str or list): list of expected output strings.
|
||||
Each string is a regex expected to match part of the output.
|
||||
status (int or list): possible passing status values
|
||||
with 0 meaning the test is expected to succeed
|
||||
installed (bool): if ``True``, the executable must be in the
|
||||
install prefix
|
||||
purpose (str): message to display before running test
|
||||
skip_missing (bool): skip the test if the executable is not
|
||||
in the install prefix bin directory or the provided work_dir
|
||||
work_dir (str or None): path to the smoke test directory
|
||||
"""
|
||||
|
||||
def test_title(purpose, test_name):
|
||||
if not purpose:
|
||||
return f"test: {test_name}: execute {test_name}"
|
||||
|
||||
match = re.search(r"test: ([^:]*): (.*)", purpose)
|
||||
if match:
|
||||
# The test title has all the expected parts
|
||||
return purpose
|
||||
|
||||
match = re.search(r"test: (.*)", purpose)
|
||||
if match:
|
||||
reason = match.group(1)
|
||||
return f"test: {test_name}: {reason}"
|
||||
|
||||
return f"test: {test_name}: {purpose}"
|
||||
|
||||
base_exe = os.path.basename(exe)
|
||||
alternate = f"Use 'test_part' instead for {self.spec.name} to process {base_exe}."
|
||||
warnings.warn(f"The 'run_test' method is deprecated. {alternate}")
|
||||
|
||||
extra = re.compile(r"[\s,\- ]")
|
||||
details = (
|
||||
[extra.sub("", options)]
|
||||
if isinstance(options, str)
|
||||
else [extra.sub("", os.path.basename(opt)) for opt in options]
|
||||
)
|
||||
details = "_".join([""] + details) if details else ""
|
||||
test_name = f"test_{base_exe}{details}"
|
||||
tty.info(test_title(purpose, test_name), format="g")
|
||||
|
||||
wdir = "." if work_dir is None else work_dir
|
||||
with fsys.working_dir(wdir, create=True):
|
||||
try:
|
||||
runner = which(exe)
|
||||
if runner is None and skip_missing:
|
||||
self.tester.status(test_name, TestStatus.SKIPPED, f"{exe} is missing")
|
||||
return
|
||||
assert runner is not None, f"Failed to find executable '{exe}'"
|
||||
|
||||
self._run_test_helper(runner, options, expected, status, installed, purpose)
|
||||
self.tester.status(test_name, TestStatus.PASSED, None)
|
||||
return True
|
||||
except (AssertionError, BaseException) as e:
|
||||
# print a summary of the error to the log file
|
||||
# so that cdash and junit reporters know about it
|
||||
exc_type, _, tb = sys.exc_info()
|
||||
|
||||
self.tester.status(test_name, TestStatus.FAILED, str(e))
|
||||
|
||||
import traceback
|
||||
|
||||
# remove the current call frame to exclude the extract_stack
|
||||
# call from the error
|
||||
stack = traceback.extract_stack()[:-1]
|
||||
|
||||
# Package files have a line added at import time, so we re-read
|
||||
# the file to make line numbers match. We have to subtract two
|
||||
# from the line number because the original line number is
|
||||
# inflated once by the import statement and the lines are
|
||||
# displaced one by the import statement.
|
||||
for i, entry in enumerate(stack):
|
||||
filename, lineno, function, text = entry
|
||||
if spack.repo.is_package_file(filename):
|
||||
with open(filename, "r") as f:
|
||||
lines = f.readlines()
|
||||
new_lineno = lineno - 2
|
||||
text = lines[new_lineno]
|
||||
stack[i] = (filename, new_lineno, function, text)
|
||||
|
||||
# Format the stack to print and print it
|
||||
out = traceback.format_list(stack)
|
||||
for line in out:
|
||||
print(line.rstrip("\n"))
|
||||
|
||||
if exc_type is spack.util.executable.ProcessError:
|
||||
out = io.StringIO()
|
||||
spack.build_environment.write_log_summary(
|
||||
out, "test", self.tester.test_log_file, last=1
|
||||
)
|
||||
m = out.getvalue()
|
||||
else:
|
||||
# We're below the package context, so get context from
|
||||
# stack instead of from traceback.
|
||||
# The traceback is truncated here, so we can't use it to
|
||||
# traverse the stack.
|
||||
context = spack.build_environment.get_package_context(tb)
|
||||
m = "\n".join(context) if context else ""
|
||||
|
||||
exc = e # e is deleted after this block
|
||||
|
||||
# If we fail fast, raise another error
|
||||
if spack.config.get("config:fail_fast", False):
|
||||
raise TestFailure([(exc, m)])
|
||||
else:
|
||||
self.tester.add_failure(exc, m)
|
||||
return False
|
||||
|
||||
# TODO (post-34236): Remove this deprecated method when eliminate test,
|
||||
# TODO (post-34236): run_test, etc.
|
||||
def _run_test_helper(self, runner, options, expected, status, installed, purpose):
|
||||
status = [status] if isinstance(status, int) else status
|
||||
expected = [expected] if isinstance(expected, str) else expected
|
||||
options = [options] if isinstance(options, str) else options
|
||||
|
||||
if installed:
|
||||
msg = f"Executable '{runner.name}' expected in prefix, "
|
||||
msg += f"found in {runner.path} instead"
|
||||
assert runner.path.startswith(self.spec.prefix), msg
|
||||
|
||||
tty.msg(f"Expecting return code in {status}")
|
||||
|
||||
try:
|
||||
output = runner(*options, output=str.split, error=str.split)
|
||||
|
||||
assert 0 in status, f"Expected {runner.name} execution to fail"
|
||||
except ProcessError as err:
|
||||
output = str(err)
|
||||
match = re.search(r"exited with status ([0-9]+)", output)
|
||||
if not (match and int(match.group(1)) in status):
|
||||
raise
|
||||
|
||||
for check in expected:
|
||||
cmd = " ".join([runner.name] + options)
|
||||
msg = f"Expected '{check}' to match output of `{cmd}`"
|
||||
msg += f"\n\nOutput: {output}"
|
||||
assert re.search(check, output), msg
|
||||
|
||||
def unit_test_check(self):
|
||||
"""Hook for unit tests to assert things about package internals.
|
||||
|
||||
|
@@ -205,23 +205,33 @@ def macho_find_paths(orig_rpaths, deps, idpath, old_layout_root, prefix_to_prefi
|
||||
paths_to_paths dictionary which maps all of the old paths to new paths
|
||||
"""
|
||||
paths_to_paths = dict()
|
||||
# Sort from longest path to shortest, to ensure we try /foo/bar/baz before /foo/bar
|
||||
prefix_iteration_order = sorted(prefix_to_prefix, key=len, reverse=True)
|
||||
for orig_rpath in orig_rpaths:
|
||||
if orig_rpath.startswith(old_layout_root):
|
||||
for old_prefix, new_prefix in prefix_to_prefix.items():
|
||||
for old_prefix in prefix_iteration_order:
|
||||
new_prefix = prefix_to_prefix[old_prefix]
|
||||
if orig_rpath.startswith(old_prefix):
|
||||
new_rpath = re.sub(re.escape(old_prefix), new_prefix, orig_rpath)
|
||||
paths_to_paths[orig_rpath] = new_rpath
|
||||
break
|
||||
else:
|
||||
paths_to_paths[orig_rpath] = orig_rpath
|
||||
|
||||
if idpath:
|
||||
for old_prefix, new_prefix in prefix_to_prefix.items():
|
||||
for old_prefix in prefix_iteration_order:
|
||||
new_prefix = prefix_to_prefix[old_prefix]
|
||||
if idpath.startswith(old_prefix):
|
||||
paths_to_paths[idpath] = re.sub(re.escape(old_prefix), new_prefix, idpath)
|
||||
break
|
||||
|
||||
for dep in deps:
|
||||
for old_prefix, new_prefix in prefix_to_prefix.items():
|
||||
for old_prefix in prefix_iteration_order:
|
||||
new_prefix = prefix_to_prefix[old_prefix]
|
||||
if dep.startswith(old_prefix):
|
||||
paths_to_paths[dep] = re.sub(re.escape(old_prefix), new_prefix, dep)
|
||||
break
|
||||
|
||||
if dep.startswith("@"):
|
||||
paths_to_paths[dep] = dep
|
||||
|
||||
@@ -270,40 +280,14 @@ def modify_macho_object(cur_path, rpaths, deps, idpath, paths_to_paths):
|
||||
install_name_tool = executable.Executable("install_name_tool")
|
||||
install_name_tool(*args)
|
||||
|
||||
return
|
||||
|
||||
|
||||
def modify_object_macholib(cur_path, paths_to_paths):
|
||||
"""
|
||||
This function is used when install machO buildcaches on linux by
|
||||
rewriting mach-o loader commands for dependency library paths of
|
||||
mach-o binaries and the id path for mach-o libraries.
|
||||
Rewritting of rpaths is handled by replace_prefix_bin.
|
||||
Inputs
|
||||
mach-o binary to be modified
|
||||
dictionary mapping paths in old install layout to new install layout
|
||||
"""
|
||||
|
||||
dll = macholib.MachO.MachO(cur_path)
|
||||
dll.rewriteLoadCommands(paths_to_paths.get)
|
||||
|
||||
try:
|
||||
f = open(dll.filename, "rb+")
|
||||
for header in dll.headers:
|
||||
f.seek(0)
|
||||
dll.write(f)
|
||||
f.seek(0, 2)
|
||||
f.flush()
|
||||
f.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return
|
||||
|
||||
|
||||
def macholib_get_paths(cur_path):
|
||||
"""Get rpaths, dependent libraries, and library id of mach-o objects."""
|
||||
headers = macholib.MachO.MachO(cur_path).headers
|
||||
headers = []
|
||||
try:
|
||||
headers = macholib.MachO.MachO(cur_path).headers
|
||||
except ValueError:
|
||||
pass
|
||||
if not headers:
|
||||
tty.warn("Failed to read Mach-O headers: {0}".format(cur_path))
|
||||
commands = []
|
||||
@@ -415,10 +399,7 @@ def relocate_macho_binaries(
|
||||
# normalized paths
|
||||
rel_to_orig = macho_make_paths_normal(orig_path_name, rpaths, deps, idpath)
|
||||
# replace the relativized paths with normalized paths
|
||||
if sys.platform == "darwin":
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, rel_to_orig)
|
||||
else:
|
||||
modify_object_macholib(path_name, rel_to_orig)
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, rel_to_orig)
|
||||
# get the normalized paths in the mach-o binary
|
||||
rpaths, deps, idpath = macholib_get_paths(path_name)
|
||||
# get the mapping of paths in old prefix to path in new prefix
|
||||
@@ -426,10 +407,7 @@ def relocate_macho_binaries(
|
||||
rpaths, deps, idpath, old_layout_root, prefix_to_prefix
|
||||
)
|
||||
# replace the old paths with new paths
|
||||
if sys.platform == "darwin":
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||
else:
|
||||
modify_object_macholib(path_name, paths_to_paths)
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||
# get the new normalized path in the mach-o binary
|
||||
rpaths, deps, idpath = macholib_get_paths(path_name)
|
||||
# get the mapping of paths to relative paths in the new prefix
|
||||
@@ -437,10 +415,7 @@ def relocate_macho_binaries(
|
||||
path_name, new_layout_root, rpaths, deps, idpath
|
||||
)
|
||||
# replace the new paths with relativized paths in the new prefix
|
||||
if sys.platform == "darwin":
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||
else:
|
||||
modify_object_macholib(path_name, paths_to_paths)
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||
else:
|
||||
# get the paths in the old prefix
|
||||
rpaths, deps, idpath = macholib_get_paths(path_name)
|
||||
@@ -449,10 +424,7 @@ def relocate_macho_binaries(
|
||||
rpaths, deps, idpath, old_layout_root, prefix_to_prefix
|
||||
)
|
||||
# replace the old paths with new paths
|
||||
if sys.platform == "darwin":
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||
else:
|
||||
modify_object_macholib(path_name, paths_to_paths)
|
||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||
|
||||
|
||||
def _transform_rpaths(orig_rpaths, orig_root, new_prefixes):
|
||||
|
@@ -39,7 +39,6 @@
|
||||
import spack.error
|
||||
import spack.patch
|
||||
import spack.provider_index
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.tag
|
||||
import spack.util.git
|
||||
@@ -216,9 +215,9 @@ def compute_loader(self, fullname):
|
||||
def packages_path():
|
||||
"""Get the test repo if it is active, otherwise the builtin repo."""
|
||||
try:
|
||||
return spack.repo.PATH.get_repo("builtin.mock").packages_path
|
||||
except spack.repo.UnknownNamespaceError:
|
||||
return spack.repo.PATH.get_repo("builtin").packages_path
|
||||
return PATH.get_repo("builtin.mock").packages_path
|
||||
except UnknownNamespaceError:
|
||||
return PATH.get_repo("builtin").packages_path
|
||||
|
||||
|
||||
class GitExe:
|
||||
@@ -314,7 +313,7 @@ def add_package_to_git_stage(packages):
|
||||
git = GitExe()
|
||||
|
||||
for pkg_name in packages:
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
filename = PATH.filename_for_package_name(pkg_name)
|
||||
if not os.path.isfile(filename):
|
||||
tty.die("No such package: %s. Path does not exist:" % pkg_name, filename)
|
||||
|
||||
@@ -806,7 +805,7 @@ def tag_index(self) -> spack.tag.TagIndex:
|
||||
return self._tag_index
|
||||
|
||||
@property
|
||||
def patch_index(self) -> spack.patch.PatchCache:
|
||||
def patch_index(self) -> "spack.patch.PatchCache":
|
||||
"""Merged PatchIndex from all Repos in the RepoPath."""
|
||||
if self._patch_index is None:
|
||||
self._patch_index = spack.patch.PatchCache(repository=self)
|
||||
@@ -1159,7 +1158,7 @@ def tag_index(self) -> spack.tag.TagIndex:
|
||||
return self.index["tags"]
|
||||
|
||||
@property
|
||||
def patch_index(self) -> spack.patch.PatchCache:
|
||||
def patch_index(self) -> "spack.patch.PatchCache":
|
||||
"""Index of patches and packages they're defined on."""
|
||||
return self.index["patches"]
|
||||
|
||||
@@ -1585,7 +1584,7 @@ def __init__(self, name, repo=None):
|
||||
long_msg = "Use 'spack create' to create a new package."
|
||||
|
||||
if not repo:
|
||||
repo = spack.repo.PATH
|
||||
repo = PATH
|
||||
|
||||
# We need to compare the base package name
|
||||
pkg_name = name.rsplit(".", 1)[-1]
|
||||
|
@@ -2,7 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import re
|
||||
import xml.sax.saxutils
|
||||
from datetime import datetime
|
||||
@@ -42,17 +41,6 @@ def elapsed(current, previous):
|
||||
return diff.total_seconds()
|
||||
|
||||
|
||||
# TODO (post-34236): Should remove with deprecated test methods since don't
|
||||
# TODO (post-34236): have an XFAIL mechanism with the new test_part() approach.
|
||||
def expected_failure(line):
|
||||
if not line:
|
||||
return False
|
||||
|
||||
match = returns_regexp.search(line)
|
||||
xfail = "0" not in match.group(1) if match else False
|
||||
return xfail
|
||||
|
||||
|
||||
def new_part():
|
||||
return {
|
||||
"command": None,
|
||||
@@ -66,14 +54,6 @@ def new_part():
|
||||
}
|
||||
|
||||
|
||||
# TODO (post-34236): Remove this when remove deprecated methods
|
||||
def part_name(source):
|
||||
elements = []
|
||||
for e in source.replace("'", "").split(" "):
|
||||
elements.append(os.path.basename(e) if os.sep in e else e)
|
||||
return "_".join(elements)
|
||||
|
||||
|
||||
def process_part_end(part, curr_time, last_time):
|
||||
if part:
|
||||
if not part["elapsed"]:
|
||||
@@ -81,11 +61,7 @@ def process_part_end(part, curr_time, last_time):
|
||||
|
||||
stat = part["status"]
|
||||
if stat in completed:
|
||||
# TODO (post-34236): remove the expected failure mapping when
|
||||
# TODO (post-34236): remove deprecated test methods.
|
||||
if stat == "passed" and expected_failure(part["desc"]):
|
||||
part["completed"] = "Expected to fail"
|
||||
elif part["completed"] == "Unknown":
|
||||
if part["completed"] == "Unknown":
|
||||
part["completed"] = completed[stat]
|
||||
elif stat is None or stat == "unknown":
|
||||
part["status"] = "passed"
|
||||
@@ -153,14 +129,6 @@ def extract_test_parts(default_name, outputs):
|
||||
if msg.startswith("Installing"):
|
||||
continue
|
||||
|
||||
# TODO (post-34236): Remove this check when remove run_test(),
|
||||
# TODO (post-34236): etc. since no longer supporting expected
|
||||
# TODO (post-34236): failures.
|
||||
if msg.startswith("Expecting return code"):
|
||||
if part:
|
||||
part["desc"] += f"; {msg}"
|
||||
continue
|
||||
|
||||
# Terminate without further parsing if no more test messages
|
||||
if "Completed testing" in msg:
|
||||
# Process last lingering part IF it didn't generate status
|
||||
|
@@ -12,6 +12,7 @@
|
||||
from llnl.util.symlink import readlink, symlink
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.hooks
|
||||
import spack.platforms
|
||||
@@ -52,6 +53,7 @@ def rewire_node(spec, explicit):
|
||||
its subgraph. Binaries, text, and links are all changed in accordance with
|
||||
the splice. The resulting package is then 'installed.'"""
|
||||
tempdir = tempfile.mkdtemp()
|
||||
|
||||
# copy anything installed to a temporary directory
|
||||
shutil.copytree(spec.build_spec.prefix, os.path.join(tempdir, spec.dag_hash()))
|
||||
|
||||
@@ -59,8 +61,21 @@ def rewire_node(spec, explicit):
|
||||
# compute prefix-to-prefix for every node from the build spec to the spliced
|
||||
# spec
|
||||
prefix_to_prefix = OrderedDict({spec.build_spec.prefix: spec.prefix})
|
||||
for build_dep in spec.build_spec.traverse(root=False):
|
||||
prefix_to_prefix[build_dep.prefix] = spec[build_dep.name].prefix
|
||||
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
||||
for s in bindist.deps_to_relocate(spec):
|
||||
analog = s
|
||||
if id(s) not in build_spec_ids:
|
||||
analogs = [
|
||||
d
|
||||
for d in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD)
|
||||
if s._splice_match(d, self_root=spec, other_root=spec.build_spec)
|
||||
]
|
||||
if analogs:
|
||||
# Prefer same-name analogs and prefer higher versions
|
||||
# This matches the preferences in Spec.splice, so we will find same node
|
||||
analog = max(analogs, key=lambda a: (a.name == s.name, a.version))
|
||||
|
||||
prefix_to_prefix[analog.prefix] = s.prefix
|
||||
|
||||
manifest = bindist.get_buildfile_manifest(spec.build_spec)
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
|
@@ -11,8 +11,6 @@
|
||||
|
||||
from llnl.util.lang import union_dicts
|
||||
|
||||
import spack.schema.gitlab_ci
|
||||
|
||||
# Schema for script fields
|
||||
# List of lists and/or strings
|
||||
# This is similar to what is allowed in
|
||||
@@ -47,7 +45,7 @@
|
||||
"tags": {"type": "array", "items": {"type": "string"}},
|
||||
"variables": {
|
||||
"type": "object",
|
||||
"patternProperties": {r"[\w\d\-_\.]+": {"type": "string"}},
|
||||
"patternProperties": {r"[\w\d\-_\.]+": {"type": ["string", "number"]}},
|
||||
},
|
||||
"before_script": script_schema,
|
||||
"script": script_schema,
|
||||
@@ -77,58 +75,54 @@
|
||||
},
|
||||
}
|
||||
|
||||
named_attributes_schema = {
|
||||
"oneOf": [
|
||||
{
|
||||
dynamic_mapping_schema = {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["dynamic-mapping"],
|
||||
"properties": {
|
||||
"dynamic-mapping": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {"noop-job": attributes_schema, "noop-job-remove": attributes_schema},
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {"build-job": attributes_schema, "build-job-remove": attributes_schema},
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {"copy-job": attributes_schema, "copy-job-remove": attributes_schema},
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["endpoint"],
|
||||
"properties": {
|
||||
"reindex-job": attributes_schema,
|
||||
"reindex-job-remove": attributes_schema,
|
||||
"name": {"type": "string"},
|
||||
# "endpoint" cannot have http patternProperties constaint as it is a required field
|
||||
# Constrain is applied in code
|
||||
"endpoint": {"type": "string"},
|
||||
"timeout": {"type": "integer", "minimum": 0},
|
||||
"verify_ssl": {"type": "boolean", "default": False},
|
||||
"header": {"type": "object", "additionalProperties": False},
|
||||
"allow": {"type": "array", "items": {"type": "string"}},
|
||||
"require": {"type": "array", "items": {"type": "string"}},
|
||||
"ignore": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"signing-job": attributes_schema,
|
||||
"signing-job-remove": attributes_schema,
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"cleanup-job": attributes_schema,
|
||||
"cleanup-job-remove": attributes_schema,
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {"any-job": attributes_schema, "any-job-remove": attributes_schema},
|
||||
},
|
||||
]
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def job_schema(name: str):
|
||||
return {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {f"{name}-job": attributes_schema, f"{name}-job-remove": attributes_schema},
|
||||
}
|
||||
|
||||
|
||||
pipeline_gen_schema = {
|
||||
"type": "array",
|
||||
"items": {"oneOf": [submapping_schema, named_attributes_schema]},
|
||||
"items": {
|
||||
"oneOf": [
|
||||
submapping_schema,
|
||||
dynamic_mapping_schema,
|
||||
job_schema("any"),
|
||||
job_schema("build"),
|
||||
job_schema("cleanup"),
|
||||
job_schema("copy"),
|
||||
job_schema("noop"),
|
||||
job_schema("reindex"),
|
||||
job_schema("signing"),
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
core_shared_properties = union_dicts(
|
||||
@@ -141,39 +135,8 @@
|
||||
}
|
||||
)
|
||||
|
||||
# TODO: Remove in Spack 0.23
|
||||
ci_properties = {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
# "required": ["mappings"],
|
||||
"properties": union_dicts(
|
||||
core_shared_properties, {"enable-artifacts-buildcache": {"type": "boolean"}}
|
||||
),
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
# "required": ["mappings"],
|
||||
"properties": union_dicts(
|
||||
core_shared_properties, {"temporary-storage-url-prefix": {"type": "string"}}
|
||||
),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties: Dict[str, Any] = {
|
||||
"ci": {
|
||||
"oneOf": [
|
||||
# TODO: Replace with core-shared-properties in Spack 0.23
|
||||
ci_properties,
|
||||
# Allow legacy format under `ci` for `config update ci`
|
||||
spack.schema.gitlab_ci.gitlab_ci_properties,
|
||||
]
|
||||
}
|
||||
}
|
||||
properties: Dict[str, Any] = {"ci": core_shared_properties}
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
@@ -183,21 +146,3 @@
|
||||
"additionalProperties": False,
|
||||
"properties": properties,
|
||||
}
|
||||
|
||||
|
||||
def update(data):
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.ci
|
||||
import spack.environment as ev
|
||||
|
||||
# Warn if deprecated section is still in the environment
|
||||
ci_env = ev.active_environment()
|
||||
if ci_env:
|
||||
env_config = ci_env.manifest[ev.TOP_LEVEL_KEY]
|
||||
if "gitlab-ci" in env_config:
|
||||
tty.die("Error: `gitlab-ci` section detected with `ci`, these are not compatible")
|
||||
|
||||
# Detect if the ci section is using the new pipeline-gen
|
||||
# If it is, assume it has already been converted
|
||||
return spack.ci.translate_deprecated_config(data)
|
||||
|
@@ -61,7 +61,10 @@
|
||||
"target": {"type": "string"},
|
||||
"alias": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"modules": {
|
||||
"anyOf": [{"type": "string"}, {"type": "null"}, {"type": "array"}]
|
||||
"anyOf": [
|
||||
{"type": "null"},
|
||||
{"type": "array", "items": {"type": "string"}},
|
||||
]
|
||||
},
|
||||
"implicit_rpaths": implicit_rpaths,
|
||||
"environment": spack.schema.environment.definition,
|
||||
|
@@ -55,6 +55,26 @@
|
||||
"unify": {
|
||||
"oneOf": [{"type": "boolean"}, {"type": "string", "enum": ["when_possible"]}]
|
||||
},
|
||||
"splice": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"explicit": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": ["target", "replacement"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"target": {"type": "string"},
|
||||
"replacement": {"type": "string"},
|
||||
"transitive": {"type": "boolean", "default": False},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
"duplicates": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
@@ -12,7 +12,6 @@
|
||||
|
||||
from llnl.util.lang import union_dicts
|
||||
|
||||
import spack.schema.gitlab_ci # DEPRECATED
|
||||
import spack.schema.merged
|
||||
|
||||
from .spec_list import spec_list_schema
|
||||
@@ -26,8 +25,6 @@
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
"properties": union_dicts(
|
||||
# Include deprecated "gitlab-ci" section
|
||||
spack.schema.gitlab_ci.properties,
|
||||
# merged configuration scope schemas
|
||||
spack.schema.merged.properties,
|
||||
# extra environment schema properties
|
||||
@@ -58,15 +55,6 @@ def update(data):
|
||||
Returns:
|
||||
True if data was changed, False otherwise
|
||||
"""
|
||||
|
||||
import spack.ci
|
||||
|
||||
if "gitlab-ci" in data:
|
||||
data["ci"] = data.pop("gitlab-ci")
|
||||
|
||||
if "ci" in data:
|
||||
return spack.ci.translate_deprecated_config(data["ci"])
|
||||
|
||||
# There are not currently any deprecated attributes in this section
|
||||
# that have not been removed
|
||||
return False
|
||||
|
@@ -1,125 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for gitlab-ci.yaml configuration file.
|
||||
|
||||
.. literalinclude:: ../spack/schema/gitlab_ci.py
|
||||
:lines: 15-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
from llnl.util.lang import union_dicts
|
||||
|
||||
image_schema = {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"entrypoint": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
runner_attributes_schema_items = {
|
||||
"image": image_schema,
|
||||
"tags": {"type": "array", "items": {"type": "string"}},
|
||||
"variables": {"type": "object", "patternProperties": {r"[\w\d\-_\.]+": {"type": "string"}}},
|
||||
"before_script": {"type": "array", "items": {"type": "string"}},
|
||||
"script": {"type": "array", "items": {"type": "string"}},
|
||||
"after_script": {"type": "array", "items": {"type": "string"}},
|
||||
}
|
||||
|
||||
runner_selector_schema = {
|
||||
"type": "object",
|
||||
"additionalProperties": True,
|
||||
"required": ["tags"],
|
||||
"properties": runner_attributes_schema_items,
|
||||
}
|
||||
|
||||
remove_attributes_schema = {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["tags"],
|
||||
"properties": {"tags": {"type": "array", "items": {"type": "string"}}},
|
||||
}
|
||||
|
||||
|
||||
core_shared_properties = union_dicts(
|
||||
runner_attributes_schema_items,
|
||||
{
|
||||
"bootstrap": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["name"],
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"compiler-agnostic": {"type": "boolean", "default": False},
|
||||
},
|
||||
},
|
||||
]
|
||||
},
|
||||
},
|
||||
"match_behavior": {"type": "string", "enum": ["first", "merge"], "default": "first"},
|
||||
"mappings": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["match"],
|
||||
"properties": {
|
||||
"match": {"type": "array", "items": {"type": "string"}},
|
||||
"remove-attributes": remove_attributes_schema,
|
||||
"runner-attributes": runner_selector_schema,
|
||||
},
|
||||
},
|
||||
},
|
||||
"service-job-attributes": runner_selector_schema,
|
||||
"signing-job-attributes": runner_selector_schema,
|
||||
"rebuild-index": {"type": "boolean"},
|
||||
"broken-specs-url": {"type": "string"},
|
||||
"broken-tests-packages": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
)
|
||||
|
||||
gitlab_ci_properties = {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["mappings"],
|
||||
"properties": union_dicts(
|
||||
core_shared_properties, {"enable-artifacts-buildcache": {"type": "boolean"}}
|
||||
),
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["mappings"],
|
||||
"properties": union_dicts(
|
||||
core_shared_properties, {"temporary-storage-url-prefix": {"type": "string"}}
|
||||
),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties: Dict[str, Any] = {"gitlab-ci": gitlab_ci_properties}
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Spack gitlab-ci configuration file schema",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": properties,
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user