Compare commits
1061 Commits
hs/windows
...
develop-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
55918c31d2 | ||
|
|
b8461f3d2d | ||
|
|
133895e785 | ||
|
|
19e3ab83cf | ||
|
|
e42a4a8bac | ||
|
|
1462c35761 | ||
|
|
0cf8cb70f4 | ||
|
|
7b2450c22a | ||
|
|
8f09f523cc | ||
|
|
24d3ed8c18 | ||
|
|
492c52089f | ||
|
|
5df7dc88fc | ||
|
|
4a75c3c87a | ||
|
|
35aa02771a | ||
|
|
b38a29f4df | ||
|
|
9a25a58219 | ||
|
|
c0c9743300 | ||
|
|
a69af3c71f | ||
|
|
cb92d70d6d | ||
|
|
76ed4578e7 | ||
|
|
504cc808d6 | ||
|
|
8076134c91 | ||
|
|
b4b3320f71 | ||
|
|
e35bc1f82d | ||
|
|
0de1ddcbe8 | ||
|
|
e3aca49e25 | ||
|
|
94c29e1cfc | ||
|
|
0c00a297e1 | ||
|
|
c6a1ec996c | ||
|
|
0437c5314e | ||
|
|
ffde309a99 | ||
|
|
a08b4ae538 | ||
|
|
404b1c6c19 | ||
|
|
275339ab4c | ||
|
|
877930c4ef | ||
|
|
89d0215d5b | ||
|
|
f003d8c0c3 | ||
|
|
6ab92b119d | ||
|
|
f809b56f81 | ||
|
|
ec058556ad | ||
|
|
ce78e8a1f8 | ||
|
|
c3435b4e7d | ||
|
|
02d2c4a9ff | ||
|
|
9d03170cb2 | ||
|
|
8892c878ce | ||
|
|
cbf4d3967a | ||
|
|
8bc0b2e086 | ||
|
|
354615d491 | ||
|
|
9ac261af58 | ||
|
|
34b2f28a5e | ||
|
|
8a10eff757 | ||
|
|
44d09f2b2b | ||
|
|
161b2d7cb0 | ||
|
|
4de5b664cd | ||
|
|
5d0c6c3350 | ||
|
|
8391c8eb87 | ||
|
|
3108849121 | ||
|
|
52471bab02 | ||
|
|
b8e3246e89 | ||
|
|
60cb628283 | ||
|
|
5bca7187a5 | ||
|
|
65daf17b54 | ||
|
|
d776dead56 | ||
|
|
741a4a5d4f | ||
|
|
dbe7b6bc6b | ||
|
|
ffc904aa6b | ||
|
|
f889b2a95e | ||
|
|
7f609ba934 | ||
|
|
ffd7830bfa | ||
|
|
20a6b22f78 | ||
|
|
1bff2f7034 | ||
|
|
ca48233ef7 | ||
|
|
c302049b5d | ||
|
|
360dbe41f7 | ||
|
|
ea1aa0714b | ||
|
|
7af1a3d240 | ||
|
|
962115b386 | ||
|
|
f81ca0cd89 | ||
|
|
25a5585f7d | ||
|
|
e81ce18cad | ||
|
|
d48d993ae7 | ||
|
|
fbd5c3d589 | ||
|
|
11aa02b37a | ||
|
|
b9ebf8cc9c | ||
|
|
1229d5a3cc | ||
|
|
be5a096665 | ||
|
|
32ce278a51 | ||
|
|
e83536de38 | ||
|
|
ff058377c5 | ||
|
|
e855bb011d | ||
|
|
dbab4828ed | ||
|
|
fac92dceca | ||
|
|
035b890b17 | ||
|
|
2a7e5cafa1 | ||
|
|
49845760b6 | ||
|
|
ce6255c0bb | ||
|
|
f0d54ba39d | ||
|
|
2ec4281c4f | ||
|
|
e80d75cbe3 | ||
|
|
47e70c5c3a | ||
|
|
fea2171672 | ||
|
|
12a475e648 | ||
|
|
c348891c07 | ||
|
|
019e90ab36 | ||
|
|
19137b2653 | ||
|
|
2761e650fa | ||
|
|
84ea389017 | ||
|
|
17f07523f5 | ||
|
|
bd2ddb8909 | ||
|
|
f5db757e66 | ||
|
|
277f8596de | ||
|
|
c8bebff7f5 | ||
|
|
61d2d21acc | ||
|
|
7b27aed4c8 | ||
|
|
ad0b256407 | ||
|
|
a2a3a83a26 | ||
|
|
7d86670826 | ||
|
|
ae306b73c3 | ||
|
|
b63cbe4e6e | ||
|
|
ef220daaca | ||
|
|
e86a3b68f7 | ||
|
|
7319408bc7 | ||
|
|
b34159348f | ||
|
|
f13d998d21 | ||
|
|
2912d4a661 | ||
|
|
8e2ec58859 | ||
|
|
01eb26578b | ||
|
|
fe0a8a1735 | ||
|
|
d523f12e99 | ||
|
|
1b0631b69e | ||
|
|
65bb3a12ea | ||
|
|
5ac2b8a178 | ||
|
|
b063765c2e | ||
|
|
4511052d26 | ||
|
|
3804d128e7 | ||
|
|
f09ce00fe1 | ||
|
|
cdde7c3ccf | ||
|
|
c52c0a482f | ||
|
|
8a75cdad9a | ||
|
|
e0eea48ccf | ||
|
|
61cbfc1da0 | ||
|
|
d8c8074762 | ||
|
|
faeef6272d | ||
|
|
f6ad1e23f8 | ||
|
|
a0173a5a94 | ||
|
|
225be45687 | ||
|
|
3581821d3c | ||
|
|
79ad6f6b48 | ||
|
|
6320993409 | ||
|
|
1472dcace4 | ||
|
|
755c113c16 | ||
|
|
43bcb5056f | ||
|
|
fd1c95a432 | ||
|
|
5b5be0582f | ||
|
|
aed1a3f980 | ||
|
|
978be305a7 | ||
|
|
7ddb40a804 | ||
|
|
37664b36da | ||
|
|
f33912d707 | ||
|
|
e785d3716e | ||
|
|
328787b017 | ||
|
|
67a40c6cc4 | ||
|
|
eccf97af33 | ||
|
|
e63e8b5efa | ||
|
|
bb25210b62 | ||
|
|
f8ab94061f | ||
|
|
ed15b73c3b | ||
|
|
1f6da280b7 | ||
|
|
1ad5739094 | ||
|
|
06f33dcdbb | ||
|
|
582254f891 | ||
|
|
31694fe9bd | ||
|
|
a53a14346e | ||
|
|
c102ff953b | ||
|
|
59a2a87937 | ||
|
|
d86feeac54 | ||
|
|
43e26b330c | ||
|
|
9c8b5f58c0 | ||
|
|
50aa5a7b24 | ||
|
|
ffab156366 | ||
|
|
e147679d40 | ||
|
|
ef9bb7ebe5 | ||
|
|
fc443ea30e | ||
|
|
b601bace24 | ||
|
|
cbad3d464a | ||
|
|
b56e792295 | ||
|
|
5b279c0732 | ||
|
|
149753a52e | ||
|
|
b582eacbc1 | ||
|
|
037196c2bd | ||
|
|
d9e8c5f13e | ||
|
|
275d1d88f4 | ||
|
|
a07d42d35b | ||
|
|
19ad29a690 | ||
|
|
4187c57250 | ||
|
|
590be9bba1 | ||
|
|
3edd68d981 | ||
|
|
5ca0e94bdd | ||
|
|
f6c9d98c8f | ||
|
|
9854c9e5f2 | ||
|
|
e5a602c1bb | ||
|
|
37fe3b4984 | ||
|
|
a00fddef4e | ||
|
|
260b36e272 | ||
|
|
117480dba9 | ||
|
|
bc75f23927 | ||
|
|
b0f1a0eb7c | ||
|
|
4d616e1168 | ||
|
|
4de8344c16 | ||
|
|
411ea019f1 | ||
|
|
296f99d800 | ||
|
|
ca4df91e7d | ||
|
|
9b8c06a049 | ||
|
|
011ff48f82 | ||
|
|
adcd05b365 | ||
|
|
dc160e3a52 | ||
|
|
ba953352a1 | ||
|
|
d47e726b76 | ||
|
|
89ab47284f | ||
|
|
31bdcd7dc6 | ||
|
|
f2bd11cbf4 | ||
|
|
f69e8297a7 | ||
|
|
c9377d9437 | ||
|
|
899004e29a | ||
|
|
df6427d259 | ||
|
|
31cfcafeba | ||
|
|
230bc7010a | ||
|
|
957c0cc9da | ||
|
|
99e4d6b446 | ||
|
|
7acd0cd86c | ||
|
|
d3378ffd25 | ||
|
|
2356ccc816 | ||
|
|
1d25275bd1 | ||
|
|
7678635d36 | ||
|
|
b2e28a0b08 | ||
|
|
53385f12da | ||
|
|
cfae194fbd | ||
|
|
88c193b83a | ||
|
|
c006cb573a | ||
|
|
d8d41e9b0e | ||
|
|
c6bfe7c6bd | ||
|
|
4432f5a1fe | ||
|
|
b9e0914ab2 | ||
|
|
49a8e84588 | ||
|
|
d36452cf4e | ||
|
|
580cc3c91b | ||
|
|
9ba7af404a | ||
|
|
2da812cbad | ||
|
|
420266c5c4 | ||
|
|
049ade024a | ||
|
|
75c71f7291 | ||
|
|
0a7533a609 | ||
|
|
7ecdc175ff | ||
|
|
962262a1d3 | ||
|
|
adaa0a4863 | ||
|
|
5f56eee8b0 | ||
|
|
aa6caf9ee6 | ||
|
|
1eb2cb97ad | ||
|
|
178a8bbdc5 | ||
|
|
e4c233710c | ||
|
|
b661acfa9b | ||
|
|
7bddcd27d2 | ||
|
|
5d2c67ec83 | ||
|
|
62fd5d12c2 | ||
|
|
64a7525e3f | ||
|
|
bfe434cbd5 | ||
|
|
39063baf18 | ||
|
|
f4a4acd272 | ||
|
|
8d2a059279 | ||
|
|
34c89c0f7b | ||
|
|
e1ea9e12a6 | ||
|
|
5611523baf | ||
|
|
4ff07c3918 | ||
|
|
49489a4815 | ||
|
|
fb53d31d09 | ||
|
|
80b9807e10 | ||
|
|
b573ec3920 | ||
|
|
cbdc07248f | ||
|
|
db6a2523d9 | ||
|
|
c710a1597f | ||
|
|
8c70912b11 | ||
|
|
64f90c38be | ||
|
|
d2f1e29927 | ||
|
|
57586df91a | ||
|
|
c00f36b5e2 | ||
|
|
2a7dd29f95 | ||
|
|
58e2f7a54f | ||
|
|
e3afe9a364 | ||
|
|
b0314faa3d | ||
|
|
2099e9f5cd | ||
|
|
5947c13570 | ||
|
|
1259992159 | ||
|
|
0477875667 | ||
|
|
4d5844b460 | ||
|
|
fc79c37e2d | ||
|
|
1d76ed7aa4 | ||
|
|
237f886e5d | ||
|
|
834ed2f117 | ||
|
|
73069045ae | ||
|
|
e0efd2bea2 | ||
|
|
b9873c5cea | ||
|
|
2f711bda5f | ||
|
|
f8381c9a63 | ||
|
|
c8f61c8662 | ||
|
|
507965cbc6 | ||
|
|
1f6ce56d3b | ||
|
|
3918f83ddc | ||
|
|
d4dc13fffb | ||
|
|
5008519a56 | ||
|
|
dad5ff8796 | ||
|
|
a24220b53f | ||
|
|
2186ff720e | ||
|
|
65d61e12c9 | ||
|
|
05f3fef72c | ||
|
|
21c2eedb80 | ||
|
|
66a3c7bc42 | ||
|
|
8b3d3ac2de | ||
|
|
b5610cdb8b | ||
|
|
6c6b262140 | ||
|
|
796e372bde | ||
|
|
78740942f9 | ||
|
|
02a991688f | ||
|
|
a8029c8ec4 | ||
|
|
adb8f37fc5 | ||
|
|
81b41d5948 | ||
|
|
0ff980ae87 | ||
|
|
74a93c04d8 | ||
|
|
b72c7deacb | ||
|
|
b061bbbb8f | ||
|
|
bbfad7e979 | ||
|
|
3a9963b497 | ||
|
|
8ac00aa58f | ||
|
|
13f80ff142 | ||
|
|
e8291cbd74 | ||
|
|
0dded55f39 | ||
|
|
a4ca6452c0 | ||
|
|
36761715fd | ||
|
|
02b116bd56 | ||
|
|
d4d7d5830d | ||
|
|
389b1824e9 | ||
|
|
e65be13056 | ||
|
|
1580c1745c | ||
|
|
cf54ef0fd3 | ||
|
|
b8b02e0691 | ||
|
|
8d986b8a99 | ||
|
|
4b836cb795 | ||
|
|
d5966e676d | ||
|
|
e187508485 | ||
|
|
80982149d5 | ||
|
|
a1f2e794c7 | ||
|
|
dbe323c631 | ||
|
|
77ddafaaac | ||
|
|
17efd6153c | ||
|
|
93f356c1cc | ||
|
|
386d115333 | ||
|
|
6b512210d4 | ||
|
|
ba215ca824 | ||
|
|
629a3e9396 | ||
|
|
08b07b9b27 | ||
|
|
3a38122764 | ||
|
|
25ab7cc16d | ||
|
|
41773383ec | ||
|
|
9855fbf7f1 | ||
|
|
5ef9d7e3ed | ||
|
|
5a4b7d3d44 | ||
|
|
9b40c1e89d | ||
|
|
edff99aab3 | ||
|
|
22043617aa | ||
|
|
7df23c7471 | ||
|
|
ef87a9a052 | ||
|
|
af62a062cc | ||
|
|
e6114f544d | ||
|
|
8d651625f7 | ||
|
|
9346306b79 | ||
|
|
f3a3e85bb9 | ||
|
|
caaaba464e | ||
|
|
8fae388f57 | ||
|
|
a332e0c143 | ||
|
|
bc662b8764 | ||
|
|
7a8955597d | ||
|
|
bcf9c646cf | ||
|
|
a76fffe8ff | ||
|
|
26c8714a24 | ||
|
|
0776ff05d2 | ||
|
|
d3beef6584 | ||
|
|
bdd06cb176 | ||
|
|
f639c4f1e6 | ||
|
|
f18a106759 | ||
|
|
5b01ddf832 | ||
|
|
c1fc98eef8 | ||
|
|
e9831985e4 | ||
|
|
30e9545d3e | ||
|
|
ce0910a82c | ||
|
|
afc01f9570 | ||
|
|
fc3a484a8c | ||
|
|
de0d5ba883 | ||
|
|
f756ab156c | ||
|
|
540de118c1 | ||
|
|
675be13a7b | ||
|
|
3342866e0e | ||
|
|
39ff675898 | ||
|
|
f807337273 | ||
|
|
8e4e3c9060 | ||
|
|
6d67992191 | ||
|
|
0f3fea511e | ||
|
|
a0611650e2 | ||
|
|
5959be577f | ||
|
|
9b5e508d15 | ||
|
|
66a30aef98 | ||
|
|
b117074df4 | ||
|
|
9f4be17451 | ||
|
|
d70e9e131d | ||
|
|
d7643d4f88 | ||
|
|
73b6aa9b92 | ||
|
|
6d51d94dab | ||
|
|
1a965e9ec2 | ||
|
|
a9e9b901d1 | ||
|
|
95b46dca3d | ||
|
|
7f6ae2a51e | ||
|
|
489d5b0f21 | ||
|
|
f884817009 | ||
|
|
a30704fdad | ||
|
|
57eb21ac3d | ||
|
|
f48c36fc2c | ||
|
|
a09b9f0659 | ||
|
|
92d940b7f4 | ||
|
|
d8c7cbe8f0 | ||
|
|
717d4800e1 | ||
|
|
c77916146c | ||
|
|
f5135018dd | ||
|
|
adfb3a77ad | ||
|
|
d5ccf8203d | ||
|
|
416943f7ed | ||
|
|
519684978b | ||
|
|
c9de1cbcda | ||
|
|
eedc41405b | ||
|
|
e6f48ceaf5 | ||
|
|
2ba583e7eb | ||
|
|
741b6bc0e4 | ||
|
|
ff98c15065 | ||
|
|
625d032e80 | ||
|
|
5227f5f387 | ||
|
|
170e322701 | ||
|
|
cb673862d1 | ||
|
|
31d6e7a901 | ||
|
|
79db34574b | ||
|
|
b3831d4e8c | ||
|
|
35f0feba00 | ||
|
|
9a04a94a26 | ||
|
|
a87fc566ec | ||
|
|
c8f6a19fc0 | ||
|
|
365892be4c | ||
|
|
70acce1aad | ||
|
|
48e2dd8038 | ||
|
|
2844f7425b | ||
|
|
f75760d4f2 | ||
|
|
b8e3f35a8b | ||
|
|
f610c3e4d0 | ||
|
|
a0b925dae3 | ||
|
|
c99518709a | ||
|
|
d67b5b300c | ||
|
|
9bcca28afd | ||
|
|
b07d1e0194 | ||
|
|
7ad08213dc | ||
|
|
dce3cf6f31 | ||
|
|
d763d6f738 | ||
|
|
d87464e995 | ||
|
|
280fd1cd68 | ||
|
|
bfbd0a4d4c | ||
|
|
be21b0b3bf | ||
|
|
8f798c01ec | ||
|
|
853a7b2567 | ||
|
|
1756aeb45a | ||
|
|
7a9a634c94 | ||
|
|
4caf718626 | ||
|
|
5867d90ccf | ||
|
|
8a9bfd162d | ||
|
|
7bc1316ce4 | ||
|
|
c9d312e9c1 | ||
|
|
792b576224 | ||
|
|
5c66f6b994 | ||
|
|
f17e76e9d8 | ||
|
|
905e7b9b45 | ||
|
|
ad294bc19f | ||
|
|
8dd978ddb9 | ||
|
|
b0c48b66c2 | ||
|
|
96880e0b65 | ||
|
|
48de9d48e2 | ||
|
|
72581ded8f | ||
|
|
8c4ff56d9f | ||
|
|
d2a551c047 | ||
|
|
9dfe096d3a | ||
|
|
62f5b18491 | ||
|
|
8f5af6eb7a | ||
|
|
d3a0904790 | ||
|
|
3c2a682876 | ||
|
|
a52ec2a9cc | ||
|
|
43a9c6cb66 | ||
|
|
4cace0cb62 | ||
|
|
482e2fbde8 | ||
|
|
ad75e8fc95 | ||
|
|
af43f6be0d | ||
|
|
76243bfcd7 | ||
|
|
feabcb884a | ||
|
|
1c065f2da9 | ||
|
|
401e7b4477 | ||
|
|
1b0020f3ee | ||
|
|
d21577803f | ||
|
|
d3518f866b | ||
|
|
93bf5b302d | ||
|
|
6e0efdff61 | ||
|
|
95f16f203a | ||
|
|
322a83c808 | ||
|
|
aa53007f82 | ||
|
|
12fd940e81 | ||
|
|
e193320ebb | ||
|
|
77839303ca | ||
|
|
e572189112 | ||
|
|
20a18b5710 | ||
|
|
e17d81692f | ||
|
|
4bf6c61ea0 | ||
|
|
21b03d149e | ||
|
|
21fbebd273 | ||
|
|
9326e9211c | ||
|
|
742e8142cf | ||
|
|
c30a17a302 | ||
|
|
beecc5dc87 | ||
|
|
dfb0f58254 | ||
|
|
31f0905f3f | ||
|
|
726bf85d15 | ||
|
|
d187a8ab68 | ||
|
|
1245a2e058 | ||
|
|
f8bd11c18f | ||
|
|
787863e176 | ||
|
|
1da7d3bfe3 | ||
|
|
26bc91fe9b | ||
|
|
470774687d | ||
|
|
20aec1536a | ||
|
|
9e1082b625 | ||
|
|
f8f13ad8aa | ||
|
|
fcf72201d3 | ||
|
|
1c528719cb | ||
|
|
f3faeb0f77 | ||
|
|
b1707c2e3c | ||
|
|
0093dd74e3 | ||
|
|
f92c5d7a2e | ||
|
|
dcdc678b19 | ||
|
|
8e3e7a5541 | ||
|
|
4016172938 | ||
|
|
95ea678d77 | ||
|
|
8ccf244c6b | ||
|
|
308dfeb2a6 | ||
|
|
118a9e8db7 | ||
|
|
6740ea79af | ||
|
|
6af8526aa1 | ||
|
|
f73cb0ded4 | ||
|
|
0a683c14a4 | ||
|
|
9cf5ba82c7 | ||
|
|
104b5c3f68 | ||
|
|
c8efea117f | ||
|
|
486ff2beac | ||
|
|
12334099f7 | ||
|
|
e4f571b1ee | ||
|
|
f8b9178630 | ||
|
|
ed57db9498 | ||
|
|
731abee6b4 | ||
|
|
60469f83e2 | ||
|
|
f2f3af19c3 | ||
|
|
cbbdf38f4f | ||
|
|
44618e31c8 | ||
|
|
5bc105c01c | ||
|
|
4e3a8b1928 | ||
|
|
59179764d7 | ||
|
|
acce67241e | ||
|
|
73036f9567 | ||
|
|
d40ea48db7 | ||
|
|
fdb5178f99 | ||
|
|
7a3525a053 | ||
|
|
394ed4f90d | ||
|
|
d7f5dbaf89 | ||
|
|
cb43019455 | ||
|
|
5303ec9aa5 | ||
|
|
02499c72c9 | ||
|
|
f1275536a2 | ||
|
|
a88239affc | ||
|
|
260ea91b83 | ||
|
|
dea075a8b0 | ||
|
|
5f333c3632 | ||
|
|
59b5cb0962 | ||
|
|
16410cda18 | ||
|
|
100c8b7630 | ||
|
|
47591f73da | ||
|
|
d3b4f0bebc | ||
|
|
5c52ded1be | ||
|
|
ff7e1b5918 | ||
|
|
6ea8079919 | ||
|
|
d48cf4ea3e | ||
|
|
41044b6f84 | ||
|
|
338c331115 | ||
|
|
563f76fbe5 | ||
|
|
6a2fb38673 | ||
|
|
429c5149ce | ||
|
|
043fea4ef9 | ||
|
|
4aa1f92c9c | ||
|
|
92d3e0ca6f | ||
|
|
a28b9ec8f3 | ||
|
|
d7452f68a0 | ||
|
|
4c333fb535 | ||
|
|
f32bdea867 | ||
|
|
05ad0618cc | ||
|
|
e0987e0350 | ||
|
|
519ed7f611 | ||
|
|
ecbc21a7ce | ||
|
|
56c61763d7 | ||
|
|
8168b17ddf | ||
|
|
d2a5b07d7f | ||
|
|
b570ca3cf3 | ||
|
|
343586d832 | ||
|
|
a8d02bd3b0 | ||
|
|
e45019f246 | ||
|
|
f31a99f188 | ||
|
|
6c4abef75c | ||
|
|
4f6789bb38 | ||
|
|
aff3fd8efd | ||
|
|
3bacced861 | ||
|
|
849b82d242 | ||
|
|
114ac5908a | ||
|
|
19712d3461 | ||
|
|
e08a72a333 | ||
|
|
1b6345828d | ||
|
|
04f4493c5b | ||
|
|
44bff90089 | ||
|
|
9290294ada | ||
|
|
04fb52aeca | ||
|
|
98854582e3 | ||
|
|
4eb9bb5f9a | ||
|
|
13a58476ee | ||
|
|
5c8d22c597 | ||
|
|
07e964c688 | ||
|
|
35b2750407 | ||
|
|
639854ba8b | ||
|
|
cd5c85ba13 | ||
|
|
cdba31280b | ||
|
|
9b5f15abec | ||
|
|
9ad1d0c813 | ||
|
|
00fae6dd79 | ||
|
|
2613a14c43 | ||
|
|
a76a48c42e | ||
|
|
0619a8bd4f | ||
|
|
a3dc9e1eb8 | ||
|
|
e129df38ce | ||
|
|
9762e8719b | ||
|
|
044dcee7ad | ||
|
|
32b6381b9b | ||
|
|
532e6b6aa9 | ||
|
|
7d6231b38a | ||
|
|
653556e881 | ||
|
|
13feee8364 | ||
|
|
62abcfb05f | ||
|
|
3637c087c5 | ||
|
|
a835a0ed31 | ||
|
|
7b3ab0e575 | ||
|
|
0aa7963ed1 | ||
|
|
941c8daabe | ||
|
|
29d85ba552 | ||
|
|
1b01680be1 | ||
|
|
e439f8ea87 | ||
|
|
1d06a324b5 | ||
|
|
20f90dcda2 | ||
|
|
6f5f6a65b3 | ||
|
|
7360afb668 | ||
|
|
f25e586f0a | ||
|
|
0191e15a6a | ||
|
|
ea6e39805a | ||
|
|
bbd205543b | ||
|
|
b95160cd86 | ||
|
|
201840367d | ||
|
|
5ebf45861f | ||
|
|
ec0d97ae82 | ||
|
|
8290e7d947 | ||
|
|
95966ce10a | ||
|
|
dce2f4ca7c | ||
|
|
7dc549d926 | ||
|
|
aa0e605956 | ||
|
|
5e56fa839d | ||
|
|
c62ed8bb2f | ||
|
|
6872da419d | ||
|
|
e2aa11518a | ||
|
|
d4233a3048 | ||
|
|
badb3bcee7 | ||
|
|
4b05a2b78f | ||
|
|
777b5b0c39 | ||
|
|
cd9446b5d4 | ||
|
|
37ede80d3f | ||
|
|
4eeb5b9665 | ||
|
|
2778e530ad | ||
|
|
af9b359478 | ||
|
|
54ffc635e2 | ||
|
|
71f73f5e40 | ||
|
|
b83c5237e9 | ||
|
|
6bf005e042 | ||
|
|
2d8b4dbe3b | ||
|
|
0fab5cadb3 | ||
|
|
ade2513dcb | ||
|
|
74b76cc6b3 | ||
|
|
edf6729943 | ||
|
|
fd087107ea | ||
|
|
a342da5642 | ||
|
|
5f7501c4f7 | ||
|
|
f01dbe2c35 | ||
|
|
a474034023 | ||
|
|
022eca1cfe | ||
|
|
f49b10ee43 | ||
|
|
ff4e311e72 | ||
|
|
43d1cdb0bd | ||
|
|
08c4f81596 | ||
|
|
63986d31ef | ||
|
|
f50f5859f3 | ||
|
|
99c4f2c3ff | ||
|
|
3b61a1f778 | ||
|
|
3771278d50 | ||
|
|
3649a195d3 | ||
|
|
2b9a3b6057 | ||
|
|
a300359627 | ||
|
|
ad75bc022f | ||
|
|
befbbec2b3 | ||
|
|
f0e51a35c7 | ||
|
|
d5b8b0600a | ||
|
|
0b575f60a5 | ||
|
|
2d16e15659 | ||
|
|
5d9c534018 | ||
|
|
8471b1b471 | ||
|
|
1c6b6d0a08 | ||
|
|
d7031dcd09 | ||
|
|
6692eb7b6f | ||
|
|
728da2ff87 | ||
|
|
c070ddac97 | ||
|
|
679770b02c | ||
|
|
971577d853 | ||
|
|
2c36a8aac3 | ||
|
|
bda94e4067 | ||
|
|
93ab70b07c | ||
|
|
44215de24e | ||
|
|
5b77ce15c7 | ||
|
|
c118c7733b | ||
|
|
f73f0f861d | ||
|
|
2269d424f9 | ||
|
|
d4ad167567 | ||
|
|
5811d754d9 | ||
|
|
315f3a0b4d | ||
|
|
3a353c2a04 | ||
|
|
29aefd8d86 | ||
|
|
ba978964e5 | ||
|
|
ac0a1ff3a2 | ||
|
|
f2474584bf | ||
|
|
61c07becc5 | ||
|
|
98b149d711 | ||
|
|
a608f83bfc | ||
|
|
f2c132af2d | ||
|
|
873cb5c1a0 | ||
|
|
c2eea41848 | ||
|
|
d62a03bbf8 | ||
|
|
4e48ed73c6 | ||
|
|
8328851391 | ||
|
|
73125df0ec | ||
|
|
639990c385 | ||
|
|
34525388fe | ||
|
|
2375f873bf | ||
|
|
3e0331b250 | ||
|
|
c302013c5b | ||
|
|
87d389fe78 | ||
|
|
27c590d2dc | ||
|
|
960f206a68 | ||
|
|
1ccfb1444a | ||
|
|
17199e7fed | ||
|
|
b88971e125 | ||
|
|
f4ddb54293 | ||
|
|
478d1fd8ff | ||
|
|
3c7357225a | ||
|
|
8a3128eb70 | ||
|
|
096ab11961 | ||
|
|
9577fd8b8a | ||
|
|
8088fb8ccc | ||
|
|
b93c57cab9 | ||
|
|
35ae2743d9 | ||
|
|
cd3bd453d3 | ||
|
|
fc6cd7c51f | ||
|
|
cfee88a5bb | ||
|
|
7711730f2c | ||
|
|
14e8902854 | ||
|
|
97961555dc | ||
|
|
51df7e088a | ||
|
|
b28583bc58 | ||
|
|
f9f6f094c3 | ||
|
|
e780a83ac6 | ||
|
|
7f57a85514 | ||
|
|
e4927b35d1 | ||
|
|
098ad7ffc0 | ||
|
|
db7aece186 | ||
|
|
0e9f131b44 | ||
|
|
1d18f571ae | ||
|
|
586360a8fe | ||
|
|
b1db22d406 | ||
|
|
7395656663 | ||
|
|
d0b736607b | ||
|
|
fe5d7881f5 | ||
|
|
28e3295fb0 | ||
|
|
314a3fbe77 | ||
|
|
6df831ef00 | ||
|
|
9818002219 | ||
|
|
1768b923f1 | ||
|
|
aa6651fe27 | ||
|
|
3ded2fc9c5 | ||
|
|
623c5a4d24 | ||
|
|
673565aefe | ||
|
|
930e711771 | ||
|
|
b1905186a6 | ||
|
|
176b7f8854 | ||
|
|
0cb4db950f | ||
|
|
bba66f9dae | ||
|
|
9c222aee67 | ||
|
|
fcc28d72e8 | ||
|
|
8bc897cee1 | ||
|
|
2b70f8367c | ||
|
|
a9a23f4565 | ||
|
|
0d86ecf122 | ||
|
|
654d6f1397 | ||
|
|
ade9c8da0e | ||
|
|
f4f59b7f18 | ||
|
|
61d6c5486c | ||
|
|
576251f0da | ||
|
|
55e0ef1e64 | ||
|
|
ef0e54726d | ||
|
|
8225b18985 | ||
|
|
02320b18f3 | ||
|
|
b4e32706db | ||
|
|
bcde9a3afb | ||
|
|
363215717d | ||
|
|
ca46fec985 | ||
|
|
7df4ac70da | ||
|
|
88aa96b53b | ||
|
|
4da2444a43 | ||
|
|
1a55f2cdab | ||
|
|
5398b1be15 | ||
|
|
a80d2d42d6 | ||
|
|
60104f916d | ||
|
|
d04358c369 | ||
|
|
4fe417b620 | ||
|
|
e3c5d5817b | ||
|
|
7573ea2ae5 | ||
|
|
ef35811f39 | ||
|
|
a798f40d04 | ||
|
|
a85afb829e | ||
|
|
71df434d1f | ||
|
|
668aba15a0 | ||
|
|
2277052a6b | ||
|
|
79d938fb1f | ||
|
|
6051d56014 | ||
|
|
94af1d2dfe | ||
|
|
d4bc1b8be2 | ||
|
|
a81baaa12e | ||
|
|
1a17d0b535 | ||
|
|
6d16d7ff83 | ||
|
|
c89c84770f | ||
|
|
9154df9062 | ||
|
|
56f3ae18f6 | ||
|
|
9a4f83be1d | ||
|
|
6b10f80cca | ||
|
|
e8e8d59803 | ||
|
|
d8d1bc5d7e | ||
|
|
3531ee28f8 | ||
|
|
8b18f95e05 | ||
|
|
128cac34e0 | ||
|
|
8fa65b286c | ||
|
|
6cb16c39ab | ||
|
|
7403469413 | ||
|
|
533de03fef | ||
|
|
3fea1d6710 | ||
|
|
257ebce108 | ||
|
|
2623b9727f | ||
|
|
68df483bc6 | ||
|
|
0cc38d685f | ||
|
|
cec770b26e | ||
|
|
f417e9f00c | ||
|
|
527e0fb4b4 | ||
|
|
c7e3fc9049 | ||
|
|
5c59bb87a4 | ||
|
|
bca975e66a | ||
|
|
8160cd1b9e | ||
|
|
5833fe0c36 | ||
|
|
ff1bc9e555 | ||
|
|
6db1defba0 | ||
|
|
3099662df2 | ||
|
|
2185749bb4 | ||
|
|
6287d98455 | ||
|
|
6c4990525d | ||
|
|
5beed521bd | ||
|
|
5fa8890bd3 | ||
|
|
122c3c2dbb | ||
|
|
607d158a44 | ||
|
|
892b5d8037 | ||
|
|
4f00c7173d | ||
|
|
fc4a4ec70d | ||
|
|
e1da0a7312 | ||
|
|
f58ee3ea2b | ||
|
|
ffdfa498bf | ||
|
|
decefe0234 | ||
|
|
e2e5e74b99 | ||
|
|
0629c5df38 | ||
|
|
79d778f8cd | ||
|
|
6f5ba44431 | ||
|
|
0905edf592 | ||
|
|
16dba78288 | ||
|
|
b220938d42 | ||
|
|
78117877e0 | ||
|
|
975f4fbf84 | ||
|
|
dc853b2cf4 | ||
|
|
69b628a3f0 | ||
|
|
98fb9c23f9 | ||
|
|
d5e08abe46 | ||
|
|
2f789f01d3 | ||
|
|
0229240df4 | ||
|
|
9059756a11 | ||
|
|
67089b967e | ||
|
|
98e35f7232 | ||
|
|
3ff441c5b0 | ||
|
|
2502a3c2b6 | ||
|
|
9bdc97043e | ||
|
|
66ee4caeab | ||
|
|
18218d732a | ||
|
|
4c6b3ccb40 | ||
|
|
1d9c8a9034 | ||
|
|
216619bb53 | ||
|
|
47c771f03f | ||
|
|
c7139eb690 | ||
|
|
74ba81368a | ||
|
|
ef11fcdf34 | ||
|
|
c4f3348af1 | ||
|
|
fec2f30d5a | ||
|
|
6af92f59ec | ||
|
|
cb47c5f0ac | ||
|
|
32727087f1 | ||
|
|
5f9c6299d1 | ||
|
|
541e40e252 | ||
|
|
ddc8790896 | ||
|
|
7d6b643b58 | ||
|
|
6f08db4631 | ||
|
|
7086d6f1ac | ||
|
|
7a313295ac | ||
|
|
3bdaaaf47c | ||
|
|
a3fa54812f | ||
|
|
afc9615abf | ||
|
|
19352af10b | ||
|
|
e4f8cff286 | ||
|
|
76df9de26a | ||
|
|
983e7427f7 | ||
|
|
d9df520e85 | ||
|
|
b51fd9f5a6 | ||
|
|
8058cd34e4 | ||
|
|
2f488b9329 | ||
|
|
78a4d3e7d2 | ||
|
|
2b9a621d19 | ||
|
|
fa5f4f1cab | ||
|
|
4042afaa99 | ||
|
|
7fdf1029b7 | ||
|
|
814f4f20c0 | ||
|
|
7c473937ba | ||
|
|
9d754c127a | ||
|
|
9a8bff01ad | ||
|
|
434a703bcf | ||
|
|
a42108438d | ||
|
|
1be9b7f53c | ||
|
|
6b05a80745 | ||
|
|
4d36b0a5ef | ||
|
|
636843f330 | ||
|
|
d4378b6e09 | ||
|
|
69b54d9039 | ||
|
|
618866b35c | ||
|
|
47bd8a6b26 | ||
|
|
93d31225db | ||
|
|
c3a1d1f91e | ||
|
|
33621a9860 | ||
|
|
055eb3cd94 | ||
|
|
c4d18671fe | ||
|
|
5d9f0cf44e | ||
|
|
02faa7b97e | ||
|
|
d37749cedd | ||
|
|
7e5e6f2833 | ||
|
|
37ea9657cf | ||
|
|
2107a88514 | ||
|
|
1a4b07e730 | ||
|
|
c98045e028 | ||
|
|
bc5456a791 | ||
|
|
656720a387 | ||
|
|
9604c0a9b3 | ||
|
|
ee96194486 | ||
|
|
ab21fc1daf | ||
|
|
d593ad0c06 | ||
|
|
254fe6ed6e | ||
|
|
7e20874f54 | ||
|
|
cd4c40fdbd | ||
|
|
c13e8e49fe | ||
|
|
35a2a0b3d0 | ||
|
|
22d69724f4 | ||
|
|
f6e3f6eec7 | ||
|
|
866c440f0c | ||
|
|
fe6f5b87dc | ||
|
|
3df3f40984 | ||
|
|
33f4a40df4 | ||
|
|
7ba0132f66 | ||
|
|
744f034dfb | ||
|
|
d41fb3d542 | ||
|
|
9b077a360e | ||
|
|
5c297d8322 | ||
|
|
9e18e63053 | ||
|
|
069286bda7 | ||
|
|
1679b5e141 | ||
|
|
1f935ac356 | ||
|
|
e66e572656 | ||
|
|
17d3d17d46 | ||
|
|
c814fb5fe6 | ||
|
|
fe2d06399f | ||
|
|
b5dec35113 | ||
|
|
20565ba8ab | ||
|
|
c47a3ee05b | ||
|
|
aaa7469b92 | ||
|
|
1f8a6d8e8b | ||
|
|
654bf45c01 | ||
|
|
daa42be47f | ||
|
|
ca179deb8e | ||
|
|
8f6092bf83 | ||
|
|
6b649ccf4f | ||
|
|
d463d4566d | ||
|
|
f79be3022b | ||
|
|
7327e731b4 | ||
|
|
ab6d494fe4 | ||
|
|
e5aa74e7cb | ||
|
|
728f8e2654 | ||
|
|
9a58a6da0d | ||
|
|
395491815a | ||
|
|
fd98ebed9d | ||
|
|
f7de621d0c | ||
|
|
a5f404cff5 | ||
|
|
8100b0d575 | ||
|
|
b38ab54028 | ||
|
|
412f22b76a | ||
|
|
d226ef31bd | ||
|
|
ae32af927d | ||
|
|
400dd40492 | ||
|
|
04bdff33ad | ||
|
|
017e3dd417 | ||
|
|
f7e3902ca8 | ||
|
|
89da8d4c84 | ||
|
|
8cac74699b | ||
|
|
db311eef46 | ||
|
|
1427735876 | ||
|
|
f88ca8cc1f | ||
|
|
bf1f4e15ee | ||
|
|
dd756d53de | ||
|
|
1c1970e727 | ||
|
|
c283fce487 |
@@ -5,7 +5,7 @@ coverage:
|
|||||||
status:
|
status:
|
||||||
project:
|
project:
|
||||||
default:
|
default:
|
||||||
threshold: 0.2%
|
threshold: 2.0%
|
||||||
|
|
||||||
ignore:
|
ignore:
|
||||||
- lib/spack/spack/test/.*
|
- lib/spack/spack/test/.*
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
{
|
{
|
||||||
|
"name": "Ubuntu 20.04",
|
||||||
"image": "ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01",
|
"image": "ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01",
|
||||||
"postCreateCommand": "./.devcontainer/postCreateCommand.sh"
|
"postCreateCommand": "./.devcontainer/postCreateCommand.sh"
|
||||||
}
|
}
|
||||||
5
.devcontainer/ubuntu22.04/devcontainer.json
Normal file
5
.devcontainer/ubuntu22.04/devcontainer.json
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"name": "Ubuntu 22.04",
|
||||||
|
"image": "ghcr.io/spack/ubuntu-22.04:v2024-05-07",
|
||||||
|
"postCreateCommand": "./.devcontainer/postCreateCommand.sh"
|
||||||
|
}
|
||||||
21
.github/workflows/audit.yaml
vendored
21
.github/workflows/audit.yaml
vendored
@@ -28,8 +28,8 @@ jobs:
|
|||||||
run:
|
run:
|
||||||
shell: ${{ matrix.system.shell }}
|
shell: ${{ matrix.system.shell }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||||
with:
|
with:
|
||||||
python-version: ${{inputs.python_version}}
|
python-version: ${{inputs.python_version}}
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -40,6 +40,8 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pywin32
|
python -m pip install --upgrade pywin32
|
||||||
- name: Package audits (with coverage)
|
- name: Package audits (with coverage)
|
||||||
|
env:
|
||||||
|
COVERAGE_FILE: coverage/.coverage-audits-${{ matrix.system.os }}
|
||||||
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
||||||
run: |
|
run: |
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
@@ -47,27 +49,26 @@ jobs:
|
|||||||
coverage run $(which spack) audit configs
|
coverage run $(which spack) audit configs
|
||||||
coverage run $(which spack) -d audit externals
|
coverage run $(which spack) -d audit externals
|
||||||
coverage combine
|
coverage combine
|
||||||
coverage xml
|
|
||||||
- name: Package audits (without coverage)
|
- name: Package audits (without coverage)
|
||||||
if: ${{ inputs.with_coverage == 'false' && runner.os != 'Windows' }}
|
if: ${{ inputs.with_coverage == 'false' && runner.os != 'Windows' }}
|
||||||
run: |
|
run: |
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
spack -d audit packages
|
spack -d audit packages
|
||||||
spack -d audit configs
|
spack -d audit configs
|
||||||
spack -d audit externals
|
spack -d audit externals
|
||||||
- name: Package audits (without coverage)
|
- name: Package audits (without coverage)
|
||||||
if: ${{ runner.os == 'Windows' }}
|
if: ${{ runner.os == 'Windows' }}
|
||||||
run: |
|
run: |
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
spack -d audit packages
|
spack -d audit packages
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
spack -d audit configs
|
spack -d audit configs
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
spack -d audit externals
|
spack -d audit externals
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
||||||
with:
|
with:
|
||||||
flags: unittests,audits
|
name: coverage-audits-${{ matrix.system.os }}
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
path: coverage
|
||||||
verbose: true
|
include-hidden-files: true
|
||||||
|
|||||||
2
.github/workflows/bin/bootstrap-test.sh
vendored
2
.github/workflows/bin/bootstrap-test.sh
vendored
@@ -1,7 +1,7 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -e
|
set -e
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
$PYTHON bin/spack bootstrap disable github-actions-v0.4
|
$PYTHON bin/spack bootstrap disable github-actions-v0.5
|
||||||
$PYTHON bin/spack bootstrap disable spack-install
|
$PYTHON bin/spack bootstrap disable spack-install
|
||||||
$PYTHON bin/spack $SPACK_FLAGS solve zlib
|
$PYTHON bin/spack $SPACK_FLAGS solve zlib
|
||||||
tree $BOOTSTRAP/store
|
tree $BOOTSTRAP/store
|
||||||
|
|||||||
102
.github/workflows/bootstrap.yml
vendored
102
.github/workflows/bootstrap.yml
vendored
@@ -37,14 +37,14 @@ jobs:
|
|||||||
make patch unzip which xz python3 python3-devel tree \
|
make patch unzip which xz python3 python3-devel tree \
|
||||||
cmake bison
|
cmake bison
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
|
spack bootstrap disable github-actions-v0.6
|
||||||
spack bootstrap disable github-actions-v0.5
|
spack bootstrap disable github-actions-v0.5
|
||||||
spack bootstrap disable github-actions-v0.4
|
|
||||||
spack external find cmake bison
|
spack external find cmake bison
|
||||||
spack -d solve zlib
|
spack -d solve zlib
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
@@ -53,33 +53,27 @@ jobs:
|
|||||||
runs-on: ${{ matrix.runner }}
|
runs-on: ${{ matrix.runner }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
runner: ['macos-13', 'macos-14', "ubuntu-latest", "windows-latest"]
|
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||||
steps:
|
steps:
|
||||||
- name: Setup macOS
|
- name: Setup macOS
|
||||||
if: ${{ matrix.runner != 'ubuntu-latest' && matrix.runner != 'windows-latest' }}
|
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||||
run: |
|
run: |
|
||||||
brew install cmake bison tree
|
brew install cmake bison tree
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||||
with:
|
with:
|
||||||
python-version: "3.12"
|
python-version: "3.12"
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
env:
|
|
||||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
|
||||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
|
||||||
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
|
|
||||||
VALIDATE_LAST_EXIT: ${{ matrix.runner == 'windows-latest' && './share/spack/qa/validate_last_exit.ps1' || '' }}
|
|
||||||
run: |
|
run: |
|
||||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
source share/spack/setup-env.sh
|
||||||
|
spack bootstrap disable github-actions-v0.6
|
||||||
spack bootstrap disable github-actions-v0.5
|
spack bootstrap disable github-actions-v0.5
|
||||||
spack bootstrap disable github-actions-v0.4
|
|
||||||
spack external find --not-buildable cmake bison
|
spack external find --not-buildable cmake bison
|
||||||
spack -d solve zlib
|
spack -d solve zlib
|
||||||
${{ env.VALIDATE_LAST_EXIT }}
|
tree $HOME/.spack/bootstrap/store/
|
||||||
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
gnupg-sources:
|
gnupg-sources:
|
||||||
runs-on: ${{ matrix.runner }}
|
runs-on: ${{ matrix.runner }}
|
||||||
@@ -89,22 +83,22 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Setup macOS
|
- name: Setup macOS
|
||||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||||
|
run: brew install tree gawk
|
||||||
|
- name: Remove system executables
|
||||||
run: |
|
run: |
|
||||||
brew install tree gawk
|
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
|
||||||
sudo rm -rf $(command -v gpg gpg2)
|
sudo rm $(command -v gpg gpg2 patchelf)
|
||||||
- name: Setup Ubuntu
|
done
|
||||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
|
||||||
run: sudo rm -rf $(command -v gpg gpg2 patchelf)
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Bootstrap GnuPG
|
- name: Bootstrap GnuPG
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
spack solve zlib
|
spack solve zlib
|
||||||
|
spack bootstrap disable github-actions-v0.6
|
||||||
spack bootstrap disable github-actions-v0.5
|
spack bootstrap disable github-actions-v0.5
|
||||||
spack bootstrap disable github-actions-v0.4
|
|
||||||
spack -d gpg list
|
spack -d gpg list
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
@@ -116,19 +110,17 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Setup macOS
|
- name: Setup macOS
|
||||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||||
|
run: brew install tree
|
||||||
|
- name: Remove system executables
|
||||||
run: |
|
run: |
|
||||||
brew install tree
|
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
|
||||||
# Remove GnuPG since we want to bootstrap it
|
sudo rm $(command -v gpg gpg2 patchelf)
|
||||||
sudo rm -rf /usr/local/bin/gpg
|
done
|
||||||
- name: Setup Ubuntu
|
|
||||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
|
||||||
run: |
|
|
||||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||||
with:
|
with:
|
||||||
python-version: |
|
python-version: |
|
||||||
3.8
|
3.8
|
||||||
@@ -136,15 +128,16 @@ jobs:
|
|||||||
3.10
|
3.10
|
||||||
3.11
|
3.11
|
||||||
3.12
|
3.12
|
||||||
|
3.13
|
||||||
- name: Set bootstrap sources
|
- name: Set bootstrap sources
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
spack bootstrap disable github-actions-v0.4
|
spack bootstrap disable github-actions-v0.5
|
||||||
spack bootstrap disable spack-install
|
spack bootstrap disable spack-install
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
run: |
|
run: |
|
||||||
set -e
|
set -e
|
||||||
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' ; do
|
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' '3.13'; do
|
||||||
not_found=1
|
not_found=1
|
||||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||||
if [[ -d "$ver_dir" ]] ; then
|
if [[ -d "$ver_dir" ]] ; then
|
||||||
@@ -167,4 +160,45 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
spack -d gpg list
|
spack -d gpg list
|
||||||
tree ~/.spack/bootstrap/store/
|
tree $HOME/.spack/bootstrap/store/
|
||||||
|
- name: Bootstrap File
|
||||||
|
run: |
|
||||||
|
source share/spack/setup-env.sh
|
||||||
|
spack -d python share/spack/qa/bootstrap-file.py
|
||||||
|
tree $HOME/.spack/bootstrap/store/
|
||||||
|
|
||||||
|
windows:
|
||||||
|
runs-on: "windows-latest"
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||||
|
with:
|
||||||
|
python-version: "3.12"
|
||||||
|
- name: Setup Windows
|
||||||
|
run: |
|
||||||
|
Remove-Item -Path (Get-Command gpg).Path
|
||||||
|
Remove-Item -Path (Get-Command file).Path
|
||||||
|
- name: Bootstrap clingo
|
||||||
|
run: |
|
||||||
|
./share/spack/setup-env.ps1
|
||||||
|
spack bootstrap disable github-actions-v0.6
|
||||||
|
spack bootstrap disable github-actions-v0.5
|
||||||
|
spack external find --not-buildable cmake bison
|
||||||
|
spack -d solve zlib
|
||||||
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
|
tree $env:userprofile/.spack/bootstrap/store/
|
||||||
|
- name: Bootstrap GnuPG
|
||||||
|
run: |
|
||||||
|
./share/spack/setup-env.ps1
|
||||||
|
spack -d gpg list
|
||||||
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
|
tree $env:userprofile/.spack/bootstrap/store/
|
||||||
|
- name: Bootstrap File
|
||||||
|
run: |
|
||||||
|
./share/spack/setup-env.ps1
|
||||||
|
spack -d python share/spack/qa/bootstrap-file.py
|
||||||
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
|
tree $env:userprofile/.spack/bootstrap/store/
|
||||||
|
|||||||
10
.github/workflows/build-containers.yml
vendored
10
.github/workflows/build-containers.yml
vendored
@@ -55,7 +55,7 @@ jobs:
|
|||||||
if: github.repository == 'spack/spack'
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
|
||||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||||
id: docker_meta
|
id: docker_meta
|
||||||
@@ -87,7 +87,7 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Upload Dockerfile
|
- name: Upload Dockerfile
|
||||||
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a
|
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||||
with:
|
with:
|
||||||
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||||
path: dockerfiles
|
path: dockerfiles
|
||||||
@@ -96,7 +96,7 @@ jobs:
|
|||||||
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf
|
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db
|
uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349
|
||||||
|
|
||||||
- name: Log in to GitHub Container Registry
|
- name: Log in to GitHub Container Registry
|
||||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||||
@@ -113,7 +113,7 @@ jobs:
|
|||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||||
uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85
|
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75
|
||||||
with:
|
with:
|
||||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||||
platforms: ${{ matrix.dockerfile[1] }}
|
platforms: ${{ matrix.dockerfile[1] }}
|
||||||
@@ -126,7 +126,7 @@ jobs:
|
|||||||
needs: deploy-images
|
needs: deploy-images
|
||||||
steps:
|
steps:
|
||||||
- name: Merge Artifacts
|
- name: Merge Artifacts
|
||||||
uses: actions/upload-artifact/merge@834a144ee995460fba8ed112a2fc961b36a5ec5a
|
uses: actions/upload-artifact/merge@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||||
with:
|
with:
|
||||||
name: dockerfiles
|
name: dockerfiles
|
||||||
pattern: dockerfiles_*
|
pattern: dockerfiles_*
|
||||||
|
|||||||
39
.github/workflows/ci.yaml
vendored
39
.github/workflows/ci.yaml
vendored
@@ -15,18 +15,6 @@ concurrency:
|
|||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
prechecks:
|
|
||||||
needs: [ changes ]
|
|
||||||
uses: ./.github/workflows/valid-style.yml
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
with_coverage: ${{ needs.changes.outputs.core }}
|
|
||||||
all-prechecks:
|
|
||||||
needs: [ prechecks ]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Success
|
|
||||||
run: "true"
|
|
||||||
# Check which files have been updated by the PR
|
# Check which files have been updated by the PR
|
||||||
changes:
|
changes:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -36,7 +24,7 @@ jobs:
|
|||||||
core: ${{ steps.filter.outputs.core }}
|
core: ${{ steps.filter.outputs.core }}
|
||||||
packages: ${{ steps.filter.outputs.packages }}
|
packages: ${{ steps.filter.outputs.packages }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
if: ${{ github.event_name == 'push' }}
|
if: ${{ github.event_name == 'push' }}
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -79,13 +67,34 @@ jobs:
|
|||||||
needs: [ prechecks, changes ]
|
needs: [ prechecks, changes ]
|
||||||
uses: ./.github/workflows/bootstrap.yml
|
uses: ./.github/workflows/bootstrap.yml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
unit-tests:
|
unit-tests:
|
||||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||||
needs: [ prechecks, changes ]
|
needs: [ prechecks, changes ]
|
||||||
uses: ./.github/workflows/unit_tests.yaml
|
uses: ./.github/workflows/unit_tests.yaml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
all:
|
|
||||||
needs: [ unit-tests, bootstrap ]
|
prechecks:
|
||||||
|
needs: [ changes ]
|
||||||
|
uses: ./.github/workflows/valid-style.yml
|
||||||
|
secrets: inherit
|
||||||
|
with:
|
||||||
|
with_coverage: ${{ needs.changes.outputs.core }}
|
||||||
|
|
||||||
|
all-prechecks:
|
||||||
|
needs: [ prechecks ]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Success
|
||||||
|
run: "true"
|
||||||
|
|
||||||
|
coverage:
|
||||||
|
needs: [ unit-tests, prechecks ]
|
||||||
|
uses: ./.github/workflows/coverage.yml
|
||||||
|
secrets: inherit
|
||||||
|
|
||||||
|
all:
|
||||||
|
needs: [ coverage, bootstrap ]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Success
|
- name: Success
|
||||||
|
|||||||
34
.github/workflows/coverage.yml
vendored
Normal file
34
.github/workflows/coverage.yml
vendored
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
name: coverage
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Upload coverage reports to codecov once as a single bundle
|
||||||
|
upload:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||||
|
with:
|
||||||
|
python-version: '3.11'
|
||||||
|
cache: 'pip'
|
||||||
|
|
||||||
|
- name: Install python dependencies
|
||||||
|
run: pip install -r .github/workflows/requirements/coverage/requirements.txt
|
||||||
|
|
||||||
|
- name: Download coverage artifact files
|
||||||
|
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16
|
||||||
|
with:
|
||||||
|
pattern: coverage-*
|
||||||
|
path: coverage
|
||||||
|
merge-multiple: true
|
||||||
|
|
||||||
|
- run: ls -la coverage
|
||||||
|
- run: coverage combine -a coverage/.coverage*
|
||||||
|
- run: coverage xml
|
||||||
|
|
||||||
|
- name: "Upload coverage report to CodeCov"
|
||||||
|
uses: codecov/codecov-action@b9fd7d16f6d7d1b5d2bec1a2887e65ceed900238
|
||||||
|
with:
|
||||||
|
verbose: true
|
||||||
4
.github/workflows/nightly-win-builds.yml
vendored
4
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
|||||||
build-paraview-deps:
|
build-paraview-deps:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
|
|||||||
1
.github/workflows/requirements/coverage/requirements.txt
vendored
Normal file
1
.github/workflows/requirements/coverage/requirements.txt
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
coverage==7.6.1
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
black==24.8.0
|
black==24.10.0
|
||||||
clingo==5.7.1
|
clingo==5.7.1
|
||||||
flake8==7.1.1
|
flake8==7.1.1
|
||||||
isort==5.13.2
|
isort==5.13.2
|
||||||
mypy==1.8.0
|
mypy==1.8.0
|
||||||
types-six==1.16.21.20240513
|
types-six==1.16.21.20241009
|
||||||
vermin==1.6.0
|
vermin==1.6.0
|
||||||
|
|||||||
90
.github/workflows/unit_tests.yaml
vendored
90
.github/workflows/unit_tests.yaml
vendored
@@ -40,10 +40,10 @@ jobs:
|
|||||||
on_develop: false
|
on_develop: false
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -76,22 +76,23 @@ jobs:
|
|||||||
SPACK_PYTHON: python
|
SPACK_PYTHON: python
|
||||||
SPACK_TEST_PARALLEL: 2
|
SPACK_TEST_PARALLEL: 2
|
||||||
COVERAGE: true
|
COVERAGE: true
|
||||||
|
COVERAGE_FILE: coverage/.coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,${{ matrix.concretizer }}
|
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
path: coverage
|
||||||
verbose: true
|
include-hidden-files: true
|
||||||
# Test shell integration
|
# Test shell integration
|
||||||
shell:
|
shell:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -112,11 +113,11 @@ jobs:
|
|||||||
COVERAGE: true
|
COVERAGE: true
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-shell-tests
|
share/spack/qa/run-shell-tests
|
||||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||||
with:
|
with:
|
||||||
flags: shelltests,linux
|
name: coverage-shell
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
path: coverage
|
||||||
verbose: true
|
include-hidden-files: true
|
||||||
|
|
||||||
# Test RHEL8 UBI with platform Python. This job is run
|
# Test RHEL8 UBI with platform Python. This job is run
|
||||||
# only on PRs modifying core Spack
|
# only on PRs modifying core Spack
|
||||||
@@ -129,7 +130,7 @@ jobs:
|
|||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch tcl unzip which xz
|
make patch tcl unzip which xz
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
@@ -148,35 +149,37 @@ jobs:
|
|||||||
clingo-cffi:
|
clingo-cffi:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.13'
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get -y update
|
sudo apt-get -y update
|
||||||
sudo apt-get -y install coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build kcov
|
sudo apt-get -y install coreutils gfortran graphviz gnupg2
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo pytest-xdist
|
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo
|
||||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||||
- name: Setup git configuration
|
|
||||||
run: |
|
|
||||||
# Need this for the git tests to succeed.
|
|
||||||
git --version
|
|
||||||
. .github/workflows/bin/setup_git.sh
|
|
||||||
- name: Run unit tests (full suite with coverage)
|
- name: Run unit tests (full suite with coverage)
|
||||||
env:
|
env:
|
||||||
COVERAGE: true
|
COVERAGE: true
|
||||||
|
COVERAGE_FILE: coverage/.coverage-clingo-cffi
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
. share/spack/setup-env.sh
|
||||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
spack bootstrap disable spack-install
|
||||||
|
spack bootstrap disable github-actions-v0.5
|
||||||
|
spack bootstrap disable github-actions-v0.6
|
||||||
|
spack bootstrap status
|
||||||
|
spack solve zlib
|
||||||
|
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretize.py
|
||||||
|
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,clingo
|
name: coverage-clingo-cffi
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
path: coverage
|
||||||
verbose: true
|
include-hidden-files: true
|
||||||
# Run unit tests on MacOS
|
# Run unit tests on MacOS
|
||||||
macos:
|
macos:
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
@@ -185,10 +188,10 @@ jobs:
|
|||||||
os: [macos-13, macos-14]
|
os: [macos-13, macos-14]
|
||||||
python-version: ["3.11"]
|
python-version: ["3.11"]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -197,10 +200,11 @@ jobs:
|
|||||||
pip install --upgrade pytest coverage[toml] pytest-xdist pytest-cov
|
pip install --upgrade pytest coverage[toml] pytest-xdist pytest-cov
|
||||||
- name: Setup Homebrew packages
|
- name: Setup Homebrew packages
|
||||||
run: |
|
run: |
|
||||||
brew install dash fish gcc gnupg2 kcov
|
brew install dash fish gcc gnupg kcov
|
||||||
- name: Run unit tests
|
- name: Run unit tests
|
||||||
env:
|
env:
|
||||||
SPACK_TEST_PARALLEL: 4
|
SPACK_TEST_PARALLEL: 4
|
||||||
|
COVERAGE_FILE: coverage/.coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
. .github/workflows/bin/setup_git.sh
|
. .github/workflows/bin/setup_git.sh
|
||||||
@@ -209,11 +213,11 @@ jobs:
|
|||||||
$(which spack) solve zlib
|
$(which spack) solve zlib
|
||||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||||
with:
|
with:
|
||||||
flags: unittests,macos
|
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
path: coverage
|
||||||
verbose: true
|
include-hidden-files: true
|
||||||
# Run unit tests on Windows
|
# Run unit tests on Windows
|
||||||
windows:
|
windows:
|
||||||
defaults:
|
defaults:
|
||||||
@@ -222,10 +226,10 @@ jobs:
|
|||||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -235,13 +239,13 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
./.github/workflows/bin/setup_git.ps1
|
./.github/workflows/bin/setup_git.ps1
|
||||||
- name: Unit Test
|
- name: Unit Test
|
||||||
|
env:
|
||||||
|
COVERAGE_FILE: coverage/.coverage-windows
|
||||||
run: |
|
run: |
|
||||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
coverage combine -a
|
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||||
coverage xml
|
|
||||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
|
||||||
with:
|
with:
|
||||||
flags: unittests,windows
|
name: coverage-windows
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
path: coverage
|
||||||
verbose: true
|
include-hidden-files: true
|
||||||
|
|||||||
71
.github/workflows/valid-style.yml
vendored
71
.github/workflows/valid-style.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
|||||||
validate:
|
validate:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
@@ -35,10 +35,10 @@ jobs:
|
|||||||
style:
|
style:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
@@ -70,7 +70,7 @@ jobs:
|
|||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch tcl unzip which xz
|
make patch tcl unzip which xz
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
@@ -85,5 +85,64 @@ jobs:
|
|||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
spack debug report
|
spack debug report
|
||||||
spack -d bootstrap now --dev
|
spack -d bootstrap now --dev
|
||||||
spack style -t black
|
spack -d style -t black
|
||||||
spack unit-test -V
|
spack unit-test -V
|
||||||
|
import-check:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: julia-actions/setup-julia@v2
|
||||||
|
with:
|
||||||
|
version: '1.10'
|
||||||
|
- uses: julia-actions/cache@v2
|
||||||
|
|
||||||
|
# PR: use the base of the PR as the old commit
|
||||||
|
- name: Checkout PR base commit
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
with:
|
||||||
|
ref: ${{ github.event.pull_request.base.sha }}
|
||||||
|
path: old
|
||||||
|
# not a PR: use the previous commit as the old commit
|
||||||
|
- name: Checkout previous commit
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
with:
|
||||||
|
fetch-depth: 2
|
||||||
|
path: old
|
||||||
|
- name: Checkout previous commit
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
run: git -C old reset --hard HEAD^
|
||||||
|
|
||||||
|
- name: Checkout new commit
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
with:
|
||||||
|
path: new
|
||||||
|
- name: Install circular import checker
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
with:
|
||||||
|
repository: haampie/circular-import-fighter
|
||||||
|
ref: 9f60f51bc7134e0be73f27623f1b0357d1718427
|
||||||
|
path: circular-import-fighter
|
||||||
|
- name: Install dependencies
|
||||||
|
working-directory: circular-import-fighter
|
||||||
|
run: make -j dependencies
|
||||||
|
- name: Import cycles before
|
||||||
|
working-directory: circular-import-fighter
|
||||||
|
run: make SPACK_ROOT=../old && cp solution solution.old
|
||||||
|
- name: Import cycles after
|
||||||
|
working-directory: circular-import-fighter
|
||||||
|
run: make clean-graph && make SPACK_ROOT=../new && cp solution solution.new
|
||||||
|
- name: Compare import cycles
|
||||||
|
working-directory: circular-import-fighter
|
||||||
|
run: |
|
||||||
|
edges_before="$(grep -oP 'edges to delete: \K\d+' solution.old)"
|
||||||
|
edges_after="$(grep -oP 'edges to delete: \K\d+' solution.new)"
|
||||||
|
if [ "$edges_after" -gt "$edges_before" ]; then
|
||||||
|
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
|
||||||
|
printf 'previously this was %s\033[0m\n' "$edges_before"
|
||||||
|
printf 'Compare \033[1;97m"Import cycles before"\033[0m and '
|
||||||
|
printf '\033[1;97m"Import cycles after"\033[0m to see problematic imports.\n'
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
|
||||||
|
fi
|
||||||
|
|||||||
@@ -14,3 +14,26 @@ sphinx:
|
|||||||
python:
|
python:
|
||||||
install:
|
install:
|
||||||
- requirements: lib/spack/docs/requirements.txt
|
- requirements: lib/spack/docs/requirements.txt
|
||||||
|
|
||||||
|
search:
|
||||||
|
ranking:
|
||||||
|
spack.html: -10
|
||||||
|
spack.*.html: -10
|
||||||
|
llnl.html: -10
|
||||||
|
llnl.*.html: -10
|
||||||
|
_modules/*: -10
|
||||||
|
command_index.html: -9
|
||||||
|
basic_usage.html: 5
|
||||||
|
configuration.html: 5
|
||||||
|
config_yaml.html: 5
|
||||||
|
packages_yaml.html: 5
|
||||||
|
build_settings.html: 5
|
||||||
|
environments.html: 5
|
||||||
|
containers.html: 5
|
||||||
|
mirrors.html: 5
|
||||||
|
module_file_support.html: 5
|
||||||
|
repositories.html: 5
|
||||||
|
binary_caches.html: 5
|
||||||
|
chain.html: 5
|
||||||
|
pipelines.html: 5
|
||||||
|
packaging_guide.html: 5
|
||||||
|
|||||||
71
CHANGELOG.md
71
CHANGELOG.md
@@ -1,3 +1,64 @@
|
|||||||
|
# v0.22.2 (2024-09-21)
|
||||||
|
|
||||||
|
## Bugfixes
|
||||||
|
- Forward compatibility with Spack 0.23 packages with language dependencies (#45205, #45191)
|
||||||
|
- Forward compatibility with `urllib` from Python 3.12.6+ (#46453, #46483)
|
||||||
|
- Bump vendored `archspec` for better aarch64 support (#45721, #46445)
|
||||||
|
- Support macOS Sequoia (#45018, #45127)
|
||||||
|
- Fix regression in `{variants.X}` and `{variants.X.value}` format strings (#46206)
|
||||||
|
- Ensure shell escaping of environment variable values in load and activate commands (#42780)
|
||||||
|
- Fix an issue where `spec[pkg]` considers specs outside the current DAG (#45090)
|
||||||
|
- Do not halt concretization on unknown variants in externals (#45326)
|
||||||
|
- Improve validation of `develop` config section (#46485)
|
||||||
|
- Explicitly disable `ccache` if turned off in config, to avoid cache pollution (#45275)
|
||||||
|
- Improve backwards compatibility in `include_concrete` (#45766)
|
||||||
|
- Fix issue where package tags were sometimes repeated (#45160)
|
||||||
|
- Make `setup-env.sh` "sourced only" by dropping execution bits (#45641)
|
||||||
|
- Make certain source/binary fetch errors recoverable instead of a hard error (#45683)
|
||||||
|
- Remove debug statements in package hash computation (#45235)
|
||||||
|
- Remove redundant clingo warnings (#45269)
|
||||||
|
- Remove hard-coded layout version (#45645)
|
||||||
|
- Do not initialize previous store state in `use_store` (#45268)
|
||||||
|
- Docs improvements (#46475)
|
||||||
|
|
||||||
|
## Package updates
|
||||||
|
- `chapel` major update (#42197, #44931, #45304)
|
||||||
|
|
||||||
|
# v0.22.1 (2024-07-04)
|
||||||
|
|
||||||
|
## Bugfixes
|
||||||
|
- Fix reuse of externals on Linux (#44316)
|
||||||
|
- Ensure parent gcc-runtime version >= child (#44834, #44870)
|
||||||
|
- Ensure the latest gcc-runtime is rpath'ed when multiple exist among link deps (#44219)
|
||||||
|
- Improve version detection of glibc (#44154)
|
||||||
|
- Improve heuristics for solver (#44893, #44976, #45023)
|
||||||
|
- Make strong preferences override reuse (#44373)
|
||||||
|
- Reduce verbosity when C compiler is missing (#44182)
|
||||||
|
- Make missing ccache executable an error when required (#44740)
|
||||||
|
- Make every environment view containing `python` a `venv` (#44382)
|
||||||
|
- Fix external detection for compilers with os but no target (#44156)
|
||||||
|
- Fix version optimization for roots (#44272)
|
||||||
|
- Handle common implementations of pagination of tags in OCI build caches (#43136)
|
||||||
|
- Apply fetched patches to develop specs (#44950)
|
||||||
|
- Avoid Windows wrappers for filesystem utilities on non-Windows (#44126)
|
||||||
|
- Fix issue with long filenames in build caches on Windows (#43851)
|
||||||
|
- Fix formatting issue in `spack audit` (#45045)
|
||||||
|
- CI fixes (#44582, #43965, #43967, #44279, #44213)
|
||||||
|
|
||||||
|
## Package updates
|
||||||
|
- protobuf: fix 3.4:3.21 patch checksum (#44443)
|
||||||
|
- protobuf: update hash for patch needed when="@3.4:3.21" (#44210)
|
||||||
|
- git: bump v2.39 to 2.45; deprecate unsafe versions (#44248)
|
||||||
|
- gcc: use -rpath {rpath_dir} not -rpath={rpath dir} (#44315)
|
||||||
|
- Remove mesa18 and libosmesa (#44264)
|
||||||
|
- Enforce consistency of `gl` providers (#44307)
|
||||||
|
- Require libiconv for iconv (#44335, #45026).
|
||||||
|
Notice that glibc/musl also provide iconv, but are not guaranteed to be
|
||||||
|
complete. Set `packages:iconv:require:[glibc]` to restore the old behavior.
|
||||||
|
- py-matplotlib: qualify when to do a post install (#44191)
|
||||||
|
- rust: fix v1.78.0 instructions (#44127)
|
||||||
|
- suite-sparse: improve setting of the `libs` property (#44214)
|
||||||
|
- netlib-lapack: provide blas and lapack together (#44981)
|
||||||
|
|
||||||
# v0.22.0 (2024-05-12)
|
# v0.22.0 (2024-05-12)
|
||||||
|
|
||||||
@@ -319,6 +380,16 @@
|
|||||||
* 344 committers to packages
|
* 344 committers to packages
|
||||||
* 45 committers to core
|
* 45 committers to core
|
||||||
|
|
||||||
|
# v0.21.3 (2024-10-02)
|
||||||
|
|
||||||
|
## Bugfixes
|
||||||
|
- Forward compatibility with Spack 0.23 packages with language dependencies (#45205, #45191)
|
||||||
|
- Forward compatibility with `urllib` from Python 3.12.6+ (#46453, #46483)
|
||||||
|
- Bump `archspec` to 0.2.5-dev for better aarch64 and Windows support (#42854, #44005,
|
||||||
|
#45721, #46445)
|
||||||
|
- Support macOS Sequoia (#45018, #45127, #43862)
|
||||||
|
- CI and test maintenance (#42909, #42728, #46711, #41943, #43363)
|
||||||
|
|
||||||
# v0.21.2 (2024-03-01)
|
# v0.21.2 (2024-03-01)
|
||||||
|
|
||||||
## Bugfixes
|
## Bugfixes
|
||||||
|
|||||||
@@ -46,13 +46,18 @@ See the
|
|||||||
[Feature Overview](https://spack.readthedocs.io/en/latest/features.html)
|
[Feature Overview](https://spack.readthedocs.io/en/latest/features.html)
|
||||||
for examples and highlights.
|
for examples and highlights.
|
||||||
|
|
||||||
To install spack and your first package, make sure you have Python.
|
To install spack and your first package, make sure you have Python & Git.
|
||||||
Then:
|
Then:
|
||||||
|
|
||||||
$ git clone -c feature.manyFiles=true https://github.com/spack/spack.git
|
$ git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git
|
||||||
$ cd spack/bin
|
$ cd spack/bin
|
||||||
$ ./spack install zlib
|
$ ./spack install zlib
|
||||||
|
|
||||||
|
> [!TIP]
|
||||||
|
> `-c feature.manyFiles=true` improves git's performance on repositories with 1,000+ files.
|
||||||
|
>
|
||||||
|
> `--depth=2` prunes the git history to reduce the size of the Spack installation.
|
||||||
|
|
||||||
Documentation
|
Documentation
|
||||||
----------------
|
----------------
|
||||||
|
|
||||||
|
|||||||
@@ -1,71 +1,11 @@
|
|||||||
@ECHO OFF
|
@ECHO OFF
|
||||||
setlocal EnableDelayedExpansion
|
|
||||||
:: (c) 2021 Lawrence Livermore National Laboratory
|
:: (c) 2021 Lawrence Livermore National Laboratory
|
||||||
:: To use this file independently of Spack's installer, execute this script in its directory, or add the
|
:: To use this file independently of Spack's installer, execute this script in its directory, or add the
|
||||||
:: associated bin directory to your PATH. Invoke to launch Spack Shell.
|
:: associated bin directory to your PATH. Invoke to launch Spack Shell.
|
||||||
::
|
::
|
||||||
:: source_dir/spack/bin/spack_cmd.bat
|
:: source_dir/spack/bin/spack_cmd.bat
|
||||||
::
|
::
|
||||||
pushd %~dp0..
|
|
||||||
set SPACK_ROOT=%CD%
|
|
||||||
pushd %CD%\..
|
|
||||||
set spackinstdir=%CD%
|
|
||||||
popd
|
|
||||||
|
|
||||||
|
call "%~dp0..\share\spack\setup-env.bat"
|
||||||
:: Check if Python is on the PATH
|
pushd %SPACK_ROOT%
|
||||||
if not defined python_pf_ver (
|
%comspec% /K
|
||||||
(for /f "delims=" %%F in ('where python.exe') do (
|
|
||||||
set "python_pf_ver=%%F"
|
|
||||||
goto :found_python
|
|
||||||
) ) 2> NUL
|
|
||||||
)
|
|
||||||
:found_python
|
|
||||||
if not defined python_pf_ver (
|
|
||||||
:: If not, look for Python from the Spack installer
|
|
||||||
:get_builtin
|
|
||||||
(for /f "tokens=*" %%g in ('dir /b /a:d "!spackinstdir!\Python*"') do (
|
|
||||||
set "python_ver=%%g")) 2> NUL
|
|
||||||
|
|
||||||
if not defined python_ver (
|
|
||||||
echo Python was not found on your system.
|
|
||||||
echo Please install Python or add Python to your PATH.
|
|
||||||
) else (
|
|
||||||
set "py_path=!spackinstdir!\!python_ver!"
|
|
||||||
set "py_exe=!py_path!\python.exe"
|
|
||||||
)
|
|
||||||
goto :exitpoint
|
|
||||||
) else (
|
|
||||||
:: Python is already on the path
|
|
||||||
set "py_exe=!python_pf_ver!"
|
|
||||||
(for /F "tokens=* USEBACKQ" %%F in (
|
|
||||||
`"!py_exe!" --version`) do (set "output=%%F")) 2>NUL
|
|
||||||
if not "!output:Microsoft Store=!"=="!output!" goto :get_builtin
|
|
||||||
goto :exitpoint
|
|
||||||
)
|
|
||||||
:exitpoint
|
|
||||||
|
|
||||||
set "PATH=%SPACK_ROOT%\bin\;%PATH%"
|
|
||||||
if defined py_path (
|
|
||||||
set "PATH=%py_path%;%PATH%"
|
|
||||||
)
|
|
||||||
|
|
||||||
if defined py_exe (
|
|
||||||
"%py_exe%" "%SPACK_ROOT%\bin\haspywin.py"
|
|
||||||
)
|
|
||||||
|
|
||||||
set "EDITOR=notepad"
|
|
||||||
|
|
||||||
DOSKEY spacktivate=spack env activate $*
|
|
||||||
|
|
||||||
@echo **********************************************************************
|
|
||||||
@echo ** Spack Package Manager
|
|
||||||
@echo **********************************************************************
|
|
||||||
|
|
||||||
IF "%1"=="" GOTO CONTINUE
|
|
||||||
set
|
|
||||||
GOTO:EOF
|
|
||||||
|
|
||||||
:continue
|
|
||||||
set PROMPT=[spack] %PROMPT%
|
|
||||||
%comspec% /k
|
|
||||||
|
|||||||
@@ -9,15 +9,15 @@ bootstrap:
|
|||||||
# may not be able to bootstrap all the software that Spack needs,
|
# may not be able to bootstrap all the software that Spack needs,
|
||||||
# depending on its type.
|
# depending on its type.
|
||||||
sources:
|
sources:
|
||||||
- name: 'github-actions-v0.5'
|
- name: github-actions-v0.6
|
||||||
|
metadata: $spack/share/spack/bootstrap/github-actions-v0.6
|
||||||
|
- name: github-actions-v0.5
|
||||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.5
|
metadata: $spack/share/spack/bootstrap/github-actions-v0.5
|
||||||
- name: 'github-actions-v0.4'
|
- name: spack-install
|
||||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.4
|
|
||||||
- name: 'spack-install'
|
|
||||||
metadata: $spack/share/spack/bootstrap/spack-install
|
metadata: $spack/share/spack/bootstrap/spack-install
|
||||||
trusted:
|
trusted:
|
||||||
# By default we trust bootstrapping from sources and from binaries
|
# By default we trust bootstrapping from sources and from binaries
|
||||||
# produced on Github via the workflow
|
# produced on Github via the workflow
|
||||||
|
github-actions-v0.6: true
|
||||||
github-actions-v0.5: true
|
github-actions-v0.5: true
|
||||||
github-actions-v0.4: true
|
|
||||||
spack-install: true
|
spack-install: true
|
||||||
|
|||||||
@@ -42,8 +42,8 @@ concretizer:
|
|||||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||||
strategy: minimal
|
strategy: minimal
|
||||||
# Option to specify compatiblity between operating systems for reuse of compilers and packages
|
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
||||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||||
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
|
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
|
||||||
os_compatible: {}
|
os_compatible: {}
|
||||||
|
|||||||
@@ -115,12 +115,6 @@ config:
|
|||||||
suppress_gpg_warnings: false
|
suppress_gpg_warnings: false
|
||||||
|
|
||||||
|
|
||||||
# If set to true, Spack will attempt to build any compiler on the spec
|
|
||||||
# that is not already available. If set to False, Spack will only use
|
|
||||||
# compilers already configured in compilers.yaml
|
|
||||||
install_missing_compilers: false
|
|
||||||
|
|
||||||
|
|
||||||
# If set to true, Spack will always check checksums after downloading
|
# If set to true, Spack will always check checksums after downloading
|
||||||
# archives. If false, Spack skips the checksum step.
|
# archives. If false, Spack skips the checksum step.
|
||||||
checksum: true
|
checksum: true
|
||||||
|
|||||||
@@ -40,9 +40,9 @@ packages:
|
|||||||
jpeg: [libjpeg-turbo, libjpeg]
|
jpeg: [libjpeg-turbo, libjpeg]
|
||||||
lapack: [openblas, amdlibflame]
|
lapack: [openblas, amdlibflame]
|
||||||
libc: [glibc, musl]
|
libc: [glibc, musl]
|
||||||
libgfortran: [ gcc-runtime ]
|
libgfortran: [gcc-runtime]
|
||||||
libglx: [mesa+glx]
|
libglx: [mesa+glx]
|
||||||
libifcore: [ intel-oneapi-runtime ]
|
libifcore: [intel-oneapi-runtime]
|
||||||
libllvm: [llvm]
|
libllvm: [llvm]
|
||||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||||
luajit: [lua-luajit-openresty, lua-luajit]
|
luajit: [lua-luajit-openresty, lua-luajit]
|
||||||
@@ -72,3 +72,13 @@ packages:
|
|||||||
permissions:
|
permissions:
|
||||||
read: world
|
read: world
|
||||||
write: user
|
write: user
|
||||||
|
cray-mpich:
|
||||||
|
buildable: false
|
||||||
|
cray-mvapich2:
|
||||||
|
buildable: false
|
||||||
|
fujitsu-mpi:
|
||||||
|
buildable: false
|
||||||
|
hpcx-mpi:
|
||||||
|
buildable: false
|
||||||
|
spectrum-mpi:
|
||||||
|
buildable: false
|
||||||
|
|||||||
@@ -1175,6 +1175,17 @@ unspecified version, but packages can depend on other packages with
|
|||||||
could depend on ``mpich@1.2:`` if it can only build with version
|
could depend on ``mpich@1.2:`` if it can only build with version
|
||||||
``1.2`` or higher of ``mpich``.
|
``1.2`` or higher of ``mpich``.
|
||||||
|
|
||||||
|
.. note:: Windows Spec Syntax Caveats
|
||||||
|
Windows has a few idiosyncrasies when it comes to the Spack spec syntax and the use of certain shells
|
||||||
|
Spack's spec dependency syntax uses the carat (``^``) character, however this is an escape string in CMD
|
||||||
|
so it must be escaped with an additional carat (i.e. ``^^``).
|
||||||
|
CMD also will attempt to interpret strings with ``=`` characters in them. Any spec including this symbol
|
||||||
|
must double quote the string.
|
||||||
|
|
||||||
|
Note: All of these issues are unique to CMD, they can be avoided by using Powershell.
|
||||||
|
|
||||||
|
For more context on these caveats see the related issues: `carat <https://github.com/spack/spack/issues/42833>`_ and `equals <https://github.com/spack/spack/issues/43348>`_
|
||||||
|
|
||||||
Below are more details about the specifiers that you can add to specs.
|
Below are more details about the specifiers that you can add to specs.
|
||||||
|
|
||||||
.. _version-specifier:
|
.. _version-specifier:
|
||||||
|
|||||||
@@ -166,3 +166,74 @@ while `py-numpy` still needs an older version:
|
|||||||
|
|
||||||
Up to Spack v0.20 ``duplicates:strategy:none`` was the default (and only) behavior. From Spack v0.21 the
|
Up to Spack v0.20 ``duplicates:strategy:none`` was the default (and only) behavior. From Spack v0.21 the
|
||||||
default behavior is ``duplicates:strategy:minimal``.
|
default behavior is ``duplicates:strategy:minimal``.
|
||||||
|
|
||||||
|
--------
|
||||||
|
Splicing
|
||||||
|
--------
|
||||||
|
|
||||||
|
The ``splice`` key covers config attributes for splicing specs in the solver.
|
||||||
|
|
||||||
|
"Splicing" is a method for replacing a dependency with another spec
|
||||||
|
that provides the same package or virtual. There are two types of
|
||||||
|
splices, referring to different behaviors for shared dependencies
|
||||||
|
between the root spec and the new spec replacing a dependency:
|
||||||
|
"transitive" and "intransitive". A "transitive" splice is one that
|
||||||
|
resolves all conflicts by taking the dependency from the new node. An
|
||||||
|
"intransitive" splice is one that resolves all conflicts by taking the
|
||||||
|
dependency from the original root. From a theory perspective, hybrid
|
||||||
|
splices are possible but are not modeled by Spack.
|
||||||
|
|
||||||
|
All spliced specs retain a ``build_spec`` attribute that points to the
|
||||||
|
original Spec before any splice occurred. The ``build_spec`` for a
|
||||||
|
non-spliced spec is itself.
|
||||||
|
|
||||||
|
The figure below shows examples of transitive and intransitive splices:
|
||||||
|
|
||||||
|
.. figure:: images/splices.png
|
||||||
|
:align: center
|
||||||
|
|
||||||
|
The concretizer can be configured to explicitly splice particular
|
||||||
|
replacements for a target spec. Splicing will allow the user to make
|
||||||
|
use of generically built public binary caches, while swapping in
|
||||||
|
highly optimized local builds for performance critical components
|
||||||
|
and/or components that interact closely with the specific hardware
|
||||||
|
details of the system. The most prominent candidate for splicing is
|
||||||
|
MPI providers. MPI packages have relatively well-understood ABI
|
||||||
|
characteristics, and most High Performance Computing facilities deploy
|
||||||
|
highly optimized MPI packages tailored to their particular
|
||||||
|
hardware. The following config block configures Spack to replace
|
||||||
|
whatever MPI provider each spec was concretized to use with the
|
||||||
|
particular package of ``mpich`` with the hash that begins ``abcdef``.
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
concretizer:
|
||||||
|
splice:
|
||||||
|
explicit:
|
||||||
|
- target: mpi
|
||||||
|
replacement: mpich/abcdef
|
||||||
|
transitive: false
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
When configuring an explicit splice, you as the user take on the
|
||||||
|
responsibility for ensuring ABI compatibility between the specs
|
||||||
|
matched by the target and the replacement you provide. If they are
|
||||||
|
not compatible, Spack will not warn you and your application will
|
||||||
|
fail to run.
|
||||||
|
|
||||||
|
The ``target`` field of an explicit splice can be any abstract
|
||||||
|
spec. The ``replacement`` field must be a spec that includes the hash
|
||||||
|
of a concrete spec, and the replacement must either be the same
|
||||||
|
package as the target, provide the virtual that is the target, or
|
||||||
|
provide a virtual that the target provides. The ``transitive`` field
|
||||||
|
is optional -- by default, splices will be transitive.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
With explicit splices configured, it is possible for Spack to
|
||||||
|
concretize to a spec that does not satisfy the input. For example,
|
||||||
|
with the config above ``hdf5 ^mvapich2`` will concretize to user
|
||||||
|
``mpich/abcdef`` instead of ``mvapich2`` as the MPI provider. Spack
|
||||||
|
will warn the user in this case, but will not fail the
|
||||||
|
concretization.
|
||||||
|
|||||||
@@ -130,14 +130,19 @@ before or after a particular phase. For example, in ``perl``, we see:
|
|||||||
|
|
||||||
@run_after("install")
|
@run_after("install")
|
||||||
def install_cpanm(self):
|
def install_cpanm(self):
|
||||||
spec = self.spec
|
spec = self.spec
|
||||||
|
maker = make
|
||||||
if spec.satisfies("+cpanm"):
|
cpan_dir = join_path("cpanm", "cpanm")
|
||||||
with working_dir(join_path("cpanm", "cpanm")):
|
if sys.platform == "win32":
|
||||||
perl = spec["perl"].command
|
maker = nmake
|
||||||
perl("Makefile.PL")
|
cpan_dir = join_path(self.stage.source_path, cpan_dir)
|
||||||
make()
|
cpan_dir = windows_sfn(cpan_dir)
|
||||||
make("install")
|
if "+cpanm" in spec:
|
||||||
|
with working_dir(cpan_dir):
|
||||||
|
perl = spec["perl"].command
|
||||||
|
perl("Makefile.PL")
|
||||||
|
maker()
|
||||||
|
maker("install")
|
||||||
|
|
||||||
This extra step automatically installs ``cpanm`` in addition to the
|
This extra step automatically installs ``cpanm`` in addition to the
|
||||||
base Perl installation.
|
base Perl installation.
|
||||||
@@ -176,8 +181,14 @@ In the ``perl`` package, we can see:
|
|||||||
|
|
||||||
@run_after("build")
|
@run_after("build")
|
||||||
@on_package_attributes(run_tests=True)
|
@on_package_attributes(run_tests=True)
|
||||||
def test(self):
|
def build_test(self):
|
||||||
make("test")
|
if sys.platform == "win32":
|
||||||
|
win32_dir = os.path.join(self.stage.source_path, "win32")
|
||||||
|
win32_dir = windows_sfn(win32_dir)
|
||||||
|
with working_dir(win32_dir):
|
||||||
|
nmake("test", ignore_quotes=True)
|
||||||
|
else:
|
||||||
|
make("test")
|
||||||
|
|
||||||
As you can guess, this runs ``make test`` *after* building the package,
|
As you can guess, this runs ``make test`` *after* building the package,
|
||||||
if and only if testing is requested. Again, this is not specific to
|
if and only if testing is requested. Again, this is not specific to
|
||||||
|
|||||||
@@ -49,14 +49,14 @@ following phases:
|
|||||||
#. ``install`` - install the package
|
#. ``install`` - install the package
|
||||||
|
|
||||||
Package developers often add unit tests that can be invoked with
|
Package developers often add unit tests that can be invoked with
|
||||||
``scons test`` or ``scons check``. Spack provides a ``test`` method
|
``scons test`` or ``scons check``. Spack provides a ``build_test`` method
|
||||||
to handle this. Since we don't know which one the package developer
|
to handle this. Since we don't know which one the package developer
|
||||||
chose, the ``test`` method does nothing by default, but can be easily
|
chose, the ``build_test`` method does nothing by default, but can be easily
|
||||||
overridden like so:
|
overridden like so:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
def test(self):
|
def build_test(self):
|
||||||
scons("check")
|
scons("check")
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. chain:
|
.. chain:
|
||||||
|
|
||||||
============================
|
=============================================
|
||||||
Chaining Spack Installations
|
Chaining Spack Installations (upstreams.yaml)
|
||||||
============================
|
=============================================
|
||||||
|
|
||||||
You can point your Spack installation to another installation to use any
|
You can point your Spack installation to another installation to use any
|
||||||
packages that are installed there. To register the other Spack instance,
|
packages that are installed there. To register the other Spack instance,
|
||||||
|
|||||||
@@ -218,6 +218,10 @@ def setup(sphinx):
|
|||||||
("py:class", "spack.spec.SpecfileReaderBase"),
|
("py:class", "spack.spec.SpecfileReaderBase"),
|
||||||
("py:class", "spack.install_test.Pb"),
|
("py:class", "spack.install_test.Pb"),
|
||||||
("py:class", "spack.filesystem_view.SimpleFilesystemView"),
|
("py:class", "spack.filesystem_view.SimpleFilesystemView"),
|
||||||
|
("py:class", "spack.traverse.EdgeAndDepth"),
|
||||||
|
("py:class", "archspec.cpu.microarchitecture.Microarchitecture"),
|
||||||
|
# TypeVar that is not handled correctly
|
||||||
|
("py:class", "llnl.util.lang.T"),
|
||||||
]
|
]
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||||
|
|||||||
@@ -281,7 +281,7 @@ When spack queries for configuration parameters, it searches in
|
|||||||
higher-precedence scopes first. So, settings in a higher-precedence file
|
higher-precedence scopes first. So, settings in a higher-precedence file
|
||||||
can override those with the same key in a lower-precedence one. For
|
can override those with the same key in a lower-precedence one. For
|
||||||
list-valued settings, Spack *prepends* higher-precedence settings to
|
list-valued settings, Spack *prepends* higher-precedence settings to
|
||||||
lower-precedence settings. Completely ignoring higher-level configuration
|
lower-precedence settings. Completely ignoring lower-precedence configuration
|
||||||
options is supported with the ``::`` notation for keys (see
|
options is supported with the ``::`` notation for keys (see
|
||||||
:ref:`config-overrides` below).
|
:ref:`config-overrides` below).
|
||||||
|
|
||||||
@@ -511,6 +511,7 @@ Spack understands over a dozen special variables. These are:
|
|||||||
* ``$target_family``. The target family for the current host, as
|
* ``$target_family``. The target family for the current host, as
|
||||||
detected by ArchSpec. E.g. ``x86_64`` or ``aarch64``.
|
detected by ArchSpec. E.g. ``x86_64`` or ``aarch64``.
|
||||||
* ``$date``: the current date in the format YYYY-MM-DD
|
* ``$date``: the current date in the format YYYY-MM-DD
|
||||||
|
* ``$spack_short_version``: the Spack version truncated to the first components.
|
||||||
|
|
||||||
|
|
||||||
Note that, as with shell variables, you can write these as ``$varname``
|
Note that, as with shell variables, you can write these as ``$varname``
|
||||||
|
|||||||
@@ -184,7 +184,7 @@ Style Tests
|
|||||||
|
|
||||||
Spack uses `Flake8 <http://flake8.pycqa.org/en/latest/>`_ to test for
|
Spack uses `Flake8 <http://flake8.pycqa.org/en/latest/>`_ to test for
|
||||||
`PEP 8 <https://www.python.org/dev/peps/pep-0008/>`_ conformance and
|
`PEP 8 <https://www.python.org/dev/peps/pep-0008/>`_ conformance and
|
||||||
`mypy <https://mypy.readthedocs.io/en/stable/>` for type checking. PEP 8 is
|
`mypy <https://mypy.readthedocs.io/en/stable/>`_ for type checking. PEP 8 is
|
||||||
a series of style guides for Python that provide suggestions for everything
|
a series of style guides for Python that provide suggestions for everything
|
||||||
from variable naming to indentation. In order to limit the number of PRs that
|
from variable naming to indentation. In order to limit the number of PRs that
|
||||||
were mostly style changes, we decided to enforce PEP 8 conformance. Your PR
|
were mostly style changes, we decided to enforce PEP 8 conformance. Your PR
|
||||||
@@ -316,6 +316,215 @@ documentation tests to make sure there are no errors. Documentation changes can
|
|||||||
in some obfuscated warning messages. If you don't understand what they mean, feel free
|
in some obfuscated warning messages. If you don't understand what they mean, feel free
|
||||||
to ask when you submit your PR.
|
to ask when you submit your PR.
|
||||||
|
|
||||||
|
.. _spack-builders-and-pipelines:
|
||||||
|
|
||||||
|
^^^^^^^^^
|
||||||
|
GitLab CI
|
||||||
|
^^^^^^^^^
|
||||||
|
|
||||||
|
""""""""""""""""""
|
||||||
|
Build Cache Stacks
|
||||||
|
""""""""""""""""""
|
||||||
|
|
||||||
|
Spack welcomes the contribution of software stacks of interest to the community. These
|
||||||
|
stacks are used to test package recipes and generate publicly available build caches.
|
||||||
|
Spack uses GitLab CI for managing the orchestration of build jobs.
|
||||||
|
|
||||||
|
GitLab Entry Point
|
||||||
|
~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Add stack entrypoint to the ``share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml``. There
|
||||||
|
are two stages required for each new stack, the generation stage and the build stage.
|
||||||
|
|
||||||
|
The generate stage is defined using the job template ``.generate`` configured with
|
||||||
|
environment variables defining the name of the stack in ``SPACK_CI_STACK_NAME`` and the
|
||||||
|
platform (``SPACK_TARGET_PLATFORM``) and architecture (``SPACK_TARGET_ARCH``) configuration,
|
||||||
|
and the tags associated with the class of runners to build on.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
The ``SPACK_CI_STACK_NAME`` must match the name of the directory containing the
|
||||||
|
stacks ``spack.yaml``.
|
||||||
|
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
The platform and architecture variables are specified in order to select the
|
||||||
|
correct configurations from the generic configurations used in Spack CI. The
|
||||||
|
configurations currently available are:
|
||||||
|
|
||||||
|
* ``.cray_rhel_zen4``
|
||||||
|
* ``.cray_sles_zen4``
|
||||||
|
* ``.darwin_aarch64``
|
||||||
|
* ``.darwin_x86_64``
|
||||||
|
* ``.linux_aarch64``
|
||||||
|
* ``.linux_icelake``
|
||||||
|
* ``.linux_neoverse_n1``
|
||||||
|
* ``.linux_neoverse_v1``
|
||||||
|
* ``.linux_neoverse_v2``
|
||||||
|
* ``.linux_power``
|
||||||
|
* ``.linux_skylake``
|
||||||
|
* ``.linux_x86_64``
|
||||||
|
* ``.linux_x86_64_v4``
|
||||||
|
|
||||||
|
New configurations can be added to accommodate new platforms and architectures.
|
||||||
|
|
||||||
|
|
||||||
|
The build stage is defined as a trigger job that consumes the GitLab CI pipeline generated in
|
||||||
|
the generate stage for this stack. Build stage jobs use the ``.build`` job template which
|
||||||
|
handles the basic configuration.
|
||||||
|
|
||||||
|
An example entry point for a new stack called ``my-super-cool-stack``
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
.my-super-cool-stack:
|
||||||
|
extends: [ ".linux_x86_64_v3" ]
|
||||||
|
variables:
|
||||||
|
SPACK_CI_STACK_NAME: my-super-cool-stack
|
||||||
|
tags: [ "all", "tags", "your", "job", "needs"]
|
||||||
|
|
||||||
|
my-super-cool-stack-generate:
|
||||||
|
extends: [ ".generate", ".my-super-cool-stack" ]
|
||||||
|
image: my-super-cool-stack-image:0.0.1
|
||||||
|
|
||||||
|
my-super-cool-stack-build:
|
||||||
|
extends: [ ".build", ".my-super-cool-stack" ]
|
||||||
|
trigger:
|
||||||
|
include:
|
||||||
|
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||||
|
job: my-super-cool-stack-generate
|
||||||
|
strategy: depend
|
||||||
|
needs:
|
||||||
|
- artifacts: True
|
||||||
|
job: my-super-cool-stack-generate
|
||||||
|
|
||||||
|
|
||||||
|
Stack Configuration
|
||||||
|
~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
The stack configuration is a spack environment file with two additional sections added.
|
||||||
|
Stack configurations should be located in ``share/spack/gitlab/cloud_pipelines/stacks/<stack_name>/spack.yaml``.
|
||||||
|
|
||||||
|
The ``ci`` section is generally used to define stack specific mappings such as image or tags.
|
||||||
|
For more information on what can go into the ``ci`` section refer to the docs on pipelines.
|
||||||
|
|
||||||
|
The ``cdash`` section is used for defining where to upload the results of builds. Spack configures
|
||||||
|
most of the details for posting pipeline results to
|
||||||
|
`cdash.spack.io <https://cdash.spack.io/index.php?project=Spack+Testing>`_. The only
|
||||||
|
requirement in the stack configuration is to define a ``build-group`` that is unique,
|
||||||
|
this is usually the long name of the stack.
|
||||||
|
|
||||||
|
An example stack that builds ``zlib``.
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
spack:
|
||||||
|
view: false
|
||||||
|
packages:
|
||||||
|
all:
|
||||||
|
require: ["%gcc", "target=x86_64_v3"]
|
||||||
|
specs:
|
||||||
|
- zlib
|
||||||
|
|
||||||
|
ci:
|
||||||
|
pipeline-gen
|
||||||
|
- build-job:
|
||||||
|
image: my-super-cool-stack-image:0.0.1
|
||||||
|
|
||||||
|
cdash:
|
||||||
|
build-group: My Super Cool Stack
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
The ``image`` used in the ``*-generate`` job must match exactly the ``image`` used in the ``build-job``.
|
||||||
|
When the images do not match the build job may fail.
|
||||||
|
|
||||||
|
|
||||||
|
"""""""""""""""""""
|
||||||
|
Registering Runners
|
||||||
|
"""""""""""""""""""
|
||||||
|
|
||||||
|
Contributing computational resources to Spack's CI build farm is one way to help expand the
|
||||||
|
capabilities and offerings of the public Spack build caches. Currently, Spack utilizes linux runners
|
||||||
|
from AWS, Google, and the University of Oregon (UO).
|
||||||
|
|
||||||
|
Runners require three key peices:
|
||||||
|
* Runner Registration Token
|
||||||
|
* Accurate tags
|
||||||
|
* OIDC Authentication script
|
||||||
|
* GPG keys
|
||||||
|
|
||||||
|
|
||||||
|
Minimum GitLab Runner Version: ``16.1.0``
|
||||||
|
`Intallation instructions <https://docs.gitlab.com/runner/install/>`_
|
||||||
|
|
||||||
|
Registration Token
|
||||||
|
~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
The first step to contribute new runners is to open an issue in the `spack infrastructure <https://github.com/spack/spack-infrastructure/issues/new?assignees=&labels=runner-registration&projects=&template=runner_registration.yml>`_
|
||||||
|
project. This will be reported to the spack infrastructure team who will guide users through the process
|
||||||
|
of registering new runners for Spack CI.
|
||||||
|
|
||||||
|
The information needed to register a runner is the motivation for the new resources, a semi-detailed description of
|
||||||
|
the runner, and finallly the point of contact for maintaining the software on the runner.
|
||||||
|
|
||||||
|
The point of contact will then work with the infrastruture team to obtain runner registration token(s) for interacting with
|
||||||
|
with Spack's GitLab instance. Once the runner is active, this point of contact will also be responsible for updating the
|
||||||
|
GitLab runner software to keep pace with Spack's Gitlab.
|
||||||
|
|
||||||
|
Tagging
|
||||||
|
~~~~~~~
|
||||||
|
|
||||||
|
In the initial stages of runner registration it is important to **exclude** the special tag ``spack``. This will prevent
|
||||||
|
the new runner(s) from being picked up for production CI jobs while it is configured and evaluated. Once it is determined
|
||||||
|
that the runner is ready for production use the ``spack`` tag will be added.
|
||||||
|
|
||||||
|
Because gitlab has no concept of tag exclustion, runners that provide specialized resource also require specialized tags.
|
||||||
|
For example, a basic CPU only x86_64 runner may have a tag ``x86_64`` associated with it. However, a runner containing an
|
||||||
|
CUDA capable GPU may have the tag ``x86_64-cuda`` to denote that it should only be used for packages that will benefit from
|
||||||
|
a CUDA capable resource.
|
||||||
|
|
||||||
|
OIDC
|
||||||
|
~~~~
|
||||||
|
|
||||||
|
Spack runners use OIDC authentication for connecting to the appropriate AWS bucket
|
||||||
|
which is used for coordinating the communication of binaries between build jobs. In
|
||||||
|
order to configure OIDC authentication, Spack CI runners use a python script with minimal
|
||||||
|
dependencies. This script can be configured for runners as seen here using the ``pre_build_script``.
|
||||||
|
|
||||||
|
.. code-block:: toml
|
||||||
|
|
||||||
|
[[runners]]
|
||||||
|
pre_build_script = """
|
||||||
|
echo 'Executing Spack pre-build setup script'
|
||||||
|
|
||||||
|
for cmd in "${PY3:-}" python3 python; do
|
||||||
|
if command -v > /dev/null "$cmd"; then
|
||||||
|
export PY3="$(command -v "$cmd")"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ -z "${PY3:-}" ]; then
|
||||||
|
echo "Unable to find python3 executable"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
$PY3 -c "import urllib.request;urllib.request.urlretrieve('https://raw.githubusercontent.com/spack/spack-infrastructure/main/scripts/gitlab_runner_pre_build/pre_build.py', 'pre_build.py')"
|
||||||
|
$PY3 pre_build.py > envvars
|
||||||
|
|
||||||
|
. ./envvars
|
||||||
|
rm -f envvars
|
||||||
|
unset GITLAB_OIDC_TOKEN
|
||||||
|
"""
|
||||||
|
|
||||||
|
GPG Keys
|
||||||
|
~~~~~~~~
|
||||||
|
|
||||||
|
Runners that may be utilized for ``protected`` CI require the registration of an intermediate signing key that
|
||||||
|
can be used to sign packages. For more information on package signing read :ref:`key_architecture`.
|
||||||
|
|
||||||
--------
|
--------
|
||||||
Coverage
|
Coverage
|
||||||
--------
|
--------
|
||||||
|
|||||||
@@ -181,10 +181,6 @@ Spec-related modules
|
|||||||
:mod:`spack.parser`
|
:mod:`spack.parser`
|
||||||
Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs.
|
Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs.
|
||||||
|
|
||||||
:mod:`spack.concretize`
|
|
||||||
Contains :class:`~spack.concretize.Concretizer` implementation,
|
|
||||||
which allows site administrators to change Spack's :ref:`concretization-policies`.
|
|
||||||
|
|
||||||
:mod:`spack.version`
|
:mod:`spack.version`
|
||||||
Implements a simple :class:`~spack.version.Version` class with simple
|
Implements a simple :class:`~spack.version.Version` class with simple
|
||||||
comparison semantics. Also implements :class:`~spack.version.VersionRange`
|
comparison semantics. Also implements :class:`~spack.version.VersionRange`
|
||||||
@@ -337,13 +333,9 @@ inserting them at different places in the spack code base. Whenever a hook
|
|||||||
type triggers by way of a function call, we find all the hooks of that type,
|
type triggers by way of a function call, we find all the hooks of that type,
|
||||||
and run them.
|
and run them.
|
||||||
|
|
||||||
Spack defines hooks by way of a module at ``lib/spack/spack/hooks`` where we can define
|
Spack defines hooks by way of a module in the ``lib/spack/spack/hooks`` directory.
|
||||||
types of hooks in the ``__init__.py``, and then python files in that folder
|
This module has to be registered in ``__init__.py`` so that Spack is aware of it.
|
||||||
can use hook functions. The files are automatically parsed, so if you write
|
This section will cover the basic kind of hooks, and how to write them.
|
||||||
a new file for some integration (e.g., ``lib/spack/spack/hooks/myintegration.py``
|
|
||||||
you can then write hook functions in that file that will be automatically detected,
|
|
||||||
and run whenever your hook is called. This section will cover the basic kind
|
|
||||||
of hooks, and how to write them.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^
|
||||||
Types of Hooks
|
Types of Hooks
|
||||||
@@ -716,27 +708,27 @@ Release branches
|
|||||||
^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
There are currently two types of Spack releases: :ref:`major releases
|
There are currently two types of Spack releases: :ref:`major releases
|
||||||
<major-releases>` (``0.17.0``, ``0.18.0``, etc.) and :ref:`point releases
|
<major-releases>` (``0.21.0``, ``0.22.0``, etc.) and :ref:`patch releases
|
||||||
<point-releases>` (``0.17.1``, ``0.17.2``, ``0.17.3``, etc.). Here is a
|
<patch-releases>` (``0.22.1``, ``0.22.2``, ``0.22.3``, etc.). Here is a
|
||||||
diagram of how Spack release branches work::
|
diagram of how Spack release branches work::
|
||||||
|
|
||||||
o branch: develop (latest version, v0.19.0.dev0)
|
o branch: develop (latest version, v0.23.0.dev0)
|
||||||
|
|
|
|
||||||
o
|
o
|
||||||
| o branch: releases/v0.18, tag: v0.18.1
|
| o branch: releases/v0.22, tag: v0.22.1
|
||||||
o |
|
o |
|
||||||
| o tag: v0.18.0
|
| o tag: v0.22.0
|
||||||
o |
|
o |
|
||||||
| o
|
| o
|
||||||
|/
|
|/
|
||||||
o
|
o
|
||||||
|
|
|
|
||||||
o
|
o
|
||||||
| o branch: releases/v0.17, tag: v0.17.2
|
| o branch: releases/v0.21, tag: v0.21.2
|
||||||
o |
|
o |
|
||||||
| o tag: v0.17.1
|
| o tag: v0.21.1
|
||||||
o |
|
o |
|
||||||
| o tag: v0.17.0
|
| o tag: v0.21.0
|
||||||
o |
|
o |
|
||||||
| o
|
| o
|
||||||
|/
|
|/
|
||||||
@@ -747,8 +739,8 @@ requests target ``develop``. The ``develop`` branch will report that its
|
|||||||
version is that of the next **major** release with a ``.dev0`` suffix.
|
version is that of the next **major** release with a ``.dev0`` suffix.
|
||||||
|
|
||||||
Each Spack release series also has a corresponding branch, e.g.
|
Each Spack release series also has a corresponding branch, e.g.
|
||||||
``releases/v0.18`` has ``0.18.x`` versions of Spack, and
|
``releases/v0.22`` has ``v0.22.x`` versions of Spack, and
|
||||||
``releases/v0.17`` has ``0.17.x`` versions. A major release is the first
|
``releases/v0.21`` has ``v0.21.x`` versions. A major release is the first
|
||||||
tagged version on a release branch. Minor releases are back-ported from
|
tagged version on a release branch. Minor releases are back-ported from
|
||||||
develop onto release branches. This is typically done by cherry-picking
|
develop onto release branches. This is typically done by cherry-picking
|
||||||
bugfix commits off of ``develop``.
|
bugfix commits off of ``develop``.
|
||||||
@@ -778,27 +770,40 @@ for more details.
|
|||||||
Scheduling work for releases
|
Scheduling work for releases
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
We schedule work for releases by creating `GitHub projects
|
We schedule work for **major releases** through `milestones
|
||||||
<https://github.com/spack/spack/projects>`_. At any time, there may be
|
<https://github.com/spack/spack/milestones>`_ and `GitHub Projects
|
||||||
several open release projects. For example, below are two releases (from
|
<https://github.com/spack/spack/projects>`_, while **patch releases** use `labels
|
||||||
some past version of the page linked above):
|
<https://github.com/spack/spack/labels>`_.
|
||||||
|
|
||||||
.. image:: images/projects.png
|
There is only one milestone open at a time. Its name corresponds to the next major version, for
|
||||||
|
example ``v0.23``. Important issues and pull requests should be assigned to this milestone by
|
||||||
|
core developers, so that they are not forgotten at the time of release. The milestone is closed
|
||||||
|
when the release is made, and a new milestone is created for the next major release.
|
||||||
|
|
||||||
This image shows one release in progress for ``0.15.1`` and another for
|
Bug reports in GitHub issues are automatically labelled ``bug`` and ``triage``. Spack developers
|
||||||
``0.16.0``. Each of these releases has a project board containing issues
|
assign one of the labels ``impact-low``, ``impact-medium`` or ``impact-high``. This will make the
|
||||||
and pull requests. GitHub shows a status bar with completed work in
|
issue appear in the `Triaged bugs <https://github.com/orgs/spack/projects/6>`_ project board.
|
||||||
green, work in progress in purple, and work not started yet in gray, so
|
Important issues should be assigned to the next milestone as well, so they appear at the top of
|
||||||
it's fairly easy to see progress.
|
the project board.
|
||||||
|
|
||||||
Spack's project boards are not firm commitments so we move work between
|
Spack's milestones are not firm commitments so we move work between releases frequently. If we
|
||||||
releases frequently. If we need to make a release and some tasks are not
|
need to make a release and some tasks are not yet done, we will simply move them to the next major
|
||||||
yet done, we will simply move them to the next minor or major release, rather
|
release milestone, rather than delaying the release to complete them.
|
||||||
than delaying the release to complete them.
|
|
||||||
|
|
||||||
For more on using GitHub project boards, see `GitHub's documentation
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
<https://docs.github.com/en/github/managing-your-work-on-github/about-project-boards>`_.
|
Backporting bug fixes
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
When a bug is fixed in the ``develop`` branch, it is often necessary to backport the fix to one
|
||||||
|
(or more) of the ``release/vX.Y`` branches. Only the release manager is responsible for doing
|
||||||
|
backports, but Spack maintainers are responsible for labelling pull requests (and issues if no bug
|
||||||
|
fix is available yet) with ``vX.Y.Z`` labels. The label should correspond to the next patch version
|
||||||
|
that the bug fix should be backported to.
|
||||||
|
|
||||||
|
Backports are done publicly by the release manager using a pull request named ``Backports vX.Y.Z``.
|
||||||
|
This pull request is opened from the ``backports/vX.Y.Z`` branch, targets the ``releases/vX.Y``
|
||||||
|
branch and contains a (growing) list of cherry-picked commits from the ``develop`` branch.
|
||||||
|
Typically there are one or two backport pull requests open at any given time.
|
||||||
|
|
||||||
.. _major-releases:
|
.. _major-releases:
|
||||||
|
|
||||||
@@ -806,25 +811,21 @@ For more on using GitHub project boards, see `GitHub's documentation
|
|||||||
Making major releases
|
Making major releases
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Assuming a project board has already been created and all required work
|
Assuming all required work from the milestone is completed, the steps to make the major release
|
||||||
completed, the steps to make the major release are:
|
are:
|
||||||
|
|
||||||
#. Create two new project boards:
|
#. `Create a new milestone <https://github.com/spack/spack/milestones>`_ for the next major
|
||||||
|
release.
|
||||||
|
|
||||||
* One for the next major release
|
#. `Create a new label <https://github.com/spack/spack/labels>`_ for the next patch release.
|
||||||
* One for the next point release
|
|
||||||
|
|
||||||
#. Move any optional tasks that are not done to one of the new project boards.
|
#. Move any optional tasks that are not done to the next milestone.
|
||||||
|
|
||||||
In general, small bugfixes should go to the next point release. Major
|
|
||||||
features, refactors, and changes that could affect concretization should
|
|
||||||
go in the next major release.
|
|
||||||
|
|
||||||
#. Create a branch for the release, based on ``develop``:
|
#. Create a branch for the release, based on ``develop``:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ git checkout -b releases/v0.15 develop
|
$ git checkout -b releases/v0.23 develop
|
||||||
|
|
||||||
For a version ``vX.Y.Z``, the branch's name should be
|
For a version ``vX.Y.Z``, the branch's name should be
|
||||||
``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
|
``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
|
||||||
@@ -860,8 +861,8 @@ completed, the steps to make the major release are:
|
|||||||
|
|
||||||
Create a pull request targeting the ``develop`` branch, bumping the major
|
Create a pull request targeting the ``develop`` branch, bumping the major
|
||||||
version in ``lib/spack/spack/__init__.py`` with a ``dev0`` release segment.
|
version in ``lib/spack/spack/__init__.py`` with a ``dev0`` release segment.
|
||||||
For instance when you have just released ``v0.15.0``, set the version
|
For instance when you have just released ``v0.23.0``, set the version
|
||||||
to ``(0, 16, 0, 'dev0')`` on ``develop``.
|
to ``(0, 24, 0, 'dev0')`` on ``develop``.
|
||||||
|
|
||||||
#. Follow the steps in :ref:`publishing-releases`.
|
#. Follow the steps in :ref:`publishing-releases`.
|
||||||
|
|
||||||
@@ -870,82 +871,52 @@ completed, the steps to make the major release are:
|
|||||||
#. Follow the steps in :ref:`announcing-releases`.
|
#. Follow the steps in :ref:`announcing-releases`.
|
||||||
|
|
||||||
|
|
||||||
.. _point-releases:
|
.. _patch-releases:
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
Making point releases
|
Making patch releases
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Assuming a project board has already been created and all required work
|
To make the patch release process both efficient and transparent, we use a *backports pull request*
|
||||||
completed, the steps to make the point release are:
|
which contains cherry-picked commits from the ``develop`` branch. The majority of the work is to
|
||||||
|
cherry-pick the bug fixes, which ideally should be done as soon as they land on ``develop``:
|
||||||
|
this ensures cherry-picking happens in order, and makes conflicts easier to resolve since the
|
||||||
|
changes are fresh in the mind of the developer.
|
||||||
|
|
||||||
#. Create a new project board for the next point release.
|
The backports pull request is always titled ``Backports vX.Y.Z`` and is labelled ``backports``. It
|
||||||
|
is opened from a branch named ``backports/vX.Y.Z`` and targets the ``releases/vX.Y`` branch.
|
||||||
|
|
||||||
#. Move any optional tasks that are not done to the next project board.
|
Whenever a pull request labelled ``vX.Y.Z`` is merged, cherry-pick the associated squashed commit
|
||||||
|
on ``develop`` to the ``backports/vX.Y.Z`` branch. For pull requests that were rebased (or not
|
||||||
|
squashed), cherry-pick each associated commit individually. Never force push to the
|
||||||
|
``backports/vX.Y.Z`` branch.
|
||||||
|
|
||||||
#. Check out the release branch (it should already exist).
|
.. warning::
|
||||||
|
|
||||||
For the ``X.Y.Z`` release, the release branch is called ``releases/vX.Y``.
|
Sometimes you may **still** get merge conflicts even if you have
|
||||||
For ``v0.15.1``, you would check out ``releases/v0.15``:
|
cherry-picked all the commits in order. This generally means there
|
||||||
|
is some other intervening pull request that the one you're trying
|
||||||
|
to pick depends on. In these cases, you'll need to make a judgment
|
||||||
|
call regarding those pull requests. Consider the number of affected
|
||||||
|
files and/or the resulting differences.
|
||||||
|
|
||||||
.. code-block:: console
|
1. If the changes are small, you might just cherry-pick it.
|
||||||
|
|
||||||
$ git checkout releases/v0.15
|
2. If the changes are large, then you may decide that this fix is not
|
||||||
|
worth including in a patch release, in which case you should remove
|
||||||
|
the label from the pull request. Remember that large, manual backports
|
||||||
|
are seldom the right choice for a patch release.
|
||||||
|
|
||||||
#. If a pull request to the release branch named ``Backports vX.Y.Z`` is not already
|
When all commits are cherry-picked in the ``backports/vX.Y.Z`` branch, make the patch
|
||||||
in the project, create it. This pull request ought to be created as early as
|
release as follows:
|
||||||
possible when working on a release project, so that we can build the release
|
|
||||||
commits incrementally, and identify potential conflicts at an early stage.
|
|
||||||
|
|
||||||
#. Cherry-pick each pull request in the ``Done`` column of the release
|
#. `Create a new label <https://github.com/spack/spack/labels>`_ ``vX.Y.{Z+1}`` for the next patch
|
||||||
project board onto the ``Backports vX.Y.Z`` pull request.
|
release.
|
||||||
|
|
||||||
This is **usually** fairly simple since we squash the commits from the
|
#. Replace the label ``vX.Y.Z`` with ``vX.Y.{Z+1}`` for all PRs and issues that are not done.
|
||||||
vast majority of pull requests. That means there is only one commit
|
|
||||||
per pull request to cherry-pick. For example, `this pull request
|
|
||||||
<https://github.com/spack/spack/pull/15777>`_ has three commits, but
|
|
||||||
they were squashed into a single commit on merge. You can see the
|
|
||||||
commit that was created here:
|
|
||||||
|
|
||||||
.. image:: images/pr-commit.png
|
#. Manually push a single commit with commit message ``Set version to vX.Y.Z`` to the
|
||||||
|
``backports/vX.Y.Z`` branch, that both bumps the Spack version number and updates the changelog:
|
||||||
You can easily cherry pick it like this (assuming you already have the
|
|
||||||
release branch checked out):
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ git cherry-pick 7e46da7
|
|
||||||
|
|
||||||
For pull requests that were rebased (or not squashed), you'll need to
|
|
||||||
cherry-pick each associated commit individually.
|
|
||||||
|
|
||||||
.. warning::
|
|
||||||
|
|
||||||
It is important to cherry-pick commits in the order they happened,
|
|
||||||
otherwise you can get conflicts while cherry-picking. When
|
|
||||||
cherry-picking look at the merge date,
|
|
||||||
**not** the number of the pull request or the date it was opened.
|
|
||||||
|
|
||||||
Sometimes you may **still** get merge conflicts even if you have
|
|
||||||
cherry-picked all the commits in order. This generally means there
|
|
||||||
is some other intervening pull request that the one you're trying
|
|
||||||
to pick depends on. In these cases, you'll need to make a judgment
|
|
||||||
call regarding those pull requests. Consider the number of affected
|
|
||||||
files and or the resulting differences.
|
|
||||||
|
|
||||||
1. If the dependency changes are small, you might just cherry-pick it,
|
|
||||||
too. If you do this, add the task to the release board.
|
|
||||||
|
|
||||||
2. If the changes are large, then you may decide that this fix is not
|
|
||||||
worth including in a point release, in which case you should remove
|
|
||||||
the task from the release project.
|
|
||||||
|
|
||||||
3. You can always decide to manually back-port the fix to the release
|
|
||||||
branch if neither of the above options makes sense, but this can
|
|
||||||
require a lot of work. It's seldom the right choice.
|
|
||||||
|
|
||||||
#. When all the commits from the project board are cherry-picked into
|
|
||||||
the ``Backports vX.Y.Z`` pull request, you can push a commit to:
|
|
||||||
|
|
||||||
1. Bump the version in ``lib/spack/spack/__init__.py``.
|
1. Bump the version in ``lib/spack/spack/__init__.py``.
|
||||||
2. Update ``CHANGELOG.md`` with a list of the changes.
|
2. Update ``CHANGELOG.md`` with a list of the changes.
|
||||||
@@ -954,20 +925,22 @@ completed, the steps to make the point release are:
|
|||||||
release branch. See `the changelog from 0.14.1
|
release branch. See `the changelog from 0.14.1
|
||||||
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
|
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
|
||||||
|
|
||||||
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
|
#. Make sure CI passes on the **backports pull request**, including:
|
||||||
is needed to keep track in the release branch of all the commits that were
|
|
||||||
cherry-picked.
|
|
||||||
|
|
||||||
#. Make sure CI passes on the release branch, including:
|
|
||||||
|
|
||||||
* Regular unit tests
|
* Regular unit tests
|
||||||
* Build tests
|
* Build tests
|
||||||
* The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_
|
* The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_
|
||||||
|
|
||||||
If CI does not pass, you'll need to figure out why, and make changes
|
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
|
||||||
to the release branch until it does. You can make more commits, modify
|
is needed to keep track in the release branch of all the commits that were
|
||||||
or remove cherry-picked commits, or cherry-pick **more** from
|
cherry-picked.
|
||||||
``develop`` to make this happen.
|
|
||||||
|
#. Make sure CI passes on the last commit of the **release branch**.
|
||||||
|
|
||||||
|
#. In the rare case you need to include additional commits in the patch release after the backports
|
||||||
|
PR is merged, it is best to delete the last commit ``Set version to vX.Y.Z`` from the release
|
||||||
|
branch with a single force push, open a new backports PR named ``Backports vX.Y.Z (2)``, and
|
||||||
|
repeat the process. Avoid repeated force pushes to the release branch.
|
||||||
|
|
||||||
#. Follow the steps in :ref:`publishing-releases`.
|
#. Follow the steps in :ref:`publishing-releases`.
|
||||||
|
|
||||||
@@ -1042,25 +1015,31 @@ Updating `releases/latest`
|
|||||||
|
|
||||||
If the new release is the **highest** Spack release yet, you should
|
If the new release is the **highest** Spack release yet, you should
|
||||||
also tag it as ``releases/latest``. For example, suppose the highest
|
also tag it as ``releases/latest``. For example, suppose the highest
|
||||||
release is currently ``0.15.3``:
|
release is currently ``0.22.3``:
|
||||||
|
|
||||||
* If you are releasing ``0.15.4`` or ``0.16.0``, then you should tag
|
* If you are releasing ``0.22.4`` or ``0.23.0``, then you should tag
|
||||||
it with ``releases/latest``, as these are higher than ``0.15.3``.
|
it with ``releases/latest``, as these are higher than ``0.22.3``.
|
||||||
|
|
||||||
* If you are making a new release of an **older** major version of
|
* If you are making a new release of an **older** major version of
|
||||||
Spack, e.g. ``0.14.4``, then you should not tag it as
|
Spack, e.g. ``0.21.4``, then you should not tag it as
|
||||||
``releases/latest`` (as there are newer major versions).
|
``releases/latest`` (as there are newer major versions).
|
||||||
|
|
||||||
To tag ``releases/latest``, do this:
|
To do so, first fetch the latest tag created on GitHub, since you may not have it locally:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ git checkout releases/vX.Y # vX.Y is the new release's branch
|
$ git fetch --force git@github.com:spack/spack vX.Y.Z
|
||||||
$ git tag --force releases/latest
|
|
||||||
$ git push --force --tags
|
|
||||||
|
|
||||||
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing
|
Then tag ``vX.Y.Z`` as ``releases/latest`` and push the individual tag to GitHub.
|
||||||
``releases/latest`` tag with the new one.
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ git tag --force releases/latest vX.Y.Z
|
||||||
|
$ git push --force git@github.com:spack/spack releases/latest
|
||||||
|
|
||||||
|
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing ``releases/latest``
|
||||||
|
tag with the new one. Do **not** use the ``--tags`` flag when pushing, since this will push *all*
|
||||||
|
local tags.
|
||||||
|
|
||||||
|
|
||||||
.. _announcing-releases:
|
.. _announcing-releases:
|
||||||
|
|||||||
@@ -5,49 +5,56 @@
|
|||||||
|
|
||||||
.. _environments:
|
.. _environments:
|
||||||
|
|
||||||
=========================
|
=====================================
|
||||||
Environments (spack.yaml)
|
Environments (spack.yaml, spack.lock)
|
||||||
=========================
|
=====================================
|
||||||
|
|
||||||
An environment is used to group together a set of specs for the
|
An environment is used to group a set of specs intended for some purpose
|
||||||
purpose of building, rebuilding and deploying in a coherent fashion.
|
to be built, rebuilt, and deployed in a coherent fashion. Environments
|
||||||
Environments provide a number of advantages over the *à la carte*
|
define aspects of the installation of the software, such as:
|
||||||
approach of building and loading individual Spack modules:
|
|
||||||
|
|
||||||
#. Environments separate the steps of (a) choosing what to
|
#. *which* specs to install;
|
||||||
install, (b) concretizing, and (c) installing. This allows
|
#. *how* those specs are configured; and
|
||||||
Environments to remain stable and repeatable, even if Spack packages
|
#. *where* the concretized software will be installed.
|
||||||
are upgraded: specs are only re-concretized when the user
|
|
||||||
explicitly asks for it. It is even possible to reliably
|
Aggregating this information into an environment for processing has advantages
|
||||||
transport environments between different computers running
|
over the *à la carte* approach of building and loading individual Spack modules.
|
||||||
different versions of Spack!
|
|
||||||
#. Environments allow several specs to be built at once; a more robust
|
With environments, you concretize, install, or load (activate) all of the
|
||||||
solution than ad-hoc scripts making multiple calls to ``spack
|
specs with a single command. Concretization fully configures the specs
|
||||||
install``.
|
and dependencies of the environment in preparation for installing the
|
||||||
#. An Environment that is built as a whole can be loaded as a whole
|
software. This is a more robust solution than ad-hoc installation scripts.
|
||||||
into the user environment. An Environment can be built to maintain
|
And you can share an environment or even re-use it on a different computer.
|
||||||
a filesystem view of its packages, and the environment can load
|
|
||||||
that view into the user environment at activation time. Spack can
|
Environment definitions, especially *how* specs are configured, allow the
|
||||||
also generate a script to load all modules related to an
|
software to remain stable and repeatable even when Spack packages are upgraded. Changes are only picked up when the environment is explicitly re-concretized.
|
||||||
environment.
|
|
||||||
|
Defining *where* specs are installed supports a filesystem view of the
|
||||||
|
environment. Yet Spack maintains a single installation of the software that
|
||||||
|
can be re-used across multiple environments.
|
||||||
|
|
||||||
|
Activating an environment determines *when* all of the associated (and
|
||||||
|
installed) specs are loaded so limits the software loaded to those specs
|
||||||
|
actually needed by the environment. Spack can even generate a script to
|
||||||
|
load all modules related to an environment.
|
||||||
|
|
||||||
Other packaging systems also provide environments that are similar in
|
Other packaging systems also provide environments that are similar in
|
||||||
some ways to Spack environments; for example, `Conda environments
|
some ways to Spack environments; for example, `Conda environments
|
||||||
<https://conda.io/docs/user-guide/tasks/manage-environments.html>`_ or
|
<https://conda.io/docs/user-guide/tasks/manage-environments.html>`_ or
|
||||||
`Python Virtual Environments
|
`Python Virtual Environments
|
||||||
<https://docs.python.org/3/tutorial/venv.html>`_. Spack environments
|
<https://docs.python.org/3/tutorial/venv.html>`_. Spack environments
|
||||||
provide some distinctive features:
|
provide some distinctive features though:
|
||||||
|
|
||||||
#. A spec installed "in" an environment is no different from the same
|
#. A spec installed "in" an environment is no different from the same
|
||||||
spec installed anywhere else in Spack. Environments are assembled
|
spec installed anywhere else in Spack.
|
||||||
simply by collecting together a set of specs.
|
#. Spack environments may contain more than one spec of the same
|
||||||
#. Spack Environments may contain more than one spec of the same
|
|
||||||
package.
|
package.
|
||||||
|
|
||||||
Spack uses a "manifest and lock" model similar to `Bundler gemfiles
|
Spack uses a "manifest and lock" model similar to `Bundler gemfiles
|
||||||
<https://bundler.io/man/gemfile.5.html>`_ and other package
|
<https://bundler.io/man/gemfile.5.html>`_ and other package managers.
|
||||||
managers. The user input file is named ``spack.yaml`` and the lock
|
The environment's user input file (or manifest), is named ``spack.yaml``.
|
||||||
file is named ``spack.lock``
|
The lock file, which contains the fully configured and concretized specs,
|
||||||
|
is named ``spack.lock``.
|
||||||
|
|
||||||
.. _environments-using:
|
.. _environments-using:
|
||||||
|
|
||||||
@@ -68,55 +75,60 @@ An environment is created by:
|
|||||||
|
|
||||||
$ spack env create myenv
|
$ spack env create myenv
|
||||||
|
|
||||||
Spack then creates the directory ``var/spack/environments/myenv``.
|
The directory ``$SPACK_ROOT/var/spack/environments/myenv`` is created
|
||||||
|
to manage the environment.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
All managed environments by default are stored in the ``var/spack/environments`` folder.
|
All managed environments by default are stored in the
|
||||||
This location can be changed by setting the ``environments_root`` variable in ``config.yaml``.
|
``$SPACK_ROOT/var/spack/environments`` folder. This location can be changed
|
||||||
|
by setting the ``environments_root`` variable in ``config.yaml``.
|
||||||
|
|
||||||
In the ``var/spack/environments/myenv`` directory, Spack creates the
|
Spack creates the file ``spack.yaml``, hidden directory ``.spack-env``, and
|
||||||
file ``spack.yaml`` and the hidden directory ``.spack-env``.
|
``spack.lock`` file under ``$SPACK_ROOT/var/spack/environments/myenv``. User
|
||||||
|
interaction occurs through the ``spack.yaml`` file and the Spack commands
|
||||||
Spack stores metadata in the ``.spack-env`` directory. User
|
that affect it. Metadata and, by default, the view are stored in the
|
||||||
interaction will occur through the ``spack.yaml`` file and the Spack
|
``.spack-env`` directory. When the environment is concretized, Spack creates
|
||||||
commands that affect it. When the environment is concretized, Spack
|
the ``spack.lock`` file with the fully configured specs and dependencies for
|
||||||
will create a file ``spack.lock`` with the concrete information for
|
|
||||||
the environment.
|
the environment.
|
||||||
|
|
||||||
In addition to being the default location for the view associated with
|
The ``.spack-env`` subdirectory also contains:
|
||||||
an Environment, the ``.spack-env`` directory also contains:
|
|
||||||
|
|
||||||
* ``repo/``: A repo consisting of the Spack packages used in this
|
* ``repo/``: A subdirectory acting as the repo consisting of the Spack
|
||||||
environment. This allows the environment to build the same, in
|
packages used in the environment. It allows the environment to build
|
||||||
theory, even on different versions of Spack with different
|
the same, in theory, even on different versions of Spack with different
|
||||||
packages!
|
packages!
|
||||||
* ``logs/``: A directory containing the build logs for the packages
|
* ``logs/``: A subdirectory containing the build logs for the packages
|
||||||
in this Environment.
|
in this environment.
|
||||||
|
|
||||||
Spack Environments can also be created from either a manifest file
|
Spack Environments can also be created from either the user input, or
|
||||||
(usually but not necessarily named, ``spack.yaml``) or a lockfile.
|
manifest, file or the lockfile. Create an environment from a manifest using:
|
||||||
To create an Environment from a manifest:
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env create myenv spack.yaml
|
$ spack env create myenv spack.yaml
|
||||||
|
|
||||||
To create an Environment from a ``spack.lock`` lockfile:
|
The resulting environment is guaranteed to have the same root specs as
|
||||||
|
the original but may concretize differently in the presence of different
|
||||||
|
explicit or default configuration settings (e.g., a different version of
|
||||||
|
Spack or for a different user account).
|
||||||
|
|
||||||
|
Create an environment from a ``spack.lock`` file using:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env create myenv spack.lock
|
$ spack env create myenv spack.lock
|
||||||
|
|
||||||
Either of these commands can also take a full path to the
|
The resulting environment, when on the same or a compatible machine, is
|
||||||
initialization file.
|
guaranteed to initially have the same concrete specs as the original.
|
||||||
|
|
||||||
A Spack Environment created from a ``spack.yaml`` manifest is
|
.. note::
|
||||||
guaranteed to have the same root specs as the original Environment,
|
|
||||||
but may concretize differently. A Spack Environment created from a
|
Environment creation also accepts a full path to the file.
|
||||||
``spack.lock`` lockfile is guaranteed to have the same concrete specs
|
|
||||||
as the original Environment. Either may obviously then differ as the
|
If the path is not under the ``$SPACK_ROOT/var/spack/environments``
|
||||||
user modifies it.
|
directory then the source is referred to as an
|
||||||
|
:ref:`independent environment <independent_environments>`.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Activating an Environment
|
Activating an Environment
|
||||||
@@ -129,7 +141,7 @@ To activate an environment, use the following command:
|
|||||||
$ spack env activate myenv
|
$ spack env activate myenv
|
||||||
|
|
||||||
By default, the ``spack env activate`` will load the view associated
|
By default, the ``spack env activate`` will load the view associated
|
||||||
with the Environment into the user environment. The ``-v,
|
with the environment into the user environment. The ``-v,
|
||||||
--with-view`` argument ensures this behavior, and the ``-V,
|
--with-view`` argument ensures this behavior, and the ``-V,
|
||||||
--without-view`` argument activates the environment without changing
|
--without-view`` argument activates the environment without changing
|
||||||
the user environment variables.
|
the user environment variables.
|
||||||
@@ -142,8 +154,11 @@ user's prompt to begin with the environment name in brackets.
|
|||||||
$ spack env activate -p myenv
|
$ spack env activate -p myenv
|
||||||
[myenv] $ ...
|
[myenv] $ ...
|
||||||
|
|
||||||
The ``activate`` command can also be used to create a new environment if it does not already
|
The ``activate`` command can also be used to create a new environment, if it is
|
||||||
exist.
|
not already defined, by adding the ``--create`` flag. Managed and independent
|
||||||
|
environments can both be created using the same flags that `spack env create`
|
||||||
|
accepts. If an environment already exists then spack will simply activate it
|
||||||
|
and ignore the create-specific flags.
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
@@ -168,49 +183,50 @@ or the shortcut alias
|
|||||||
If the environment was activated with its view, deactivating the
|
If the environment was activated with its view, deactivating the
|
||||||
environment will remove the view from the user environment.
|
environment will remove the view from the user environment.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^
|
.. _independent_environments:
|
||||||
Anonymous Environments
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Apart from managed environments, Spack also supports anonymous environments.
|
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Independent Environments
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Anonymous environments can be placed in any directory of choice.
|
Independent environments can be located in any directory outside of Spack.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
When uninstalling packages, Spack asks the user to confirm the removal of packages
|
When uninstalling packages, Spack asks the user to confirm the removal of packages
|
||||||
that are still used in a managed environment. This is not the case for anonymous
|
that are still used in a managed environment. This is not the case for independent
|
||||||
environments.
|
environments.
|
||||||
|
|
||||||
To create an anonymous environment, use one of the following commands:
|
To create an independent environment, use one of the following commands:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env create --dir my_env
|
$ spack env create --dir my_env
|
||||||
$ spack env create ./my_env
|
$ spack env create ./my_env
|
||||||
|
|
||||||
As a shorthand, you can also create an anonymous environment upon activation if it does not
|
As a shorthand, you can also create an independent environment upon activation if it does not
|
||||||
already exist:
|
already exist:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env activate --create ./my_env
|
$ spack env activate --create ./my_env
|
||||||
|
|
||||||
For convenience, Spack can also place an anonymous environment in a temporary directory for you:
|
For convenience, Spack can also place an independent environment in a temporary directory for you:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env activate --temp
|
$ spack env activate --temp
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Environment Sensitive Commands
|
Environment-Aware Commands
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Spack commands are environment sensitive. For example, the ``find``
|
Spack commands are environment-aware. For example, the ``find``
|
||||||
command shows only the specs in the active Environment if an
|
command shows only the specs in the active environment if an
|
||||||
Environment has been activated. Similarly, the ``install`` and
|
environment has been activated. Otherwise it shows all specs in
|
||||||
``uninstall`` commands act on the active environment.
|
the Spack instance. The same rule applies to the ``install`` and
|
||||||
|
``uninstall`` commands.
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
@@ -255,32 +271,33 @@ Environment has been activated. Similarly, the ``install`` and
|
|||||||
|
|
||||||
|
|
||||||
Note that when we installed the abstract spec ``zlib@1.2.8``, it was
|
Note that when we installed the abstract spec ``zlib@1.2.8``, it was
|
||||||
presented as a root of the Environment. All explicitly installed
|
presented as a root of the environment. All explicitly installed
|
||||||
packages will be listed as roots of the Environment.
|
packages will be listed as roots of the environment.
|
||||||
|
|
||||||
All of the Spack commands that act on the list of installed specs are
|
All of the Spack commands that act on the list of installed specs are
|
||||||
Environment-sensitive in this way, including ``install``,
|
environment-aware in this way, including ``install``,
|
||||||
``uninstall``, ``find``, ``extensions``, and more. In the
|
``uninstall``, ``find``, ``extensions``, etcetera. In the
|
||||||
:ref:`environment-configuration` section we will discuss
|
:ref:`environment-configuration` section we will discuss
|
||||||
Environment-sensitive commands further.
|
environment-aware commands further.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
Adding Abstract Specs
|
Adding Abstract Specs
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
An abstract spec is the user-specified spec before Spack has applied
|
An abstract spec is the user-specified spec before Spack applies
|
||||||
any defaults or dependency information.
|
defaults or dependency information.
|
||||||
|
|
||||||
Users can add abstract specs to an Environment using the ``spack add``
|
Users can add abstract specs to an environment using the ``spack add``
|
||||||
command. The most important component of an Environment is a list of
|
command. The most important component of an environment is a list of
|
||||||
abstract specs.
|
abstract specs.
|
||||||
|
|
||||||
Adding a spec adds to the manifest (the ``spack.yaml`` file), which is
|
Adding a spec adds it as a root spec of the environment in the user
|
||||||
used to define the roots of the Environment, but does not affect the
|
input file (``spack.yaml``). It does not affect the concrete specs
|
||||||
concrete specs in the lockfile, nor does it install the spec.
|
in the lock file (``spack.lock``) and it does not install the spec.
|
||||||
|
|
||||||
The ``spack add`` command is environment aware. It adds to the
|
The ``spack add`` command is environment-aware. It adds the spec to the
|
||||||
currently active environment. All environment aware commands can also
|
currently active environment. An error is generated if there isn't an
|
||||||
|
active environment. All environment-aware commands can also
|
||||||
be called using the ``spack -e`` flag to specify the environment.
|
be called using the ``spack -e`` flag to specify the environment.
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
@@ -300,11 +317,11 @@ or
|
|||||||
Concretizing
|
Concretizing
|
||||||
^^^^^^^^^^^^
|
^^^^^^^^^^^^
|
||||||
|
|
||||||
Once some user specs have been added to an environment, they can be concretized.
|
Once user specs have been added to an environment, they can be concretized.
|
||||||
There are at the moment three different modes of operation to concretize an environment,
|
There are three different modes of operation to concretize an environment,
|
||||||
which are explained in details in :ref:`environments_concretization_config`.
|
explained in detail in :ref:`environments_concretization_config`.
|
||||||
Regardless of which mode of operation has been chosen, the following
|
Regardless of which mode of operation is chosen, the following
|
||||||
command will ensure all the root specs are concretized according to the
|
command will ensure all of the root specs are concretized according to the
|
||||||
constraints that are prescribed in the configuration:
|
constraints that are prescribed in the configuration:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
@@ -313,16 +330,15 @@ constraints that are prescribed in the configuration:
|
|||||||
|
|
||||||
In the case of specs that are not concretized together, the command
|
In the case of specs that are not concretized together, the command
|
||||||
above will concretize only the specs that were added and not yet
|
above will concretize only the specs that were added and not yet
|
||||||
concretized. Forcing a re-concretization of all the specs can be done
|
concretized. Forcing a re-concretization of all of the specs can be done
|
||||||
instead with this command:
|
by adding the ``-f`` option:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
[myenv]$ spack concretize -f
|
[myenv]$ spack concretize -f
|
||||||
|
|
||||||
When the ``-f`` flag is not used to reconcretize all specs, Spack
|
Without the option, Spack guarantees that already concretized specs are
|
||||||
guarantees that already concretized specs are unchanged in the
|
unchanged in the environment.
|
||||||
environment.
|
|
||||||
|
|
||||||
The ``concretize`` command does not install any packages. For packages
|
The ``concretize`` command does not install any packages. For packages
|
||||||
that have already been installed outside of the environment, the
|
that have already been installed outside of the environment, the
|
||||||
@@ -355,16 +371,16 @@ installed specs using the ``-c`` (``--concretized``) flag.
|
|||||||
Installing an Environment
|
Installing an Environment
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
In addition to installing individual specs into an Environment, one
|
In addition to adding individual specs to an environment, one
|
||||||
can install the entire Environment at once using the command
|
can install the entire environment at once using the command
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
[myenv]$ spack install
|
[myenv]$ spack install
|
||||||
|
|
||||||
If the Environment has been concretized, Spack will install the
|
If the environment has been concretized, Spack will install the
|
||||||
concretized specs. Otherwise, ``spack install`` will first concretize
|
concretized specs. Otherwise, ``spack install`` will concretize
|
||||||
the Environment and then install the concretized specs.
|
the environment before installing the concretized specs.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
@@ -385,17 +401,17 @@ the Environment and then install the concretized specs.
|
|||||||
|
|
||||||
|
|
||||||
As it installs, ``spack install`` creates symbolic links in the
|
As it installs, ``spack install`` creates symbolic links in the
|
||||||
``logs/`` directory in the Environment, allowing for easy inspection
|
``logs/`` directory in the environment, allowing for easy inspection
|
||||||
of build logs related to that environment. The ``spack install``
|
of build logs related to that environment. The ``spack install``
|
||||||
command also stores a Spack repo containing the ``package.py`` file
|
command also stores a Spack repo containing the ``package.py`` file
|
||||||
used at install time for each package in the ``repos/`` directory in
|
used at install time for each package in the ``repos/`` directory in
|
||||||
the Environment.
|
the environment.
|
||||||
|
|
||||||
The ``--no-add`` option can be used in a concrete environment to tell
|
The ``--no-add`` option can be used in a concrete environment to tell
|
||||||
spack to install specs already present in the environment but not to
|
spack to install specs already present in the environment but not to
|
||||||
add any new root specs to the environment. For root specs provided
|
add any new root specs to the environment. For root specs provided
|
||||||
to ``spack install`` on the command line, ``--no-add`` is the default,
|
to ``spack install`` on the command line, ``--no-add`` is the default,
|
||||||
while for dependency specs on the other hand, it is optional. In other
|
while for dependency specs, it is optional. In other
|
||||||
words, if there is an unambiguous match in the active concrete environment
|
words, if there is an unambiguous match in the active concrete environment
|
||||||
for a root spec provided to ``spack install`` on the command line, spack
|
for a root spec provided to ``spack install`` on the command line, spack
|
||||||
does not require you to specify the ``--no-add`` option to prevent the spec
|
does not require you to specify the ``--no-add`` option to prevent the spec
|
||||||
@@ -409,12 +425,22 @@ Developing Packages in a Spack Environment
|
|||||||
|
|
||||||
The ``spack develop`` command allows one to develop Spack packages in
|
The ``spack develop`` command allows one to develop Spack packages in
|
||||||
an environment. It requires a spec containing a concrete version, and
|
an environment. It requires a spec containing a concrete version, and
|
||||||
will configure Spack to install the package from local source. By
|
will configure Spack to install the package from local source.
|
||||||
default, it will also clone the package to a subdirectory in the
|
If a version is not provided from the command line interface then spack
|
||||||
environment. This package will have a special variant ``dev_path``
|
will automatically pick the highest version the package has defined.
|
||||||
|
This means any infinity versions (``develop``, ``main``, ``stable``) will be
|
||||||
|
preferred in this selection process.
|
||||||
|
By default, ``spack develop`` will also clone the package to a subdirectory in the
|
||||||
|
environment for the local source. This package will have a special variant ``dev_path``
|
||||||
set, and Spack will ensure the package and its dependents are rebuilt
|
set, and Spack will ensure the package and its dependents are rebuilt
|
||||||
any time the environment is installed if the package's local source
|
any time the environment is installed if the package's local source
|
||||||
code has been modified. Spack ensures that all instances of a
|
code has been modified. Spack's native implementation to check for modifications
|
||||||
|
is to check if ``mtime`` is newer than the installation.
|
||||||
|
A custom check can be created by overriding the ``detect_dev_src_change`` method
|
||||||
|
in your package class. This is particularly useful for projects using custom spack repo's
|
||||||
|
to drive development and want to optimize performance.
|
||||||
|
|
||||||
|
Spack ensures that all instances of a
|
||||||
developed package in the environment are concretized to match the
|
developed package in the environment are concretized to match the
|
||||||
version (and other constraints) passed as the spec argument to the
|
version (and other constraints) passed as the spec argument to the
|
||||||
``spack develop`` command.
|
``spack develop`` command.
|
||||||
@@ -424,7 +450,7 @@ also be used as valid concrete versions (see :ref:`version-specifier`).
|
|||||||
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
||||||
the ``main`` branch of the package, and ``spack install`` will install from
|
the ``main`` branch of the package, and ``spack install`` will install from
|
||||||
that git clone if ``foo`` is in the environment.
|
that git clone if ``foo`` is in the environment.
|
||||||
Further development on ``foo`` can be tested by reinstalling the environment,
|
Further development on ``foo`` can be tested by re-installing the environment,
|
||||||
and eventually committed and pushed to the upstream git repo.
|
and eventually committed and pushed to the upstream git repo.
|
||||||
|
|
||||||
If the package being developed supports out-of-source builds then users can use the
|
If the package being developed supports out-of-source builds then users can use the
|
||||||
@@ -609,7 +635,7 @@ manipulate configuration inline in the ``spack.yaml`` file.
|
|||||||
Inline configurations
|
Inline configurations
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Inline Environment-scope configuration is done using the same yaml
|
Inline environment-scope configuration is done using the same yaml
|
||||||
format as standard Spack configuration scopes, covered in the
|
format as standard Spack configuration scopes, covered in the
|
||||||
:ref:`configuration` section. Each section is contained under a
|
:ref:`configuration` section. Each section is contained under a
|
||||||
top-level yaml object with it's name. For example, a ``spack.yaml``
|
top-level yaml object with it's name. For example, a ``spack.yaml``
|
||||||
@@ -634,7 +660,7 @@ Included configurations
|
|||||||
|
|
||||||
Spack environments allow an ``include`` heading in their yaml
|
Spack environments allow an ``include`` heading in their yaml
|
||||||
schema. This heading pulls in external configuration files and applies
|
schema. This heading pulls in external configuration files and applies
|
||||||
them to the Environment.
|
them to the environment.
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
@@ -647,6 +673,9 @@ them to the Environment.
|
|||||||
Environments can include files or URLs. File paths can be relative or
|
Environments can include files or URLs. File paths can be relative or
|
||||||
absolute. URLs include the path to the text for individual files or
|
absolute. URLs include the path to the text for individual files or
|
||||||
can be the path to a directory containing configuration files.
|
can be the path to a directory containing configuration files.
|
||||||
|
Spack supports ``file``, ``http``, ``https`` and ``ftp`` protocols (or
|
||||||
|
schemes). Spack-specific, environment and user path variables may be
|
||||||
|
used in these paths. See :ref:`config-file-variables` for more information.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Configuration precedence
|
Configuration precedence
|
||||||
@@ -661,7 +690,7 @@ have higher precedence, as the included configs are applied in reverse order.
|
|||||||
Manually Editing the Specs List
|
Manually Editing the Specs List
|
||||||
-------------------------------
|
-------------------------------
|
||||||
|
|
||||||
The list of abstract/root specs in the Environment is maintained in
|
The list of abstract/root specs in the environment is maintained in
|
||||||
the ``spack.yaml`` manifest under the heading ``specs``.
|
the ``spack.yaml`` manifest under the heading ``specs``.
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
@@ -769,7 +798,7 @@ evaluates to the cross-product of those specs. Spec matrices also
|
|||||||
contain an ``excludes`` directive, which eliminates certain
|
contain an ``excludes`` directive, which eliminates certain
|
||||||
combinations from the evaluated result.
|
combinations from the evaluated result.
|
||||||
|
|
||||||
The following two Environment manifests are identical:
|
The following two environment manifests are identical:
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
@@ -844,7 +873,7 @@ files are identical.
|
|||||||
In short files like the example, it may be easier to simply list the
|
In short files like the example, it may be easier to simply list the
|
||||||
included specs. However for more complicated examples involving many
|
included specs. However for more complicated examples involving many
|
||||||
packages across many toolchains, separately factored lists make
|
packages across many toolchains, separately factored lists make
|
||||||
Environments substantially more manageable.
|
environments substantially more manageable.
|
||||||
|
|
||||||
Additionally, the ``-l`` option to the ``spack add`` command allows
|
Additionally, the ``-l`` option to the ``spack add`` command allows
|
||||||
one to add to named lists in the definitions section of the manifest
|
one to add to named lists in the definitions section of the manifest
|
||||||
@@ -863,7 +892,7 @@ named list ``compilers`` is ``['%gcc', '%clang', '%intel']`` on
|
|||||||
spack:
|
spack:
|
||||||
definitions:
|
definitions:
|
||||||
- compilers: ['%gcc', '%clang']
|
- compilers: ['%gcc', '%clang']
|
||||||
- when: arch.satisfies('x86_64:')
|
- when: arch.satisfies('target=x86_64:')
|
||||||
compilers: ['%intel']
|
compilers: ['%intel']
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
@@ -893,9 +922,8 @@ The valid variables for a ``when`` clause are:
|
|||||||
|
|
||||||
#. ``env``. The user environment (usually ``os.environ`` in Python).
|
#. ``env``. The user environment (usually ``os.environ`` in Python).
|
||||||
|
|
||||||
#. ``hostname``. The hostname of the system.
|
#. ``hostname``. The hostname of the system (if ``hostname`` is an
|
||||||
|
executable in the user's PATH).
|
||||||
#. ``full_hostname``. The fully qualified hostname of the system.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
SpecLists as Constraints
|
SpecLists as Constraints
|
||||||
@@ -1061,7 +1089,7 @@ true``). The argument ``--without-view`` can be used to create an
|
|||||||
environment without any view configured.
|
environment without any view configured.
|
||||||
|
|
||||||
The ``spack env view`` command can be used to change the manage views
|
The ``spack env view`` command can be used to change the manage views
|
||||||
of an Environment. The subcommand ``spack env view enable`` will add a
|
of an environment. The subcommand ``spack env view enable`` will add a
|
||||||
view named ``default`` to an environment. It takes an optional
|
view named ``default`` to an environment. It takes an optional
|
||||||
argument to specify the path for the new default view. The subcommand
|
argument to specify the path for the new default view. The subcommand
|
||||||
``spack env view disable`` will remove the view named ``default`` from
|
``spack env view disable`` will remove the view named ``default`` from
|
||||||
@@ -1229,7 +1257,7 @@ gets installed and is available for use in the ``env`` target.
|
|||||||
$(SPACK) -e . env depfile -o $@ --make-prefix spack
|
$(SPACK) -e . env depfile -o $@ --make-prefix spack
|
||||||
|
|
||||||
env: spack/env
|
env: spack/env
|
||||||
$(info Environment installed!)
|
$(info environment installed!)
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
rm -rf spack.lock env.mk spack/
|
rm -rf spack.lock env.mk spack/
|
||||||
|
|||||||
@@ -61,10 +61,15 @@ Getting Spack is easy. You can clone it from the `github repository
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ git clone -c feature.manyFiles=true https://github.com/spack/spack.git
|
$ git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git
|
||||||
|
|
||||||
This will create a directory called ``spack``.
|
This will create a directory called ``spack``.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
``-c feature.manyFiles=true`` improves git's performance on repositories with 1,000+ files.
|
||||||
|
|
||||||
|
``--depth=2`` prunes the git history to reduce the size of the Spack installation.
|
||||||
|
|
||||||
.. _shell-support:
|
.. _shell-support:
|
||||||
|
|
||||||
^^^^^^^^^^^^^
|
^^^^^^^^^^^^^
|
||||||
@@ -1475,16 +1480,14 @@ in a Windows CMD prompt.
|
|||||||
Step 3: Run and configure Spack
|
Step 3: Run and configure Spack
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
To use Spack, run ``bin\spack_cmd.bat`` (you may need to Run as Administrator) from the top-level spack
|
On Windows, Spack supports both primary native shells, Powershell and the traditional command prompt.
|
||||||
directory. This will provide a Windows command prompt with an environment properly set up with Spack
|
To use Spack, pick your favorite shell, and run ``bin\spack_cmd.bat`` or ``share/spack/setup-env.ps1``
|
||||||
and its prerequisites. If you receive a warning message that Python is not in your ``PATH``
|
(you may need to Run as Administrator) from the top-level spack
|
||||||
|
directory. This will provide a Spack enabled shell. If you receive a warning message that Python is not in your ``PATH``
|
||||||
(which may happen if you installed Python from the website and not the Windows Store) add the location
|
(which may happen if you installed Python from the website and not the Windows Store) add the location
|
||||||
of the Python executable to your ``PATH`` now. You can permanently add Python to your ``PATH`` variable
|
of the Python executable to your ``PATH`` now. You can permanently add Python to your ``PATH`` variable
|
||||||
by using the ``Edit the system environment variables`` utility in Windows Control Panel.
|
by using the ``Edit the system environment variables`` utility in Windows Control Panel.
|
||||||
|
|
||||||
.. note::
|
|
||||||
Alternatively, Powershell can be used in place of CMD
|
|
||||||
|
|
||||||
To configure Spack, first run the following command inside the Spack console:
|
To configure Spack, first run the following command inside the Spack console:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
@@ -1549,7 +1552,7 @@ and not tabs, so ensure that this is the case when editing one directly.
|
|||||||
|
|
||||||
.. note:: Cygwin
|
.. note:: Cygwin
|
||||||
The use of Cygwin is not officially supported by Spack and is not tested.
|
The use of Cygwin is not officially supported by Spack and is not tested.
|
||||||
However Spack will not throw an error, so use if choosing to use Spack
|
However Spack will not prevent this, so use if choosing to use Spack
|
||||||
with Cygwin, know that no functionality is garunteed.
|
with Cygwin, know that no functionality is garunteed.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^
|
||||||
@@ -1563,21 +1566,12 @@ Spack console via:
|
|||||||
|
|
||||||
spack install cpuinfo
|
spack install cpuinfo
|
||||||
|
|
||||||
If in the previous step, you did not have CMake or Ninja installed, running the command above should bootstrap both packages
|
If in the previous step, you did not have CMake or Ninja installed, running the command above should install both packages
|
||||||
|
|
||||||
"""""""""""""""""""""""""""
|
.. note:: Spec Syntax Caveats
|
||||||
Windows Compatible Packages
|
Windows has a few idiosyncrasies when it comes to the Spack spec syntax and the use of certain shells
|
||||||
"""""""""""""""""""""""""""
|
See the Spack spec syntax doc for more information
|
||||||
|
|
||||||
Not all spack packages currently have Windows support. Some are inherently incompatible with the
|
|
||||||
platform, and others simply have yet to be ported. To view the current set of packages with Windows
|
|
||||||
support, the list command should be used via `spack list -t windows`. If there's a package you'd like
|
|
||||||
to install on Windows but is not in that list, feel free to reach out to request the port or contribute
|
|
||||||
the port yourself.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
This is by no means a comprehensive list, some packages may have ports that were not tagged
|
|
||||||
while others may just work out of the box on Windows and have not been tagged as such.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^
|
||||||
For developers
|
For developers
|
||||||
@@ -1587,6 +1581,3 @@ The intent is to provide a Windows installer that will automatically set up
|
|||||||
Python, Git, and Spack, instead of requiring the user to do so manually.
|
Python, Git, and Spack, instead of requiring the user to do so manually.
|
||||||
Instructions for creating the installer are at
|
Instructions for creating the installer are at
|
||||||
https://github.com/spack/spack/blob/develop/lib/spack/spack/cmd/installer/README.md
|
https://github.com/spack/spack/blob/develop/lib/spack/spack/cmd/installer/README.md
|
||||||
|
|
||||||
Alternatively a pre-built copy of the Windows installer is available as an artifact of Spack's Windows CI
|
|
||||||
available at each run of the CI on develop or any PR.
|
|
||||||
|
|||||||
Binary file not shown.
|
Before Width: | Height: | Size: 44 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 68 KiB |
BIN
lib/spack/docs/images/splices.png
Normal file
BIN
lib/spack/docs/images/splices.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 358 KiB |
@@ -12,10 +12,6 @@
|
|||||||
Spack
|
Spack
|
||||||
===================
|
===================
|
||||||
|
|
||||||
.. epigraph::
|
|
||||||
|
|
||||||
`These are docs for the Spack package manager. For sphere packing, see` `pyspack <https://pyspack.readthedocs.io>`_.
|
|
||||||
|
|
||||||
Spack is a package management tool designed to support multiple
|
Spack is a package management tool designed to support multiple
|
||||||
versions and configurations of software on a wide variety of platforms
|
versions and configurations of software on a wide variety of platforms
|
||||||
and environments. It was designed for large supercomputing centers,
|
and environments. It was designed for large supercomputing centers,
|
||||||
@@ -39,10 +35,15 @@ package:
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ git clone -c feature.manyFiles=true https://github.com/spack/spack.git
|
$ git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git
|
||||||
$ cd spack/bin
|
$ cd spack/bin
|
||||||
$ ./spack install libelf
|
$ ./spack install libelf
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
``-c feature.manyFiles=true`` improves git's performance on repositories with 1,000+ files.
|
||||||
|
|
||||||
|
``--depth=2`` prunes the git history to reduce the size of the Spack installation.
|
||||||
|
|
||||||
If you're new to spack and want to start using it, see :doc:`getting_started`,
|
If you're new to spack and want to start using it, see :doc:`getting_started`,
|
||||||
or refer to the full manual below.
|
or refer to the full manual below.
|
||||||
|
|
||||||
|
|||||||
@@ -457,11 +457,11 @@ For instance, the following config options,
|
|||||||
tcl:
|
tcl:
|
||||||
all:
|
all:
|
||||||
suffixes:
|
suffixes:
|
||||||
^python@3.12: 'python-3.12'
|
^python@3: 'python{^python.version}'
|
||||||
^openblas: 'openblas'
|
^openblas: 'openblas'
|
||||||
|
|
||||||
will add a ``python-3.12`` version string to any packages compiled with
|
will add a ``python-3.12.1`` version string to any packages compiled with
|
||||||
Python matching the spec, ``python@3.12``. This is useful to know which
|
Python matching the spec, ``python@3``. This is useful to know which
|
||||||
version of Python a set of Python extensions is associated with. Likewise, the
|
version of Python a set of Python extensions is associated with. Likewise, the
|
||||||
``openblas`` string is attached to any program that has openblas in the spec,
|
``openblas`` string is attached to any program that has openblas in the spec,
|
||||||
most likely via the ``+blas`` variant specification.
|
most likely via the ``+blas`` variant specification.
|
||||||
|
|||||||
@@ -59,7 +59,7 @@ Functional Example
|
|||||||
------------------
|
------------------
|
||||||
|
|
||||||
The simplest fully functional standalone example of a working pipeline can be
|
The simplest fully functional standalone example of a working pipeline can be
|
||||||
examined live at this example `project <https://gitlab.com/scott.wittenburg/spack-pipeline-demo>`_
|
examined live at this example `project <https://gitlab.com/spack/pipeline-quickstart>`_
|
||||||
on gitlab.com.
|
on gitlab.com.
|
||||||
|
|
||||||
Here's the ``.gitlab-ci.yml`` file from that example that builds and runs the
|
Here's the ``.gitlab-ci.yml`` file from that example that builds and runs the
|
||||||
@@ -67,39 +67,46 @@ pipeline:
|
|||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
stages: [generate, build]
|
stages: [ "generate", "build" ]
|
||||||
|
|
||||||
variables:
|
variables:
|
||||||
SPACK_REPO: https://github.com/scottwittenburg/spack.git
|
SPACK_REPOSITORY: "https://github.com/spack/spack.git"
|
||||||
SPACK_REF: pipelines-reproducible-builds
|
SPACK_REF: "develop-2024-10-06"
|
||||||
|
SPACK_USER_CONFIG_PATH: ${CI_PROJECT_DIR}
|
||||||
|
SPACK_BACKTRACE: 1
|
||||||
|
|
||||||
generate-pipeline:
|
generate-pipeline:
|
||||||
stage: generate
|
|
||||||
tags:
|
tags:
|
||||||
- docker
|
- saas-linux-small-amd64
|
||||||
|
stage: generate
|
||||||
image:
|
image:
|
||||||
name: ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01
|
name: ghcr.io/spack/ubuntu20.04-runner-x86_64:2023-01-01
|
||||||
entrypoint: [""]
|
|
||||||
before_script:
|
|
||||||
- git clone ${SPACK_REPO}
|
|
||||||
- pushd spack && git checkout ${SPACK_REF} && popd
|
|
||||||
- . "./spack/share/spack/setup-env.sh"
|
|
||||||
script:
|
script:
|
||||||
|
- git clone ${SPACK_REPOSITORY}
|
||||||
|
- cd spack && git checkout ${SPACK_REF} && cd ../
|
||||||
|
- . "./spack/share/spack/setup-env.sh"
|
||||||
|
- spack --version
|
||||||
- spack env activate --without-view .
|
- spack env activate --without-view .
|
||||||
- spack -d ci generate
|
- spack -d -v --color=always
|
||||||
|
ci generate
|
||||||
|
--check-index-only
|
||||||
--artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
--artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||||
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/pipeline.yml"
|
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml"
|
||||||
artifacts:
|
artifacts:
|
||||||
paths:
|
paths:
|
||||||
- "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
- "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||||
|
|
||||||
build-jobs:
|
build-pipeline:
|
||||||
stage: build
|
stage: build
|
||||||
trigger:
|
trigger:
|
||||||
include:
|
include:
|
||||||
- artifact: "jobs_scratch_dir/pipeline.yml"
|
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||||
job: generate-pipeline
|
job: generate-pipeline
|
||||||
strategy: depend
|
strategy: depend
|
||||||
|
needs:
|
||||||
|
- artifacts: True
|
||||||
|
job: generate-pipeline
|
||||||
|
|
||||||
|
|
||||||
The key thing to note above is that there are two jobs: The first job to run,
|
The key thing to note above is that there are two jobs: The first job to run,
|
||||||
``generate-pipeline``, runs the ``spack ci generate`` command to generate a
|
``generate-pipeline``, runs the ``spack ci generate`` command to generate a
|
||||||
@@ -114,82 +121,93 @@ And here's the spack environment built by the pipeline represented as a
|
|||||||
spack:
|
spack:
|
||||||
view: false
|
view: false
|
||||||
concretizer:
|
concretizer:
|
||||||
unify: false
|
unify: true
|
||||||
|
reuse: false
|
||||||
|
|
||||||
definitions:
|
definitions:
|
||||||
- pkgs:
|
- pkgs:
|
||||||
- zlib
|
- zlib
|
||||||
- bzip2
|
- bzip2 ~debug
|
||||||
- arch:
|
- compiler:
|
||||||
- '%gcc@7.5.0 arch=linux-ubuntu18.04-x86_64'
|
- '%gcc'
|
||||||
|
|
||||||
specs:
|
specs:
|
||||||
- matrix:
|
- matrix:
|
||||||
- - $pkgs
|
- - $pkgs
|
||||||
- - $arch
|
- - $compiler
|
||||||
|
|
||||||
mirrors: { "mirror": "s3://spack-public/mirror" }
|
|
||||||
|
|
||||||
ci:
|
ci:
|
||||||
enable-artifacts-buildcache: True
|
target: gitlab
|
||||||
rebuild-index: False
|
|
||||||
pipeline-gen:
|
pipeline-gen:
|
||||||
- any-job:
|
- any-job:
|
||||||
before_script:
|
tags:
|
||||||
- git clone ${SPACK_REPO}
|
- saas-linux-small-amd64
|
||||||
- pushd spack && git checkout ${SPACK_CHECKOUT_VERSION} && popd
|
|
||||||
- . "./spack/share/spack/setup-env.sh"
|
|
||||||
- build-job:
|
|
||||||
tags: [docker]
|
|
||||||
image:
|
image:
|
||||||
name: ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01
|
name: ghcr.io/spack/ubuntu20.04-runner-x86_64:2023-01-01
|
||||||
entrypoint: [""]
|
before_script:
|
||||||
|
- git clone ${SPACK_REPOSITORY}
|
||||||
|
- cd spack && git checkout ${SPACK_REF} && cd ../
|
||||||
|
- . "./spack/share/spack/setup-env.sh"
|
||||||
|
- spack --version
|
||||||
|
- export SPACK_USER_CONFIG_PATH=${CI_PROJECT_DIR}
|
||||||
|
- spack config blame mirrors
|
||||||
|
|
||||||
|
|
||||||
The elements of this file important to spack ci pipelines are described in more
|
|
||||||
detail below, but there are a couple of things to note about the above working
|
|
||||||
example:
|
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
There is no ``script`` attribute specified for here. The reason for this is
|
The use of ``reuse: false`` in spack environments used for pipelines is
|
||||||
Spack CI will automatically generate reasonable default scripts. More
|
almost always what you want, as without it your pipelines will not rebuild
|
||||||
detail on what is in these scripts can be found below.
|
packages even if package hashes have changed. This is due to the concretizer
|
||||||
|
strongly preferring known hashes when ``reuse: true``.
|
||||||
|
|
||||||
Also notice the ``before_script`` section. It is required when using any of the
|
The ``ci`` section in the above environment file contains the bare minimum
|
||||||
default scripts to source the ``setup-env.sh`` script in order to inform
|
configuration required for ``spack ci generate`` to create a working pipeline.
|
||||||
the default scripts where to find the ``spack`` executable.
|
The ``target: gitlab`` tells spack that the desired pipeline output is for
|
||||||
|
gitlab. However, this isn't strictly required, as currently gitlab is the
|
||||||
|
only possible output format for pipelines. The ``pipeline-gen`` section
|
||||||
|
contains the key information needed to specify attributes for the generated
|
||||||
|
jobs. Notice that it contains a list which has only a single element in
|
||||||
|
this case. In real pipelines it will almost certainly have more elements,
|
||||||
|
and in those cases, order is important: spack starts at the bottom of the
|
||||||
|
list and works upwards when applying attributes.
|
||||||
|
|
||||||
Normally ``enable-artifacts-buildcache`` is not recommended in production as it
|
But in this simple case, we use only the special key ``any-job`` to
|
||||||
results in large binary artifacts getting transferred back and forth between
|
indicate that spack should apply the specified attributes (``tags``, ``image``,
|
||||||
gitlab and the runners. But in this example on gitlab.com where there is no
|
and ``before_script``) to any job it generates. This includes jobs for
|
||||||
shared, persistent file system, and where no secrets are stored for giving
|
building/pushing all packages, a ``rebuild-index`` job at the end of the
|
||||||
permission to write to an S3 bucket, ``enabled-buildcache-artifacts`` is the only
|
pipeline, as well as any ``noop`` jobs that might be needed by gitlab when
|
||||||
way to propagate binaries from jobs to their dependents.
|
no rebuilds are required.
|
||||||
|
|
||||||
Also, it is usually a good idea to let the pipeline generate a final "rebuild the
|
Something to note is that in this simple case, we rely on spack to
|
||||||
buildcache index" job, so that subsequent pipeline generation can quickly determine
|
generate a reasonable script for the package build jobs (it just creates
|
||||||
which specs are up to date and which need to be rebuilt (it's a good idea for other
|
a script that invokes ``spack ci rebuild``).
|
||||||
reasons as well, but those are out of scope for this discussion). In this case we
|
|
||||||
have disabled it (using ``rebuild-index: False``) because the index would only be
|
|
||||||
generated in the artifacts mirror anyway, and consequently would not be available
|
|
||||||
during subsequent pipeline runs.
|
|
||||||
|
|
||||||
.. note::
|
Another thing to note is the use of the ``SPACK_USER_CONFIG_DIR`` environment
|
||||||
With the addition of reproducible builds (#22887) a previously working
|
variable in any generated jobs. The purpose of this is to make spack
|
||||||
pipeline will require some changes:
|
aware of one final file in the example, the one that contains the mirror
|
||||||
|
configuration. This file, ``mirrors.yaml`` looks like this:
|
||||||
|
|
||||||
* In the build-jobs, the environment location changed.
|
.. code-block:: yaml
|
||||||
This will typically show as a ``KeyError`` in the failing job. Be sure to
|
|
||||||
point to ``${SPACK_CONCRETE_ENV_DIR}``.
|
|
||||||
|
|
||||||
* When using ``include`` in your environment, be sure to make the included
|
mirrors:
|
||||||
files available in the build jobs. This means adding those files to the
|
buildcache-destination:
|
||||||
artifact directory. Those files will also be missing in the reproducibility
|
url: oci://registry.gitlab.com/spack/pipeline-quickstart
|
||||||
artifact.
|
binary: true
|
||||||
|
access_pair:
|
||||||
|
id_variable: CI_REGISTRY_USER
|
||||||
|
secret_variable: CI_REGISTRY_PASSWORD
|
||||||
|
|
||||||
* Because the location of the environment changed, including files with
|
|
||||||
relative path may have to be adapted to work both in the project context
|
Note the name of the mirror is ``buildcache-destination``, which is required
|
||||||
(generation job) and in the concrete env dir context (build job).
|
as of Spack 0.23 (see below for more information). The mirror url simply
|
||||||
|
points to the container registry associated with the project, while
|
||||||
|
``id_variable`` and ``secret_variable`` refer to to environment variables
|
||||||
|
containing the access credentials for the mirror.
|
||||||
|
|
||||||
|
When spack builds packages for this example project, they will be pushed to
|
||||||
|
the project container registry, where they will be available for subsequent
|
||||||
|
jobs to install as dependencies, or for other pipelines to use to build runnable
|
||||||
|
container images.
|
||||||
|
|
||||||
-----------------------------------
|
-----------------------------------
|
||||||
Spack commands supporting pipelines
|
Spack commands supporting pipelines
|
||||||
@@ -417,15 +435,6 @@ configuration with a ``script`` attribute. Specifying a signing job without a sc
|
|||||||
does not create a signing job and the job configuration attributes will be ignored.
|
does not create a signing job and the job configuration attributes will be ignored.
|
||||||
Signing jobs are always assigned the runner tags ``aws``, ``protected``, and ``notary``.
|
Signing jobs are always assigned the runner tags ``aws``, ``protected``, and ``notary``.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^
|
|
||||||
Cleanup (cleanup)
|
|
||||||
^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
When using ``temporary-storage-url-prefix`` the cleanup job will destroy the mirror
|
|
||||||
created for the associated Gitlab pipeline. Cleanup jobs do not allow modifying the
|
|
||||||
script, but do expect that the spack command is in the path and require a
|
|
||||||
``before_script`` to be specified that sources the ``setup-env.sh`` script.
|
|
||||||
|
|
||||||
.. _noop_jobs:
|
.. _noop_jobs:
|
||||||
|
|
||||||
^^^^^^^^^^^^
|
^^^^^^^^^^^^
|
||||||
@@ -592,6 +601,77 @@ the attributes will be merged starting from the bottom match going up to the top
|
|||||||
|
|
||||||
In the case that no match is found in a submapping section, no additional attributes will be applied.
|
In the case that no match is found in a submapping section, no additional attributes will be applied.
|
||||||
|
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Dynamic Mapping Sections
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
For large scale CI where cost optimization is required, dynamic mapping allows for the use of real-time
|
||||||
|
mapping schemes served by a web service. This type of mapping does not support the ``-remove`` type
|
||||||
|
behavior, but it does follow the rest of the merge rules for configurations.
|
||||||
|
|
||||||
|
The dynamic mapping service needs to implement a single REST API interface for getting
|
||||||
|
requests ``GET <URL>[:PORT][/PATH]?spec=<pkg_name@pkg_version +variant1+variant2%compiler@compiler_version>``.
|
||||||
|
|
||||||
|
example request.
|
||||||
|
|
||||||
|
.. code-block::
|
||||||
|
|
||||||
|
https://my-dyn-mapping.spack.io/allocation?spec=zlib-ng@2.1.6 +compat+opt+shared+pic+new_strategies arch=linux-ubuntu20.04-x86_64_v3%gcc@12.0.0
|
||||||
|
|
||||||
|
|
||||||
|
With an example response the updates kubernetes request variables, overrides the max retries for gitlab,
|
||||||
|
and prepends a note about the modifications made by the my-dyn-mapping.spack.io service.
|
||||||
|
|
||||||
|
.. code-block::
|
||||||
|
|
||||||
|
200 OK
|
||||||
|
|
||||||
|
{
|
||||||
|
"variables":
|
||||||
|
{
|
||||||
|
"KUBERNETES_CPU_REQUEST": "500m",
|
||||||
|
"KUBERNETES_MEMORY_REQUEST": "2G",
|
||||||
|
},
|
||||||
|
"retry": { "max:": "1"}
|
||||||
|
"script+:":
|
||||||
|
[
|
||||||
|
"echo \"Job modified by my-dyn-mapping.spack.io\""
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
The ci.yaml configuration section takes the URL endpoint as well as a number of options to configure how responses are handled.
|
||||||
|
|
||||||
|
It is possible to specify a list of allowed and ignored configuration attributes under ``allow`` and ``ignore``
|
||||||
|
respectively. It is also possible to configure required attributes under ``required`` section.
|
||||||
|
|
||||||
|
Options to configure the client timeout and SSL verification using the ``timeout`` and ``verify_ssl`` options.
|
||||||
|
By default, the ``timeout`` is set to the option in ``config:timeout`` and ``veryify_ssl`` is set the the option in ``config::verify_ssl``.
|
||||||
|
|
||||||
|
Passing header parameters to the request can be achieved through the ``header`` section. The values of the variables passed to the
|
||||||
|
header may be environment variables that are expanded at runtime, such as a private token configured on the runner.
|
||||||
|
|
||||||
|
Here is an example configuration pointing to ``my-dyn-mapping.spack.io/allocation``.
|
||||||
|
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
ci:
|
||||||
|
- dynamic-mapping:
|
||||||
|
endpoint: my-dyn-mapping.spack.io/allocation
|
||||||
|
timeout: 10
|
||||||
|
verify_ssl: True
|
||||||
|
header:
|
||||||
|
PRIVATE_TOKEN: ${MY_PRIVATE_TOKEN}
|
||||||
|
MY_CONFIG: "fuzz_allocation:false"
|
||||||
|
allow:
|
||||||
|
- variables
|
||||||
|
ignore:
|
||||||
|
- script
|
||||||
|
require: []
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^
|
^^^^^^^^^^^^^
|
||||||
Bootstrapping
|
Bootstrapping
|
||||||
^^^^^^^^^^^^^
|
^^^^^^^^^^^^^
|
||||||
@@ -663,26 +743,13 @@ build the package.
|
|||||||
|
|
||||||
When including a bootstrapping phase as in the example above, the result is that
|
When including a bootstrapping phase as in the example above, the result is that
|
||||||
the bootstrapped compiler packages will be pushed to the binary mirror (and the
|
the bootstrapped compiler packages will be pushed to the binary mirror (and the
|
||||||
local artifacts mirror) before the actual release specs are built. In this case,
|
local artifacts mirror) before the actual release specs are built.
|
||||||
the jobs corresponding to subsequent release specs are configured to
|
|
||||||
``install_missing_compilers``, so that if spack is asked to install a package
|
|
||||||
with a compiler it doesn't know about, it can be quickly installed from the
|
|
||||||
binary mirror first.
|
|
||||||
|
|
||||||
Since bootstrapping compilers is optional, those items can be left out of the
|
Since bootstrapping compilers is optional, those items can be left out of the
|
||||||
environment/stack file, and in that case no bootstrapping will be done (only the
|
environment/stack file, and in that case no bootstrapping will be done (only the
|
||||||
specs will be staged for building) and the runners will be expected to already
|
specs will be staged for building) and the runners will be expected to already
|
||||||
have all needed compilers installed and configured for spack to use.
|
have all needed compilers installed and configured for spack to use.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^
|
|
||||||
Pipeline Buildcache
|
|
||||||
^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
The ``enable-artifacts-buildcache`` key
|
|
||||||
takes a boolean and determines whether the pipeline uses artifacts to store and
|
|
||||||
pass along the buildcaches from one stage to the next (the default if you don't
|
|
||||||
provide this option is ``False``).
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^
|
||||||
Broken Specs URL
|
Broken Specs URL
|
||||||
^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
sphinx==7.4.7
|
sphinx==8.1.3
|
||||||
sphinxcontrib-programoutput==0.17
|
sphinxcontrib-programoutput==0.17
|
||||||
sphinx_design==0.6.1
|
sphinx_design==0.6.1
|
||||||
sphinx-rtd-theme==2.0.0
|
sphinx-rtd-theme==3.0.1
|
||||||
python-levenshtein==0.25.1
|
python-levenshtein==0.26.1
|
||||||
docutils==0.20.1
|
docutils==0.21.2
|
||||||
pygments==2.18.0
|
pygments==2.18.0
|
||||||
urllib3==2.2.2
|
urllib3==2.2.3
|
||||||
pytest==8.3.2
|
pytest==8.3.3
|
||||||
isort==5.13.2
|
isort==5.13.2
|
||||||
black==24.8.0
|
black==24.10.0
|
||||||
flake8==7.1.1
|
flake8==7.1.1
|
||||||
mypy==1.11.1
|
mypy==1.11.1
|
||||||
|
|||||||
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
* Homepage: https://pypi.python.org/pypi/archspec
|
* Homepage: https://pypi.python.org/pypi/archspec
|
||||||
* Usage: Labeling, comparison and detection of microarchitectures
|
* Usage: Labeling, comparison and detection of microarchitectures
|
||||||
* Version: 0.2.5-dev (commit 7e6740012b897ae4a950f0bba7e9726b767e921f)
|
* Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47)
|
||||||
|
|
||||||
astunparse
|
astunparse
|
||||||
----------------
|
----------------
|
||||||
|
|||||||
@@ -81,8 +81,13 @@ def __init__(self, name, parents, vendor, features, compilers, generation=0, cpu
|
|||||||
self.generation = generation
|
self.generation = generation
|
||||||
# Only relevant for AArch64
|
# Only relevant for AArch64
|
||||||
self.cpu_part = cpu_part
|
self.cpu_part = cpu_part
|
||||||
# Cache the ancestor computation
|
|
||||||
|
# Cache the "ancestor" computation
|
||||||
self._ancestors = None
|
self._ancestors = None
|
||||||
|
# Cache the "generic" computation
|
||||||
|
self._generic = None
|
||||||
|
# Cache the "family" computation
|
||||||
|
self._family = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ancestors(self):
|
def ancestors(self):
|
||||||
@@ -115,6 +120,9 @@ def __eq__(self, other):
|
|||||||
and self.cpu_part == other.cpu_part
|
and self.cpu_part == other.cpu_part
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash(self.name)
|
||||||
|
|
||||||
@coerce_target_names
|
@coerce_target_names
|
||||||
def __ne__(self, other):
|
def __ne__(self, other):
|
||||||
return not self == other
|
return not self == other
|
||||||
@@ -171,18 +179,22 @@ def __contains__(self, feature):
|
|||||||
@property
|
@property
|
||||||
def family(self):
|
def family(self):
|
||||||
"""Returns the architecture family a given target belongs to"""
|
"""Returns the architecture family a given target belongs to"""
|
||||||
roots = [x for x in [self] + self.ancestors if not x.ancestors]
|
if self._family is None:
|
||||||
msg = "a target is expected to belong to just one architecture family"
|
roots = [x for x in [self] + self.ancestors if not x.ancestors]
|
||||||
msg += f"[found {', '.join(str(x) for x in roots)}]"
|
msg = "a target is expected to belong to just one architecture family"
|
||||||
assert len(roots) == 1, msg
|
msg += f"[found {', '.join(str(x) for x in roots)}]"
|
||||||
|
assert len(roots) == 1, msg
|
||||||
|
self._family = roots.pop()
|
||||||
|
|
||||||
return roots.pop()
|
return self._family
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def generic(self):
|
def generic(self):
|
||||||
"""Returns the best generic architecture that is compatible with self"""
|
"""Returns the best generic architecture that is compatible with self"""
|
||||||
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
if self._generic is None:
|
||||||
return max(generics, key=lambda x: len(x.ancestors))
|
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
||||||
|
self._generic = max(generics, key=lambda x: len(x.ancestors))
|
||||||
|
return self._generic
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
"""Returns a dictionary representation of this object."""
|
"""Returns a dictionary representation of this object."""
|
||||||
|
|||||||
@@ -1482,7 +1482,6 @@
|
|||||||
"cldemote",
|
"cldemote",
|
||||||
"movdir64b",
|
"movdir64b",
|
||||||
"movdiri",
|
"movdiri",
|
||||||
"pdcm",
|
|
||||||
"serialize",
|
"serialize",
|
||||||
"waitpkg"
|
"waitpkg"
|
||||||
],
|
],
|
||||||
@@ -2237,6 +2236,84 @@
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"zen5": {
|
||||||
|
"from": ["zen4"],
|
||||||
|
"vendor": "AuthenticAMD",
|
||||||
|
"features": [
|
||||||
|
"abm",
|
||||||
|
"aes",
|
||||||
|
"avx",
|
||||||
|
"avx2",
|
||||||
|
"avx512_bf16",
|
||||||
|
"avx512_bitalg",
|
||||||
|
"avx512bw",
|
||||||
|
"avx512cd",
|
||||||
|
"avx512dq",
|
||||||
|
"avx512f",
|
||||||
|
"avx512ifma",
|
||||||
|
"avx512vbmi",
|
||||||
|
"avx512_vbmi2",
|
||||||
|
"avx512vl",
|
||||||
|
"avx512_vnni",
|
||||||
|
"avx512_vp2intersect",
|
||||||
|
"avx512_vpopcntdq",
|
||||||
|
"avx_vnni",
|
||||||
|
"bmi1",
|
||||||
|
"bmi2",
|
||||||
|
"clflushopt",
|
||||||
|
"clwb",
|
||||||
|
"clzero",
|
||||||
|
"cppc",
|
||||||
|
"cx16",
|
||||||
|
"f16c",
|
||||||
|
"flush_l1d",
|
||||||
|
"fma",
|
||||||
|
"fsgsbase",
|
||||||
|
"gfni",
|
||||||
|
"ibrs_enhanced",
|
||||||
|
"mmx",
|
||||||
|
"movbe",
|
||||||
|
"movdir64b",
|
||||||
|
"movdiri",
|
||||||
|
"pclmulqdq",
|
||||||
|
"popcnt",
|
||||||
|
"rdseed",
|
||||||
|
"sse",
|
||||||
|
"sse2",
|
||||||
|
"sse4_1",
|
||||||
|
"sse4_2",
|
||||||
|
"sse4a",
|
||||||
|
"ssse3",
|
||||||
|
"tsc_adjust",
|
||||||
|
"vaes",
|
||||||
|
"vpclmulqdq",
|
||||||
|
"xsavec",
|
||||||
|
"xsaveopt"
|
||||||
|
],
|
||||||
|
"compilers": {
|
||||||
|
"gcc": [
|
||||||
|
{
|
||||||
|
"versions": "14.1:",
|
||||||
|
"name": "znver5",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"aocc": [
|
||||||
|
{
|
||||||
|
"versions": "5.0:",
|
||||||
|
"name": "znver5",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"clang": [
|
||||||
|
{
|
||||||
|
"versions": "19.1:",
|
||||||
|
"name": "znver5",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
"ppc64": {
|
"ppc64": {
|
||||||
"from": [],
|
"from": [],
|
||||||
"vendor": "generic",
|
"vendor": "generic",
|
||||||
@@ -2844,8 +2921,7 @@
|
|||||||
"asimdrdm",
|
"asimdrdm",
|
||||||
"lrcpc",
|
"lrcpc",
|
||||||
"dcpop",
|
"dcpop",
|
||||||
"asimddp",
|
"asimddp"
|
||||||
"ssbs"
|
|
||||||
],
|
],
|
||||||
"compilers" : {
|
"compilers" : {
|
||||||
"gcc": [
|
"gcc": [
|
||||||
@@ -2942,7 +3018,6 @@
|
|||||||
"uscat",
|
"uscat",
|
||||||
"ilrcpc",
|
"ilrcpc",
|
||||||
"flagm",
|
"flagm",
|
||||||
"ssbs",
|
|
||||||
"dcpodp",
|
"dcpodp",
|
||||||
"svei8mm",
|
"svei8mm",
|
||||||
"svebf16",
|
"svebf16",
|
||||||
@@ -3010,7 +3085,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"versions": "11:",
|
"versions": "11:",
|
||||||
"flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng"
|
"flags" : "-march=armv8.4-a+sve+fp16+bf16+crypto+i8mm+rng"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"versions": "12:",
|
"versions": "12:",
|
||||||
@@ -3066,7 +3141,6 @@
|
|||||||
"uscat",
|
"uscat",
|
||||||
"ilrcpc",
|
"ilrcpc",
|
||||||
"flagm",
|
"flagm",
|
||||||
"ssbs",
|
|
||||||
"sb",
|
"sb",
|
||||||
"dcpodp",
|
"dcpodp",
|
||||||
"sve2",
|
"sve2",
|
||||||
@@ -3179,7 +3253,6 @@
|
|||||||
"uscat",
|
"uscat",
|
||||||
"ilrcpc",
|
"ilrcpc",
|
||||||
"flagm",
|
"flagm",
|
||||||
"ssbs",
|
|
||||||
"sb",
|
"sb",
|
||||||
"dcpodp",
|
"dcpodp",
|
||||||
"sve2",
|
"sve2",
|
||||||
|
|||||||
@@ -41,6 +41,20 @@ def comma_and(sequence: List[str]) -> str:
|
|||||||
return comma_list(sequence, "and")
|
return comma_list(sequence, "and")
|
||||||
|
|
||||||
|
|
||||||
|
def ordinal(number: int) -> str:
|
||||||
|
"""Return the ordinal representation (1st, 2nd, 3rd, etc.) for the provided number.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
number: int to convert to ordinal number
|
||||||
|
|
||||||
|
Returns: number's corresponding ordinal
|
||||||
|
"""
|
||||||
|
idx = (number % 10) << 1
|
||||||
|
tens = number % 100 // 10
|
||||||
|
suffix = "th" if tens == 1 or idx > 6 else "thstndrd"[idx : idx + 2]
|
||||||
|
return f"{number}{suffix}"
|
||||||
|
|
||||||
|
|
||||||
def quote(sequence: List[str], q: str = "'") -> List[str]:
|
def quote(sequence: List[str], q: str = "'") -> List[str]:
|
||||||
"""Quotes each item in the input list with the quote character passed as second argument."""
|
"""Quotes each item in the input list with the quote character passed as second argument."""
|
||||||
return [f"{q}{e}{q}" for e in sequence]
|
return [f"{q}{e}{q}" for e in sequence]
|
||||||
|
|||||||
@@ -27,8 +27,6 @@
|
|||||||
from llnl.util.lang import dedupe, memoized
|
from llnl.util.lang import dedupe, memoized
|
||||||
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
|
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
|
||||||
|
|
||||||
from spack.util.executable import Executable, which
|
|
||||||
|
|
||||||
from ..path import path_to_os_path, system_path_filter
|
from ..path import path_to_os_path, system_path_filter
|
||||||
|
|
||||||
if sys.platform != "win32":
|
if sys.platform != "win32":
|
||||||
@@ -49,11 +47,11 @@
|
|||||||
"copy_mode",
|
"copy_mode",
|
||||||
"filter_file",
|
"filter_file",
|
||||||
"find",
|
"find",
|
||||||
|
"find_first",
|
||||||
"find_headers",
|
"find_headers",
|
||||||
"find_all_headers",
|
"find_all_headers",
|
||||||
"find_libraries",
|
"find_libraries",
|
||||||
"find_system_libraries",
|
"find_system_libraries",
|
||||||
"fix_darwin_install_name",
|
|
||||||
"force_remove",
|
"force_remove",
|
||||||
"force_symlink",
|
"force_symlink",
|
||||||
"getuid",
|
"getuid",
|
||||||
@@ -248,42 +246,6 @@ def path_contains_subdirectory(path, root):
|
|||||||
return norm_path.startswith(norm_root)
|
return norm_path.startswith(norm_root)
|
||||||
|
|
||||||
|
|
||||||
@memoized
|
|
||||||
def file_command(*args):
|
|
||||||
"""Creates entry point to `file` system command with provided arguments"""
|
|
||||||
file_cmd = which("file", required=True)
|
|
||||||
for arg in args:
|
|
||||||
file_cmd.add_default_arg(arg)
|
|
||||||
return file_cmd
|
|
||||||
|
|
||||||
|
|
||||||
@memoized
|
|
||||||
def _get_mime_type():
|
|
||||||
"""Generate method to call `file` system command to aquire mime type
|
|
||||||
for a specified path
|
|
||||||
"""
|
|
||||||
if sys.platform == "win32":
|
|
||||||
# -h option (no-dereference) does not exist in Windows
|
|
||||||
return file_command("-b", "--mime-type")
|
|
||||||
else:
|
|
||||||
return file_command("-b", "-h", "--mime-type")
|
|
||||||
|
|
||||||
|
|
||||||
def mime_type(filename):
|
|
||||||
"""Returns the mime type and subtype of a file.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
filename: file to be analyzed
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple containing the MIME type and subtype
|
|
||||||
"""
|
|
||||||
output = _get_mime_type()(filename, output=str, error=str).strip()
|
|
||||||
tty.debug("==> " + output)
|
|
||||||
type, _, subtype = output.partition("/")
|
|
||||||
return type, subtype
|
|
||||||
|
|
||||||
|
|
||||||
#: This generates the library filenames that may appear on any OS.
|
#: This generates the library filenames that may appear on any OS.
|
||||||
library_extensions = ["a", "la", "so", "tbd", "dylib"]
|
library_extensions = ["a", "la", "so", "tbd", "dylib"]
|
||||||
|
|
||||||
@@ -1679,41 +1641,6 @@ def safe_remove(*files_or_dirs):
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
|
||||||
def fix_darwin_install_name(path):
|
|
||||||
"""Fix install name of dynamic libraries on Darwin to have full path.
|
|
||||||
|
|
||||||
There are two parts of this task:
|
|
||||||
|
|
||||||
1. Use ``install_name('-id', ...)`` to change install name of a single lib
|
|
||||||
2. Use ``install_name('-change', ...)`` to change the cross linking between
|
|
||||||
libs. The function assumes that all libraries are in one folder and
|
|
||||||
currently won't follow subfolders.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
path (str): directory in which .dylib files are located
|
|
||||||
"""
|
|
||||||
libs = glob.glob(join_path(path, "*.dylib"))
|
|
||||||
for lib in libs:
|
|
||||||
# fix install name first:
|
|
||||||
install_name_tool = Executable("install_name_tool")
|
|
||||||
install_name_tool("-id", lib, lib)
|
|
||||||
otool = Executable("otool")
|
|
||||||
long_deps = otool("-L", lib, output=str).split("\n")
|
|
||||||
deps = [dep.partition(" ")[0][1::] for dep in long_deps[2:-1]]
|
|
||||||
# fix all dependencies:
|
|
||||||
for dep in deps:
|
|
||||||
for loc in libs:
|
|
||||||
# We really want to check for either
|
|
||||||
# dep == os.path.basename(loc) or
|
|
||||||
# dep == join_path(builddir, os.path.basename(loc)),
|
|
||||||
# but we don't know builddir (nor how symbolic links look
|
|
||||||
# in builddir). We thus only compare the basenames.
|
|
||||||
if os.path.basename(dep) == os.path.basename(loc):
|
|
||||||
install_name_tool("-change", dep, loc, lib)
|
|
||||||
break
|
|
||||||
|
|
||||||
|
|
||||||
def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2) -> Optional[str]:
|
def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2) -> Optional[str]:
|
||||||
"""Find the first file matching a pattern.
|
"""Find the first file matching a pattern.
|
||||||
|
|
||||||
|
|||||||
@@ -11,8 +11,9 @@
|
|||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
|
import warnings
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from typing import Any, Callable, Iterable, List, Tuple
|
from typing import Callable, Iterable, List, Tuple, TypeVar
|
||||||
|
|
||||||
# Ignore emacs backups when listing modules
|
# Ignore emacs backups when listing modules
|
||||||
ignore_modules = r"^\.#|~$"
|
ignore_modules = r"^\.#|~$"
|
||||||
@@ -870,18 +871,12 @@ class UnhashableArguments(TypeError):
|
|||||||
"""Raise when an @memoized function receives unhashable arg or kwarg values."""
|
"""Raise when an @memoized function receives unhashable arg or kwarg values."""
|
||||||
|
|
||||||
|
|
||||||
def enum(**kwargs):
|
T = TypeVar("T")
|
||||||
"""Return an enum-like class.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
**kwargs: explicit dictionary of enums
|
|
||||||
"""
|
|
||||||
return type("Enum", (object,), kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def stable_partition(
|
def stable_partition(
|
||||||
input_iterable: Iterable, predicate_fn: Callable[[Any], bool]
|
input_iterable: Iterable[T], predicate_fn: Callable[[T], bool]
|
||||||
) -> Tuple[List[Any], List[Any]]:
|
) -> Tuple[List[T], List[T]]:
|
||||||
"""Partition the input iterable according to a custom predicate.
|
"""Partition the input iterable according to a custom predicate.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -893,12 +888,13 @@ def stable_partition(
|
|||||||
Tuple of the list of elements evaluating to True, and
|
Tuple of the list of elements evaluating to True, and
|
||||||
list of elements evaluating to False.
|
list of elements evaluating to False.
|
||||||
"""
|
"""
|
||||||
true_items, false_items = [], []
|
true_items: List[T] = []
|
||||||
|
false_items: List[T] = []
|
||||||
for item in input_iterable:
|
for item in input_iterable:
|
||||||
if predicate_fn(item):
|
if predicate_fn(item):
|
||||||
true_items.append(item)
|
true_items.append(item)
|
||||||
continue
|
else:
|
||||||
false_items.append(item)
|
false_items.append(item)
|
||||||
return true_items, false_items
|
return true_items, false_items
|
||||||
|
|
||||||
|
|
||||||
@@ -910,6 +906,21 @@ def ensure_last(lst, *elements):
|
|||||||
lst.append(lst.pop(lst.index(elt)))
|
lst.append(lst.pop(lst.index(elt)))
|
||||||
|
|
||||||
|
|
||||||
|
class Const:
|
||||||
|
"""Class level constant, raises when trying to set the attribute"""
|
||||||
|
|
||||||
|
__slots__ = ["value"]
|
||||||
|
|
||||||
|
def __init__(self, value):
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __get__(self, instance, owner):
|
||||||
|
return self.value
|
||||||
|
|
||||||
|
def __set__(self, instance, value):
|
||||||
|
raise TypeError(f"Const value does not support assignment [value={self.value}]")
|
||||||
|
|
||||||
|
|
||||||
class TypedMutableSequence(collections.abc.MutableSequence):
|
class TypedMutableSequence(collections.abc.MutableSequence):
|
||||||
"""Base class that behaves like a list, just with a different type.
|
"""Base class that behaves like a list, just with a different type.
|
||||||
|
|
||||||
@@ -1014,3 +1025,42 @@ def __init__(self, callback):
|
|||||||
|
|
||||||
def __get__(self, instance, owner):
|
def __get__(self, instance, owner):
|
||||||
return self.callback(owner)
|
return self.callback(owner)
|
||||||
|
|
||||||
|
|
||||||
|
class DeprecatedProperty:
|
||||||
|
"""Data descriptor to error or warn when a deprecated property is accessed.
|
||||||
|
|
||||||
|
Derived classes must define a factory method to return an adaptor for the deprecated
|
||||||
|
property, if the descriptor is not set to error.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = ["name"]
|
||||||
|
|
||||||
|
#: 0 - Nothing
|
||||||
|
#: 1 - Warning
|
||||||
|
#: 2 - Error
|
||||||
|
error_lvl = 0
|
||||||
|
|
||||||
|
def __init__(self, name: str) -> None:
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
def __get__(self, instance, owner):
|
||||||
|
if instance is None:
|
||||||
|
return self
|
||||||
|
|
||||||
|
if self.error_lvl == 1:
|
||||||
|
warnings.warn(
|
||||||
|
f"accessing the '{self.name}' property of '{instance}', which is deprecated"
|
||||||
|
)
|
||||||
|
elif self.error_lvl == 2:
|
||||||
|
raise AttributeError(f"cannot access the '{self.name}' attribute of '{instance}'")
|
||||||
|
|
||||||
|
return self.factory(instance, owner)
|
||||||
|
|
||||||
|
def __set__(self, instance, value):
|
||||||
|
raise TypeError(
|
||||||
|
f"the deprecated property '{self.name}' of '{instance}' does not support assignment"
|
||||||
|
)
|
||||||
|
|
||||||
|
def factory(self, instance, owner):
|
||||||
|
raise NotImplementedError("must be implemented by derived classes")
|
||||||
|
|||||||
@@ -263,7 +263,9 @@ def match_to_ansi(match):
|
|||||||
f"Incomplete color format: '{match.group(0)}' in '{match.string}'"
|
f"Incomplete color format: '{match.group(0)}' in '{match.string}'"
|
||||||
)
|
)
|
||||||
|
|
||||||
ansi_code = _escape(f"{styles[style]};{colors.get(color_code, '')}", color, enclose, zsh)
|
color_number = colors.get(color_code, "")
|
||||||
|
semi = ";" if color_number else ""
|
||||||
|
ansi_code = _escape(f"{styles[style]}{semi}{color_number}", color, enclose, zsh)
|
||||||
if text:
|
if text:
|
||||||
return f"{ansi_code}{text}{_escape(0, color, enclose, zsh)}"
|
return f"{ansi_code}{text}{_escape(0, color, enclose, zsh)}"
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -10,7 +10,6 @@
|
|||||||
import errno
|
import errno
|
||||||
import io
|
import io
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
import multiprocessing.connection
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import select
|
import select
|
||||||
@@ -19,9 +18,10 @@
|
|||||||
import threading
|
import threading
|
||||||
import traceback
|
import traceback
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
from multiprocessing.connection import Connection
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from typing import Optional
|
from typing import Callable, Optional
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
@@ -345,49 +345,6 @@ def close(self):
|
|||||||
self.file.close()
|
self.file.close()
|
||||||
|
|
||||||
|
|
||||||
class MultiProcessFd:
|
|
||||||
"""Return an object which stores a file descriptor and can be passed as an
|
|
||||||
argument to a function run with ``multiprocessing.Process``, such that
|
|
||||||
the file descriptor is available in the subprocess."""
|
|
||||||
|
|
||||||
def __init__(self, fd):
|
|
||||||
self._connection = None
|
|
||||||
self._fd = None
|
|
||||||
if sys.version_info >= (3, 8):
|
|
||||||
self._connection = multiprocessing.connection.Connection(fd)
|
|
||||||
else:
|
|
||||||
self._fd = fd
|
|
||||||
|
|
||||||
@property
|
|
||||||
def fd(self):
|
|
||||||
if self._connection:
|
|
||||||
return self._connection._handle
|
|
||||||
else:
|
|
||||||
return self._fd
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
if self._connection:
|
|
||||||
self._connection.close()
|
|
||||||
else:
|
|
||||||
os.close(self._fd)
|
|
||||||
|
|
||||||
|
|
||||||
def close_connection_and_file(multiprocess_fd, file):
|
|
||||||
# MultiprocessFd is intended to transmit a FD
|
|
||||||
# to a child process, this FD is then opened to a Python File object
|
|
||||||
# (using fdopen). In >= 3.8, MultiprocessFd encapsulates a
|
|
||||||
# multiprocessing.connection.Connection; Connection closes the FD
|
|
||||||
# when it is deleted, and prints a warning about duplicate closure if
|
|
||||||
# it is not explicitly closed. In < 3.8, MultiprocessFd encapsulates a
|
|
||||||
# simple FD; closing the FD here appears to conflict with
|
|
||||||
# closure of the File object (in < 3.8 that is). Therefore this needs
|
|
||||||
# to choose whether to close the File or the Connection.
|
|
||||||
if sys.version_info >= (3, 8):
|
|
||||||
multiprocess_fd.close()
|
|
||||||
else:
|
|
||||||
file.close()
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def replace_environment(env):
|
def replace_environment(env):
|
||||||
"""Replace the current environment (`os.environ`) with `env`.
|
"""Replace the current environment (`os.environ`) with `env`.
|
||||||
@@ -545,22 +502,20 @@ def __enter__(self):
|
|||||||
# forcing debug output.
|
# forcing debug output.
|
||||||
self._saved_debug = tty._debug
|
self._saved_debug = tty._debug
|
||||||
|
|
||||||
# OS-level pipe for redirecting output to logger
|
# Pipe for redirecting output to logger
|
||||||
read_fd, write_fd = os.pipe()
|
read_fd, self.write_fd = multiprocessing.Pipe(duplex=False)
|
||||||
|
|
||||||
read_multiprocess_fd = MultiProcessFd(read_fd)
|
# Pipe for communication back from the daemon
|
||||||
|
|
||||||
# Multiprocessing pipe for communication back from the daemon
|
|
||||||
# Currently only used to save echo value between uses
|
# Currently only used to save echo value between uses
|
||||||
self.parent_pipe, child_pipe = multiprocessing.Pipe()
|
self.parent_pipe, child_pipe = multiprocessing.Pipe(duplex=False)
|
||||||
|
|
||||||
# Sets a daemon that writes to file what it reads from a pipe
|
# Sets a daemon that writes to file what it reads from a pipe
|
||||||
try:
|
try:
|
||||||
# need to pass this b/c multiprocessing closes stdin in child.
|
# need to pass this b/c multiprocessing closes stdin in child.
|
||||||
input_multiprocess_fd = None
|
input_fd = None
|
||||||
try:
|
try:
|
||||||
if sys.stdin.isatty():
|
if sys.stdin.isatty():
|
||||||
input_multiprocess_fd = MultiProcessFd(os.dup(sys.stdin.fileno()))
|
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
||||||
except BaseException:
|
except BaseException:
|
||||||
# just don't forward input if this fails
|
# just don't forward input if this fails
|
||||||
pass
|
pass
|
||||||
@@ -569,9 +524,9 @@ def __enter__(self):
|
|||||||
self.process = multiprocessing.Process(
|
self.process = multiprocessing.Process(
|
||||||
target=_writer_daemon,
|
target=_writer_daemon,
|
||||||
args=(
|
args=(
|
||||||
input_multiprocess_fd,
|
input_fd,
|
||||||
read_multiprocess_fd,
|
read_fd,
|
||||||
write_fd,
|
self.write_fd,
|
||||||
self.echo,
|
self.echo,
|
||||||
self.log_file,
|
self.log_file,
|
||||||
child_pipe,
|
child_pipe,
|
||||||
@@ -582,9 +537,9 @@ def __enter__(self):
|
|||||||
self.process.start()
|
self.process.start()
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
if input_multiprocess_fd:
|
if input_fd:
|
||||||
input_multiprocess_fd.close()
|
input_fd.close()
|
||||||
read_multiprocess_fd.close()
|
read_fd.close()
|
||||||
|
|
||||||
# Flush immediately before redirecting so that anything buffered
|
# Flush immediately before redirecting so that anything buffered
|
||||||
# goes to the original stream
|
# goes to the original stream
|
||||||
@@ -602,9 +557,9 @@ def __enter__(self):
|
|||||||
self._saved_stderr = os.dup(sys.stderr.fileno())
|
self._saved_stderr = os.dup(sys.stderr.fileno())
|
||||||
|
|
||||||
# redirect to the pipe we created above
|
# redirect to the pipe we created above
|
||||||
os.dup2(write_fd, sys.stdout.fileno())
|
os.dup2(self.write_fd.fileno(), sys.stdout.fileno())
|
||||||
os.dup2(write_fd, sys.stderr.fileno())
|
os.dup2(self.write_fd.fileno(), sys.stderr.fileno())
|
||||||
os.close(write_fd)
|
self.write_fd.close()
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Handle I/O the Python way. This won't redirect lower-level
|
# Handle I/O the Python way. This won't redirect lower-level
|
||||||
@@ -617,7 +572,7 @@ def __enter__(self):
|
|||||||
self._saved_stderr = sys.stderr
|
self._saved_stderr = sys.stderr
|
||||||
|
|
||||||
# create a file object for the pipe; redirect to it.
|
# create a file object for the pipe; redirect to it.
|
||||||
pipe_fd_out = os.fdopen(write_fd, "w")
|
pipe_fd_out = os.fdopen(self.write_fd.fileno(), "w", closefd=False)
|
||||||
sys.stdout = pipe_fd_out
|
sys.stdout = pipe_fd_out
|
||||||
sys.stderr = pipe_fd_out
|
sys.stderr = pipe_fd_out
|
||||||
|
|
||||||
@@ -653,6 +608,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
|||||||
else:
|
else:
|
||||||
sys.stdout = self._saved_stdout
|
sys.stdout = self._saved_stdout
|
||||||
sys.stderr = self._saved_stderr
|
sys.stderr = self._saved_stderr
|
||||||
|
self.write_fd.close()
|
||||||
|
|
||||||
# print log contents in parent if needed.
|
# print log contents in parent if needed.
|
||||||
if self.log_file.write_in_parent:
|
if self.log_file.write_in_parent:
|
||||||
@@ -866,14 +822,14 @@ def force_echo(self):
|
|||||||
|
|
||||||
|
|
||||||
def _writer_daemon(
|
def _writer_daemon(
|
||||||
stdin_multiprocess_fd,
|
stdin_fd: Optional[Connection],
|
||||||
read_multiprocess_fd,
|
read_fd: Connection,
|
||||||
write_fd,
|
write_fd: Connection,
|
||||||
echo,
|
echo: bool,
|
||||||
log_file_wrapper,
|
log_file_wrapper: FileWrapper,
|
||||||
control_pipe,
|
control_fd: Connection,
|
||||||
filter_fn,
|
filter_fn: Optional[Callable[[str], str]],
|
||||||
):
|
) -> None:
|
||||||
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
|
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
|
||||||
|
|
||||||
The daemon receives output from the parent process and writes it both
|
The daemon receives output from the parent process and writes it both
|
||||||
@@ -910,43 +866,37 @@ def _writer_daemon(
|
|||||||
``StringIO`` in the parent. This is mainly for testing.
|
``StringIO`` in the parent. This is mainly for testing.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
stdin_multiprocess_fd (int): input from the terminal
|
stdin_fd: optional input from the terminal
|
||||||
read_multiprocess_fd (int): pipe for reading from parent's redirected
|
read_fd: pipe for reading from parent's redirected stdout
|
||||||
stdout
|
echo: initial echo setting -- controlled by user and preserved across multiple writer
|
||||||
echo (bool): initial echo setting -- controlled by user and
|
daemons
|
||||||
preserved across multiple writer daemons
|
log_file_wrapper: file to log all output
|
||||||
log_file_wrapper (FileWrapper): file to log all output
|
control_pipe: multiprocessing pipe on which to send control information to the parent
|
||||||
control_pipe (Pipe): multiprocessing pipe on which to send control
|
filter_fn: optional function to filter each line of output
|
||||||
information to the parent
|
|
||||||
filter_fn (callable, optional): function to filter each line of output
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# If this process was forked, then it will inherit file descriptors from
|
# This process depends on closing all instances of write_pipe to terminate the reading loop
|
||||||
# the parent process. This process depends on closing all instances of
|
write_fd.close()
|
||||||
# write_fd to terminate the reading loop, so we close the file descriptor
|
|
||||||
# here. Forking is the process spawning method everywhere except Mac OS
|
|
||||||
# for Python >= 3.8 and on Windows
|
|
||||||
if sys.version_info < (3, 8) or sys.platform != "darwin":
|
|
||||||
os.close(write_fd)
|
|
||||||
|
|
||||||
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
||||||
# that prevents unbuffered text I/O.
|
# that prevents unbuffered text I/O.
|
||||||
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
||||||
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8")
|
# 3. closefd=False because Connection has "ownership"
|
||||||
|
read_file = os.fdopen(read_fd.fileno(), "r", 1, encoding="utf-8", closefd=False)
|
||||||
|
|
||||||
if stdin_multiprocess_fd:
|
if stdin_fd:
|
||||||
stdin = os.fdopen(stdin_multiprocess_fd.fd)
|
stdin_file = os.fdopen(stdin_fd.fileno(), closefd=False)
|
||||||
else:
|
else:
|
||||||
stdin = None
|
stdin_file = None
|
||||||
|
|
||||||
# list of streams to select from
|
# list of streams to select from
|
||||||
istreams = [in_pipe, stdin] if stdin else [in_pipe]
|
istreams = [read_file, stdin_file] if stdin_file else [read_file]
|
||||||
force_echo = False # parent can force echo for certain output
|
force_echo = False # parent can force echo for certain output
|
||||||
|
|
||||||
log_file = log_file_wrapper.unwrap()
|
log_file = log_file_wrapper.unwrap()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with keyboard_input(stdin) as kb:
|
with keyboard_input(stdin_file) as kb:
|
||||||
while True:
|
while True:
|
||||||
# fix the terminal settings if we recently came to
|
# fix the terminal settings if we recently came to
|
||||||
# the foreground
|
# the foreground
|
||||||
@@ -959,12 +909,12 @@ def _writer_daemon(
|
|||||||
# Allow user to toggle echo with 'v' key.
|
# Allow user to toggle echo with 'v' key.
|
||||||
# Currently ignores other chars.
|
# Currently ignores other chars.
|
||||||
# only read stdin if we're in the foreground
|
# only read stdin if we're in the foreground
|
||||||
if stdin in rlist and not _is_background_tty(stdin):
|
if stdin_file and stdin_file in rlist and not _is_background_tty(stdin_file):
|
||||||
# it's possible to be backgrounded between the above
|
# it's possible to be backgrounded between the above
|
||||||
# check and the read, so we ignore SIGTTIN here.
|
# check and the read, so we ignore SIGTTIN here.
|
||||||
with ignore_signal(signal.SIGTTIN):
|
with ignore_signal(signal.SIGTTIN):
|
||||||
try:
|
try:
|
||||||
if stdin.read(1) == "v":
|
if stdin_file.read(1) == "v":
|
||||||
echo = not echo
|
echo = not echo
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
# If SIGTTIN is ignored, the system gives EIO
|
# If SIGTTIN is ignored, the system gives EIO
|
||||||
@@ -973,13 +923,13 @@ def _writer_daemon(
|
|||||||
if e.errno != errno.EIO:
|
if e.errno != errno.EIO:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
if in_pipe in rlist:
|
if read_file in rlist:
|
||||||
line_count = 0
|
line_count = 0
|
||||||
try:
|
try:
|
||||||
while line_count < 100:
|
while line_count < 100:
|
||||||
# Handle output from the calling process.
|
# Handle output from the calling process.
|
||||||
try:
|
try:
|
||||||
line = _retry(in_pipe.readline)()
|
line = _retry(read_file.readline)()
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
# installs like --test=root gpgme produce non-UTF8 logs
|
# installs like --test=root gpgme produce non-UTF8 logs
|
||||||
line = "<line lost: output was not encoded as UTF-8>\n"
|
line = "<line lost: output was not encoded as UTF-8>\n"
|
||||||
@@ -1008,7 +958,7 @@ def _writer_daemon(
|
|||||||
if xoff in controls:
|
if xoff in controls:
|
||||||
force_echo = False
|
force_echo = False
|
||||||
|
|
||||||
if not _input_available(in_pipe):
|
if not _input_available(read_file):
|
||||||
break
|
break
|
||||||
finally:
|
finally:
|
||||||
if line_count > 0:
|
if line_count > 0:
|
||||||
@@ -1023,14 +973,14 @@ def _writer_daemon(
|
|||||||
finally:
|
finally:
|
||||||
# send written data back to parent if we used a StringIO
|
# send written data back to parent if we used a StringIO
|
||||||
if isinstance(log_file, io.StringIO):
|
if isinstance(log_file, io.StringIO):
|
||||||
control_pipe.send(log_file.getvalue())
|
control_fd.send(log_file.getvalue())
|
||||||
log_file_wrapper.close()
|
log_file_wrapper.close()
|
||||||
close_connection_and_file(read_multiprocess_fd, in_pipe)
|
read_fd.close()
|
||||||
if stdin_multiprocess_fd:
|
if stdin_fd:
|
||||||
close_connection_and_file(stdin_multiprocess_fd, stdin)
|
stdin_fd.close()
|
||||||
|
|
||||||
# send echo value back to the parent so it can be preserved.
|
# send echo value back to the parent so it can be preserved.
|
||||||
control_pipe.send(echo)
|
control_fd.send(echo)
|
||||||
|
|
||||||
|
|
||||||
def _retry(function):
|
def _retry(function):
|
||||||
|
|||||||
@@ -3,6 +3,13 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import spack.paths
|
||||||
|
import spack.util.git
|
||||||
|
|
||||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||||
__version__ = "0.23.0.dev0"
|
__version__ = "0.23.0.dev0"
|
||||||
spack_version = __version__
|
spack_version = __version__
|
||||||
@@ -19,4 +26,58 @@ def __try_int(v):
|
|||||||
spack_version_info = tuple([__try_int(v) for v in __version__.split(".")])
|
spack_version_info = tuple([__try_int(v) for v in __version__.split(".")])
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["spack_version_info", "spack_version"]
|
def get_spack_commit() -> Optional[str]:
|
||||||
|
"""Get the Spack git commit sha.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
(str or None) the commit sha if available, otherwise None
|
||||||
|
"""
|
||||||
|
git_path = os.path.join(spack.paths.prefix, ".git")
|
||||||
|
if not os.path.exists(git_path):
|
||||||
|
return None
|
||||||
|
|
||||||
|
git = spack.util.git.git()
|
||||||
|
if not git:
|
||||||
|
return None
|
||||||
|
|
||||||
|
rev = git(
|
||||||
|
"-C",
|
||||||
|
spack.paths.prefix,
|
||||||
|
"rev-parse",
|
||||||
|
"HEAD",
|
||||||
|
output=str,
|
||||||
|
error=os.devnull,
|
||||||
|
fail_on_error=False,
|
||||||
|
)
|
||||||
|
if git.returncode != 0:
|
||||||
|
return None
|
||||||
|
|
||||||
|
match = re.match(r"[a-f\d]{7,}$", rev)
|
||||||
|
return match.group(0) if match else None
|
||||||
|
|
||||||
|
|
||||||
|
def get_version() -> str:
|
||||||
|
"""Get a descriptive version of this instance of Spack.
|
||||||
|
|
||||||
|
Outputs '<PEP440 version> (<git commit sha>)'.
|
||||||
|
|
||||||
|
The commit sha is only added when available.
|
||||||
|
"""
|
||||||
|
commit = get_spack_commit()
|
||||||
|
if commit:
|
||||||
|
return f"{spack_version} ({commit})"
|
||||||
|
return spack_version
|
||||||
|
|
||||||
|
|
||||||
|
def get_short_version() -> str:
|
||||||
|
"""Short Spack version."""
|
||||||
|
return f"{spack_version_info[0]}.{spack_version_info[1]}"
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"spack_version_info",
|
||||||
|
"spack_version",
|
||||||
|
"get_version",
|
||||||
|
"get_spack_commit",
|
||||||
|
"get_short_version",
|
||||||
|
]
|
||||||
|
|||||||
@@ -39,6 +39,7 @@ def _search_duplicate_compilers(error_cls):
|
|||||||
import collections
|
import collections
|
||||||
import collections.abc
|
import collections.abc
|
||||||
import glob
|
import glob
|
||||||
|
import inspect
|
||||||
import io
|
import io
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
@@ -46,11 +47,15 @@ def _search_duplicate_compilers(error_cls):
|
|||||||
import pickle
|
import pickle
|
||||||
import re
|
import re
|
||||||
import warnings
|
import warnings
|
||||||
|
from typing import Iterable, List, Set, Tuple
|
||||||
from urllib.request import urlopen
|
from urllib.request import urlopen
|
||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
|
from llnl.string import plural
|
||||||
|
|
||||||
|
import spack.builder
|
||||||
import spack.config
|
import spack.config
|
||||||
|
import spack.fetch_strategy
|
||||||
import spack.patch
|
import spack.patch
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
@@ -73,7 +78,9 @@ def __init__(self, summary, details):
|
|||||||
self.details = tuple(details)
|
self.details = tuple(details)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.summary + "\n" + "\n".join([" " + detail for detail in self.details])
|
if self.details:
|
||||||
|
return f"{self.summary}\n" + "\n".join(f" {detail}" for detail in self.details)
|
||||||
|
return self.summary
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
if self.summary != other.summary or self.details != other.details:
|
if self.summary != other.summary or self.details != other.details:
|
||||||
@@ -257,40 +264,6 @@ def _search_duplicate_specs_in_externals(error_cls):
|
|||||||
return errors
|
return errors
|
||||||
|
|
||||||
|
|
||||||
@config_packages
|
|
||||||
def _deprecated_preferences(error_cls):
|
|
||||||
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.23)"""
|
|
||||||
# TODO (v0.23): remove this audit as the attributes will not be allowed in config
|
|
||||||
errors = []
|
|
||||||
packages_yaml = spack.config.CONFIG.get_config("packages")
|
|
||||||
|
|
||||||
def make_error(attribute_name, config_data, summary):
|
|
||||||
s = io.StringIO()
|
|
||||||
s.write("Occurring in the following file:\n")
|
|
||||||
dict_view = syaml.syaml_dict((k, v) for k, v in config_data.items() if k == attribute_name)
|
|
||||||
syaml.dump_config(dict_view, stream=s, blame=True)
|
|
||||||
return error_cls(summary=summary, details=[s.getvalue()])
|
|
||||||
|
|
||||||
if "all" in packages_yaml and "version" in packages_yaml["all"]:
|
|
||||||
summary = "Using the deprecated 'version' attribute under 'packages:all'"
|
|
||||||
errors.append(make_error("version", packages_yaml["all"], summary))
|
|
||||||
|
|
||||||
for package_name in packages_yaml:
|
|
||||||
if package_name == "all":
|
|
||||||
continue
|
|
||||||
|
|
||||||
package_conf = packages_yaml[package_name]
|
|
||||||
for attribute in ("compiler", "providers", "target"):
|
|
||||||
if attribute not in package_conf:
|
|
||||||
continue
|
|
||||||
summary = (
|
|
||||||
f"Using the deprecated '{attribute}' attribute " f"under 'packages:{package_name}'"
|
|
||||||
)
|
|
||||||
errors.append(make_error(attribute, package_conf, summary))
|
|
||||||
|
|
||||||
return errors
|
|
||||||
|
|
||||||
|
|
||||||
@config_packages
|
@config_packages
|
||||||
def _avoid_mismatched_variants(error_cls):
|
def _avoid_mismatched_variants(error_cls):
|
||||||
"""Warns if variant preferences have mismatched types or names."""
|
"""Warns if variant preferences have mismatched types or names."""
|
||||||
@@ -311,7 +284,7 @@ def _avoid_mismatched_variants(error_cls):
|
|||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||||
for variant in current_spec.variants.values():
|
for variant in current_spec.variants.values():
|
||||||
# Variant does not exist at all
|
# Variant does not exist at all
|
||||||
if variant.name not in pkg_cls.variants:
|
if variant.name not in pkg_cls.variant_names():
|
||||||
summary = (
|
summary = (
|
||||||
f"Setting a preference for the '{pkg_name}' package to the "
|
f"Setting a preference for the '{pkg_name}' package to the "
|
||||||
f"non-existing variant '{variant.name}'"
|
f"non-existing variant '{variant.name}'"
|
||||||
@@ -320,9 +293,8 @@ def _avoid_mismatched_variants(error_cls):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
# Variant cannot accept this value
|
# Variant cannot accept this value
|
||||||
s = spack.spec.Spec(pkg_name)
|
|
||||||
try:
|
try:
|
||||||
s.update_variant_validate(variant.name, variant.value)
|
spack.variant.prevalidate_variant_value(pkg_cls, variant, strict=True)
|
||||||
except Exception:
|
except Exception:
|
||||||
summary = (
|
summary = (
|
||||||
f"Setting the variant '{variant.name}' of the '{pkg_name}' package "
|
f"Setting the variant '{variant.name}' of the '{pkg_name}' package "
|
||||||
@@ -416,6 +388,14 @@ def _make_config_error(config_data, summary, error_cls):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
package_deprecated_attributes = AuditClass(
|
||||||
|
group="packages",
|
||||||
|
tag="PKG-DEPRECATED-ATTRIBUTES",
|
||||||
|
description="Sanity checks to preclude use of deprecated package attributes",
|
||||||
|
kwargs=("pkgs",),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
package_properties = AuditClass(
|
package_properties = AuditClass(
|
||||||
group="packages",
|
group="packages",
|
||||||
tag="PKG-PROPERTIES",
|
tag="PKG-PROPERTIES",
|
||||||
@@ -434,22 +414,23 @@ def _make_config_error(config_data, summary, error_cls):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@package_directives
|
@package_properties
|
||||||
def _check_build_test_callbacks(pkgs, error_cls):
|
def _check_build_test_callbacks(pkgs, error_cls):
|
||||||
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
"""Ensure stand-alone test methods are not included in build-time callbacks.
|
||||||
|
|
||||||
|
Test methods are for checking the installed software as stand-alone tests.
|
||||||
|
They could also be called during the post-install phase of a build.
|
||||||
|
"""
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||||
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
|
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
|
||||||
|
|
||||||
# TODO (post-34236): "test*"->"test_*" once remove deprecated methods
|
has_test_method = test_callbacks and any([m.startswith("test_") for m in test_callbacks])
|
||||||
# TODO (post-34236): "test"->"test_" once remove deprecated methods
|
|
||||||
has_test_method = test_callbacks and any([m.startswith("test") for m in test_callbacks])
|
|
||||||
if has_test_method:
|
if has_test_method:
|
||||||
msg = '{0} package contains "test*" method(s) in ' "build_time_test_callbacks"
|
msg = f"Package {pkg_name} includes stand-alone test methods in build-time checks."
|
||||||
instr = 'Remove all methods whose names start with "test" from: [{0}]'.format(
|
callbacks = ", ".join(test_callbacks)
|
||||||
", ".join(test_callbacks)
|
instr = f"Remove the following from 'build_time_test_callbacks': {callbacks}"
|
||||||
)
|
|
||||||
errors.append(error_cls(msg.format(pkg_name), [instr]))
|
errors.append(error_cls(msg.format(pkg_name), [instr]))
|
||||||
|
|
||||||
return errors
|
return errors
|
||||||
@@ -547,6 +528,46 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
|
|||||||
return errors
|
return errors
|
||||||
|
|
||||||
|
|
||||||
|
@package_deprecated_attributes
|
||||||
|
def _search_for_deprecated_package_methods(pkgs, error_cls):
|
||||||
|
"""Ensure the package doesn't define or use deprecated methods"""
|
||||||
|
DEPRECATED_METHOD = (("test", "a name starting with 'test_'"),)
|
||||||
|
DEPRECATED_USE = (
|
||||||
|
("self.cache_extra_test_sources(", "cache_extra_test_sources(self, ..)"),
|
||||||
|
("self.install_test_root(", "install_test_root(self, ..)"),
|
||||||
|
("self.run_test(", "test_part(self, ..)"),
|
||||||
|
)
|
||||||
|
errors = []
|
||||||
|
for pkg_name in pkgs:
|
||||||
|
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||||
|
methods = inspect.getmembers(pkg_cls, predicate=lambda x: inspect.isfunction(x))
|
||||||
|
method_errors = collections.defaultdict(list)
|
||||||
|
for name, function in methods:
|
||||||
|
for deprecated_name, alternate in DEPRECATED_METHOD:
|
||||||
|
if name == deprecated_name:
|
||||||
|
msg = f"Rename '{deprecated_name}' method to {alternate} instead."
|
||||||
|
method_errors[name].append(msg)
|
||||||
|
|
||||||
|
source = inspect.getsource(function)
|
||||||
|
for deprecated_name, alternate in DEPRECATED_USE:
|
||||||
|
if deprecated_name in source:
|
||||||
|
msg = f"Change '{deprecated_name}' to '{alternate}' in '{name}' method."
|
||||||
|
method_errors[name].append(msg)
|
||||||
|
|
||||||
|
num_methods = len(method_errors)
|
||||||
|
if num_methods > 0:
|
||||||
|
methods = plural(num_methods, "method", show_n=False)
|
||||||
|
error_msg = (
|
||||||
|
f"Package '{pkg_name}' implements or uses unsupported deprecated {methods}."
|
||||||
|
)
|
||||||
|
instr = [f"Make changes to '{pkg_cls.__module__}':"]
|
||||||
|
for name in sorted(method_errors):
|
||||||
|
instr.extend([f" {msg}" for msg in method_errors[name]])
|
||||||
|
errors.append(error_cls(error_msg, instr))
|
||||||
|
|
||||||
|
return errors
|
||||||
|
|
||||||
|
|
||||||
@package_properties
|
@package_properties
|
||||||
def _ensure_all_package_names_are_lowercase(pkgs, error_cls):
|
def _ensure_all_package_names_are_lowercase(pkgs, error_cls):
|
||||||
"""Ensure package names are lowercase and consistent"""
|
"""Ensure package names are lowercase and consistent"""
|
||||||
@@ -692,12 +713,17 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
|
|||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||||
buildsystem_variant, _ = pkg_cls.variants["build_system"]
|
|
||||||
buildsystem_names = [getattr(x, "value", x) for x in buildsystem_variant.values]
|
# values are either Value objects (for conditional values) or the values themselves
|
||||||
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in buildsystem_names]
|
build_system_names = set(
|
||||||
module = pkg_cls.module
|
v.value if isinstance(v, spack.variant.Value) else v
|
||||||
|
for _, variant in pkg_cls.variant_definitions("build_system")
|
||||||
|
for v in variant.values
|
||||||
|
)
|
||||||
|
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in build_system_names]
|
||||||
|
|
||||||
has_builders_in_package_py = any(
|
has_builders_in_package_py = any(
|
||||||
getattr(module, name, False) for name in builder_cls_names
|
spack.builder.get_builder_class(pkg_cls, name) for name in builder_cls_names
|
||||||
)
|
)
|
||||||
if not has_builders_in_package_py:
|
if not has_builders_in_package_py:
|
||||||
continue
|
continue
|
||||||
@@ -713,6 +739,171 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
|
|||||||
return errors
|
return errors
|
||||||
|
|
||||||
|
|
||||||
|
class DeprecatedMagicGlobals(ast.NodeVisitor):
|
||||||
|
def __init__(self, magic_globals: Iterable[str]):
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
self.magic_globals: Set[str] = set(magic_globals)
|
||||||
|
|
||||||
|
# State to track whether we're in a class function
|
||||||
|
self.depth: int = 0
|
||||||
|
self.in_function: bool = False
|
||||||
|
self.path = (ast.Module, ast.ClassDef, ast.FunctionDef)
|
||||||
|
|
||||||
|
# Defined locals in the current function (heuristically at least)
|
||||||
|
self.locals: Set[str] = set()
|
||||||
|
|
||||||
|
# List of (name, lineno) tuples for references to magic globals
|
||||||
|
self.references_to_globals: List[Tuple[str, int]] = []
|
||||||
|
|
||||||
|
def descend_in_function_def(self, node: ast.AST) -> None:
|
||||||
|
if not isinstance(node, self.path[self.depth]):
|
||||||
|
return
|
||||||
|
self.depth += 1
|
||||||
|
if self.depth == len(self.path):
|
||||||
|
self.in_function = True
|
||||||
|
super().generic_visit(node)
|
||||||
|
if self.depth == len(self.path):
|
||||||
|
self.in_function = False
|
||||||
|
self.locals.clear()
|
||||||
|
self.depth -= 1
|
||||||
|
|
||||||
|
def generic_visit(self, node: ast.AST) -> None:
|
||||||
|
# Recurse into function definitions
|
||||||
|
if self.depth < len(self.path):
|
||||||
|
return self.descend_in_function_def(node)
|
||||||
|
elif not self.in_function:
|
||||||
|
return
|
||||||
|
elif isinstance(node, ast.Global):
|
||||||
|
for name in node.names:
|
||||||
|
if name in self.magic_globals:
|
||||||
|
self.references_to_globals.append((name, node.lineno))
|
||||||
|
elif isinstance(node, ast.Assign):
|
||||||
|
# visit the rhs before lhs
|
||||||
|
super().visit(node.value)
|
||||||
|
for target in node.targets:
|
||||||
|
super().visit(target)
|
||||||
|
elif isinstance(node, ast.Name) and node.id in self.magic_globals:
|
||||||
|
if isinstance(node.ctx, ast.Load) and node.id not in self.locals:
|
||||||
|
self.references_to_globals.append((node.id, node.lineno))
|
||||||
|
elif isinstance(node.ctx, ast.Store):
|
||||||
|
self.locals.add(node.id)
|
||||||
|
else:
|
||||||
|
super().generic_visit(node)
|
||||||
|
|
||||||
|
|
||||||
|
@package_properties
|
||||||
|
def _uses_deprecated_globals(pkgs, error_cls):
|
||||||
|
"""Ensure that packages do not use deprecated globals"""
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
for pkg_name in pkgs:
|
||||||
|
# some packages scheduled to be removed in v0.23 are not worth fixing.
|
||||||
|
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||||
|
if all(v.get("deprecated", False) for v in pkg_cls.versions.values()):
|
||||||
|
continue
|
||||||
|
|
||||||
|
file = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||||
|
tree = ast.parse(open(file).read())
|
||||||
|
visitor = DeprecatedMagicGlobals(("std_cmake_args", "std_meson_args", "std_pip_args"))
|
||||||
|
visitor.visit(tree)
|
||||||
|
if visitor.references_to_globals:
|
||||||
|
errors.append(
|
||||||
|
error_cls(
|
||||||
|
f"Package '{pkg_name}' uses deprecated globals",
|
||||||
|
[
|
||||||
|
f"{file}:{line} references '{name}'"
|
||||||
|
for name, line in visitor.references_to_globals
|
||||||
|
],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return errors
|
||||||
|
|
||||||
|
|
||||||
|
@package_properties
|
||||||
|
def _ensure_test_docstring(pkgs, error_cls):
|
||||||
|
"""Ensure stand-alone test methods have a docstring.
|
||||||
|
|
||||||
|
The docstring of a test method is implicitly used as the description of
|
||||||
|
the corresponding test part during test results reporting.
|
||||||
|
"""
|
||||||
|
doc_regex = r'\s+("""[^"]+""")'
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
for pkg_name in pkgs:
|
||||||
|
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||||
|
methods = inspect.getmembers(pkg_cls, predicate=lambda x: inspect.isfunction(x))
|
||||||
|
method_names = []
|
||||||
|
for name, test_fn in methods:
|
||||||
|
if not name.startswith("test_"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Ensure the test method has a docstring
|
||||||
|
source = inspect.getsource(test_fn)
|
||||||
|
match = re.search(doc_regex, source)
|
||||||
|
if match is None or len(match.group(0).replace('"', "").strip()) == 0:
|
||||||
|
method_names.append(name)
|
||||||
|
|
||||||
|
num_methods = len(method_names)
|
||||||
|
if num_methods > 0:
|
||||||
|
methods = plural(num_methods, "method", show_n=False)
|
||||||
|
docstrings = plural(num_methods, "docstring", show_n=False)
|
||||||
|
msg = f"Package {pkg_name} has test {methods} with empty or missing {docstrings}."
|
||||||
|
names = ", ".join(method_names)
|
||||||
|
instr = [
|
||||||
|
"Docstrings are used as descriptions in test outputs.",
|
||||||
|
f"Add a concise summary to the following {methods} in '{pkg_cls.__module__}':",
|
||||||
|
f"{names}",
|
||||||
|
]
|
||||||
|
errors.append(error_cls(msg, instr))
|
||||||
|
|
||||||
|
return errors
|
||||||
|
|
||||||
|
|
||||||
|
@package_properties
|
||||||
|
def _ensure_test_implemented(pkgs, error_cls):
|
||||||
|
"""Ensure stand-alone test methods are implemented.
|
||||||
|
|
||||||
|
The test method is also required to be non-empty.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def skip(line):
|
||||||
|
ln = line.strip()
|
||||||
|
return ln.startswith("#") or "pass" in ln
|
||||||
|
|
||||||
|
doc_regex = r'\s+("""[^"]+""")'
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
for pkg_name in pkgs:
|
||||||
|
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||||
|
methods = inspect.getmembers(pkg_cls, predicate=lambda x: inspect.isfunction(x))
|
||||||
|
method_names = []
|
||||||
|
for name, test_fn in methods:
|
||||||
|
if not name.startswith("test_"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
source = inspect.getsource(test_fn)
|
||||||
|
|
||||||
|
# Attempt to ensure the test method is implemented.
|
||||||
|
impl = re.sub(doc_regex, r"", source).splitlines()[1:]
|
||||||
|
lines = [ln.strip() for ln in impl if not skip(ln)]
|
||||||
|
if not lines:
|
||||||
|
method_names.append(name)
|
||||||
|
|
||||||
|
num_methods = len(method_names)
|
||||||
|
if num_methods > 0:
|
||||||
|
methods = plural(num_methods, "method", show_n=False)
|
||||||
|
msg = f"Package {pkg_name} has empty or missing test {methods}."
|
||||||
|
names = ", ".join(method_names)
|
||||||
|
instr = [
|
||||||
|
f"Implement or remove the following {methods} from '{pkg_cls.__module__}': {names}"
|
||||||
|
]
|
||||||
|
errors.append(error_cls(msg, instr))
|
||||||
|
|
||||||
|
return errors
|
||||||
|
|
||||||
|
|
||||||
@package_https_directives
|
@package_https_directives
|
||||||
def _linting_package_file(pkgs, error_cls):
|
def _linting_package_file(pkgs, error_cls):
|
||||||
"""Check for correctness of links"""
|
"""Check for correctness of links"""
|
||||||
@@ -879,20 +1070,22 @@ def check_virtual_with_variants(spec, msg):
|
|||||||
|
|
||||||
# check variants
|
# check variants
|
||||||
dependency_variants = dep.spec.variants
|
dependency_variants = dep.spec.variants
|
||||||
for name, value in dependency_variants.items():
|
for name, variant in dependency_variants.items():
|
||||||
try:
|
try:
|
||||||
v, _ = dependency_pkg_cls.variants[name]
|
spack.variant.prevalidate_variant_value(
|
||||||
v.validate_or_raise(value, pkg_cls=dependency_pkg_cls)
|
dependency_pkg_cls, variant, dep.spec, strict=True
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
summary = (
|
summary = (
|
||||||
f"{pkg_name}: wrong variant used for dependency in 'depends_on()'"
|
f"{pkg_name}: wrong variant used for dependency in 'depends_on()'"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
error_msg = str(e)
|
||||||
if isinstance(e, KeyError):
|
if isinstance(e, KeyError):
|
||||||
error_msg = (
|
error_msg = (
|
||||||
f"variant {str(e).strip()} does not exist in package {dep_name}"
|
f"variant {str(e).strip()} does not exist in package {dep_name}"
|
||||||
|
f" in package '{dep_name}'"
|
||||||
)
|
)
|
||||||
error_msg += f" in package '{dep_name}'"
|
|
||||||
|
|
||||||
errors.append(
|
errors.append(
|
||||||
error_cls(summary=summary, details=[error_msg, f"in {filename}"])
|
error_cls(summary=summary, details=[error_msg, f"in {filename}"])
|
||||||
@@ -904,39 +1097,38 @@ def check_virtual_with_variants(spec, msg):
|
|||||||
@package_directives
|
@package_directives
|
||||||
def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||||
"""Ensures that variant defaults are present and parsable from cli"""
|
"""Ensures that variant defaults are present and parsable from cli"""
|
||||||
|
|
||||||
|
def check_variant(pkg_cls, variant, vname):
|
||||||
|
# bool is a subclass of int in python. Permitting a default that is an instance
|
||||||
|
# of 'int' means both foo=false and foo=0 are accepted. Other falsish values are
|
||||||
|
# not allowed, since they can't be parsed from CLI ('foo=')
|
||||||
|
default_is_parsable = isinstance(variant.default, int) or variant.default
|
||||||
|
|
||||||
|
if not default_is_parsable:
|
||||||
|
msg = f"Variant '{vname}' of package '{pkg_cls.name}' has an unparsable default value"
|
||||||
|
return [error_cls(msg, [])]
|
||||||
|
|
||||||
|
try:
|
||||||
|
vspec = variant.make_default()
|
||||||
|
except spack.variant.MultipleValuesInExclusiveVariantError:
|
||||||
|
msg = f"Can't create default value for variant '{vname}' in package '{pkg_cls.name}'"
|
||||||
|
return [error_cls(msg, [])]
|
||||||
|
|
||||||
|
try:
|
||||||
|
variant.validate_or_raise(vspec, pkg_cls.name)
|
||||||
|
except spack.variant.InvalidVariantValueError:
|
||||||
|
msg = "Default value of variant '{vname}' in package '{pkg.name}' is invalid"
|
||||||
|
question = "Is it among the allowed values?"
|
||||||
|
return [error_cls(msg, [question])]
|
||||||
|
|
||||||
|
return []
|
||||||
|
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||||
for variant_name, entry in pkg_cls.variants.items():
|
for vname in pkg_cls.variant_names():
|
||||||
variant, _ = entry
|
for _, variant_def in pkg_cls.variant_definitions(vname):
|
||||||
default_is_parsable = (
|
errors.extend(check_variant(pkg_cls, variant_def, vname))
|
||||||
# Permitting a default that is an instance on 'int' permits
|
|
||||||
# to have foo=false or foo=0. Other falsish values are
|
|
||||||
# not allowed, since they can't be parsed from cli ('foo=')
|
|
||||||
isinstance(variant.default, int)
|
|
||||||
or variant.default
|
|
||||||
)
|
|
||||||
if not default_is_parsable:
|
|
||||||
error_msg = "Variant '{}' of package '{}' has a bad default value"
|
|
||||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
vspec = variant.make_default()
|
|
||||||
except spack.variant.MultipleValuesInExclusiveVariantError:
|
|
||||||
error_msg = "Cannot create a default value for the variant '{}' in package '{}'"
|
|
||||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
variant.validate_or_raise(vspec, pkg_cls=pkg_cls)
|
|
||||||
except spack.variant.InvalidVariantValueError:
|
|
||||||
error_msg = (
|
|
||||||
"The default value of the variant '{}' in package '{}' failed validation"
|
|
||||||
)
|
|
||||||
question = "Is it among the allowed values?"
|
|
||||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), [question]))
|
|
||||||
|
|
||||||
return errors
|
return errors
|
||||||
|
|
||||||
|
|
||||||
@@ -946,11 +1138,11 @@ def _ensure_variants_have_descriptions(pkgs, error_cls):
|
|||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||||
for variant_name, entry in pkg_cls.variants.items():
|
for name in pkg_cls.variant_names():
|
||||||
variant, _ = entry
|
for when, variant in pkg_cls.variant_definitions(name):
|
||||||
if not variant.description:
|
if not variant.description:
|
||||||
error_msg = "Variant '{}' in package '{}' is missing a description"
|
msg = f"Variant '{name}' in package '{pkg_name}' is missing a description"
|
||||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
errors.append(error_cls(msg, []))
|
||||||
|
|
||||||
return errors
|
return errors
|
||||||
|
|
||||||
@@ -1007,29 +1199,26 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
|||||||
|
|
||||||
|
|
||||||
def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||||
variant_exceptions = (
|
|
||||||
spack.variant.InconsistentValidationError,
|
|
||||||
spack.variant.MultipleValuesInExclusiveVariantError,
|
|
||||||
spack.variant.InvalidVariantValueError,
|
|
||||||
KeyError,
|
|
||||||
)
|
|
||||||
errors = []
|
errors = []
|
||||||
|
variant_names = pkg.variant_names()
|
||||||
|
summary = f"{pkg.name}: wrong variant in '{directive}' directive"
|
||||||
|
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||||
|
|
||||||
for name, v in constraint.variants.items():
|
for name, v in constraint.variants.items():
|
||||||
|
if name not in variant_names:
|
||||||
|
msg = f"variant {name} does not exist in {pkg.name}"
|
||||||
|
errors.append(error_cls(summary=summary, details=[msg, f"in {filename}"]))
|
||||||
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
variant, _ = pkg.variants[name]
|
spack.variant.prevalidate_variant_value(pkg, v, constraint, strict=True)
|
||||||
variant.validate_or_raise(v, pkg_cls=pkg)
|
except (
|
||||||
except variant_exceptions as e:
|
spack.variant.InconsistentValidationError,
|
||||||
summary = pkg.name + ': wrong variant in "{0}" directive'
|
spack.variant.MultipleValuesInExclusiveVariantError,
|
||||||
summary = summary.format(directive)
|
spack.variant.InvalidVariantValueError,
|
||||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
) as e:
|
||||||
|
msg = str(e).strip()
|
||||||
error_msg = str(e).strip()
|
errors.append(error_cls(summary=summary, details=[msg, f"in {filename}"]))
|
||||||
if isinstance(e, KeyError):
|
|
||||||
error_msg = "the variant {0} does not exist".format(error_msg)
|
|
||||||
|
|
||||||
err = error_cls(summary=summary, details=[error_msg, "in " + filename])
|
|
||||||
|
|
||||||
errors.append(err)
|
|
||||||
|
|
||||||
return errors
|
return errors
|
||||||
|
|
||||||
@@ -1067,9 +1256,10 @@ def _extracts_errors(triggers, summary):
|
|||||||
for dname in dnames
|
for dname in dnames
|
||||||
)
|
)
|
||||||
|
|
||||||
for vname, (variant, triggers) in pkg_cls.variants.items():
|
for when, variants_by_name in pkg_cls.variants.items():
|
||||||
summary = f"{pkg_name}: wrong 'when=' condition for the '{vname}' variant"
|
for vname, variant in variants_by_name.items():
|
||||||
errors.extend(_extracts_errors(triggers, summary))
|
summary = f"{pkg_name}: wrong 'when=' condition for the '{vname}' variant"
|
||||||
|
errors.extend(_extracts_errors([when], summary))
|
||||||
|
|
||||||
for when, providers, details in _error_items(pkg_cls.provided):
|
for when, providers, details in _error_items(pkg_cls.provided):
|
||||||
errors.extend(
|
errors.extend(
|
||||||
|
|||||||
@@ -33,9 +33,9 @@
|
|||||||
from llnl.util.symlink import readlink
|
from llnl.util.symlink import readlink
|
||||||
|
|
||||||
import spack.caches
|
import spack.caches
|
||||||
import spack.cmd
|
|
||||||
import spack.config as config
|
import spack.config as config
|
||||||
import spack.database as spack_db
|
import spack.database as spack_db
|
||||||
|
import spack.deptypes as dt
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
import spack.hooks
|
import spack.hooks
|
||||||
@@ -44,9 +44,9 @@
|
|||||||
import spack.oci.image
|
import spack.oci.image
|
||||||
import spack.oci.oci
|
import spack.oci.oci
|
||||||
import spack.oci.opener
|
import spack.oci.opener
|
||||||
|
import spack.paths
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.relocate as relocate
|
import spack.relocate as relocate
|
||||||
import spack.repo
|
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.store
|
import spack.store
|
||||||
@@ -54,6 +54,7 @@
|
|||||||
import spack.util.archive
|
import spack.util.archive
|
||||||
import spack.util.crypto
|
import spack.util.crypto
|
||||||
import spack.util.file_cache as file_cache
|
import spack.util.file_cache as file_cache
|
||||||
|
import spack.util.filesystem as ssys
|
||||||
import spack.util.gpg
|
import spack.util.gpg
|
||||||
import spack.util.parallel
|
import spack.util.parallel
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
@@ -105,7 +106,7 @@ class BuildCacheDatabase(spack_db.Database):
|
|||||||
record_fields = ("spec", "ref_count", "in_buildcache")
|
record_fields = ("spec", "ref_count", "in_buildcache")
|
||||||
|
|
||||||
def __init__(self, root):
|
def __init__(self, root):
|
||||||
super().__init__(root, lock_cfg=spack_db.NO_LOCK)
|
super().__init__(root, lock_cfg=spack_db.NO_LOCK, layout=None)
|
||||||
self._write_transaction_impl = llnl.util.lang.nullcontext
|
self._write_transaction_impl = llnl.util.lang.nullcontext
|
||||||
self._read_transaction_impl = llnl.util.lang.nullcontext
|
self._read_transaction_impl = llnl.util.lang.nullcontext
|
||||||
|
|
||||||
@@ -251,7 +252,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
|||||||
|
|
||||||
spec_list = [
|
spec_list = [
|
||||||
s
|
s
|
||||||
for s in db.query_local(installed=any, in_buildcache=any)
|
for s in db.query_local(installed=any)
|
||||||
if s.external or db.query_local_by_spec_hash(s.dag_hash()).in_buildcache
|
if s.external or db.query_local_by_spec_hash(s.dag_hash()).in_buildcache
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -687,7 +688,7 @@ def get_buildfile_manifest(spec):
|
|||||||
# Non-symlinks.
|
# Non-symlinks.
|
||||||
for rel_path in visitor.files:
|
for rel_path in visitor.files:
|
||||||
abs_path = os.path.join(root, rel_path)
|
abs_path = os.path.join(root, rel_path)
|
||||||
m_type, m_subtype = fsys.mime_type(abs_path)
|
m_type, m_subtype = ssys.mime_type(abs_path)
|
||||||
|
|
||||||
if relocate.needs_binary_relocation(m_type, m_subtype):
|
if relocate.needs_binary_relocation(m_type, m_subtype):
|
||||||
# Why is this branch not part of needs_binary_relocation? :(
|
# Why is this branch not part of needs_binary_relocation? :(
|
||||||
@@ -712,15 +713,32 @@ def get_buildfile_manifest(spec):
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def hashes_to_prefixes(spec):
|
def deps_to_relocate(spec):
|
||||||
"""Return a dictionary of hashes to prefixes for a spec and its deps, excluding externals"""
|
"""Return the transitive link and direct run dependencies of the spec.
|
||||||
return {
|
|
||||||
s.dag_hash(): str(s.prefix)
|
This is a special traversal for dependencies we need to consider when relocating a package.
|
||||||
|
|
||||||
|
Package binaries, scripts, and other files may refer to the prefixes of dependencies, so
|
||||||
|
we need to rewrite those locations when dependencies are in a different place at install time
|
||||||
|
than they were at build time.
|
||||||
|
|
||||||
|
This traversal covers transitive link dependencies and direct run dependencies because:
|
||||||
|
|
||||||
|
1. Spack adds RPATHs for transitive link dependencies so that packages can find needed
|
||||||
|
dependency libraries.
|
||||||
|
2. Packages may call any of their *direct* run dependencies (and may bake their paths into
|
||||||
|
binaries or scripts), so we also need to search for run dependency prefixes when relocating.
|
||||||
|
|
||||||
|
This returns a deduplicated list of transitive link dependencies and direct run dependencies.
|
||||||
|
"""
|
||||||
|
deps = [
|
||||||
|
s
|
||||||
for s in itertools.chain(
|
for s in itertools.chain(
|
||||||
spec.traverse(root=True, deptype="link"), spec.dependencies(deptype="run")
|
spec.traverse(root=True, deptype="link"), spec.dependencies(deptype="run")
|
||||||
)
|
)
|
||||||
if not s.external
|
if not s.external
|
||||||
}
|
]
|
||||||
|
return llnl.util.lang.dedupe(deps, key=lambda s: s.dag_hash())
|
||||||
|
|
||||||
|
|
||||||
def get_buildinfo_dict(spec):
|
def get_buildinfo_dict(spec):
|
||||||
@@ -736,7 +754,7 @@ def get_buildinfo_dict(spec):
|
|||||||
"relocate_binaries": manifest["binary_to_relocate"],
|
"relocate_binaries": manifest["binary_to_relocate"],
|
||||||
"relocate_links": manifest["link_to_relocate"],
|
"relocate_links": manifest["link_to_relocate"],
|
||||||
"hardlinks_deduped": manifest["hardlinks_deduped"],
|
"hardlinks_deduped": manifest["hardlinks_deduped"],
|
||||||
"hash_to_prefix": hashes_to_prefixes(spec),
|
"hash_to_prefix": {d.dag_hash(): str(d.prefix) for d in deps_to_relocate(spec)},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -788,7 +806,9 @@ def sign_specfile(key: str, specfile_path: str) -> str:
|
|||||||
return signed_specfile_path
|
return signed_specfile_path
|
||||||
|
|
||||||
|
|
||||||
def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_dir, concurrency):
|
def _read_specs_and_push_index(
|
||||||
|
file_list, read_method, cache_prefix, db: BuildCacheDatabase, temp_dir, concurrency
|
||||||
|
):
|
||||||
"""Read all the specs listed in the provided list, using thread given thread parallelism,
|
"""Read all the specs listed in the provided list, using thread given thread parallelism,
|
||||||
generate the index, and push it to the mirror.
|
generate the index, and push it to the mirror.
|
||||||
|
|
||||||
@@ -812,7 +832,7 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
|
|||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
db.add(fetched_spec, None)
|
db.add(fetched_spec)
|
||||||
db.mark(fetched_spec, "in_buildcache", True)
|
db.mark(fetched_spec, "in_buildcache", True)
|
||||||
|
|
||||||
# Now generate the index, compute its hash, and push the two files to
|
# Now generate the index, compute its hash, and push the two files to
|
||||||
@@ -1444,7 +1464,9 @@ def _oci_push_pkg_blob(
|
|||||||
filename = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz")
|
filename = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz")
|
||||||
|
|
||||||
# Create an oci.image.layer aka tarball of the package
|
# Create an oci.image.layer aka tarball of the package
|
||||||
compressed_tarfile_checksum, tarfile_checksum = spack.oci.oci.create_tarball(spec, filename)
|
compressed_tarfile_checksum, tarfile_checksum = _do_create_tarball(
|
||||||
|
filename, spec.prefix, get_buildinfo_dict(spec)
|
||||||
|
)
|
||||||
|
|
||||||
blob = spack.oci.oci.Blob(
|
blob = spack.oci.oci.Blob(
|
||||||
Digest.from_sha256(compressed_tarfile_checksum),
|
Digest.from_sha256(compressed_tarfile_checksum),
|
||||||
@@ -1627,7 +1649,6 @@ def _oci_push(
|
|||||||
Dict[str, spack.oci.oci.Blob],
|
Dict[str, spack.oci.oci.Blob],
|
||||||
List[Tuple[Spec, BaseException]],
|
List[Tuple[Spec, BaseException]],
|
||||||
]:
|
]:
|
||||||
|
|
||||||
# Spec dag hash -> blob
|
# Spec dag hash -> blob
|
||||||
checksums: Dict[str, spack.oci.oci.Blob] = {}
|
checksums: Dict[str, spack.oci.oci.Blob] = {}
|
||||||
|
|
||||||
@@ -1765,7 +1786,7 @@ def _oci_update_index(
|
|||||||
|
|
||||||
for spec_dict in spec_dicts:
|
for spec_dict in spec_dicts:
|
||||||
spec = Spec.from_dict(spec_dict)
|
spec = Spec.from_dict(spec_dict)
|
||||||
db.add(spec, directory_layout=None)
|
db.add(spec)
|
||||||
db.mark(spec, "in_buildcache", True)
|
db.mark(spec, "in_buildcache", True)
|
||||||
|
|
||||||
# Create the index.json file
|
# Create the index.json file
|
||||||
@@ -2197,11 +2218,36 @@ def relocate_package(spec):
|
|||||||
# First match specific prefix paths. Possibly the *local* install prefix
|
# First match specific prefix paths. Possibly the *local* install prefix
|
||||||
# of some dependency is in an upstream, so we cannot assume the original
|
# of some dependency is in an upstream, so we cannot assume the original
|
||||||
# spack store root can be mapped uniformly to the new spack store root.
|
# spack store root can be mapped uniformly to the new spack store root.
|
||||||
for dag_hash, new_dep_prefix in hashes_to_prefixes(spec).items():
|
#
|
||||||
if dag_hash in hash_to_old_prefix:
|
# If the spec is spliced, we need to handle the simultaneous mapping
|
||||||
old_dep_prefix = hash_to_old_prefix[dag_hash]
|
# from the old install_tree to the new install_tree and from the build_spec
|
||||||
prefix_to_prefix_bin[old_dep_prefix] = new_dep_prefix
|
# to the spliced spec.
|
||||||
prefix_to_prefix_text[old_dep_prefix] = new_dep_prefix
|
# Because foo.build_spec is foo for any non-spliced spec, we can simplify
|
||||||
|
# by checking for spliced-in nodes by checking for nodes not in the build_spec
|
||||||
|
# without any explicit check for whether the spec is spliced.
|
||||||
|
# An analog in this algorithm is any spec that shares a name or provides the same virtuals
|
||||||
|
# in the context of the relevant root spec. This ensures that the analog for a spec s
|
||||||
|
# is the spec that s replaced when we spliced.
|
||||||
|
relocation_specs = deps_to_relocate(spec)
|
||||||
|
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
||||||
|
for s in relocation_specs:
|
||||||
|
analog = s
|
||||||
|
if id(s) not in build_spec_ids:
|
||||||
|
analogs = [
|
||||||
|
d
|
||||||
|
for d in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD)
|
||||||
|
if s._splice_match(d, self_root=spec, other_root=spec.build_spec)
|
||||||
|
]
|
||||||
|
if analogs:
|
||||||
|
# Prefer same-name analogs and prefer higher versions
|
||||||
|
# This matches the preferences in Spec.splice, so we will find same node
|
||||||
|
analog = max(analogs, key=lambda a: (a.name == s.name, a.version))
|
||||||
|
|
||||||
|
lookup_dag_hash = analog.dag_hash()
|
||||||
|
if lookup_dag_hash in hash_to_old_prefix:
|
||||||
|
old_dep_prefix = hash_to_old_prefix[lookup_dag_hash]
|
||||||
|
prefix_to_prefix_bin[old_dep_prefix] = str(s.prefix)
|
||||||
|
prefix_to_prefix_text[old_dep_prefix] = str(s.prefix)
|
||||||
|
|
||||||
# Only then add the generic fallback of install prefix -> install prefix.
|
# Only then add the generic fallback of install prefix -> install prefix.
|
||||||
prefix_to_prefix_text[old_prefix] = new_prefix
|
prefix_to_prefix_text[old_prefix] = new_prefix
|
||||||
@@ -2516,7 +2562,13 @@ def _ensure_common_prefix(tar: tarfile.TarFile) -> str:
|
|||||||
return pkg_prefix
|
return pkg_prefix
|
||||||
|
|
||||||
|
|
||||||
def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
def install_root_node(
|
||||||
|
spec: spack.spec.Spec,
|
||||||
|
unsigned=False,
|
||||||
|
force: bool = False,
|
||||||
|
sha256: Optional[str] = None,
|
||||||
|
allow_missing: bool = False,
|
||||||
|
) -> None:
|
||||||
"""Install the root node of a concrete spec from a buildcache.
|
"""Install the root node of a concrete spec from a buildcache.
|
||||||
|
|
||||||
Checking the sha256 sum of a node before installation is usually needed only
|
Checking the sha256 sum of a node before installation is usually needed only
|
||||||
@@ -2525,11 +2577,10 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
spec: spec to be installed (note that only the root node will be installed)
|
spec: spec to be installed (note that only the root node will be installed)
|
||||||
unsigned (bool): if True allows installing unsigned binaries
|
unsigned: if True allows installing unsigned binaries
|
||||||
force (bool): force installation if the spec is already present in the
|
force: force installation if the spec is already present in the local store
|
||||||
local store
|
sha256: optional sha256 of the binary package, to be checked before installation
|
||||||
sha256 (str): optional sha256 of the binary package, to be checked
|
allow_missing: when true, allows installing a node with missing dependencies
|
||||||
before installation
|
|
||||||
"""
|
"""
|
||||||
# Early termination
|
# Early termination
|
||||||
if spec.external or spec.virtual:
|
if spec.external or spec.virtual:
|
||||||
@@ -2539,10 +2590,10 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
|||||||
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
|
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
|
||||||
return
|
return
|
||||||
|
|
||||||
download_result = download_tarball(spec, unsigned)
|
download_result = download_tarball(spec.build_spec, unsigned)
|
||||||
if not download_result:
|
if not download_result:
|
||||||
msg = 'download of binary cache file for spec "{0}" failed'
|
msg = 'download of binary cache file for spec "{0}" failed'
|
||||||
raise RuntimeError(msg.format(spec.format()))
|
raise RuntimeError(msg.format(spec.build_spec.format()))
|
||||||
|
|
||||||
if sha256:
|
if sha256:
|
||||||
checker = spack.util.crypto.Checker(sha256)
|
checker = spack.util.crypto.Checker(sha256)
|
||||||
@@ -2561,8 +2612,13 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
|||||||
with spack.util.path.filter_padding():
|
with spack.util.path.filter_padding():
|
||||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||||
extract_tarball(spec, download_result, force)
|
extract_tarball(spec, download_result, force)
|
||||||
|
spec.package.windows_establish_runtime_linkage()
|
||||||
|
if spec.spliced: # overwrite old metadata with new
|
||||||
|
spack.store.STORE.layout.write_spec(
|
||||||
|
spec, spack.store.STORE.layout.spec_file_path(spec)
|
||||||
|
)
|
||||||
spack.hooks.post_install(spec, False)
|
spack.hooks.post_install(spec, False)
|
||||||
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
spack.store.STORE.db.add(spec, allow_missing=allow_missing)
|
||||||
|
|
||||||
|
|
||||||
def install_single_spec(spec, unsigned=False, force=False):
|
def install_single_spec(spec, unsigned=False, force=False):
|
||||||
@@ -2694,6 +2750,9 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
|||||||
|
|
||||||
for mirror in mirror_collection.values():
|
for mirror in mirror_collection.values():
|
||||||
fetch_url = mirror.fetch_url
|
fetch_url = mirror.fetch_url
|
||||||
|
# TODO: oci:// does not support signing.
|
||||||
|
if fetch_url.startswith("oci://"):
|
||||||
|
continue
|
||||||
keys_url = url_util.join(
|
keys_url = url_util.join(
|
||||||
fetch_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH
|
fetch_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -9,6 +9,7 @@
|
|||||||
all_core_root_specs,
|
all_core_root_specs,
|
||||||
ensure_clingo_importable_or_raise,
|
ensure_clingo_importable_or_raise,
|
||||||
ensure_core_dependencies,
|
ensure_core_dependencies,
|
||||||
|
ensure_file_in_path_or_raise,
|
||||||
ensure_gpg_in_path_or_raise,
|
ensure_gpg_in_path_or_raise,
|
||||||
ensure_patchelf_in_path_or_raise,
|
ensure_patchelf_in_path_or_raise,
|
||||||
)
|
)
|
||||||
@@ -19,6 +20,7 @@
|
|||||||
"is_bootstrapping",
|
"is_bootstrapping",
|
||||||
"ensure_bootstrap_configuration",
|
"ensure_bootstrap_configuration",
|
||||||
"ensure_core_dependencies",
|
"ensure_core_dependencies",
|
||||||
|
"ensure_file_in_path_or_raise",
|
||||||
"ensure_gpg_in_path_or_raise",
|
"ensure_gpg_in_path_or_raise",
|
||||||
"ensure_clingo_importable_or_raise",
|
"ensure_clingo_importable_or_raise",
|
||||||
"ensure_patchelf_in_path_or_raise",
|
"ensure_patchelf_in_path_or_raise",
|
||||||
|
|||||||
@@ -4,6 +4,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
"""Common basic functions used through the spack.bootstrap package"""
|
"""Common basic functions used through the spack.bootstrap package"""
|
||||||
import fnmatch
|
import fnmatch
|
||||||
|
import glob
|
||||||
import importlib
|
import importlib
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
@@ -60,10 +61,19 @@ def _try_import_from_store(
|
|||||||
python, *_ = candidate_spec.dependencies("python-venv")
|
python, *_ = candidate_spec.dependencies("python-venv")
|
||||||
else:
|
else:
|
||||||
python, *_ = candidate_spec.dependencies("python")
|
python, *_ = candidate_spec.dependencies("python")
|
||||||
module_paths = [
|
|
||||||
os.path.join(candidate_spec.prefix, python.package.purelib),
|
# if python is installed, ask it for the layout
|
||||||
os.path.join(candidate_spec.prefix, python.package.platlib),
|
if python.installed:
|
||||||
]
|
module_paths = [
|
||||||
|
os.path.join(candidate_spec.prefix, python.package.purelib),
|
||||||
|
os.path.join(candidate_spec.prefix, python.package.platlib),
|
||||||
|
]
|
||||||
|
# otherwise search for the site-packages directory
|
||||||
|
# (clingo from binaries with truncated python-venv runtime)
|
||||||
|
else:
|
||||||
|
module_paths = glob.glob(
|
||||||
|
os.path.join(candidate_spec.prefix, "lib", "python*", "site-packages")
|
||||||
|
)
|
||||||
path_before = list(sys.path)
|
path_before = list(sys.path)
|
||||||
|
|
||||||
# NOTE: try module_paths first and last, last allows an existing version in path
|
# NOTE: try module_paths first and last, last allows an existing version in path
|
||||||
|
|||||||
@@ -14,6 +14,7 @@
|
|||||||
import spack.compilers
|
import spack.compilers
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.environment
|
import spack.environment
|
||||||
|
import spack.modules
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.repo
|
import spack.repo
|
||||||
|
|||||||
@@ -37,21 +37,16 @@
|
|||||||
import spack.binary_distribution
|
import spack.binary_distribution
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.detection
|
import spack.detection
|
||||||
import spack.environment
|
import spack.mirror
|
||||||
import spack.modules
|
|
||||||
import spack.paths
|
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.platforms.linux
|
|
||||||
import spack.repo
|
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.user_environment
|
import spack.user_environment
|
||||||
import spack.util.environment
|
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.spack_yaml
|
import spack.util.spack_yaml
|
||||||
import spack.util.url
|
|
||||||
import spack.version
|
import spack.version
|
||||||
|
from spack.installer import PackageInstaller
|
||||||
|
|
||||||
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
|
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
|
||||||
from .clingo import ClingoBootstrapConcretizer
|
from .clingo import ClingoBootstrapConcretizer
|
||||||
@@ -96,12 +91,7 @@ def __init__(self, conf: ConfigDictionary) -> None:
|
|||||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||||
|
|
||||||
# Promote (relative) paths to file urls
|
# Promote (relative) paths to file urls
|
||||||
url = conf["info"]["url"]
|
self.url = spack.mirror.Mirror(conf["info"]["url"]).fetch_url
|
||||||
if spack.util.url.is_path_instead_of_url(url):
|
|
||||||
if not os.path.isabs(url):
|
|
||||||
url = os.path.join(self.metadata_dir, url)
|
|
||||||
url = spack.util.url.path_to_file_url(url)
|
|
||||||
self.url = url
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
||||||
@@ -180,7 +170,15 @@ def _install_by_hash(
|
|||||||
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
|
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
|
||||||
for match in spack.store.find([f"/{pkg_hash}"], multiple=False, query_fn=query):
|
for match in spack.store.find([f"/{pkg_hash}"], multiple=False, query_fn=query):
|
||||||
spack.binary_distribution.install_root_node(
|
spack.binary_distribution.install_root_node(
|
||||||
match, unsigned=True, force=True, sha256=pkg_sha256
|
# allow_missing is true since when bootstrapping clingo we truncate runtime
|
||||||
|
# deps such as gcc-runtime, since we link libstdc++ statically, and the other
|
||||||
|
# further runtime deps are loaded by the Python interpreter. This just silences
|
||||||
|
# warnings about missing dependencies.
|
||||||
|
match,
|
||||||
|
unsigned=True,
|
||||||
|
force=True,
|
||||||
|
sha256=pkg_sha256,
|
||||||
|
allow_missing=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
def _install_and_test(
|
def _install_and_test(
|
||||||
@@ -283,7 +281,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
|||||||
|
|
||||||
# Install the spec that should make the module importable
|
# Install the spec that should make the module importable
|
||||||
with spack.config.override(self.mirror_scope):
|
with spack.config.override(self.mirror_scope):
|
||||||
concrete_spec.package.do_install(fail_fast=True)
|
PackageInstaller([concrete_spec.package], fail_fast=True).install()
|
||||||
|
|
||||||
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
||||||
self.last_search = info
|
self.last_search = info
|
||||||
@@ -306,7 +304,7 @@ def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bo
|
|||||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||||
tty.debug(msg.format(abstract_spec_str))
|
tty.debug(msg.format(abstract_spec_str))
|
||||||
with spack.config.override(self.mirror_scope):
|
with spack.config.override(self.mirror_scope):
|
||||||
concrete_spec.package.do_install()
|
PackageInstaller([concrete_spec.package], fail_fast=True).install()
|
||||||
if _executables_in_store(executables, concrete_spec, query_info=info):
|
if _executables_in_store(executables, concrete_spec, query_info=info):
|
||||||
self.last_search = info
|
self.last_search = info
|
||||||
return True
|
return True
|
||||||
@@ -472,7 +470,8 @@ def ensure_clingo_importable_or_raise() -> None:
|
|||||||
|
|
||||||
def gnupg_root_spec() -> str:
|
def gnupg_root_spec() -> str:
|
||||||
"""Return the root spec used to bootstrap GnuPG"""
|
"""Return the root spec used to bootstrap GnuPG"""
|
||||||
return _root_spec("gnupg@2.3:")
|
root_spec_name = "win-gpg" if IS_WINDOWS else "gnupg"
|
||||||
|
return _root_spec(f"{root_spec_name}@2.3:")
|
||||||
|
|
||||||
|
|
||||||
def ensure_gpg_in_path_or_raise() -> None:
|
def ensure_gpg_in_path_or_raise() -> None:
|
||||||
@@ -482,6 +481,19 @@ def ensure_gpg_in_path_or_raise() -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def file_root_spec() -> str:
|
||||||
|
"""Return the root spec used to bootstrap file"""
|
||||||
|
root_spec_name = "win-file" if IS_WINDOWS else "file"
|
||||||
|
return _root_spec(root_spec_name)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_file_in_path_or_raise() -> None:
|
||||||
|
"""Ensure file is in the PATH or raise"""
|
||||||
|
return ensure_executables_in_path_or_raise(
|
||||||
|
executables=["file"], abstract_spec=file_root_spec()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def patchelf_root_spec() -> str:
|
def patchelf_root_spec() -> str:
|
||||||
"""Return the root spec used to bootstrap patchelf"""
|
"""Return the root spec used to bootstrap patchelf"""
|
||||||
# 0.13.1 is the last version not to require C++17.
|
# 0.13.1 is the last version not to require C++17.
|
||||||
@@ -565,14 +577,15 @@ def ensure_core_dependencies() -> None:
|
|||||||
"""Ensure the presence of all the core dependencies."""
|
"""Ensure the presence of all the core dependencies."""
|
||||||
if sys.platform.lower() == "linux":
|
if sys.platform.lower() == "linux":
|
||||||
ensure_patchelf_in_path_or_raise()
|
ensure_patchelf_in_path_or_raise()
|
||||||
if not IS_WINDOWS:
|
elif sys.platform == "win32":
|
||||||
ensure_gpg_in_path_or_raise()
|
ensure_file_in_path_or_raise()
|
||||||
|
ensure_gpg_in_path_or_raise()
|
||||||
ensure_clingo_importable_or_raise()
|
ensure_clingo_importable_or_raise()
|
||||||
|
|
||||||
|
|
||||||
def all_core_root_specs() -> List[str]:
|
def all_core_root_specs() -> List[str]:
|
||||||
"""Return a list of all the core root specs that may be used to bootstrap Spack"""
|
"""Return a list of all the core root specs that may be used to bootstrap Spack"""
|
||||||
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
|
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec(), file_root_spec()]
|
||||||
|
|
||||||
|
|
||||||
def bootstrapping_sources(scope: Optional[str] = None):
|
def bootstrapping_sources(scope: Optional[str] = None):
|
||||||
@@ -589,7 +602,10 @@ def bootstrapping_sources(scope: Optional[str] = None):
|
|||||||
current = copy.copy(entry)
|
current = copy.copy(entry)
|
||||||
metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
|
metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
|
||||||
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||||
with open(metadata_yaml, encoding="utf-8") as stream:
|
try:
|
||||||
current.update(spack.util.spack_yaml.load(stream))
|
with open(metadata_yaml, encoding="utf-8") as stream:
|
||||||
list_of_sources.append(current)
|
current.update(spack.util.spack_yaml.load(stream))
|
||||||
|
list_of_sources.append(current)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
return list_of_sources
|
return list_of_sources
|
||||||
|
|||||||
@@ -14,9 +14,9 @@
|
|||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
|
|
||||||
import spack.environment
|
import spack.environment
|
||||||
|
import spack.spec
|
||||||
import spack.tengine
|
import spack.tengine
|
||||||
import spack.util.cpus
|
import spack.util.path
|
||||||
import spack.util.executable
|
|
||||||
|
|
||||||
from ._common import _root_spec
|
from ._common import _root_spec
|
||||||
from .config import root_path, spec_for_current_python, store_path
|
from .config import root_path, spec_for_current_python, store_path
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -88,7 +88,7 @@ def _core_requirements() -> List[RequiredResponseType]:
|
|||||||
|
|
||||||
def _buildcache_requirements() -> List[RequiredResponseType]:
|
def _buildcache_requirements() -> List[RequiredResponseType]:
|
||||||
_buildcache_exes = {
|
_buildcache_exes = {
|
||||||
"file": _missing("file", "required to analyze files for buildcaches"),
|
"file": _missing("file", "required to analyze files for buildcaches", system_only=False),
|
||||||
("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False),
|
("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False),
|
||||||
}
|
}
|
||||||
if platform.system().lower() == "darwin":
|
if platform.system().lower() == "darwin":
|
||||||
|
|||||||
@@ -37,13 +37,17 @@
|
|||||||
import multiprocessing
|
import multiprocessing
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import stat
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
import types
|
import types
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from enum import Flag, auto
|
from enum import Flag, auto
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from typing import Dict, List, Set, Tuple
|
from multiprocessing.connection import Connection
|
||||||
|
from typing import Callable, Dict, List, Optional, Set, Tuple
|
||||||
|
|
||||||
|
import archspec.cpu
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.string import plural
|
from llnl.string import plural
|
||||||
@@ -51,8 +55,8 @@
|
|||||||
from llnl.util.lang import dedupe, stable_partition
|
from llnl.util.lang import dedupe, stable_partition
|
||||||
from llnl.util.symlink import symlink
|
from llnl.util.symlink import symlink
|
||||||
from llnl.util.tty.color import cescape, colorize
|
from llnl.util.tty.color import cescape, colorize
|
||||||
from llnl.util.tty.log import MultiProcessFd
|
|
||||||
|
|
||||||
|
import spack.build_systems._checks
|
||||||
import spack.build_systems.cmake
|
import spack.build_systems.cmake
|
||||||
import spack.build_systems.meson
|
import spack.build_systems.meson
|
||||||
import spack.build_systems.python
|
import spack.build_systems.python
|
||||||
@@ -61,26 +65,21 @@
|
|||||||
import spack.config
|
import spack.config
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.main
|
import spack.multimethod
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.repo
|
|
||||||
import spack.schema.environment
|
import spack.schema.environment
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.subprocess_context
|
import spack.subprocess_context
|
||||||
import spack.user_environment
|
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
import spack.util.path
|
import spack.util.libc
|
||||||
import spack.util.pattern
|
|
||||||
from spack import traverse
|
from spack import traverse
|
||||||
from spack.context import Context
|
from spack.context import Context
|
||||||
from spack.error import NoHeadersError, NoLibrariesError
|
from spack.error import InstallError, NoHeadersError, NoLibrariesError
|
||||||
from spack.install_test import spack_install_test_log
|
from spack.install_test import spack_install_test_log
|
||||||
from spack.installer import InstallError
|
|
||||||
from spack.util.cpus import determine_number_of_jobs
|
|
||||||
from spack.util.environment import (
|
from spack.util.environment import (
|
||||||
SYSTEM_DIR_CASE_ENTRY,
|
SYSTEM_DIR_CASE_ENTRY,
|
||||||
EnvironmentModifications,
|
EnvironmentModifications,
|
||||||
@@ -92,7 +91,7 @@
|
|||||||
)
|
)
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
from spack.util.log_parse import make_log_context, parse_log_events
|
from spack.util.log_parse import make_log_context, parse_log_events
|
||||||
from spack.util.module_cmd import load_module, path_from_modules
|
from spack.util.module_cmd import load_module
|
||||||
|
|
||||||
#
|
#
|
||||||
# This can be set by the user to globally disable parallel builds.
|
# This can be set by the user to globally disable parallel builds.
|
||||||
@@ -363,7 +362,7 @@ def set_compiler_environment_variables(pkg, env):
|
|||||||
_add_werror_handling(keep_werror, env)
|
_add_werror_handling(keep_werror, env)
|
||||||
|
|
||||||
# Set the target parameters that the compiler will add
|
# Set the target parameters that the compiler will add
|
||||||
isa_arg = spec.architecture.target.optimization_flags(compiler)
|
isa_arg = optimization_flags(compiler, spec.target)
|
||||||
env.set("SPACK_TARGET_ARGS", isa_arg)
|
env.set("SPACK_TARGET_ARGS", isa_arg)
|
||||||
|
|
||||||
# Trap spack-tracked compiler flags as appropriate.
|
# Trap spack-tracked compiler flags as appropriate.
|
||||||
@@ -408,6 +407,65 @@ def set_compiler_environment_variables(pkg, env):
|
|||||||
return env
|
return env
|
||||||
|
|
||||||
|
|
||||||
|
def optimization_flags(compiler, target):
|
||||||
|
if spack.compilers.is_mixed_toolchain(compiler):
|
||||||
|
msg = (
|
||||||
|
"microarchitecture specific optimizations are not "
|
||||||
|
"supported yet on mixed compiler toolchains [check"
|
||||||
|
f" {compiler.name}@{compiler.version} for further details]"
|
||||||
|
)
|
||||||
|
tty.debug(msg)
|
||||||
|
return ""
|
||||||
|
|
||||||
|
# Try to check if the current compiler comes with a version number or
|
||||||
|
# has an unexpected suffix. If so, treat it as a compiler with a
|
||||||
|
# custom spec.
|
||||||
|
compiler_version = compiler.version
|
||||||
|
version_number, suffix = archspec.cpu.version_components(compiler.version)
|
||||||
|
if not version_number or suffix:
|
||||||
|
try:
|
||||||
|
compiler_version = compiler.real_version
|
||||||
|
except spack.util.executable.ProcessError as e:
|
||||||
|
# log this and just return compiler.version instead
|
||||||
|
tty.debug(str(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = target.optimization_flags(compiler.name, compiler_version.dotted_numeric_string)
|
||||||
|
except (ValueError, archspec.cpu.UnsupportedMicroarchitecture):
|
||||||
|
result = ""
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class FilterDefaultDynamicLinkerSearchPaths:
|
||||||
|
"""Remove rpaths to directories that are default search paths of the dynamic linker."""
|
||||||
|
|
||||||
|
def __init__(self, dynamic_linker: Optional[str]) -> None:
|
||||||
|
# Identify directories by (inode, device) tuple, which handles symlinks too.
|
||||||
|
self.default_path_identifiers: Set[Tuple[int, int]] = set()
|
||||||
|
if not dynamic_linker:
|
||||||
|
return
|
||||||
|
for path in spack.util.libc.default_search_paths_from_dynamic_linker(dynamic_linker):
|
||||||
|
try:
|
||||||
|
s = os.stat(path)
|
||||||
|
if stat.S_ISDIR(s.st_mode):
|
||||||
|
self.default_path_identifiers.add((s.st_ino, s.st_dev))
|
||||||
|
except OSError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
def is_dynamic_loader_default_path(self, p: str) -> bool:
|
||||||
|
try:
|
||||||
|
s = os.stat(p)
|
||||||
|
return (s.st_ino, s.st_dev) in self.default_path_identifiers
|
||||||
|
except OSError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __call__(self, dirs: List[str]) -> List[str]:
|
||||||
|
if not self.default_path_identifiers:
|
||||||
|
return dirs
|
||||||
|
return [p for p in dirs if not self.is_dynamic_loader_default_path(p)]
|
||||||
|
|
||||||
|
|
||||||
def set_wrapper_variables(pkg, env):
|
def set_wrapper_variables(pkg, env):
|
||||||
"""Set environment variables used by the Spack compiler wrapper (which have the prefix
|
"""Set environment variables used by the Spack compiler wrapper (which have the prefix
|
||||||
`SPACK_`) and also add the compiler wrappers to PATH.
|
`SPACK_`) and also add the compiler wrappers to PATH.
|
||||||
@@ -455,7 +513,7 @@ def set_wrapper_variables(pkg, env):
|
|||||||
env.set(SPACK_DEBUG, "TRUE")
|
env.set(SPACK_DEBUG, "TRUE")
|
||||||
env.set(SPACK_SHORT_SPEC, pkg.spec.short_spec)
|
env.set(SPACK_SHORT_SPEC, pkg.spec.short_spec)
|
||||||
env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
|
env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
|
||||||
env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir)
|
env.set(SPACK_DEBUG_LOG_DIR, spack.paths.spack_working_dir)
|
||||||
|
|
||||||
if spack.config.get("config:ccache"):
|
if spack.config.get("config:ccache"):
|
||||||
# Enable ccache in the compiler wrapper
|
# Enable ccache in the compiler wrapper
|
||||||
@@ -465,69 +523,71 @@ def set_wrapper_variables(pkg, env):
|
|||||||
env.set("CCACHE_DISABLE", "1")
|
env.set("CCACHE_DISABLE", "1")
|
||||||
|
|
||||||
# Gather information about various types of dependencies
|
# Gather information about various types of dependencies
|
||||||
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
rpath_hashes = set(s.dag_hash() for s in get_rpath_deps(pkg))
|
||||||
rpath_deps = get_rpath_deps(pkg)
|
link_deps = pkg.spec.traverse(root=False, order="topo", deptype=dt.LINK)
|
||||||
|
external_link_deps, nonexternal_link_deps = stable_partition(link_deps, lambda d: d.external)
|
||||||
|
|
||||||
link_dirs = []
|
link_dirs = []
|
||||||
include_dirs = []
|
include_dirs = []
|
||||||
rpath_dirs = []
|
rpath_dirs = []
|
||||||
|
|
||||||
def _prepend_all(list_to_modify, items_to_add):
|
for dep in chain(external_link_deps, nonexternal_link_deps):
|
||||||
# Update the original list (creating a new list would be faster but
|
# TODO: is_system_path is wrong, but even if we knew default -L, -I flags from the compiler
|
||||||
# may not be convenient)
|
# and default search dirs from the dynamic linker, it's not obvious how to avoid a possibly
|
||||||
for item in reversed(list(items_to_add)):
|
# expensive search in `query.libs.directories` and `query.headers.directories`, which is
|
||||||
list_to_modify.insert(0, item)
|
# what this branch is trying to avoid.
|
||||||
|
if is_system_path(dep.prefix):
|
||||||
|
continue
|
||||||
|
# TODO: as of Spack 0.22, multiple instances of the same package may occur among the link
|
||||||
|
# deps, so keying by name is wrong. In practice it is not problematic: we obtain the same
|
||||||
|
# gcc-runtime / glibc here, and repeatedly add the same dirs that are later deduped.
|
||||||
|
query = pkg.spec[dep.name]
|
||||||
|
dep_link_dirs = []
|
||||||
|
try:
|
||||||
|
# Locating libraries can be time consuming, so log start and finish.
|
||||||
|
tty.debug(f"Collecting libraries for {dep.name}")
|
||||||
|
dep_link_dirs.extend(query.libs.directories)
|
||||||
|
tty.debug(f"Libraries for {dep.name} have been collected.")
|
||||||
|
except NoLibrariesError:
|
||||||
|
tty.debug(f"No libraries found for {dep.name}")
|
||||||
|
|
||||||
def update_compiler_args_for_dep(dep):
|
for default_lib_dir in ("lib", "lib64"):
|
||||||
if dep in link_deps and (not is_system_path(dep.prefix)):
|
default_lib_prefix = os.path.join(dep.prefix, default_lib_dir)
|
||||||
query = pkg.spec[dep.name]
|
if os.path.isdir(default_lib_prefix):
|
||||||
dep_link_dirs = list()
|
dep_link_dirs.append(default_lib_prefix)
|
||||||
try:
|
|
||||||
# In some circumstances (particularly for externals) finding
|
|
||||||
# libraries packages can be time consuming, so indicate that
|
|
||||||
# we are performing this operation (and also report when it
|
|
||||||
# finishes).
|
|
||||||
tty.debug("Collecting libraries for {0}".format(dep.name))
|
|
||||||
dep_link_dirs.extend(query.libs.directories)
|
|
||||||
tty.debug("Libraries for {0} have been collected.".format(dep.name))
|
|
||||||
except NoLibrariesError:
|
|
||||||
tty.debug("No libraries found for {0}".format(dep.name))
|
|
||||||
|
|
||||||
for default_lib_dir in ["lib", "lib64"]:
|
link_dirs[:0] = dep_link_dirs
|
||||||
default_lib_prefix = os.path.join(dep.prefix, default_lib_dir)
|
if dep.dag_hash() in rpath_hashes:
|
||||||
if os.path.isdir(default_lib_prefix):
|
rpath_dirs[:0] = dep_link_dirs
|
||||||
dep_link_dirs.append(default_lib_prefix)
|
|
||||||
|
|
||||||
_prepend_all(link_dirs, dep_link_dirs)
|
try:
|
||||||
if dep in rpath_deps:
|
tty.debug(f"Collecting headers for {dep.name}")
|
||||||
_prepend_all(rpath_dirs, dep_link_dirs)
|
include_dirs[:0] = query.headers.directories
|
||||||
|
tty.debug(f"Headers for {dep.name} have been collected.")
|
||||||
|
except NoHeadersError:
|
||||||
|
tty.debug(f"No headers found for {dep.name}")
|
||||||
|
|
||||||
try:
|
# The top-level package is heuristically rpath'ed.
|
||||||
_prepend_all(include_dirs, query.headers.directories)
|
for libdir in ("lib64", "lib"):
|
||||||
except NoHeadersError:
|
|
||||||
tty.debug("No headers found for {0}".format(dep.name))
|
|
||||||
|
|
||||||
for dspec in pkg.spec.traverse(root=False, order="post"):
|
|
||||||
if dspec.external:
|
|
||||||
update_compiler_args_for_dep(dspec)
|
|
||||||
|
|
||||||
# Just above, we prepended entries for -L/-rpath for externals. We
|
|
||||||
# now do this for non-external packages so that Spack-built packages
|
|
||||||
# are searched first for libraries etc.
|
|
||||||
for dspec in pkg.spec.traverse(root=False, order="post"):
|
|
||||||
if not dspec.external:
|
|
||||||
update_compiler_args_for_dep(dspec)
|
|
||||||
|
|
||||||
# The top-level package is always RPATHed. It hasn't been installed yet
|
|
||||||
# so the RPATHs are added unconditionally (e.g. even though lib64/ may
|
|
||||||
# not be created for the install).
|
|
||||||
for libdir in ["lib64", "lib"]:
|
|
||||||
lib_path = os.path.join(pkg.prefix, libdir)
|
lib_path = os.path.join(pkg.prefix, libdir)
|
||||||
rpath_dirs.insert(0, lib_path)
|
rpath_dirs.insert(0, lib_path)
|
||||||
|
|
||||||
|
filter_default_dynamic_linker_search_paths = FilterDefaultDynamicLinkerSearchPaths(
|
||||||
|
pkg.compiler.default_dynamic_linker
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: filter_system_paths is again wrong (and probably unnecessary due to the is_system_path
|
||||||
|
# branch above). link_dirs should be filtered with entries from _parse_link_paths.
|
||||||
link_dirs = list(dedupe(filter_system_paths(link_dirs)))
|
link_dirs = list(dedupe(filter_system_paths(link_dirs)))
|
||||||
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
||||||
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
||||||
|
rpath_dirs = filter_default_dynamic_linker_search_paths(rpath_dirs)
|
||||||
|
|
||||||
|
# TODO: implicit_rpaths is prefiltered by is_system_path, that should be removed in favor of
|
||||||
|
# just this filter.
|
||||||
|
implicit_rpaths = filter_default_dynamic_linker_search_paths(pkg.compiler.implicit_rpaths())
|
||||||
|
if implicit_rpaths:
|
||||||
|
env.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(implicit_rpaths))
|
||||||
|
|
||||||
# Spack managed directories include the stage, store and upstream stores. We extend this with
|
# Spack managed directories include the stage, store and upstream stores. We extend this with
|
||||||
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
|
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
|
||||||
@@ -557,14 +617,12 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
|||||||
"""
|
"""
|
||||||
module = ModuleChangePropagator(pkg)
|
module = ModuleChangePropagator(pkg)
|
||||||
|
|
||||||
|
jobs = spack.config.determine_number_of_jobs(parallel=pkg.parallel)
|
||||||
|
module.make_jobs = jobs
|
||||||
if context == Context.BUILD:
|
if context == Context.BUILD:
|
||||||
module.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
|
||||||
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||||
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||||
|
|
||||||
jobs = determine_number_of_jobs(parallel=pkg.parallel)
|
|
||||||
module.make_jobs = jobs
|
|
||||||
|
|
||||||
# TODO: make these build deps that can be installed if not found.
|
# TODO: make these build deps that can be installed if not found.
|
||||||
module.make = MakeExecutable("make", jobs)
|
module.make = MakeExecutable("make", jobs)
|
||||||
module.gmake = MakeExecutable("gmake", jobs)
|
module.gmake = MakeExecutable("gmake", jobs)
|
||||||
@@ -732,21 +790,6 @@ def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]
|
|||||||
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
||||||
|
|
||||||
|
|
||||||
def get_rpaths(pkg):
|
|
||||||
"""Get a list of all the rpaths for a package."""
|
|
||||||
rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
|
|
||||||
deps = get_rpath_deps(pkg)
|
|
||||||
rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib))
|
|
||||||
rpaths.extend(d.prefix.lib64 for d in deps if os.path.isdir(d.prefix.lib64))
|
|
||||||
# Second module is our compiler mod name. We use that to get rpaths from
|
|
||||||
# module show output.
|
|
||||||
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
|
|
||||||
mod_rpath = path_from_modules([pkg.compiler.modules[1]])
|
|
||||||
if mod_rpath:
|
|
||||||
rpaths.append(mod_rpath)
|
|
||||||
return list(dedupe(filter_system_paths(rpaths)))
|
|
||||||
|
|
||||||
|
|
||||||
def load_external_modules(pkg):
|
def load_external_modules(pkg):
|
||||||
"""Traverse a package's spec DAG and load any external modules.
|
"""Traverse a package's spec DAG and load any external modules.
|
||||||
|
|
||||||
@@ -788,7 +831,6 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
|||||||
# Platform specific setup goes before package specific setup. This is for setting
|
# Platform specific setup goes before package specific setup. This is for setting
|
||||||
# defaults like MACOSX_DEPLOYMENT_TARGET on macOS.
|
# defaults like MACOSX_DEPLOYMENT_TARGET on macOS.
|
||||||
platform = spack.platforms.by_name(pkg.spec.architecture.platform)
|
platform = spack.platforms.by_name(pkg.spec.architecture.platform)
|
||||||
target = platform.target(pkg.spec.architecture.target)
|
|
||||||
platform.setup_platform_environment(pkg, env_mods)
|
platform.setup_platform_environment(pkg, env_mods)
|
||||||
|
|
||||||
tty.debug("setup_package: grabbing modifications from dependencies")
|
tty.debug("setup_package: grabbing modifications from dependencies")
|
||||||
@@ -813,15 +855,8 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
|||||||
for mod in pkg.compiler.modules:
|
for mod in pkg.compiler.modules:
|
||||||
load_module(mod)
|
load_module(mod)
|
||||||
|
|
||||||
if target and target.module_name:
|
|
||||||
load_module(target.module_name)
|
|
||||||
|
|
||||||
load_external_modules(pkg)
|
load_external_modules(pkg)
|
||||||
|
|
||||||
implicit_rpaths = pkg.compiler.implicit_rpaths()
|
|
||||||
if implicit_rpaths:
|
|
||||||
env_mods.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(implicit_rpaths))
|
|
||||||
|
|
||||||
# Make sure nothing's strange about the Spack environment.
|
# Make sure nothing's strange about the Spack environment.
|
||||||
validate(env_mods, tty.warn)
|
validate(env_mods, tty.warn)
|
||||||
env_mods.apply_modifications()
|
env_mods.apply_modifications()
|
||||||
@@ -1011,6 +1046,12 @@ def set_all_package_py_globals(self):
|
|||||||
# This includes runtime dependencies, also runtime deps of direct build deps.
|
# This includes runtime dependencies, also runtime deps of direct build deps.
|
||||||
set_package_py_globals(pkg, context=Context.RUN)
|
set_package_py_globals(pkg, context=Context.RUN)
|
||||||
|
|
||||||
|
# Looping over the set of packages a second time
|
||||||
|
# ensures all globals are loaded into the module space prior to
|
||||||
|
# any package setup. This guarantees package setup methods have
|
||||||
|
# access to expected module level definitions such as "spack_cc"
|
||||||
|
for dspec, flag in chain(self.external, self.nonexternal):
|
||||||
|
pkg = dspec.package
|
||||||
for spec in dspec.dependents():
|
for spec in dspec.dependents():
|
||||||
# Note: some specs have dependents that are unreachable from the root, so avoid
|
# Note: some specs have dependents that are unreachable from the root, so avoid
|
||||||
# setting globals for those.
|
# setting globals for those.
|
||||||
@@ -1020,6 +1061,15 @@ def set_all_package_py_globals(self):
|
|||||||
pkg.setup_dependent_package(dependent_module, spec)
|
pkg.setup_dependent_package(dependent_module, spec)
|
||||||
dependent_module.propagate_changes_to_mro()
|
dependent_module.propagate_changes_to_mro()
|
||||||
|
|
||||||
|
if self.context == Context.BUILD:
|
||||||
|
pkg = self.specs[0].package
|
||||||
|
module = ModuleChangePropagator(pkg)
|
||||||
|
# std_cmake_args is not sufficiently static to be defined
|
||||||
|
# in set_package_py_globals and is deprecated so its handled
|
||||||
|
# here as a special case
|
||||||
|
module.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
||||||
|
module.propagate_changes_to_mro()
|
||||||
|
|
||||||
def get_env_modifications(self) -> EnvironmentModifications:
|
def get_env_modifications(self) -> EnvironmentModifications:
|
||||||
"""Returns the environment variable modifications for the given input specs and context.
|
"""Returns the environment variable modifications for the given input specs and context.
|
||||||
Environment modifications include:
|
Environment modifications include:
|
||||||
@@ -1089,45 +1139,61 @@ def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
|
|||||||
env.prepend_path("PATH", bin_dir)
|
env.prepend_path("PATH", bin_dir)
|
||||||
|
|
||||||
|
|
||||||
def get_cmake_prefix_path(pkg):
|
|
||||||
# Note that unlike modifications_from_dependencies, this does not include
|
|
||||||
# any edits to CMAKE_PREFIX_PATH defined in custom
|
|
||||||
# setup_dependent_build_environment implementations of dependency packages
|
|
||||||
build_deps = set(pkg.spec.dependencies(deptype=("build", "test")))
|
|
||||||
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
|
||||||
build_link_deps = build_deps | link_deps
|
|
||||||
spack_built = []
|
|
||||||
externals = []
|
|
||||||
# modifications_from_dependencies updates CMAKE_PREFIX_PATH by first
|
|
||||||
# prepending all externals and then all non-externals
|
|
||||||
for dspec in pkg.spec.traverse(root=False, order="post"):
|
|
||||||
if dspec in build_link_deps:
|
|
||||||
if dspec.external:
|
|
||||||
externals.insert(0, dspec)
|
|
||||||
else:
|
|
||||||
spack_built.insert(0, dspec)
|
|
||||||
|
|
||||||
ordered_build_link_deps = spack_built + externals
|
|
||||||
cmake_prefix_path_entries = []
|
|
||||||
for spec in ordered_build_link_deps:
|
|
||||||
cmake_prefix_path_entries.extend(spec.package.cmake_prefix_paths)
|
|
||||||
|
|
||||||
return filter_system_paths(cmake_prefix_path_entries)
|
|
||||||
|
|
||||||
|
|
||||||
def _setup_pkg_and_run(
|
def _setup_pkg_and_run(
|
||||||
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
|
||||||
|
function: Callable,
|
||||||
|
kwargs: Dict,
|
||||||
|
write_pipe: Connection,
|
||||||
|
input_pipe: Optional[Connection],
|
||||||
|
jsfd1: Optional[Connection],
|
||||||
|
jsfd2: Optional[Connection],
|
||||||
):
|
):
|
||||||
|
"""Main entry point in the child process for Spack builds.
|
||||||
|
|
||||||
|
``_setup_pkg_and_run`` is called by the child process created in
|
||||||
|
``start_build_process()``, and its main job is to run ``function()`` on behalf of
|
||||||
|
some Spack installation (see :ref:`spack.installer.PackageInstaller._install_task`).
|
||||||
|
|
||||||
|
The child process is passed a ``write_pipe``, on which it's expected to send one of
|
||||||
|
the following:
|
||||||
|
|
||||||
|
* ``StopPhase``: error raised by a build process indicating it's stopping at a
|
||||||
|
particular build phase.
|
||||||
|
|
||||||
|
* ``BaseException``: any exception raised by a child build process, which will be
|
||||||
|
wrapped in ``ChildError`` (which adds a bunch of debug info and log context) and
|
||||||
|
raised in the parent.
|
||||||
|
|
||||||
|
* The return value of ``function()``, which can be anything (except an exception).
|
||||||
|
This is returned to the caller.
|
||||||
|
|
||||||
|
Note: ``jsfd1`` and ``jsfd2`` are passed solely to ensure that the child process
|
||||||
|
does not close these file descriptors. Some ``multiprocessing`` backends will close
|
||||||
|
them automatically in the child if they are not passed at process creation time.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
serialized_pkg: Spack package install context object (serialized form of the
|
||||||
|
package that we'll build in the child process).
|
||||||
|
function: function to call in the child process; serialized_pkg is passed to
|
||||||
|
this as the first argument.
|
||||||
|
kwargs: additional keyword arguments to pass to ``function()``.
|
||||||
|
write_pipe: multiprocessing ``Connection`` to the parent process, to which the
|
||||||
|
child *must* send a result (or an error) back to parent on.
|
||||||
|
input_multiprocess_fd: stdin from the parent (not passed currently on Windows)
|
||||||
|
jsfd1: gmake Jobserver file descriptor 1.
|
||||||
|
jsfd2: gmake Jobserver file descriptor 2.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
context: str = kwargs.get("context", "build")
|
context: str = kwargs.get("context", "build")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# We are in the child process. Python sets sys.stdin to
|
# We are in the child process. Python sets sys.stdin to open(os.devnull) to prevent our
|
||||||
# open(os.devnull) to prevent our process and its parent from
|
# process and its parent from simultaneously reading from the original stdin. But, we
|
||||||
# simultaneously reading from the original stdin. But, we assume
|
# assume that the parent process is not going to read from it till we are done with the
|
||||||
# that the parent process is not going to read from it till we
|
# child, so we undo Python's precaution. closefd=False since Connection has ownership.
|
||||||
# are done with the child, so we undo Python's precaution.
|
if input_pipe is not None:
|
||||||
if input_multiprocess_fd is not None:
|
sys.stdin = os.fdopen(input_pipe.fileno(), closefd=False)
|
||||||
sys.stdin = os.fdopen(input_multiprocess_fd.fd)
|
|
||||||
|
|
||||||
pkg = serialized_pkg.restore()
|
pkg = serialized_pkg.restore()
|
||||||
|
|
||||||
@@ -1139,17 +1205,18 @@ def _setup_pkg_and_run(
|
|||||||
return_value = function(pkg, kwargs)
|
return_value = function(pkg, kwargs)
|
||||||
write_pipe.send(return_value)
|
write_pipe.send(return_value)
|
||||||
|
|
||||||
except StopPhase as e:
|
except spack.error.StopPhase as e:
|
||||||
# Do not create a full ChildError from this, it's not an error
|
# Do not create a full ChildError from this, it's not an error
|
||||||
# it's a control statement.
|
# it's a control statement.
|
||||||
write_pipe.send(e)
|
write_pipe.send(e)
|
||||||
except BaseException:
|
except BaseException as e:
|
||||||
# catch ANYTHING that goes wrong in the child process
|
# catch ANYTHING that goes wrong in the child process
|
||||||
exc_type, exc, tb = sys.exc_info()
|
|
||||||
|
|
||||||
# Need to unwind the traceback in the child because traceback
|
# Need to unwind the traceback in the child because traceback
|
||||||
# objects can't be sent to the parent.
|
# objects can't be sent to the parent.
|
||||||
tb_string = traceback.format_exc()
|
exc_type = type(e)
|
||||||
|
tb = e.__traceback__
|
||||||
|
tb_string = "".join(traceback.format_exception(exc_type, e, tb))
|
||||||
|
|
||||||
# build up some context from the offending package so we can
|
# build up some context from the offending package so we can
|
||||||
# show that, too.
|
# show that, too.
|
||||||
@@ -1166,8 +1233,8 @@ def _setup_pkg_and_run(
|
|||||||
elif context == "test":
|
elif context == "test":
|
||||||
logfile = os.path.join(pkg.test_suite.stage, pkg.test_suite.test_log_name(pkg.spec))
|
logfile = os.path.join(pkg.test_suite.stage, pkg.test_suite.test_log_name(pkg.spec))
|
||||||
|
|
||||||
error_msg = str(exc)
|
error_msg = str(e)
|
||||||
if isinstance(exc, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
if isinstance(e, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
||||||
process = "test the installation" if context == "test" else "build from sources"
|
process = "test the installation" if context == "test" else "build from sources"
|
||||||
error_msg = (
|
error_msg = (
|
||||||
"The '{}' package cannot find an attribute while trying to {}. "
|
"The '{}' package cannot find an attribute while trying to {}. "
|
||||||
@@ -1177,7 +1244,7 @@ def _setup_pkg_and_run(
|
|||||||
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
|
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
|
||||||
).format(pkg.name, process, context)
|
).format(pkg.name, process, context)
|
||||||
error_msg = colorize("@*R{{{}}}".format(error_msg))
|
error_msg = colorize("@*R{{{}}}".format(error_msg))
|
||||||
error_msg = "{}\n\n{}".format(str(exc), error_msg)
|
error_msg = "{}\n\n{}".format(str(e), error_msg)
|
||||||
|
|
||||||
# make a pickleable exception to send to parent.
|
# make a pickleable exception to send to parent.
|
||||||
msg = "%s: %s" % (exc_type.__name__, error_msg)
|
msg = "%s: %s" % (exc_type.__name__, error_msg)
|
||||||
@@ -1195,8 +1262,8 @@ def _setup_pkg_and_run(
|
|||||||
|
|
||||||
finally:
|
finally:
|
||||||
write_pipe.close()
|
write_pipe.close()
|
||||||
if input_multiprocess_fd is not None:
|
if input_pipe is not None:
|
||||||
input_multiprocess_fd.close()
|
input_pipe.close()
|
||||||
|
|
||||||
|
|
||||||
def start_build_process(pkg, function, kwargs):
|
def start_build_process(pkg, function, kwargs):
|
||||||
@@ -1223,23 +1290,9 @@ def child_fun():
|
|||||||
If something goes wrong, the child process catches the error and
|
If something goes wrong, the child process catches the error and
|
||||||
passes it to the parent wrapped in a ChildError. The parent is
|
passes it to the parent wrapped in a ChildError. The parent is
|
||||||
expected to handle (or re-raise) the ChildError.
|
expected to handle (or re-raise) the ChildError.
|
||||||
|
|
||||||
This uses `multiprocessing.Process` to create the child process. The
|
|
||||||
mechanism used to create the process differs on different operating
|
|
||||||
systems and for different versions of Python. In some cases "fork"
|
|
||||||
is used (i.e. the "fork" system call) and some cases it starts an
|
|
||||||
entirely new Python interpreter process (in the docs this is referred
|
|
||||||
to as the "spawn" start method). Breaking it down by OS:
|
|
||||||
|
|
||||||
- Linux always uses fork.
|
|
||||||
- Mac OS uses fork before Python 3.8 and "spawn" for 3.8 and after.
|
|
||||||
- Windows always uses the "spawn" start method.
|
|
||||||
|
|
||||||
For more information on `multiprocessing` child process creation
|
|
||||||
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
|
||||||
"""
|
"""
|
||||||
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
||||||
input_multiprocess_fd = None
|
input_fd = None
|
||||||
jobserver_fd1 = None
|
jobserver_fd1 = None
|
||||||
jobserver_fd2 = None
|
jobserver_fd2 = None
|
||||||
|
|
||||||
@@ -1248,14 +1301,13 @@ def child_fun():
|
|||||||
try:
|
try:
|
||||||
# Forward sys.stdin when appropriate, to allow toggling verbosity
|
# Forward sys.stdin when appropriate, to allow toggling verbosity
|
||||||
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
|
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
|
||||||
input_fd = os.dup(sys.stdin.fileno())
|
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
||||||
input_multiprocess_fd = MultiProcessFd(input_fd)
|
|
||||||
mflags = os.environ.get("MAKEFLAGS", False)
|
mflags = os.environ.get("MAKEFLAGS", False)
|
||||||
if mflags:
|
if mflags:
|
||||||
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
|
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
|
||||||
if m:
|
if m:
|
||||||
jobserver_fd1 = MultiProcessFd(int(m.group(1)))
|
jobserver_fd1 = Connection(int(m.group(1)))
|
||||||
jobserver_fd2 = MultiProcessFd(int(m.group(2)))
|
jobserver_fd2 = Connection(int(m.group(2)))
|
||||||
|
|
||||||
p = multiprocessing.Process(
|
p = multiprocessing.Process(
|
||||||
target=_setup_pkg_and_run,
|
target=_setup_pkg_and_run,
|
||||||
@@ -1264,7 +1316,7 @@ def child_fun():
|
|||||||
function,
|
function,
|
||||||
kwargs,
|
kwargs,
|
||||||
write_pipe,
|
write_pipe,
|
||||||
input_multiprocess_fd,
|
input_fd,
|
||||||
jobserver_fd1,
|
jobserver_fd1,
|
||||||
jobserver_fd2,
|
jobserver_fd2,
|
||||||
),
|
),
|
||||||
@@ -1284,8 +1336,8 @@ def child_fun():
|
|||||||
|
|
||||||
finally:
|
finally:
|
||||||
# Close the input stream in the parent process
|
# Close the input stream in the parent process
|
||||||
if input_multiprocess_fd is not None:
|
if input_fd is not None:
|
||||||
input_multiprocess_fd.close()
|
input_fd.close()
|
||||||
|
|
||||||
def exitcode_msg(p):
|
def exitcode_msg(p):
|
||||||
typ = "exit" if p.exitcode >= 0 else "signal"
|
typ = "exit" if p.exitcode >= 0 else "signal"
|
||||||
@@ -1300,7 +1352,7 @@ def exitcode_msg(p):
|
|||||||
p.join()
|
p.join()
|
||||||
|
|
||||||
# If returns a StopPhase, raise it
|
# If returns a StopPhase, raise it
|
||||||
if isinstance(child_result, StopPhase):
|
if isinstance(child_result, spack.error.StopPhase):
|
||||||
# do not print
|
# do not print
|
||||||
raise child_result
|
raise child_result
|
||||||
|
|
||||||
@@ -1509,17 +1561,6 @@ def _make_child_error(msg, module, name, traceback, log, log_type, context):
|
|||||||
return ChildError(msg, module, name, traceback, log, log_type, context)
|
return ChildError(msg, module, name, traceback, log, log_type, context)
|
||||||
|
|
||||||
|
|
||||||
class StopPhase(spack.error.SpackError):
|
|
||||||
"""Pickle-able exception to control stopped builds."""
|
|
||||||
|
|
||||||
def __reduce__(self):
|
|
||||||
return _make_stop_phase, (self.message, self.long_message)
|
|
||||||
|
|
||||||
|
|
||||||
def _make_stop_phase(msg, long_msg):
|
|
||||||
return StopPhase(msg, long_msg)
|
|
||||||
|
|
||||||
|
|
||||||
def write_log_summary(out, log_type, log, last=None):
|
def write_log_summary(out, log_type, log, last=None):
|
||||||
errors, warnings = parse_log_events(log)
|
errors, warnings = parse_log_events(log)
|
||||||
nerr = len(errors)
|
nerr = len(errors)
|
||||||
@@ -1553,21 +1594,21 @@ class ModuleChangePropagator:
|
|||||||
|
|
||||||
_PROTECTED_NAMES = ("package", "current_module", "modules_in_mro", "_set_attributes")
|
_PROTECTED_NAMES = ("package", "current_module", "modules_in_mro", "_set_attributes")
|
||||||
|
|
||||||
def __init__(self, package):
|
def __init__(self, package: spack.package_base.PackageBase) -> None:
|
||||||
self._set_self_attributes("package", package)
|
self._set_self_attributes("package", package)
|
||||||
self._set_self_attributes("current_module", package.module)
|
self._set_self_attributes("current_module", package.module)
|
||||||
|
|
||||||
#: Modules for the classes in the MRO up to PackageBase
|
#: Modules for the classes in the MRO up to PackageBase
|
||||||
modules_in_mro = []
|
modules_in_mro = []
|
||||||
for cls in type(package).__mro__:
|
for cls in package.__class__.__mro__:
|
||||||
module = cls.module
|
module = getattr(cls, "module", None)
|
||||||
|
|
||||||
if module == self.current_module:
|
if module is None or module is spack.package_base:
|
||||||
continue
|
|
||||||
|
|
||||||
if module == spack.package_base:
|
|
||||||
break
|
break
|
||||||
|
|
||||||
|
if module is self.current_module:
|
||||||
|
continue
|
||||||
|
|
||||||
modules_in_mro.append(module)
|
modules_in_mro.append(module)
|
||||||
self._set_self_attributes("modules_in_mro", modules_in_mro)
|
self._set_self_attributes("modules_in_mro", modules_in_mro)
|
||||||
self._set_self_attributes("_set_attributes", {})
|
self._set_self_attributes("_set_attributes", {})
|
||||||
|
|||||||
@@ -8,7 +8,7 @@
|
|||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.installer
|
import spack.error
|
||||||
import spack.relocate
|
import spack.relocate
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
@@ -34,7 +34,7 @@ def check_paths(path_list, filetype, predicate):
|
|||||||
if not predicate(abs_path):
|
if not predicate(abs_path):
|
||||||
msg = "Install failed for {0}. No such {1} in prefix: {2}"
|
msg = "Install failed for {0}. No such {1} in prefix: {2}"
|
||||||
msg = msg.format(pkg.name, filetype, path)
|
msg = msg.format(pkg.name, filetype, path)
|
||||||
raise spack.installer.InstallError(msg)
|
raise spack.error.InstallError(msg)
|
||||||
|
|
||||||
check_paths(pkg.sanity_check_is_file, "file", os.path.isfile)
|
check_paths(pkg.sanity_check_is_file, "file", os.path.isfile)
|
||||||
check_paths(pkg.sanity_check_is_dir, "directory", os.path.isdir)
|
check_paths(pkg.sanity_check_is_dir, "directory", os.path.isdir)
|
||||||
@@ -42,7 +42,7 @@ def check_paths(path_list, filetype, predicate):
|
|||||||
ignore_file = llnl.util.lang.match_predicate(spack.store.STORE.layout.hidden_file_regexes)
|
ignore_file = llnl.util.lang.match_predicate(spack.store.STORE.layout.hidden_file_regexes)
|
||||||
if all(map(ignore_file, os.listdir(pkg.prefix))):
|
if all(map(ignore_file, os.listdir(pkg.prefix))):
|
||||||
msg = "Install failed for {0}. Nothing was installed!"
|
msg = "Install failed for {0}. Nothing was installed!"
|
||||||
raise spack.installer.InstallError(msg.format(pkg.name))
|
raise spack.error.InstallError(msg.format(pkg.name))
|
||||||
|
|
||||||
|
|
||||||
def apply_macos_rpath_fixups(builder: spack.builder.Builder):
|
def apply_macos_rpath_fixups(builder: spack.builder.Builder):
|
||||||
|
|||||||
@@ -2,10 +2,11 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import os
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
|
||||||
import spack.directives
|
import spack.directives
|
||||||
import spack.package_base
|
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
|
|
||||||
from .autotools import AutotoolsBuilder, AutotoolsPackage
|
from .autotools import AutotoolsBuilder, AutotoolsPackage
|
||||||
@@ -46,18 +47,12 @@ class AspellDictPackage(AutotoolsPackage):
|
|||||||
#: Override the default autotools builder
|
#: Override the default autotools builder
|
||||||
AutotoolsBuilder = AspellBuilder
|
AutotoolsBuilder = AspellBuilder
|
||||||
|
|
||||||
def view_destination(self, view):
|
|
||||||
aspell_spec = self.spec["aspell"]
|
|
||||||
if view.get_projection_for_spec(aspell_spec) != aspell_spec.prefix:
|
|
||||||
raise spack.package_base.ExtensionError(
|
|
||||||
"aspell does not support non-global extensions"
|
|
||||||
)
|
|
||||||
aspell = aspell_spec.command
|
|
||||||
return aspell("dump", "config", "dict-dir", output=str).strip()
|
|
||||||
|
|
||||||
def view_source(self):
|
|
||||||
return self.prefix.lib
|
|
||||||
|
|
||||||
def patch(self):
|
def patch(self):
|
||||||
fs.filter_file(r"^dictdir=.*$", "dictdir=/lib", "configure")
|
aspell_spec = self.spec["aspell"]
|
||||||
fs.filter_file(r"^datadir=.*$", "datadir=/lib", "configure")
|
aspell = aspell_spec.command
|
||||||
|
dictdir = aspell("dump", "config", "dict-dir", output=str).strip()
|
||||||
|
datadir = aspell("dump", "config", "data-dir", output=str).strip()
|
||||||
|
dictdir = os.path.relpath(dictdir, aspell_spec.prefix)
|
||||||
|
datadir = os.path.relpath(datadir, aspell_spec.prefix)
|
||||||
|
fs.filter_file(r"^dictdir=.*$", f"dictdir=/{dictdir}", "configure")
|
||||||
|
fs.filter_file(r"^datadir=.*$", f"datadir=/{datadir}", "configure")
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import inspect
|
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import stat
|
import stat
|
||||||
@@ -14,6 +13,7 @@
|
|||||||
|
|
||||||
import spack.build_environment
|
import spack.build_environment
|
||||||
import spack.builder
|
import spack.builder
|
||||||
|
import spack.error
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
from spack.directives import build_system, conflicts, depends_on
|
from spack.directives import build_system, conflicts, depends_on
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
@@ -249,7 +249,7 @@ def runs_ok(script_abs_path):
|
|||||||
|
|
||||||
# An external gnuconfig may not not have a prefix.
|
# An external gnuconfig may not not have a prefix.
|
||||||
if gnuconfig_dir is None:
|
if gnuconfig_dir is None:
|
||||||
raise spack.build_environment.InstallError(
|
raise spack.error.InstallError(
|
||||||
"Spack could not find substitutes for GNU config files because no "
|
"Spack could not find substitutes for GNU config files because no "
|
||||||
"prefix is available for the `gnuconfig` package. Make sure you set a "
|
"prefix is available for the `gnuconfig` package. Make sure you set a "
|
||||||
"prefix path instead of modules for external `gnuconfig`."
|
"prefix path instead of modules for external `gnuconfig`."
|
||||||
@@ -269,7 +269,7 @@ def runs_ok(script_abs_path):
|
|||||||
msg += (
|
msg += (
|
||||||
" or the `gnuconfig` package prefix is misconfigured as" " an external package"
|
" or the `gnuconfig` package prefix is misconfigured as" " an external package"
|
||||||
)
|
)
|
||||||
raise spack.build_environment.InstallError(msg)
|
raise spack.error.InstallError(msg)
|
||||||
|
|
||||||
# Filter working substitutes
|
# Filter working substitutes
|
||||||
candidates = [f for f in candidates if runs_ok(f)]
|
candidates = [f for f in candidates if runs_ok(f)]
|
||||||
@@ -294,9 +294,7 @@ def runs_ok(script_abs_path):
|
|||||||
and set the prefix to the directory containing the `config.guess` and
|
and set the prefix to the directory containing the `config.guess` and
|
||||||
`config.sub` files.
|
`config.sub` files.
|
||||||
"""
|
"""
|
||||||
raise spack.build_environment.InstallError(
|
raise spack.error.InstallError(msg.format(", ".join(to_be_found), self.name))
|
||||||
msg.format(", ".join(to_be_found), self.name)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Copy the good files over the bad ones
|
# Copy the good files over the bad ones
|
||||||
for abs_path in to_be_patched:
|
for abs_path in to_be_patched:
|
||||||
@@ -549,13 +547,12 @@ def autoreconf(self, pkg, spec, prefix):
|
|||||||
tty.warn("* a custom AUTORECONF phase in the package *")
|
tty.warn("* a custom AUTORECONF phase in the package *")
|
||||||
tty.warn("*********************************************************")
|
tty.warn("*********************************************************")
|
||||||
with fs.working_dir(self.configure_directory):
|
with fs.working_dir(self.configure_directory):
|
||||||
m = inspect.getmodule(self.pkg)
|
|
||||||
# This line is what is needed most of the time
|
# This line is what is needed most of the time
|
||||||
# --install, --verbose, --force
|
# --install, --verbose, --force
|
||||||
autoreconf_args = ["-ivf"]
|
autoreconf_args = ["-ivf"]
|
||||||
autoreconf_args += self.autoreconf_search_path_args
|
autoreconf_args += self.autoreconf_search_path_args
|
||||||
autoreconf_args += self.autoreconf_extra_args
|
autoreconf_args += self.autoreconf_extra_args
|
||||||
m.autoreconf(*autoreconf_args)
|
self.pkg.module.autoreconf(*autoreconf_args)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def autoreconf_search_path_args(self):
|
def autoreconf_search_path_args(self):
|
||||||
@@ -579,7 +576,9 @@ def set_configure_or_die(self):
|
|||||||
raise RuntimeError(msg.format(self.configure_directory))
|
raise RuntimeError(msg.format(self.configure_directory))
|
||||||
|
|
||||||
# Monkey-patch the configure script in the corresponding module
|
# Monkey-patch the configure script in the corresponding module
|
||||||
inspect.getmodule(self.pkg).configure = Executable(self.configure_abs_path)
|
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
|
||||||
|
globals_for_pkg.configure = Executable(self.configure_abs_path)
|
||||||
|
globals_for_pkg.propagate_changes_to_mro()
|
||||||
|
|
||||||
def configure_args(self):
|
def configure_args(self):
|
||||||
"""Return the list of all the arguments that must be passed to configure,
|
"""Return the list of all the arguments that must be passed to configure,
|
||||||
@@ -596,7 +595,7 @@ def configure(self, pkg, spec, prefix):
|
|||||||
options += self.configure_args()
|
options += self.configure_args()
|
||||||
|
|
||||||
with fs.working_dir(self.build_directory, create=True):
|
with fs.working_dir(self.build_directory, create=True):
|
||||||
inspect.getmodule(self.pkg).configure(*options)
|
pkg.module.configure(*options)
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(self, pkg, spec, prefix):
|
||||||
"""Run "make" on the build targets specified by the builder."""
|
"""Run "make" on the build targets specified by the builder."""
|
||||||
@@ -604,12 +603,12 @@ def build(self, pkg, spec, prefix):
|
|||||||
params = ["V=1"]
|
params = ["V=1"]
|
||||||
params += self.build_targets
|
params += self.build_targets
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).make(*params)
|
pkg.module.make(*params)
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Run "make" on the install targets specified by the builder."""
|
"""Run "make" on the install targets specified by the builder."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).make(*self.install_targets)
|
pkg.module.make(*self.install_targets)
|
||||||
|
|
||||||
spack.builder.run_after("build")(execute_build_time_tests)
|
spack.builder.run_after("build")(execute_build_time_tests)
|
||||||
|
|
||||||
@@ -688,9 +687,8 @@ def _activate_or_not(
|
|||||||
|
|
||||||
variant = variant or name
|
variant = variant or name
|
||||||
|
|
||||||
# Defensively look that the name passed as argument is among
|
# Defensively look that the name passed as argument is among variants
|
||||||
# variants
|
if not self.pkg.has_variant(variant):
|
||||||
if variant not in self.pkg.variants:
|
|
||||||
msg = '"{0}" is not a variant of "{1}"'
|
msg = '"{0}" is not a variant of "{1}"'
|
||||||
raise KeyError(msg.format(variant, self.pkg.name))
|
raise KeyError(msg.format(variant, self.pkg.name))
|
||||||
|
|
||||||
@@ -699,27 +697,19 @@ def _activate_or_not(
|
|||||||
|
|
||||||
# Create a list of pairs. Each pair includes a configuration
|
# Create a list of pairs. Each pair includes a configuration
|
||||||
# option and whether or not that option is activated
|
# option and whether or not that option is activated
|
||||||
variant_desc, _ = self.pkg.variants[variant]
|
vdef = self.pkg.get_variant(variant)
|
||||||
if set(variant_desc.values) == set((True, False)):
|
if set(vdef.values) == set((True, False)):
|
||||||
# BoolValuedVariant carry information about a single option.
|
# BoolValuedVariant carry information about a single option.
|
||||||
# Nonetheless, for uniformity of treatment we'll package them
|
# Nonetheless, for uniformity of treatment we'll package them
|
||||||
# in an iterable of one element.
|
# in an iterable of one element.
|
||||||
condition = "+{name}".format(name=variant)
|
options = [(name, f"+{variant}" in spec)]
|
||||||
options = [(name, condition in spec)]
|
|
||||||
else:
|
else:
|
||||||
condition = "{variant}={value}"
|
|
||||||
# "feature_values" is used to track values which correspond to
|
# "feature_values" is used to track values which correspond to
|
||||||
# features which can be enabled or disabled as understood by the
|
# features which can be enabled or disabled as understood by the
|
||||||
# package's build system. It excludes values which have special
|
# package's build system. It excludes values which have special
|
||||||
# meanings and do not correspond to features (e.g. "none")
|
# meanings and do not correspond to features (e.g. "none")
|
||||||
feature_values = (
|
feature_values = getattr(vdef.values, "feature_values", None) or vdef.values
|
||||||
getattr(variant_desc.values, "feature_values", None) or variant_desc.values
|
options = [(value, f"{variant}={value}" in spec) for value in feature_values]
|
||||||
)
|
|
||||||
|
|
||||||
options = [
|
|
||||||
(value, condition.format(variant=variant, value=value) in spec)
|
|
||||||
for value in feature_values
|
|
||||||
]
|
|
||||||
|
|
||||||
# For each allowed value in the list of values
|
# For each allowed value in the list of values
|
||||||
for option_value, activated in options:
|
for option_value, activated in options:
|
||||||
|
|||||||
@@ -10,7 +10,6 @@
|
|||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.build_environment
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
|
|
||||||
from .cmake import CMakeBuilder, CMakePackage
|
from .cmake import CMakeBuilder, CMakePackage
|
||||||
@@ -89,7 +88,7 @@ def define_cmake_cache_from_variant(self, cmake_var, variant=None, comment=""):
|
|||||||
if variant is None:
|
if variant is None:
|
||||||
variant = cmake_var.lower()
|
variant = cmake_var.lower()
|
||||||
|
|
||||||
if variant not in self.pkg.variants:
|
if not self.pkg.has_variant(variant):
|
||||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
|
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
|
||||||
|
|
||||||
if variant not in self.pkg.spec.variants:
|
if variant not in self.pkg.spec.variants:
|
||||||
@@ -297,18 +296,6 @@ def initconfig_hardware_entries(self):
|
|||||||
def std_initconfig_entries(self):
|
def std_initconfig_entries(self):
|
||||||
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
|
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
|
||||||
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
|
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
|
||||||
cmake_rpaths_env = spack.build_environment.get_rpaths(self.pkg)
|
|
||||||
cmake_rpaths_path = ";".join(cmake_rpaths_env)
|
|
||||||
complete_rpath_list = cmake_rpaths_path
|
|
||||||
if "SPACK_COMPILER_EXTRA_RPATHS" in os.environ:
|
|
||||||
spack_extra_rpaths_env = os.environ["SPACK_COMPILER_EXTRA_RPATHS"]
|
|
||||||
spack_extra_rpaths_path = spack_extra_rpaths_env.replace(os.pathsep, ";")
|
|
||||||
complete_rpath_list = "{0};{1}".format(complete_rpath_list, spack_extra_rpaths_path)
|
|
||||||
|
|
||||||
if "SPACK_COMPILER_IMPLICIT_RPATHS" in os.environ:
|
|
||||||
spack_implicit_rpaths_env = os.environ["SPACK_COMPILER_IMPLICIT_RPATHS"]
|
|
||||||
spack_implicit_rpaths_path = spack_implicit_rpaths_env.replace(os.pathsep, ";")
|
|
||||||
complete_rpath_list = "{0};{1}".format(complete_rpath_list, spack_implicit_rpaths_path)
|
|
||||||
|
|
||||||
return [
|
return [
|
||||||
"#------------------{0}".format("-" * 60),
|
"#------------------{0}".format("-" * 60),
|
||||||
@@ -318,8 +305,6 @@ def std_initconfig_entries(self):
|
|||||||
"#------------------{0}\n".format("-" * 60),
|
"#------------------{0}\n".format("-" * 60),
|
||||||
cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
||||||
cmake_cache_string("CMAKE_INSTALL_RPATH_USE_LINK_PATH", "ON"),
|
cmake_cache_string("CMAKE_INSTALL_RPATH_USE_LINK_PATH", "ON"),
|
||||||
cmake_cache_string("CMAKE_BUILD_RPATH", complete_rpath_list),
|
|
||||||
cmake_cache_string("CMAKE_INSTALL_RPATH", complete_rpath_list),
|
|
||||||
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
|
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import inspect
|
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
@@ -72,9 +70,7 @@ def check_args(self):
|
|||||||
def build(self, pkg, spec, prefix):
|
def build(self, pkg, spec, prefix):
|
||||||
"""Runs ``cargo install`` in the source directory"""
|
"""Runs ``cargo install`` in the source directory"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(pkg).cargo(
|
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
|
||||||
"install", "--root", "out", "--path", ".", *self.build_args
|
|
||||||
)
|
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Copy build files into package prefix."""
|
"""Copy build files into package prefix."""
|
||||||
@@ -86,4 +82,4 @@ def install(self, pkg, spec, prefix):
|
|||||||
def check(self):
|
def check(self):
|
||||||
"""Run "cargo test"."""
|
"""Run "cargo test"."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).cargo("test", *self.check_args)
|
self.pkg.module.cargo("test", *self.check_args)
|
||||||
|
|||||||
@@ -3,22 +3,24 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import collections.abc
|
import collections.abc
|
||||||
import inspect
|
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import platform
|
import platform
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from typing import List, Optional, Tuple
|
from itertools import chain
|
||||||
|
from typing import List, Optional, Set, Tuple
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
from llnl.util.lang import stable_partition
|
||||||
|
|
||||||
import spack.build_environment
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
|
import spack.error
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
from spack.directives import build_system, conflicts, depends_on, variant
|
from spack.directives import build_system, conflicts, depends_on, variant
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
from spack.util.environment import filter_system_paths
|
||||||
|
|
||||||
from ._checks import BaseBuilder, execute_build_time_tests
|
from ._checks import BaseBuilder, execute_build_time_tests
|
||||||
|
|
||||||
@@ -146,11 +148,30 @@ def _values(x):
|
|||||||
default=default,
|
default=default,
|
||||||
values=_values,
|
values=_values,
|
||||||
description="the build system generator to use",
|
description="the build system generator to use",
|
||||||
|
when="build_system=cmake",
|
||||||
)
|
)
|
||||||
for x in not_used:
|
for x in not_used:
|
||||||
conflicts(f"generator={x}")
|
conflicts(f"generator={x}")
|
||||||
|
|
||||||
|
|
||||||
|
def get_cmake_prefix_path(pkg: spack.package_base.PackageBase) -> List[str]:
|
||||||
|
"""Obtain the CMAKE_PREFIX_PATH entries for a package, based on the cmake_prefix_path package
|
||||||
|
attribute of direct build/test and transitive link dependencies."""
|
||||||
|
# Add direct build/test deps
|
||||||
|
selected: Set[str] = {s.dag_hash() for s in pkg.spec.dependencies(deptype=dt.BUILD | dt.TEST)}
|
||||||
|
# Add transitive link deps
|
||||||
|
selected.update(s.dag_hash() for s in pkg.spec.traverse(root=False, deptype=dt.LINK))
|
||||||
|
# Separate out externals so they do not shadow Spack prefixes
|
||||||
|
externals, spack_built = stable_partition(
|
||||||
|
(s for s in pkg.spec.traverse(root=False, order="topo") if s.dag_hash() in selected),
|
||||||
|
lambda x: x.external,
|
||||||
|
)
|
||||||
|
|
||||||
|
return filter_system_paths(
|
||||||
|
path for spec in chain(spack_built, externals) for path in spec.package.cmake_prefix_paths
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class CMakePackage(spack.package_base.PackageBase):
|
class CMakePackage(spack.package_base.PackageBase):
|
||||||
"""Specialized class for packages built using CMake
|
"""Specialized class for packages built using CMake
|
||||||
|
|
||||||
@@ -345,7 +366,7 @@ def std_args(pkg, generator=None):
|
|||||||
msg = "Invalid CMake generator: '{0}'\n".format(generator)
|
msg = "Invalid CMake generator: '{0}'\n".format(generator)
|
||||||
msg += "CMakePackage currently supports the following "
|
msg += "CMakePackage currently supports the following "
|
||||||
msg += "primary generators: '{0}'".format("', '".join(valid_primary_generators))
|
msg += "primary generators: '{0}'".format("', '".join(valid_primary_generators))
|
||||||
raise spack.package_base.InstallError(msg)
|
raise spack.error.InstallError(msg)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
build_type = pkg.spec.variants["build_type"].value
|
build_type = pkg.spec.variants["build_type"].value
|
||||||
@@ -357,6 +378,16 @@ def std_args(pkg, generator=None):
|
|||||||
"-G",
|
"-G",
|
||||||
generator,
|
generator,
|
||||||
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
|
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
|
||||||
|
define("CMAKE_INSTALL_RPATH_USE_LINK_PATH", True),
|
||||||
|
# only include the install prefix lib dirs; rpaths for deps are added by USE_LINK_PATH
|
||||||
|
define(
|
||||||
|
"CMAKE_INSTALL_RPATH",
|
||||||
|
[
|
||||||
|
pathlib.Path(pkg.prefix, "lib").as_posix(),
|
||||||
|
pathlib.Path(pkg.prefix, "lib64").as_posix(),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
define("CMAKE_PREFIX_PATH", get_cmake_prefix_path(pkg)),
|
||||||
define("CMAKE_BUILD_TYPE", build_type),
|
define("CMAKE_BUILD_TYPE", build_type),
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -371,15 +402,6 @@ def std_args(pkg, generator=None):
|
|||||||
_conditional_cmake_defaults(pkg, args)
|
_conditional_cmake_defaults(pkg, args)
|
||||||
_maybe_set_python_hints(pkg, args)
|
_maybe_set_python_hints(pkg, args)
|
||||||
|
|
||||||
# Set up CMake rpath
|
|
||||||
args.extend(
|
|
||||||
[
|
|
||||||
define("CMAKE_INSTALL_RPATH_USE_LINK_PATH", True),
|
|
||||||
define("CMAKE_INSTALL_RPATH", spack.build_environment.get_rpaths(pkg)),
|
|
||||||
define("CMAKE_PREFIX_PATH", spack.build_environment.get_cmake_prefix_path(pkg)),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
return args
|
return args
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -505,7 +527,7 @@ def define_from_variant(self, cmake_var, variant=None):
|
|||||||
if variant is None:
|
if variant is None:
|
||||||
variant = cmake_var.lower()
|
variant = cmake_var.lower()
|
||||||
|
|
||||||
if variant not in self.pkg.variants:
|
if not self.pkg.has_variant(variant):
|
||||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
|
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
|
||||||
|
|
||||||
if variant not in self.pkg.spec.variants:
|
if variant not in self.pkg.spec.variants:
|
||||||
@@ -540,28 +562,35 @@ def cmake_args(self):
|
|||||||
|
|
||||||
def cmake(self, pkg, spec, prefix):
|
def cmake(self, pkg, spec, prefix):
|
||||||
"""Runs ``cmake`` in the build directory"""
|
"""Runs ``cmake`` in the build directory"""
|
||||||
|
|
||||||
|
# skip cmake phase if it is an incremental develop build
|
||||||
|
if spec.is_develop and os.path.isfile(
|
||||||
|
os.path.join(self.build_directory, "CMakeCache.txt")
|
||||||
|
):
|
||||||
|
return
|
||||||
|
|
||||||
options = self.std_cmake_args
|
options = self.std_cmake_args
|
||||||
options += self.cmake_args()
|
options += self.cmake_args()
|
||||||
options.append(os.path.abspath(self.root_cmakelists_dir))
|
options.append(os.path.abspath(self.root_cmakelists_dir))
|
||||||
with fs.working_dir(self.build_directory, create=True):
|
with fs.working_dir(self.build_directory, create=True):
|
||||||
inspect.getmodule(self.pkg).cmake(*options)
|
pkg.module.cmake(*options)
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(self, pkg, spec, prefix):
|
||||||
"""Make the build targets"""
|
"""Make the build targets"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
if self.generator == "Unix Makefiles":
|
if self.generator == "Unix Makefiles":
|
||||||
inspect.getmodule(self.pkg).make(*self.build_targets)
|
pkg.module.make(*self.build_targets)
|
||||||
elif self.generator == "Ninja":
|
elif self.generator == "Ninja":
|
||||||
self.build_targets.append("-v")
|
self.build_targets.append("-v")
|
||||||
inspect.getmodule(self.pkg).ninja(*self.build_targets)
|
pkg.module.ninja(*self.build_targets)
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Make the install targets"""
|
"""Make the install targets"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
if self.generator == "Unix Makefiles":
|
if self.generator == "Unix Makefiles":
|
||||||
inspect.getmodule(self.pkg).make(*self.install_targets)
|
pkg.module.make(*self.install_targets)
|
||||||
elif self.generator == "Ninja":
|
elif self.generator == "Ninja":
|
||||||
inspect.getmodule(self.pkg).ninja(*self.install_targets)
|
pkg.module.ninja(*self.install_targets)
|
||||||
|
|
||||||
spack.builder.run_after("build")(execute_build_time_tests)
|
spack.builder.run_after("build")(execute_build_time_tests)
|
||||||
|
|
||||||
|
|||||||
@@ -14,6 +14,7 @@
|
|||||||
|
|
||||||
import spack.compiler
|
import spack.compiler
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
|
import spack.util.executable
|
||||||
|
|
||||||
# Local "type" for type hints
|
# Local "type" for type hints
|
||||||
Path = Union[str, pathlib.Path]
|
Path = Union[str, pathlib.Path]
|
||||||
|
|||||||
@@ -3,6 +3,9 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import re
|
||||||
|
from typing import Iterable, List
|
||||||
|
|
||||||
import spack.variant
|
import spack.variant
|
||||||
from spack.directives import conflicts, depends_on, variant
|
from spack.directives import conflicts, depends_on, variant
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
@@ -44,6 +47,7 @@ class CudaPackage(PackageBase):
|
|||||||
"87",
|
"87",
|
||||||
"89",
|
"89",
|
||||||
"90",
|
"90",
|
||||||
|
"90a",
|
||||||
)
|
)
|
||||||
|
|
||||||
# FIXME: keep cuda and cuda_arch separate to make usage easier until
|
# FIXME: keep cuda and cuda_arch separate to make usage easier until
|
||||||
@@ -70,6 +74,27 @@ def cuda_flags(arch_list):
|
|||||||
for s in arch_list
|
for s in arch_list
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||||
|
"""Adds a decimal place to each CUDA arch.
|
||||||
|
|
||||||
|
>>> compute_capabilities(['90', '90a'])
|
||||||
|
['9.0', '9.0a']
|
||||||
|
|
||||||
|
Args:
|
||||||
|
arch_list: A list of integer strings, optionally followed by a suffix.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A list of float strings, optionally followed by a suffix
|
||||||
|
"""
|
||||||
|
pattern = re.compile(r"(\d+)")
|
||||||
|
capabilities = []
|
||||||
|
for arch in arch_list:
|
||||||
|
_, number, letter = re.split(pattern, arch)
|
||||||
|
number = "{0:.1f}".format(float(number) / 10.0)
|
||||||
|
capabilities.append(number + letter)
|
||||||
|
return capabilities
|
||||||
|
|
||||||
depends_on("cuda", when="+cuda")
|
depends_on("cuda", when="+cuda")
|
||||||
|
|
||||||
# CUDA version vs Architecture
|
# CUDA version vs Architecture
|
||||||
@@ -85,8 +110,8 @@ def cuda_flags(arch_list):
|
|||||||
|
|
||||||
depends_on("cuda@5.0:10.2", when="cuda_arch=30")
|
depends_on("cuda@5.0:10.2", when="cuda_arch=30")
|
||||||
depends_on("cuda@5.0:10.2", when="cuda_arch=32")
|
depends_on("cuda@5.0:10.2", when="cuda_arch=32")
|
||||||
depends_on("cuda@5.0:", when="cuda_arch=35")
|
depends_on("cuda@5.0:11.8", when="cuda_arch=35")
|
||||||
depends_on("cuda@6.5:", when="cuda_arch=37")
|
depends_on("cuda@6.5:11.8", when="cuda_arch=37")
|
||||||
|
|
||||||
depends_on("cuda@6.0:", when="cuda_arch=50")
|
depends_on("cuda@6.0:", when="cuda_arch=50")
|
||||||
depends_on("cuda@6.5:", when="cuda_arch=52")
|
depends_on("cuda@6.5:", when="cuda_arch=52")
|
||||||
@@ -106,6 +131,7 @@ def cuda_flags(arch_list):
|
|||||||
depends_on("cuda@11.8:", when="cuda_arch=89")
|
depends_on("cuda@11.8:", when="cuda_arch=89")
|
||||||
|
|
||||||
depends_on("cuda@12.0:", when="cuda_arch=90")
|
depends_on("cuda@12.0:", when="cuda_arch=90")
|
||||||
|
depends_on("cuda@12.0:", when="cuda_arch=90a")
|
||||||
|
|
||||||
# From the NVIDIA install guide we know of conflicts for particular
|
# From the NVIDIA install guide we know of conflicts for particular
|
||||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||||
@@ -124,7 +150,6 @@ def cuda_flags(arch_list):
|
|||||||
# minimum supported versions
|
# minimum supported versions
|
||||||
conflicts("%gcc@:4", when="+cuda ^cuda@11.0:")
|
conflicts("%gcc@:4", when="+cuda ^cuda@11.0:")
|
||||||
conflicts("%gcc@:5", when="+cuda ^cuda@11.4:")
|
conflicts("%gcc@:5", when="+cuda ^cuda@11.4:")
|
||||||
conflicts("%gcc@:7.2", when="+cuda ^cuda@12.4:")
|
|
||||||
conflicts("%clang@:6", when="+cuda ^cuda@12.2:")
|
conflicts("%clang@:6", when="+cuda ^cuda@12.2:")
|
||||||
|
|
||||||
# maximum supported version
|
# maximum supported version
|
||||||
@@ -145,7 +170,8 @@ def cuda_flags(arch_list):
|
|||||||
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
|
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
|
||||||
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
|
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
|
||||||
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
||||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.6")
|
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
|
||||||
|
conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
|
||||||
|
|
||||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||||
@@ -215,6 +241,11 @@ def cuda_flags(arch_list):
|
|||||||
conflicts("%intel@19.2:", when="+cuda ^cuda@:11.1.0")
|
conflicts("%intel@19.2:", when="+cuda ^cuda@:11.1.0")
|
||||||
conflicts("%intel@2021:", when="+cuda ^cuda@:11.4.0")
|
conflicts("%intel@2021:", when="+cuda ^cuda@:11.4.0")
|
||||||
|
|
||||||
|
# ARM
|
||||||
|
# https://github.com/spack/spack/pull/39666#issuecomment-2377609263
|
||||||
|
# Might need to be expanded to other gcc versions
|
||||||
|
conflicts("%gcc@13.2.0", when="+cuda ^cuda@:12.4 target=aarch64:")
|
||||||
|
|
||||||
# XL is mostly relevant for ppc64le Linux
|
# XL is mostly relevant for ppc64le Linux
|
||||||
conflicts("%xl@:12,14:", when="+cuda ^cuda@:9.1")
|
conflicts("%xl@:12,14:", when="+cuda ^cuda@:9.1")
|
||||||
conflicts("%xl@:12,14:15,17:", when="+cuda ^cuda@9.2")
|
conflicts("%xl@:12,14:15,17:", when="+cuda ^cuda@9.2")
|
||||||
|
|||||||
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import inspect
|
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
@@ -46,16 +44,27 @@ class GoBuilder(BaseBuilder):
|
|||||||
+-----------------------------------------------+--------------------+
|
+-----------------------------------------------+--------------------+
|
||||||
| **Method** | **Purpose** |
|
| **Method** | **Purpose** |
|
||||||
+===============================================+====================+
|
+===============================================+====================+
|
||||||
| :py:meth:`~.GoBuilder.build_args` | Specify arguments |
|
| :py:attr:`~.GoBuilder.build_args` | Specify arguments |
|
||||||
| | to ``go build`` |
|
| | to ``go build`` |
|
||||||
+-----------------------------------------------+--------------------+
|
+-----------------------------------------------+--------------------+
|
||||||
| :py:meth:`~.GoBuilder.check_args` | Specify arguments |
|
| :py:attr:`~.GoBuilder.check_args` | Specify arguments |
|
||||||
| | to ``go test`` |
|
| | to ``go test`` |
|
||||||
+-----------------------------------------------+--------------------+
|
+-----------------------------------------------+--------------------+
|
||||||
"""
|
"""
|
||||||
|
|
||||||
phases = ("build", "install")
|
phases = ("build", "install")
|
||||||
|
|
||||||
|
#: Names associated with package methods in the old build-system format
|
||||||
|
legacy_methods = ("check", "installcheck")
|
||||||
|
|
||||||
|
#: Names associated with package attributes in the old build-system format
|
||||||
|
legacy_attributes = (
|
||||||
|
"build_args",
|
||||||
|
"check_args",
|
||||||
|
"build_directory",
|
||||||
|
"install_time_test_callbacks",
|
||||||
|
)
|
||||||
|
|
||||||
#: Callback names for install-time test
|
#: Callback names for install-time test
|
||||||
install_time_test_callbacks = ["check"]
|
install_time_test_callbacks = ["check"]
|
||||||
|
|
||||||
@@ -82,7 +91,7 @@ def check_args(self):
|
|||||||
def build(self, pkg, spec, prefix):
|
def build(self, pkg, spec, prefix):
|
||||||
"""Runs ``go build`` in the source directory"""
|
"""Runs ``go build`` in the source directory"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(pkg).go("build", *self.build_args)
|
pkg.module.go("build", *self.build_args)
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Install built binaries into prefix bin."""
|
"""Install built binaries into prefix bin."""
|
||||||
@@ -95,4 +104,4 @@ def install(self, pkg, spec, prefix):
|
|||||||
def check(self):
|
def check(self):
|
||||||
"""Run ``go test .`` in the source directory"""
|
"""Run ``go test .`` in the source directory"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).go("test", *self.check_args)
|
self.pkg.module.go("test", *self.check_args)
|
||||||
|
|||||||
@@ -22,9 +22,10 @@
|
|||||||
install,
|
install,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
import spack.builder
|
||||||
import spack.error
|
import spack.error
|
||||||
from spack.build_environment import dso_suffix
|
from spack.build_environment import dso_suffix
|
||||||
from spack.package_base import InstallError
|
from spack.error import InstallError
|
||||||
from spack.util.environment import EnvironmentModifications
|
from spack.util.environment import EnvironmentModifications
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
from spack.util.prefix import Prefix
|
from spack.util.prefix import Prefix
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import inspect
|
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
@@ -103,12 +102,12 @@ def edit(self, pkg, spec, prefix):
|
|||||||
def build(self, pkg, spec, prefix):
|
def build(self, pkg, spec, prefix):
|
||||||
"""Run "make" on the build targets specified by the builder."""
|
"""Run "make" on the build targets specified by the builder."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).make(*self.build_targets)
|
pkg.module.make(*self.build_targets)
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Run "make" on the install targets specified by the builder."""
|
"""Run "make" on the install targets specified by the builder."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).make(*self.install_targets)
|
pkg.module.make(*self.install_targets)
|
||||||
|
|
||||||
spack.builder.run_after("build")(execute_build_time_tests)
|
spack.builder.run_after("build")(execute_build_time_tests)
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import inspect
|
|
||||||
import os
|
import os
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
@@ -195,19 +194,19 @@ def meson(self, pkg, spec, prefix):
|
|||||||
options += self.std_meson_args
|
options += self.std_meson_args
|
||||||
options += self.meson_args()
|
options += self.meson_args()
|
||||||
with fs.working_dir(self.build_directory, create=True):
|
with fs.working_dir(self.build_directory, create=True):
|
||||||
inspect.getmodule(self.pkg).meson(*options)
|
pkg.module.meson(*options)
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(self, pkg, spec, prefix):
|
||||||
"""Make the build targets"""
|
"""Make the build targets"""
|
||||||
options = ["-v"]
|
options = ["-v"]
|
||||||
options += self.build_targets
|
options += self.build_targets
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).ninja(*options)
|
pkg.module.ninja(*options)
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Make the install targets"""
|
"""Make the install targets"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).ninja(*self.install_targets)
|
pkg.module.ninja(*self.install_targets)
|
||||||
|
|
||||||
spack.builder.run_after("build")(execute_build_time_tests)
|
spack.builder.run_after("build")(execute_build_time_tests)
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import inspect
|
|
||||||
from typing import List # novm
|
from typing import List # novm
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
@@ -104,7 +103,7 @@ def msbuild_install_args(self):
|
|||||||
def build(self, pkg, spec, prefix):
|
def build(self, pkg, spec, prefix):
|
||||||
"""Run "msbuild" on the build targets specified by the builder."""
|
"""Run "msbuild" on the build targets specified by the builder."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).msbuild(
|
pkg.module.msbuild(
|
||||||
*self.std_msbuild_args,
|
*self.std_msbuild_args,
|
||||||
*self.msbuild_args(),
|
*self.msbuild_args(),
|
||||||
self.define_targets(*self.build_targets),
|
self.define_targets(*self.build_targets),
|
||||||
@@ -114,6 +113,6 @@ def install(self, pkg, spec, prefix):
|
|||||||
"""Run "msbuild" on the install targets specified by the builder.
|
"""Run "msbuild" on the install targets specified by the builder.
|
||||||
This is INSTALL by default"""
|
This is INSTALL by default"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).msbuild(
|
pkg.module.msbuild(
|
||||||
*self.msbuild_install_args(), self.define_targets(*self.install_targets)
|
*self.msbuild_install_args(), self.define_targets(*self.install_targets)
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import inspect
|
|
||||||
from typing import List # novm
|
from typing import List # novm
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
@@ -132,9 +131,7 @@ def build(self, pkg, spec, prefix):
|
|||||||
if self.makefile_name:
|
if self.makefile_name:
|
||||||
opts.append("/F{}".format(self.makefile_name))
|
opts.append("/F{}".format(self.makefile_name))
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).nmake(
|
pkg.module.nmake(*opts, *self.build_targets, ignore_quotes=self.ignore_quotes)
|
||||||
*opts, *self.build_targets, ignore_quotes=self.ignore_quotes
|
|
||||||
)
|
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Run "nmake" on the install targets specified by the builder.
|
"""Run "nmake" on the install targets specified by the builder.
|
||||||
@@ -146,6 +143,4 @@ def install(self, pkg, spec, prefix):
|
|||||||
opts.append("/F{}".format(self.makefile_name))
|
opts.append("/F{}".format(self.makefile_name))
|
||||||
opts.append(self.define("PREFIX", fs.windows_sfn(prefix)))
|
opts.append(self.define("PREFIX", fs.windows_sfn(prefix)))
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).nmake(
|
pkg.module.nmake(*opts, *self.install_targets, ignore_quotes=self.ignore_quotes)
|
||||||
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -2,8 +2,6 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import inspect
|
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
from spack.directives import build_system, extends
|
from spack.directives import build_system, extends
|
||||||
@@ -47,7 +45,7 @@ class OctaveBuilder(BaseBuilder):
|
|||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Install the package from the archive file"""
|
"""Install the package from the archive file"""
|
||||||
inspect.getmodule(self.pkg).octave(
|
pkg.module.octave(
|
||||||
"--quiet",
|
"--quiet",
|
||||||
"--norc",
|
"--norc",
|
||||||
"--built-in-docstrings-file=/dev/null",
|
"--built-in-docstrings-file=/dev/null",
|
||||||
|
|||||||
@@ -15,7 +15,7 @@
|
|||||||
import spack.util.path
|
import spack.util.path
|
||||||
from spack.build_environment import dso_suffix
|
from spack.build_environment import dso_suffix
|
||||||
from spack.directives import conflicts, license, redistribute, variant
|
from spack.directives import conflicts, license, redistribute, variant
|
||||||
from spack.package_base import InstallError
|
from spack.error import InstallError
|
||||||
from spack.util.environment import EnvironmentModifications
|
from spack.util.environment import EnvironmentModifications
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import inspect
|
|
||||||
import os
|
import os
|
||||||
from typing import Iterable
|
from typing import Iterable
|
||||||
|
|
||||||
@@ -134,7 +133,7 @@ def build_method(self):
|
|||||||
def build_executable(self):
|
def build_executable(self):
|
||||||
"""Returns the executable method to build the perl package"""
|
"""Returns the executable method to build the perl package"""
|
||||||
if self.build_method == "Makefile.PL":
|
if self.build_method == "Makefile.PL":
|
||||||
build_executable = inspect.getmodule(self.pkg).make
|
build_executable = self.pkg.module.make
|
||||||
elif self.build_method == "Build.PL":
|
elif self.build_method == "Build.PL":
|
||||||
build_executable = Executable(os.path.join(self.pkg.stage.source_path, "Build"))
|
build_executable = Executable(os.path.join(self.pkg.stage.source_path, "Build"))
|
||||||
return build_executable
|
return build_executable
|
||||||
@@ -158,7 +157,7 @@ def configure(self, pkg, spec, prefix):
|
|||||||
options = ["Build.PL", "--install_base", prefix]
|
options = ["Build.PL", "--install_base", prefix]
|
||||||
options += self.configure_args()
|
options += self.configure_args()
|
||||||
|
|
||||||
inspect.getmodule(self.pkg).perl(*options)
|
pkg.module.perl(*options)
|
||||||
|
|
||||||
# It is possible that the shebang in the Build script that is created from
|
# It is possible that the shebang in the Build script that is created from
|
||||||
# Build.PL may be too long causing the build to fail. Patching the shebang
|
# Build.PL may be too long causing the build to fail. Patching the shebang
|
||||||
|
|||||||
@@ -4,7 +4,6 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import functools
|
import functools
|
||||||
import inspect
|
|
||||||
import operator
|
import operator
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@@ -25,6 +24,8 @@
|
|||||||
import spack.detection
|
import spack.detection
|
||||||
import spack.multimethod
|
import spack.multimethod
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
|
import spack.platforms
|
||||||
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
from spack.directives import build_system, depends_on, extends
|
from spack.directives import build_system, depends_on, extends
|
||||||
@@ -228,7 +229,7 @@ def test_imports(self) -> None:
|
|||||||
|
|
||||||
# Make sure we are importing the installed modules,
|
# Make sure we are importing the installed modules,
|
||||||
# not the ones in the source directory
|
# not the ones in the source directory
|
||||||
python = inspect.getmodule(self).python # type: ignore[union-attr]
|
python = self.module.python
|
||||||
for module in self.import_modules:
|
for module in self.import_modules:
|
||||||
with test_part(
|
with test_part(
|
||||||
self,
|
self,
|
||||||
@@ -315,9 +316,9 @@ def get_external_python_for_prefix(self):
|
|||||||
)
|
)
|
||||||
|
|
||||||
python_externals_detected = [
|
python_externals_detected = [
|
||||||
d.spec
|
spec
|
||||||
for d in python_externals_detection.get("python", [])
|
for spec in python_externals_detection.get("python", [])
|
||||||
if d.prefix == self.spec.external_path
|
if spec.external_path == self.spec.external_path
|
||||||
]
|
]
|
||||||
if python_externals_detected:
|
if python_externals_detected:
|
||||||
return python_externals_detected[0]
|
return python_externals_detected[0]
|
||||||
@@ -338,7 +339,7 @@ class PythonPackage(PythonExtension):
|
|||||||
legacy_buildsystem = "python_pip"
|
legacy_buildsystem = "python_pip"
|
||||||
|
|
||||||
#: Callback names for install-time test
|
#: Callback names for install-time test
|
||||||
install_time_test_callbacks = ["test"]
|
install_time_test_callbacks = ["test_imports"]
|
||||||
|
|
||||||
build_system("python_pip")
|
build_system("python_pip")
|
||||||
|
|
||||||
@@ -428,7 +429,7 @@ class PythonPipBuilder(BaseBuilder):
|
|||||||
phases = ("install",)
|
phases = ("install",)
|
||||||
|
|
||||||
#: Names associated with package methods in the old build-system format
|
#: Names associated with package methods in the old build-system format
|
||||||
legacy_methods = ("test",)
|
legacy_methods = ("test_imports",)
|
||||||
|
|
||||||
#: Same as legacy_methods, but the signature is different
|
#: Same as legacy_methods, but the signature is different
|
||||||
legacy_long_methods = ("install_options", "global_options", "config_settings")
|
legacy_long_methods = ("install_options", "global_options", "config_settings")
|
||||||
@@ -437,7 +438,7 @@ class PythonPipBuilder(BaseBuilder):
|
|||||||
legacy_attributes = ("archive_files", "build_directory", "install_time_test_callbacks")
|
legacy_attributes = ("archive_files", "build_directory", "install_time_test_callbacks")
|
||||||
|
|
||||||
#: Callback names for install-time test
|
#: Callback names for install-time test
|
||||||
install_time_test_callbacks = ["test"]
|
install_time_test_callbacks = ["test_imports"]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def std_args(cls) -> List[str]:
|
def std_args(cls) -> List[str]:
|
||||||
|
|||||||
@@ -2,8 +2,6 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import inspect
|
|
||||||
|
|
||||||
from llnl.util.filesystem import working_dir
|
from llnl.util.filesystem import working_dir
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
@@ -66,17 +64,17 @@ def qmake_args(self):
|
|||||||
def qmake(self, pkg, spec, prefix):
|
def qmake(self, pkg, spec, prefix):
|
||||||
"""Run ``qmake`` to configure the project and generate a Makefile."""
|
"""Run ``qmake`` to configure the project and generate a Makefile."""
|
||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).qmake(*self.qmake_args())
|
pkg.module.qmake(*self.qmake_args())
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(self, pkg, spec, prefix):
|
||||||
"""Make the build targets"""
|
"""Make the build targets"""
|
||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).make()
|
pkg.module.make()
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Make the install targets"""
|
"""Make the install targets"""
|
||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).make("install")
|
pkg.module.make("install")
|
||||||
|
|
||||||
def check(self):
|
def check(self):
|
||||||
"""Search the Makefile for a ``check:`` target and runs it if found."""
|
"""Search the Makefile for a ``check:`` target and runs it if found."""
|
||||||
|
|||||||
@@ -2,10 +2,10 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import inspect
|
|
||||||
from typing import Optional, Tuple
|
from typing import Optional, Tuple
|
||||||
|
|
||||||
import llnl.util.lang as lang
|
import llnl.util.lang as lang
|
||||||
|
from llnl.util.filesystem import mkdirp
|
||||||
|
|
||||||
from spack.directives import extends
|
from spack.directives import extends
|
||||||
|
|
||||||
@@ -37,6 +37,7 @@ def configure_vars(self):
|
|||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Installs an R package."""
|
"""Installs an R package."""
|
||||||
|
mkdirp(pkg.module.r_lib_dir)
|
||||||
|
|
||||||
config_args = self.configure_args()
|
config_args = self.configure_args()
|
||||||
config_vars = self.configure_vars()
|
config_vars = self.configure_vars()
|
||||||
@@ -44,14 +45,14 @@ def install(self, pkg, spec, prefix):
|
|||||||
args = ["--vanilla", "CMD", "INSTALL"]
|
args = ["--vanilla", "CMD", "INSTALL"]
|
||||||
|
|
||||||
if config_args:
|
if config_args:
|
||||||
args.append("--configure-args={0}".format(" ".join(config_args)))
|
args.append(f"--configure-args={' '.join(config_args)}")
|
||||||
|
|
||||||
if config_vars:
|
if config_vars:
|
||||||
args.append("--configure-vars={0}".format(" ".join(config_vars)))
|
args.append(f"--configure-vars={' '.join(config_vars)}")
|
||||||
|
|
||||||
args.extend(["--library={0}".format(self.pkg.module.r_lib_dir), self.stage.source_path])
|
args.extend([f"--library={pkg.module.r_lib_dir}", self.stage.source_path])
|
||||||
|
|
||||||
inspect.getmodule(self.pkg).R(*args)
|
pkg.module.R(*args)
|
||||||
|
|
||||||
|
|
||||||
class RPackage(Package):
|
class RPackage(Package):
|
||||||
@@ -80,27 +81,21 @@ class RPackage(Package):
|
|||||||
@lang.classproperty
|
@lang.classproperty
|
||||||
def homepage(cls):
|
def homepage(cls):
|
||||||
if cls.cran:
|
if cls.cran:
|
||||||
return "https://cloud.r-project.org/package=" + cls.cran
|
return f"https://cloud.r-project.org/package={cls.cran}"
|
||||||
elif cls.bioc:
|
elif cls.bioc:
|
||||||
return "https://bioconductor.org/packages/" + cls.bioc
|
return f"https://bioconductor.org/packages/{cls.bioc}"
|
||||||
|
|
||||||
@lang.classproperty
|
@lang.classproperty
|
||||||
def url(cls):
|
def url(cls):
|
||||||
if cls.cran:
|
if cls.cran:
|
||||||
return (
|
return f"https://cloud.r-project.org/src/contrib/{cls.cran}_{str(list(cls.versions)[0])}.tar.gz"
|
||||||
"https://cloud.r-project.org/src/contrib/"
|
|
||||||
+ cls.cran
|
|
||||||
+ "_"
|
|
||||||
+ str(list(cls.versions)[0])
|
|
||||||
+ ".tar.gz"
|
|
||||||
)
|
|
||||||
|
|
||||||
@lang.classproperty
|
@lang.classproperty
|
||||||
def list_url(cls):
|
def list_url(cls):
|
||||||
if cls.cran:
|
if cls.cran:
|
||||||
return "https://cloud.r-project.org/src/contrib/Archive/" + cls.cran + "/"
|
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def git(self):
|
def git(self):
|
||||||
if self.bioc:
|
if self.bioc:
|
||||||
return "https://git.bioconductor.org/packages/" + self.bioc
|
return f"https://git.bioconductor.org/packages/{self.bioc}"
|
||||||
|
|||||||
@@ -11,9 +11,9 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
||||||
|
from spack.config import determine_number_of_jobs
|
||||||
from spack.directives import build_system, extends, maintainers
|
from spack.directives import build_system, extends, maintainers
|
||||||
from spack.package_base import PackageBase
|
from spack.package_base import PackageBase
|
||||||
from spack.util.cpus import determine_number_of_jobs
|
|
||||||
from spack.util.environment import env_flag
|
from spack.util.environment import env_flag
|
||||||
from spack.util.executable import Executable, ProcessError
|
from spack.util.executable import Executable, ProcessError
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import glob
|
import glob
|
||||||
import inspect
|
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
@@ -52,10 +51,10 @@ def build(self, pkg, spec, prefix):
|
|||||||
gemspecs = glob.glob("*.gemspec")
|
gemspecs = glob.glob("*.gemspec")
|
||||||
rakefiles = glob.glob("Rakefile")
|
rakefiles = glob.glob("Rakefile")
|
||||||
if gemspecs:
|
if gemspecs:
|
||||||
inspect.getmodule(self.pkg).gem("build", "--norc", gemspecs[0])
|
pkg.module.gem("build", "--norc", gemspecs[0])
|
||||||
elif rakefiles:
|
elif rakefiles:
|
||||||
jobs = inspect.getmodule(self.pkg).make_jobs
|
jobs = pkg.module.make_jobs
|
||||||
inspect.getmodule(self.pkg).rake("package", "-j{0}".format(jobs))
|
pkg.module.rake("package", "-j{0}".format(jobs))
|
||||||
else:
|
else:
|
||||||
# Some Ruby packages only ship `*.gem` files, so nothing to build
|
# Some Ruby packages only ship `*.gem` files, so nothing to build
|
||||||
pass
|
pass
|
||||||
@@ -70,6 +69,6 @@ def install(self, pkg, spec, prefix):
|
|||||||
# if --install-dir is not used, GEM_PATH is deleted from the
|
# if --install-dir is not used, GEM_PATH is deleted from the
|
||||||
# environement, and Gems required to build native extensions will
|
# environement, and Gems required to build native extensions will
|
||||||
# not be found. Those extensions are built during `gem install`.
|
# not be found. Those extensions are built during `gem install`.
|
||||||
inspect.getmodule(self.pkg).gem(
|
pkg.module.gem(
|
||||||
"install", "--norc", "--ignore-dependencies", "--install-dir", prefix, gems[0]
|
"install", "--norc", "--ignore-dependencies", "--install-dir", prefix, gems[0]
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -2,8 +2,6 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import inspect
|
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
from spack.directives import build_system, depends_on
|
from spack.directives import build_system, depends_on
|
||||||
@@ -63,8 +61,7 @@ def build_args(self, spec, prefix):
|
|||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(self, pkg, spec, prefix):
|
||||||
"""Build the package."""
|
"""Build the package."""
|
||||||
args = self.build_args(spec, prefix)
|
pkg.module.scons(*self.build_args(spec, prefix))
|
||||||
inspect.getmodule(self.pkg).scons(*args)
|
|
||||||
|
|
||||||
def install_args(self, spec, prefix):
|
def install_args(self, spec, prefix):
|
||||||
"""Arguments to pass to install."""
|
"""Arguments to pass to install."""
|
||||||
@@ -72,9 +69,7 @@ def install_args(self, spec, prefix):
|
|||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Install the package."""
|
"""Install the package."""
|
||||||
args = self.install_args(spec, prefix)
|
pkg.module.scons("install", *self.install_args(spec, prefix))
|
||||||
|
|
||||||
inspect.getmodule(self.pkg).scons("install", *args)
|
|
||||||
|
|
||||||
def build_test(self):
|
def build_test(self):
|
||||||
"""Run unit tests after build.
|
"""Run unit tests after build.
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import inspect
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
@@ -86,14 +85,13 @@ def import_modules(self):
|
|||||||
|
|
||||||
def python(self, *args, **kwargs):
|
def python(self, *args, **kwargs):
|
||||||
"""The python ``Executable``."""
|
"""The python ``Executable``."""
|
||||||
inspect.getmodule(self).python(*args, **kwargs)
|
self.pkg.module.python(*args, **kwargs)
|
||||||
|
|
||||||
def test_imports(self):
|
def test_imports(self):
|
||||||
"""Attempts to import modules of the installed package."""
|
"""Attempts to import modules of the installed package."""
|
||||||
|
|
||||||
# Make sure we are importing the installed modules,
|
# Make sure we are importing the installed modules,
|
||||||
# not the ones in the source directory
|
# not the ones in the source directory
|
||||||
python = inspect.getmodule(self).python
|
|
||||||
for module in self.import_modules:
|
for module in self.import_modules:
|
||||||
with spack.install_test.test_part(
|
with spack.install_test.test_part(
|
||||||
self,
|
self,
|
||||||
@@ -101,7 +99,7 @@ def test_imports(self):
|
|||||||
purpose="checking import of {0}".format(module),
|
purpose="checking import of {0}".format(module),
|
||||||
work_dir="spack-test",
|
work_dir="spack-test",
|
||||||
):
|
):
|
||||||
python("-c", "import {0}".format(module))
|
self.python("-c", "import {0}".format(module))
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("sip")
|
@spack.builder.builder("sip")
|
||||||
@@ -136,7 +134,7 @@ def configure(self, pkg, spec, prefix):
|
|||||||
"""Configure the package."""
|
"""Configure the package."""
|
||||||
|
|
||||||
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
|
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
|
||||||
args = ["--verbose", "--target-dir", inspect.getmodule(self.pkg).python_platlib]
|
args = ["--verbose", "--target-dir", pkg.module.python_platlib]
|
||||||
args.extend(self.configure_args())
|
args.extend(self.configure_args())
|
||||||
|
|
||||||
# https://github.com/Python-SIP/sip/commit/cb0be6cb6e9b756b8b0db3136efb014f6fb9b766
|
# https://github.com/Python-SIP/sip/commit/cb0be6cb6e9b756b8b0db3136efb014f6fb9b766
|
||||||
@@ -155,7 +153,7 @@ def build(self, pkg, spec, prefix):
|
|||||||
args = self.build_args()
|
args = self.build_args()
|
||||||
|
|
||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).make(*args)
|
pkg.module.make(*args)
|
||||||
|
|
||||||
def build_args(self):
|
def build_args(self):
|
||||||
"""Arguments to pass to build."""
|
"""Arguments to pass to build."""
|
||||||
@@ -166,7 +164,7 @@ def install(self, pkg, spec, prefix):
|
|||||||
args = self.install_args()
|
args = self.install_args()
|
||||||
|
|
||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).make("install", *args)
|
pkg.module.make("install", *args)
|
||||||
|
|
||||||
def install_args(self):
|
def install_args(self):
|
||||||
"""Arguments to pass to install."""
|
"""Arguments to pass to install."""
|
||||||
|
|||||||
@@ -2,8 +2,6 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import inspect
|
|
||||||
|
|
||||||
from llnl.util.filesystem import working_dir
|
from llnl.util.filesystem import working_dir
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
@@ -90,11 +88,11 @@ def build_directory(self):
|
|||||||
|
|
||||||
def python(self, *args, **kwargs):
|
def python(self, *args, **kwargs):
|
||||||
"""The python ``Executable``."""
|
"""The python ``Executable``."""
|
||||||
inspect.getmodule(self.pkg).python(*args, **kwargs)
|
self.pkg.module.python(*args, **kwargs)
|
||||||
|
|
||||||
def waf(self, *args, **kwargs):
|
def waf(self, *args, **kwargs):
|
||||||
"""Runs the waf ``Executable``."""
|
"""Runs the waf ``Executable``."""
|
||||||
jobs = inspect.getmodule(self.pkg).make_jobs
|
jobs = self.pkg.module.make_jobs
|
||||||
|
|
||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
self.python("waf", "-j{0}".format(jobs), *args, **kwargs)
|
self.python("waf", "-j{0}".format(jobs), *args, **kwargs)
|
||||||
|
|||||||
@@ -6,13 +6,13 @@
|
|||||||
import collections.abc
|
import collections.abc
|
||||||
import copy
|
import copy
|
||||||
import functools
|
import functools
|
||||||
import inspect
|
|
||||||
from typing import List, Optional, Tuple
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
from llnl.util import lang
|
from llnl.util import lang
|
||||||
|
|
||||||
import spack.build_environment
|
import spack.error
|
||||||
import spack.multimethod
|
import spack.multimethod
|
||||||
|
import spack.repo
|
||||||
|
|
||||||
#: Builder classes, as registered by the "builder" decorator
|
#: Builder classes, as registered by the "builder" decorator
|
||||||
BUILDER_CLS = {}
|
BUILDER_CLS = {}
|
||||||
@@ -75,6 +75,14 @@ def __call__(self, spec, prefix):
|
|||||||
return self.phase_fn(self.builder.pkg, spec, prefix)
|
return self.phase_fn(self.builder.pkg, spec, prefix)
|
||||||
|
|
||||||
|
|
||||||
|
def get_builder_class(pkg, name: str) -> Optional[type]:
|
||||||
|
"""Return the builder class if a package module defines it."""
|
||||||
|
cls = getattr(pkg.module, name, None)
|
||||||
|
if cls and cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
|
||||||
|
return cls
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def _create(pkg):
|
def _create(pkg):
|
||||||
"""Return a new builder object for the package object being passed as argument.
|
"""Return a new builder object for the package object being passed as argument.
|
||||||
|
|
||||||
@@ -97,13 +105,13 @@ class hierarchy (look at AspellDictPackage for an example of that)
|
|||||||
Args:
|
Args:
|
||||||
pkg (spack.package_base.PackageBase): package object for which we need a builder
|
pkg (spack.package_base.PackageBase): package object for which we need a builder
|
||||||
"""
|
"""
|
||||||
package_module = inspect.getmodule(pkg)
|
|
||||||
package_buildsystem = buildsystem_name(pkg)
|
package_buildsystem = buildsystem_name(pkg)
|
||||||
default_builder_cls = BUILDER_CLS[package_buildsystem]
|
default_builder_cls = BUILDER_CLS[package_buildsystem]
|
||||||
builder_cls_name = default_builder_cls.__name__
|
builder_cls_name = default_builder_cls.__name__
|
||||||
builder_cls = getattr(package_module, builder_cls_name, None)
|
builder_class = get_builder_class(pkg, builder_cls_name)
|
||||||
if builder_cls:
|
|
||||||
return builder_cls(pkg)
|
if builder_class:
|
||||||
|
return builder_class(pkg)
|
||||||
|
|
||||||
# Specialized version of a given buildsystem can subclass some
|
# Specialized version of a given buildsystem can subclass some
|
||||||
# base classes and specialize certain phases or methods or attributes.
|
# base classes and specialize certain phases or methods or attributes.
|
||||||
@@ -463,15 +471,13 @@ def _on_phase_start(self, instance):
|
|||||||
# If a phase has a matching stop_before_phase attribute,
|
# If a phase has a matching stop_before_phase attribute,
|
||||||
# stop the installation process raising a StopPhase
|
# stop the installation process raising a StopPhase
|
||||||
if getattr(instance, "stop_before_phase", None) == self.name:
|
if getattr(instance, "stop_before_phase", None) == self.name:
|
||||||
raise spack.build_environment.StopPhase(
|
raise spack.error.StopPhase("Stopping before '{0}' phase".format(self.name))
|
||||||
"Stopping before '{0}' phase".format(self.name)
|
|
||||||
)
|
|
||||||
|
|
||||||
def _on_phase_exit(self, instance):
|
def _on_phase_exit(self, instance):
|
||||||
# If a phase has a matching last_phase attribute,
|
# If a phase has a matching last_phase attribute,
|
||||||
# stop the installation process raising a StopPhase
|
# stop the installation process raising a StopPhase
|
||||||
if getattr(instance, "last_phase", None) == self.name:
|
if getattr(instance, "last_phase", None) == self.name:
|
||||||
raise spack.build_environment.StopPhase("Stopping at '{0}' phase".format(self.name))
|
raise spack.error.StopPhase("Stopping at '{0}' phase".format(self.name))
|
||||||
|
|
||||||
def copy(self):
|
def copy(self):
|
||||||
return copy.deepcopy(self)
|
return copy.deepcopy(self)
|
||||||
@@ -525,10 +531,6 @@ def stage(self):
|
|||||||
def prefix(self):
|
def prefix(self):
|
||||||
return self.pkg.prefix
|
return self.pkg.prefix
|
||||||
|
|
||||||
def test(self):
|
|
||||||
# Defer tests to virtual and concrete packages
|
|
||||||
pass
|
|
||||||
|
|
||||||
def setup_build_environment(self, env):
|
def setup_build_environment(self, env):
|
||||||
"""Sets up the build environment for a package.
|
"""Sets up the build environment for a package.
|
||||||
|
|
||||||
|
|||||||
@@ -5,15 +5,12 @@
|
|||||||
|
|
||||||
"""Caches used by Spack to store data"""
|
"""Caches used by Spack to store data"""
|
||||||
import os
|
import os
|
||||||
from typing import Union
|
|
||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
from llnl.util.filesystem import mkdirp
|
from llnl.util.filesystem import mkdirp
|
||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.error
|
|
||||||
import spack.fetch_strategy
|
import spack.fetch_strategy
|
||||||
import spack.mirror
|
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.util.file_cache
|
import spack.util.file_cache
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
@@ -34,12 +31,8 @@ def _misc_cache():
|
|||||||
return spack.util.file_cache.FileCache(path)
|
return spack.util.file_cache.FileCache(path)
|
||||||
|
|
||||||
|
|
||||||
FileCacheType = Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton]
|
|
||||||
|
|
||||||
#: Spack's cache for small data
|
#: Spack's cache for small data
|
||||||
MISC_CACHE: Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton] = (
|
MISC_CACHE: spack.util.file_cache.FileCache = llnl.util.lang.Singleton(_misc_cache) # type: ignore
|
||||||
llnl.util.lang.Singleton(_misc_cache)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def fetch_cache_location():
|
def fetch_cache_location():
|
||||||
@@ -76,6 +69,4 @@ def store(self, fetcher, relative_dest):
|
|||||||
|
|
||||||
|
|
||||||
#: Spack's local cache for downloaded source archives
|
#: Spack's local cache for downloaded source archives
|
||||||
FETCH_CACHE: Union[spack.fetch_strategy.FsCache, llnl.util.lang.Singleton] = (
|
FETCH_CACHE: spack.fetch_strategy.FsCache = llnl.util.lang.Singleton(_fetch_cache) # type: ignore
|
||||||
llnl.util.lang.Singleton(_fetch_cache)
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -10,6 +10,7 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
|
import ssl
|
||||||
import stat
|
import stat
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
@@ -19,26 +20,27 @@
|
|||||||
from collections import defaultdict, namedtuple
|
from collections import defaultdict, namedtuple
|
||||||
from typing import Dict, List, Optional, Set, Tuple
|
from typing import Dict, List, Optional, Set, Tuple
|
||||||
from urllib.error import HTTPError, URLError
|
from urllib.error import HTTPError, URLError
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import quote, urlencode, urlparse
|
||||||
from urllib.request import HTTPHandler, Request, build_opener
|
from urllib.request import HTTPHandler, HTTPSHandler, Request, build_opener
|
||||||
|
|
||||||
import ruamel.yaml
|
import ruamel.yaml
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import memoized
|
from llnl.util.lang import Singleton, memoized
|
||||||
from llnl.util.tty.color import cescape, colorize
|
from llnl.util.tty.color import cescape, colorize
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
|
import spack.concretize
|
||||||
import spack.config as cfg
|
import spack.config as cfg
|
||||||
import spack.environment as ev
|
import spack.error
|
||||||
import spack.main
|
import spack.main
|
||||||
import spack.mirror
|
import spack.mirror
|
||||||
|
import spack.package_base
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.stage
|
|
||||||
import spack.util.git
|
import spack.util.git
|
||||||
import spack.util.gpg as gpg_util
|
import spack.util.gpg as gpg_util
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
@@ -50,6 +52,31 @@
|
|||||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||||
|
|
||||||
|
|
||||||
|
def _urlopen():
|
||||||
|
error_handler = web_util.SpackHTTPDefaultErrorHandler()
|
||||||
|
|
||||||
|
# One opener with HTTPS ssl enabled
|
||||||
|
with_ssl = build_opener(
|
||||||
|
HTTPHandler(), HTTPSHandler(context=web_util.ssl_create_default_context()), error_handler
|
||||||
|
)
|
||||||
|
|
||||||
|
# One opener with HTTPS ssl disabled
|
||||||
|
without_ssl = build_opener(
|
||||||
|
HTTPHandler(), HTTPSHandler(context=ssl._create_unverified_context()), error_handler
|
||||||
|
)
|
||||||
|
|
||||||
|
# And dynamically dispatch based on the config:verify_ssl.
|
||||||
|
def dispatch_open(fullurl, data=None, timeout=None, verify_ssl=True):
|
||||||
|
opener = with_ssl if verify_ssl else without_ssl
|
||||||
|
timeout = timeout or spack.config.get("config:connect_timeout", 1)
|
||||||
|
return opener.open(fullurl, data, timeout)
|
||||||
|
|
||||||
|
return dispatch_open
|
||||||
|
|
||||||
|
|
||||||
|
_dyn_mapping_urlopener = Singleton(_urlopen)
|
||||||
|
|
||||||
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
|
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
|
||||||
JOB_RETRY_CONDITIONS = [
|
JOB_RETRY_CONDITIONS = [
|
||||||
# "always",
|
# "always",
|
||||||
@@ -69,8 +96,6 @@
|
|||||||
|
|
||||||
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
|
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
|
||||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||||
# TODO: Remove this in Spack 0.23
|
|
||||||
SHARED_PR_MIRROR_URL = "s3://spack-binaries-prs/shared_pr_mirror"
|
|
||||||
JOB_NAME_FORMAT = (
|
JOB_NAME_FORMAT = (
|
||||||
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{ arch=architecture}"
|
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{ arch=architecture}"
|
||||||
)
|
)
|
||||||
@@ -175,11 +200,11 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
|||||||
return nodes, edges, stages
|
return nodes, edges, stages
|
||||||
|
|
||||||
|
|
||||||
def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions):
|
def _print_staging_summary(spec_labels, stages, rebuild_decisions):
|
||||||
if not stages:
|
if not stages:
|
||||||
return
|
return
|
||||||
|
|
||||||
mirrors = spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True)
|
mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||||
tty.msg("Checked the following mirrors for binaries:")
|
tty.msg("Checked the following mirrors for binaries:")
|
||||||
for m in mirrors.values():
|
for m in mirrors.values():
|
||||||
tty.msg(f" {m.fetch_url}")
|
tty.msg(f" {m.fetch_url}")
|
||||||
@@ -226,35 +251,36 @@ def _spec_matches(spec, match_string):
|
|||||||
return spec.intersects(match_string)
|
return spec.intersects(match_string)
|
||||||
|
|
||||||
|
|
||||||
def _format_job_needs(
|
def _format_job_needs(dep_jobs, build_group, prune_dag, rebuild_decisions):
|
||||||
dep_jobs, build_group, prune_dag, rebuild_decisions, enable_artifacts_buildcache
|
|
||||||
):
|
|
||||||
needs_list = []
|
needs_list = []
|
||||||
for dep_job in dep_jobs:
|
for dep_job in dep_jobs:
|
||||||
dep_spec_key = _spec_ci_label(dep_job)
|
dep_spec_key = _spec_ci_label(dep_job)
|
||||||
rebuild = rebuild_decisions[dep_spec_key].rebuild
|
rebuild = rebuild_decisions[dep_spec_key].rebuild
|
||||||
|
|
||||||
if not prune_dag or rebuild:
|
if not prune_dag or rebuild:
|
||||||
needs_list.append(
|
needs_list.append({"job": get_job_name(dep_job, build_group), "artifacts": False})
|
||||||
{
|
|
||||||
"job": get_job_name(dep_job, build_group),
|
|
||||||
"artifacts": enable_artifacts_buildcache,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return needs_list
|
return needs_list
|
||||||
|
|
||||||
|
|
||||||
def get_change_revisions():
|
def get_change_revisions():
|
||||||
"""If this is a git repo get the revisions to use when checking
|
"""If this is a git repo get the revisions to use when checking
|
||||||
for changed packages and spack core modules."""
|
for changed packages and spack core modules."""
|
||||||
|
rev1 = None
|
||||||
|
rev2 = None
|
||||||
|
|
||||||
|
# Note: git_dir may be a file in a worktree. If it exists, attempt to use git
|
||||||
|
# to determine if there is a revision
|
||||||
git_dir = os.path.join(spack.paths.prefix, ".git")
|
git_dir = os.path.join(spack.paths.prefix, ".git")
|
||||||
if os.path.exists(git_dir) and os.path.isdir(git_dir):
|
if os.path.exists(git_dir):
|
||||||
# TODO: This will only find changed packages from the last
|
# The default will only find changed packages from the last
|
||||||
# TODO: commit. While this may work for single merge commits
|
# commit. When the commit is a merge commit, this is will return all of the
|
||||||
# TODO: when merging the topic branch into the base, it will
|
# changes on the topic.
|
||||||
# TODO: require more thought outside of that narrow case.
|
# TODO: Handle the case where the clone is not shallow clone of a merge commit
|
||||||
return "HEAD^", "HEAD"
|
# using `git merge-base`
|
||||||
return None, None
|
rev1 = "HEAD^"
|
||||||
|
rev2 = "HEAD"
|
||||||
|
|
||||||
|
return rev1, rev2
|
||||||
|
|
||||||
|
|
||||||
def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
||||||
@@ -373,7 +399,7 @@ class SpackCI:
|
|||||||
used by the CI generator(s).
|
used by the CI generator(s).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, ci_config, spec_labels, stages):
|
def __init__(self, ci_config, spec_labels=None, stages=None):
|
||||||
"""Given the information from the ci section of the config
|
"""Given the information from the ci section of the config
|
||||||
and the staged jobs, set up meta data needed for generating Spack
|
and the staged jobs, set up meta data needed for generating Spack
|
||||||
CI IR.
|
CI IR.
|
||||||
@@ -384,12 +410,6 @@ def __init__(self, ci_config, spec_labels, stages):
|
|||||||
|
|
||||||
self.ir = {
|
self.ir = {
|
||||||
"jobs": {},
|
"jobs": {},
|
||||||
"temporary-storage-url-prefix": self.ci_config.get(
|
|
||||||
"temporary-storage-url-prefix", None
|
|
||||||
),
|
|
||||||
"enable-artifacts-buildcache": self.ci_config.get(
|
|
||||||
"enable-artifacts-buildcache", False
|
|
||||||
),
|
|
||||||
"rebuild-index": self.ci_config.get("rebuild-index", True),
|
"rebuild-index": self.ci_config.get("rebuild-index", True),
|
||||||
"broken-specs-url": self.ci_config.get("broken-specs-url", None),
|
"broken-specs-url": self.ci_config.get("broken-specs-url", None),
|
||||||
"broken-tests-packages": self.ci_config.get("broken-tests-packages", []),
|
"broken-tests-packages": self.ci_config.get("broken-tests-packages", []),
|
||||||
@@ -397,17 +417,29 @@ def __init__(self, ci_config, spec_labels, stages):
|
|||||||
}
|
}
|
||||||
jobs = self.ir["jobs"]
|
jobs = self.ir["jobs"]
|
||||||
|
|
||||||
for spec, dag_hash in _build_jobs(spec_labels, stages):
|
if spec_labels and stages:
|
||||||
jobs[dag_hash] = self.__init_job(spec)
|
for spec, dag_hash in _build_jobs(spec_labels, stages):
|
||||||
|
jobs[dag_hash] = self.__init_job(spec)
|
||||||
|
|
||||||
for name in self.named_jobs:
|
for name in self.named_jobs:
|
||||||
# Skip the special named jobs
|
# Skip the special named jobs
|
||||||
if name not in ["any", "build"]:
|
if name not in ["any", "build"]:
|
||||||
jobs[name] = self.__init_job("")
|
jobs[name] = self.__init_job("")
|
||||||
|
|
||||||
def __init_job(self, spec):
|
def __init_job(self, release_spec):
|
||||||
"""Initialize job object"""
|
"""Initialize job object"""
|
||||||
return {"spec": spec, "attributes": {}}
|
job_object = {"spec": release_spec, "attributes": {}}
|
||||||
|
if release_spec:
|
||||||
|
job_vars = job_object["attributes"].setdefault("variables", {})
|
||||||
|
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec.dag_hash()
|
||||||
|
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
||||||
|
job_vars["SPACK_JOB_SPEC_PKG_VERSION"] = release_spec.format("{version}")
|
||||||
|
job_vars["SPACK_JOB_SPEC_COMPILER_NAME"] = release_spec.format("{compiler.name}")
|
||||||
|
job_vars["SPACK_JOB_SPEC_COMPILER_VERSION"] = release_spec.format("{compiler.version}")
|
||||||
|
job_vars["SPACK_JOB_SPEC_ARCH"] = release_spec.format("{architecture}")
|
||||||
|
job_vars["SPACK_JOB_SPEC_VARIANTS"] = release_spec.format("{variants}")
|
||||||
|
|
||||||
|
return job_object
|
||||||
|
|
||||||
def __is_named(self, section):
|
def __is_named(self, section):
|
||||||
"""Check if a pipeline-gen configuration section is for a named job,
|
"""Check if a pipeline-gen configuration section is for a named job,
|
||||||
@@ -500,6 +532,7 @@ def generate_ir(self):
|
|||||||
for section in reversed(pipeline_gen):
|
for section in reversed(pipeline_gen):
|
||||||
name = self.__is_named(section)
|
name = self.__is_named(section)
|
||||||
has_submapping = "submapping" in section
|
has_submapping = "submapping" in section
|
||||||
|
has_dynmapping = "dynamic-mapping" in section
|
||||||
section = cfg.InternalConfigScope._process_dict_keyname_overrides(section)
|
section = cfg.InternalConfigScope._process_dict_keyname_overrides(section)
|
||||||
|
|
||||||
if name:
|
if name:
|
||||||
@@ -542,6 +575,108 @@ def _apply_section(dest, src):
|
|||||||
job["attributes"] = self.__apply_submapping(
|
job["attributes"] = self.__apply_submapping(
|
||||||
job["attributes"], job["spec"], section
|
job["attributes"], job["spec"], section
|
||||||
)
|
)
|
||||||
|
elif has_dynmapping:
|
||||||
|
mapping = section["dynamic-mapping"]
|
||||||
|
|
||||||
|
dynmap_name = mapping.get("name")
|
||||||
|
|
||||||
|
# Check if this section should be skipped
|
||||||
|
dynmap_skip = os.environ.get("SPACK_CI_SKIP_DYNAMIC_MAPPING")
|
||||||
|
if dynmap_name and dynmap_skip:
|
||||||
|
if re.match(dynmap_skip, dynmap_name):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Get the endpoint
|
||||||
|
endpoint = mapping["endpoint"]
|
||||||
|
endpoint_url = urlparse(endpoint)
|
||||||
|
|
||||||
|
# Configure the request header
|
||||||
|
header = {"User-Agent": web_util.SPACK_USER_AGENT}
|
||||||
|
header.update(mapping.get("header", {}))
|
||||||
|
|
||||||
|
# Expand header environment variables
|
||||||
|
# ie. if tokens are passed
|
||||||
|
for value in header.values():
|
||||||
|
value = os.path.expandvars(value)
|
||||||
|
|
||||||
|
verify_ssl = mapping.get("verify_ssl", spack.config.get("config:verify_ssl", True))
|
||||||
|
timeout = mapping.get("timeout", spack.config.get("config:connect_timeout", 1))
|
||||||
|
|
||||||
|
required = mapping.get("require", [])
|
||||||
|
allowed = mapping.get("allow", [])
|
||||||
|
ignored = mapping.get("ignore", [])
|
||||||
|
|
||||||
|
# required keys are implicitly allowed
|
||||||
|
allowed = sorted(set(allowed + required))
|
||||||
|
ignored = sorted(set(ignored))
|
||||||
|
required = sorted(set(required))
|
||||||
|
|
||||||
|
# Make sure required things are not also ignored
|
||||||
|
assert not any([ikey in required for ikey in ignored])
|
||||||
|
|
||||||
|
def job_query(job):
|
||||||
|
job_vars = job["attributes"]["variables"]
|
||||||
|
query = (
|
||||||
|
"{SPACK_JOB_SPEC_PKG_NAME}@{SPACK_JOB_SPEC_PKG_VERSION}"
|
||||||
|
# The preceding spaces are required (ref. https://github.com/spack/spack-gantry/blob/develop/docs/api.md#allocation)
|
||||||
|
" {SPACK_JOB_SPEC_VARIANTS}"
|
||||||
|
" arch={SPACK_JOB_SPEC_ARCH}"
|
||||||
|
"%{SPACK_JOB_SPEC_COMPILER_NAME}@{SPACK_JOB_SPEC_COMPILER_VERSION}"
|
||||||
|
).format_map(job_vars)
|
||||||
|
return f"spec={quote(query)}"
|
||||||
|
|
||||||
|
for job in jobs.values():
|
||||||
|
if not job["spec"]:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Create request for this job
|
||||||
|
query = job_query(job)
|
||||||
|
request = Request(
|
||||||
|
endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
response = _dyn_mapping_urlopener(
|
||||||
|
request, verify_ssl=verify_ssl, timeout=timeout
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
# For now just ignore any errors from dynamic mapping and continue
|
||||||
|
# This is still experimental, and failures should not stop CI
|
||||||
|
# from running normally
|
||||||
|
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}")
|
||||||
|
tty.warn(f"{e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
config = json.load(codecs.getreader("utf-8")(response))
|
||||||
|
|
||||||
|
# Strip ignore keys
|
||||||
|
if ignored:
|
||||||
|
for key in ignored:
|
||||||
|
if key in config:
|
||||||
|
config.pop(key)
|
||||||
|
|
||||||
|
# Only keep allowed keys
|
||||||
|
clean_config = {}
|
||||||
|
if allowed:
|
||||||
|
for key in allowed:
|
||||||
|
if key in config:
|
||||||
|
clean_config[key] = config[key]
|
||||||
|
else:
|
||||||
|
clean_config = config
|
||||||
|
|
||||||
|
# Verify all of the required keys are present
|
||||||
|
if required:
|
||||||
|
missing_keys = []
|
||||||
|
for key in required:
|
||||||
|
if key not in clean_config.keys():
|
||||||
|
missing_keys.append(key)
|
||||||
|
|
||||||
|
if missing_keys:
|
||||||
|
tty.warn(f"Response missing required keys: {missing_keys}")
|
||||||
|
|
||||||
|
if clean_config:
|
||||||
|
job["attributes"] = spack.config.merge_yaml(
|
||||||
|
job.get("attributes", {}), clean_config
|
||||||
|
)
|
||||||
|
|
||||||
for _, job in jobs.items():
|
for _, job in jobs.items():
|
||||||
if job["spec"]:
|
if job["spec"]:
|
||||||
@@ -558,14 +693,13 @@ def generate_gitlab_ci_yaml(
|
|||||||
prune_dag=False,
|
prune_dag=False,
|
||||||
check_index_only=False,
|
check_index_only=False,
|
||||||
artifacts_root=None,
|
artifacts_root=None,
|
||||||
remote_mirror_override=None,
|
|
||||||
):
|
):
|
||||||
"""Generate a gitlab yaml file to run a dynamic child pipeline from
|
"""Generate a gitlab yaml file to run a dynamic child pipeline from
|
||||||
the spec matrix in the active environment.
|
the spec matrix in the active environment.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
env (spack.environment.Environment): Activated environment object
|
env (spack.environment.Environment): Activated environment object
|
||||||
which must contain a gitlab-ci section describing how to map
|
which must contain a ci section describing how to map
|
||||||
specs to runners
|
specs to runners
|
||||||
print_summary (bool): Should we print a summary of all the jobs in
|
print_summary (bool): Should we print a summary of all the jobs in
|
||||||
the stages in which they were placed.
|
the stages in which they were placed.
|
||||||
@@ -580,39 +714,60 @@ def generate_gitlab_ci_yaml(
|
|||||||
artifacts_root (str): Path where artifacts like logs, environment
|
artifacts_root (str): Path where artifacts like logs, environment
|
||||||
files (spack.yaml, spack.lock), etc should be written. GitLab
|
files (spack.yaml, spack.lock), etc should be written. GitLab
|
||||||
requires this to be within the project directory.
|
requires this to be within the project directory.
|
||||||
remote_mirror_override (str): Typically only needed when one spack.yaml
|
|
||||||
is used to populate several mirrors with binaries, based on some
|
|
||||||
criteria. Spack protected pipelines populate different mirrors based
|
|
||||||
on branch name, facilitated by this option. DEPRECATED
|
|
||||||
"""
|
"""
|
||||||
|
rev1, rev2 = get_change_revisions()
|
||||||
|
tty.debug(f"Got following revisions: rev1={rev1}, rev2={rev2}")
|
||||||
|
|
||||||
|
# Get the joined "ci" config with all of the current scopes resolved
|
||||||
|
ci_config = cfg.get("ci")
|
||||||
|
spack_prune_untouched = os.environ.get("SPACK_PRUNE_UNTOUCHED", None)
|
||||||
|
|
||||||
|
changed = rev1 and rev2
|
||||||
|
affected_pkgs = None
|
||||||
|
if spack_prune_untouched and changed:
|
||||||
|
affected_pkgs = compute_affected_packages(rev1, rev2)
|
||||||
|
tty.debug("affected pkgs:")
|
||||||
|
if affected_pkgs:
|
||||||
|
for p in affected_pkgs:
|
||||||
|
tty.debug(f" {p}")
|
||||||
|
else:
|
||||||
|
tty.debug(" no affected packages...")
|
||||||
|
|
||||||
|
possible_builds = spack.package_base.possible_dependencies(*env.user_specs)
|
||||||
|
changed = any((spec in p for p in possible_builds.values()) for spec in affected_pkgs)
|
||||||
|
|
||||||
|
if not changed:
|
||||||
|
spack_ci = SpackCI(ci_config)
|
||||||
|
spack_ci_ir = spack_ci.generate_ir()
|
||||||
|
|
||||||
|
# No jobs should be generated.
|
||||||
|
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||||
|
# If this job fails ignore the status and carry on
|
||||||
|
noop_job["retry"] = 0
|
||||||
|
noop_job["allow_failure"] = True
|
||||||
|
|
||||||
|
tty.msg("Skipping concretization, generating no-op job")
|
||||||
|
output_object = {"no-specs-to-rebuild": noop_job}
|
||||||
|
|
||||||
|
# Ensure the child pipeline always runs
|
||||||
|
output_object["workflow"] = {"rules": [{"when": "always"}]}
|
||||||
|
|
||||||
|
with open(output_file, "w") as f:
|
||||||
|
ruamel.yaml.YAML().dump(output_object, f)
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
with spack.concretize.disable_compiler_existence_check():
|
with spack.concretize.disable_compiler_existence_check():
|
||||||
with env.write_transaction():
|
with env.write_transaction():
|
||||||
env.concretize()
|
env.concretize()
|
||||||
env.write()
|
env.write()
|
||||||
|
|
||||||
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
|
|
||||||
|
|
||||||
# Get the joined "ci" config with all of the current scopes resolved
|
|
||||||
ci_config = cfg.get("ci")
|
|
||||||
|
|
||||||
config_deprecated = False
|
|
||||||
if not ci_config:
|
if not ci_config:
|
||||||
tty.warn("Environment does not have `ci` a configuration")
|
raise SpackCIError("Environment does not have a `ci` configuration")
|
||||||
gitlabci_config = yaml_root.get("gitlab-ci")
|
|
||||||
if not gitlabci_config:
|
|
||||||
tty.die("Environment yaml does not have `gitlab-ci` config section. Cannot recover.")
|
|
||||||
|
|
||||||
tty.warn(
|
|
||||||
"The `gitlab-ci` configuration is deprecated in favor of `ci`.\n",
|
|
||||||
"To update run \n\t$ spack env update /path/to/ci/spack.yaml",
|
|
||||||
)
|
|
||||||
translate_deprecated_config(gitlabci_config)
|
|
||||||
ci_config = gitlabci_config
|
|
||||||
config_deprecated = True
|
|
||||||
|
|
||||||
# Default target is gitlab...and only target is gitlab
|
# Default target is gitlab...and only target is gitlab
|
||||||
if not ci_config.get("target", "gitlab") == "gitlab":
|
if not ci_config.get("target", "gitlab") == "gitlab":
|
||||||
tty.die('Spack CI module only generates target "gitlab"')
|
raise SpackCIError('Spack CI module only generates target "gitlab"')
|
||||||
|
|
||||||
cdash_config = cfg.get("cdash")
|
cdash_config = cfg.get("cdash")
|
||||||
cdash_handler = CDashHandler(cdash_config) if "build-group" in cdash_config else None
|
cdash_handler = CDashHandler(cdash_config) if "build-group" in cdash_config else None
|
||||||
@@ -631,20 +786,13 @@ def generate_gitlab_ci_yaml(
|
|||||||
dependent_depth = None
|
dependent_depth = None
|
||||||
|
|
||||||
prune_untouched_packages = False
|
prune_untouched_packages = False
|
||||||
spack_prune_untouched = os.environ.get("SPACK_PRUNE_UNTOUCHED", None)
|
|
||||||
if spack_prune_untouched is not None and spack_prune_untouched.lower() == "true":
|
if spack_prune_untouched is not None and spack_prune_untouched.lower() == "true":
|
||||||
# Requested to prune untouched packages, but assume we won't do that
|
# Requested to prune untouched packages, but assume we won't do that
|
||||||
# unless we're actually in a git repo.
|
# unless we're actually in a git repo.
|
||||||
rev1, rev2 = get_change_revisions()
|
if changed:
|
||||||
tty.debug(f"Got following revisions: rev1={rev1}, rev2={rev2}")
|
|
||||||
if rev1 and rev2:
|
|
||||||
# If the stack file itself did not change, proceed with pruning
|
# If the stack file itself did not change, proceed with pruning
|
||||||
if not get_stack_changed(env.manifest_path, rev1, rev2):
|
if not get_stack_changed(env.manifest_path, rev1, rev2):
|
||||||
prune_untouched_packages = True
|
prune_untouched_packages = True
|
||||||
affected_pkgs = compute_affected_packages(rev1, rev2)
|
|
||||||
tty.debug("affected pkgs:")
|
|
||||||
for p in affected_pkgs:
|
|
||||||
tty.debug(f" {p}")
|
|
||||||
affected_specs = get_spec_filter_list(
|
affected_specs = get_spec_filter_list(
|
||||||
env, affected_pkgs, dependent_traverse_depth=dependent_depth
|
env, affected_pkgs, dependent_traverse_depth=dependent_depth
|
||||||
)
|
)
|
||||||
@@ -673,12 +821,6 @@ def generate_gitlab_ci_yaml(
|
|||||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE", None)
|
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE", None)
|
||||||
|
|
||||||
copy_only_pipeline = spack_pipeline_type == "spack_copy_only"
|
copy_only_pipeline = spack_pipeline_type == "spack_copy_only"
|
||||||
if copy_only_pipeline and config_deprecated:
|
|
||||||
tty.warn(
|
|
||||||
"SPACK_PIPELINE_TYPE=spack_copy_only is not supported when using\n",
|
|
||||||
"deprecated ci configuration, a no-op pipeline will be generated\n",
|
|
||||||
"instead.",
|
|
||||||
)
|
|
||||||
|
|
||||||
def ensure_expected_target_path(path):
|
def ensure_expected_target_path(path):
|
||||||
"""Returns passed paths with all Windows path separators exchanged
|
"""Returns passed paths with all Windows path separators exchanged
|
||||||
@@ -697,38 +839,16 @@ def ensure_expected_target_path(path):
|
|||||||
return path
|
return path
|
||||||
|
|
||||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||||
deprecated_mirror_config = False
|
|
||||||
buildcache_destination = None
|
buildcache_destination = None
|
||||||
if "buildcache-destination" in pipeline_mirrors:
|
if "buildcache-destination" not in pipeline_mirrors:
|
||||||
if remote_mirror_override:
|
raise SpackCIError("spack ci generate requires a mirror named 'buildcache-destination'")
|
||||||
tty.die(
|
|
||||||
"Using the deprecated --buildcache-destination cli option and "
|
|
||||||
"having a mirror named 'buildcache-destination' at the same time "
|
|
||||||
"is not allowed"
|
|
||||||
)
|
|
||||||
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
|
||||||
else:
|
|
||||||
deprecated_mirror_config = True
|
|
||||||
# TODO: This will be an error in Spack 0.23
|
|
||||||
|
|
||||||
# TODO: Remove this block in spack 0.23
|
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
||||||
remote_mirror_url = None
|
|
||||||
if deprecated_mirror_config:
|
|
||||||
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
|
||||||
tty.die("spack ci generate requires an env containing a mirror")
|
|
||||||
|
|
||||||
ci_mirrors = yaml_root["mirrors"]
|
|
||||||
mirror_urls = [url for url in ci_mirrors.values()]
|
|
||||||
remote_mirror_url = mirror_urls[0]
|
|
||||||
|
|
||||||
spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None)
|
spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None)
|
||||||
if spack_buildcache_copy:
|
if spack_buildcache_copy:
|
||||||
buildcache_copies = {}
|
buildcache_copies = {}
|
||||||
buildcache_copy_src_prefix = (
|
buildcache_copy_src_prefix = buildcache_destination.fetch_url
|
||||||
buildcache_destination.fetch_url
|
|
||||||
if buildcache_destination
|
|
||||||
else remote_mirror_override or remote_mirror_url
|
|
||||||
)
|
|
||||||
buildcache_copy_dest_prefix = spack_buildcache_copy
|
buildcache_copy_dest_prefix = spack_buildcache_copy
|
||||||
|
|
||||||
# Check for a list of "known broken" specs that we should not bother
|
# Check for a list of "known broken" specs that we should not bother
|
||||||
@@ -738,55 +858,10 @@ def ensure_expected_target_path(path):
|
|||||||
if "broken-specs-url" in ci_config:
|
if "broken-specs-url" in ci_config:
|
||||||
broken_specs_url = ci_config["broken-specs-url"]
|
broken_specs_url = ci_config["broken-specs-url"]
|
||||||
|
|
||||||
enable_artifacts_buildcache = False
|
|
||||||
if "enable-artifacts-buildcache" in ci_config:
|
|
||||||
tty.warn("Support for enable-artifacts-buildcache will be removed in Spack 0.23")
|
|
||||||
enable_artifacts_buildcache = ci_config["enable-artifacts-buildcache"]
|
|
||||||
|
|
||||||
rebuild_index_enabled = True
|
rebuild_index_enabled = True
|
||||||
if "rebuild-index" in ci_config and ci_config["rebuild-index"] is False:
|
if "rebuild-index" in ci_config and ci_config["rebuild-index"] is False:
|
||||||
rebuild_index_enabled = False
|
rebuild_index_enabled = False
|
||||||
|
|
||||||
temp_storage_url_prefix = None
|
|
||||||
if "temporary-storage-url-prefix" in ci_config:
|
|
||||||
tty.warn("Support for temporary-storage-url-prefix will be removed in Spack 0.23")
|
|
||||||
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
|
||||||
|
|
||||||
# If a remote mirror override (alternate buildcache destination) was
|
|
||||||
# specified, add it here in case it has already built hashes we might
|
|
||||||
# generate.
|
|
||||||
# TODO: Remove this block in Spack 0.23
|
|
||||||
mirrors_to_check = None
|
|
||||||
if deprecated_mirror_config and remote_mirror_override:
|
|
||||||
if spack_pipeline_type == "spack_protected_branch":
|
|
||||||
# Overriding the main mirror in this case might result
|
|
||||||
# in skipping jobs on a release pipeline because specs are
|
|
||||||
# up to date in develop. Eventually we want to notice and take
|
|
||||||
# advantage of this by scheduling a job to copy the spec from
|
|
||||||
# develop to the release, but until we have that, this makes
|
|
||||||
# sure we schedule a rebuild job if the spec isn't already in
|
|
||||||
# override mirror.
|
|
||||||
mirrors_to_check = {"override": remote_mirror_override}
|
|
||||||
|
|
||||||
# If we have a remote override and we want generate pipeline using
|
|
||||||
# --check-index-only, then the override mirror needs to be added to
|
|
||||||
# the configured mirrors when bindist.update() is run, or else we
|
|
||||||
# won't fetch its index and include in our local cache.
|
|
||||||
spack.mirror.add(
|
|
||||||
spack.mirror.Mirror(remote_mirror_override, name="ci_pr_mirror"),
|
|
||||||
cfg.default_modify_scope(),
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO: Remove this block in Spack 0.23
|
|
||||||
shared_pr_mirror = None
|
|
||||||
if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request":
|
|
||||||
stack_name = os.environ.get("SPACK_CI_STACK_NAME", "")
|
|
||||||
shared_pr_mirror = url_util.join(SHARED_PR_MIRROR_URL, stack_name)
|
|
||||||
spack.mirror.add(
|
|
||||||
spack.mirror.Mirror(shared_pr_mirror, name="ci_shared_pr_mirror"),
|
|
||||||
cfg.default_modify_scope(),
|
|
||||||
)
|
|
||||||
|
|
||||||
pipeline_artifacts_dir = artifacts_root
|
pipeline_artifacts_dir = artifacts_root
|
||||||
if not pipeline_artifacts_dir:
|
if not pipeline_artifacts_dir:
|
||||||
proj_dir = os.environ.get("CI_PROJECT_DIR", os.getcwd())
|
proj_dir = os.environ.get("CI_PROJECT_DIR", os.getcwd())
|
||||||
@@ -795,9 +870,8 @@ def ensure_expected_target_path(path):
|
|||||||
pipeline_artifacts_dir = os.path.abspath(pipeline_artifacts_dir)
|
pipeline_artifacts_dir = os.path.abspath(pipeline_artifacts_dir)
|
||||||
concrete_env_dir = os.path.join(pipeline_artifacts_dir, "concrete_environment")
|
concrete_env_dir = os.path.join(pipeline_artifacts_dir, "concrete_environment")
|
||||||
|
|
||||||
# Now that we've added the mirrors we know about, they should be properly
|
# Copy the environment manifest file into the concrete environment directory,
|
||||||
# reflected in the environment manifest file, so copy that into the
|
# along with the spack.lock file.
|
||||||
# concrete environment directory, along with the spack.lock file.
|
|
||||||
if not os.path.exists(concrete_env_dir):
|
if not os.path.exists(concrete_env_dir):
|
||||||
os.makedirs(concrete_env_dir)
|
os.makedirs(concrete_env_dir)
|
||||||
shutil.copyfile(env.manifest_path, os.path.join(concrete_env_dir, "spack.yaml"))
|
shutil.copyfile(env.manifest_path, os.path.join(concrete_env_dir, "spack.yaml"))
|
||||||
@@ -822,18 +896,12 @@ def ensure_expected_target_path(path):
|
|||||||
env_includes.extend(include_scopes)
|
env_includes.extend(include_scopes)
|
||||||
env_yaml_root["spack"]["include"] = [ensure_expected_target_path(i) for i in env_includes]
|
env_yaml_root["spack"]["include"] = [ensure_expected_target_path(i) for i in env_includes]
|
||||||
|
|
||||||
if "gitlab-ci" in env_yaml_root["spack"] and "ci" not in env_yaml_root["spack"]:
|
|
||||||
env_yaml_root["spack"]["ci"] = env_yaml_root["spack"].pop("gitlab-ci")
|
|
||||||
translate_deprecated_config(env_yaml_root["spack"]["ci"])
|
|
||||||
|
|
||||||
with open(os.path.join(concrete_env_dir, "spack.yaml"), "w") as fd:
|
with open(os.path.join(concrete_env_dir, "spack.yaml"), "w") as fd:
|
||||||
fd.write(syaml.dump_config(env_yaml_root, default_flow_style=False))
|
fd.write(syaml.dump_config(env_yaml_root, default_flow_style=False))
|
||||||
|
|
||||||
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
|
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
|
||||||
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
|
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
|
||||||
job_test_dir = os.path.join(pipeline_artifacts_dir, "tests")
|
job_test_dir = os.path.join(pipeline_artifacts_dir, "tests")
|
||||||
# TODO: Remove this line in Spack 0.23
|
|
||||||
local_mirror_dir = os.path.join(pipeline_artifacts_dir, "mirror")
|
|
||||||
user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data")
|
user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data")
|
||||||
|
|
||||||
# We communicate relative paths to the downstream jobs to avoid issues in
|
# We communicate relative paths to the downstream jobs to avoid issues in
|
||||||
@@ -847,8 +915,6 @@ def ensure_expected_target_path(path):
|
|||||||
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
|
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
|
||||||
rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir)
|
rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir)
|
||||||
rel_job_test_dir = os.path.relpath(job_test_dir, ci_project_dir)
|
rel_job_test_dir = os.path.relpath(job_test_dir, ci_project_dir)
|
||||||
# TODO: Remove this line in Spack 0.23
|
|
||||||
rel_local_mirror_dir = os.path.join(local_mirror_dir, ci_project_dir)
|
|
||||||
rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir)
|
rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir)
|
||||||
|
|
||||||
# Speed up staging by first fetching binary indices from all mirrors
|
# Speed up staging by first fetching binary indices from all mirrors
|
||||||
@@ -910,7 +976,7 @@ def ensure_expected_target_path(path):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
||||||
spec=release_spec, mirrors_to_check=mirrors_to_check, index_only=check_index_only
|
spec=release_spec, index_only=check_index_only
|
||||||
)
|
)
|
||||||
|
|
||||||
spec_record.rebuild = not up_to_date_mirrors
|
spec_record.rebuild = not up_to_date_mirrors
|
||||||
@@ -952,36 +1018,16 @@ def main_script_replacements(cmd):
|
|||||||
|
|
||||||
job_name = get_job_name(release_spec, build_group)
|
job_name = get_job_name(release_spec, build_group)
|
||||||
|
|
||||||
job_vars = job_object.setdefault("variables", {})
|
|
||||||
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
|
|
||||||
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
|
||||||
job_vars["SPACK_JOB_SPEC_PKG_VERSION"] = release_spec.format("{version}")
|
|
||||||
job_vars["SPACK_JOB_SPEC_COMPILER_NAME"] = release_spec.format("{compiler.name}")
|
|
||||||
job_vars["SPACK_JOB_SPEC_COMPILER_VERSION"] = release_spec.format("{compiler.version}")
|
|
||||||
job_vars["SPACK_JOB_SPEC_ARCH"] = release_spec.format("{architecture}")
|
|
||||||
job_vars["SPACK_JOB_SPEC_VARIANTS"] = release_spec.format("{variants}")
|
|
||||||
|
|
||||||
job_object["needs"] = []
|
job_object["needs"] = []
|
||||||
if spec_label in dependencies:
|
if spec_label in dependencies:
|
||||||
if enable_artifacts_buildcache:
|
# In this case, "needs" is only used for scheduling
|
||||||
# Get dependencies transitively, so they're all
|
# purposes, so we only get the direct dependencies.
|
||||||
# available in the artifacts buildcache.
|
dep_jobs = []
|
||||||
dep_jobs = [d for d in release_spec.traverse(deptype="all", root=False)]
|
for dep_label in dependencies[spec_label]:
|
||||||
else:
|
dep_jobs.append(spec_labels[dep_label])
|
||||||
# In this case, "needs" is only used for scheduling
|
|
||||||
# purposes, so we only get the direct dependencies.
|
|
||||||
dep_jobs = []
|
|
||||||
for dep_label in dependencies[spec_label]:
|
|
||||||
dep_jobs.append(spec_labels[dep_label])
|
|
||||||
|
|
||||||
job_object["needs"].extend(
|
job_object["needs"].extend(
|
||||||
_format_job_needs(
|
_format_job_needs(dep_jobs, build_group, prune_dag, rebuild_decisions)
|
||||||
dep_jobs,
|
|
||||||
build_group,
|
|
||||||
prune_dag,
|
|
||||||
rebuild_decisions,
|
|
||||||
enable_artifacts_buildcache,
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
rebuild_spec = spec_record.rebuild
|
rebuild_spec = spec_record.rebuild
|
||||||
@@ -1038,6 +1084,7 @@ def main_script_replacements(cmd):
|
|||||||
|
|
||||||
# Let downstream jobs know whether the spec needed rebuilding, regardless
|
# Let downstream jobs know whether the spec needed rebuilding, regardless
|
||||||
# whether DAG pruning was enabled or not.
|
# whether DAG pruning was enabled or not.
|
||||||
|
job_vars = job_object["variables"]
|
||||||
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = str(rebuild_spec)
|
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = str(rebuild_spec)
|
||||||
|
|
||||||
if cdash_handler:
|
if cdash_handler:
|
||||||
@@ -1062,19 +1109,6 @@ def main_script_replacements(cmd):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO: Remove this block in Spack 0.23
|
|
||||||
if enable_artifacts_buildcache:
|
|
||||||
bc_root = os.path.join(local_mirror_dir, "build_cache")
|
|
||||||
job_object["artifacts"]["paths"].extend(
|
|
||||||
[
|
|
||||||
os.path.join(bc_root, p)
|
|
||||||
for p in [
|
|
||||||
bindist.tarball_name(release_spec, ".spec.json"),
|
|
||||||
bindist.tarball_directory_name(release_spec),
|
|
||||||
]
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
job_object["stage"] = stage_name
|
job_object["stage"] = stage_name
|
||||||
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
|
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
|
||||||
job_object["interruptible"] = True
|
job_object["interruptible"] = True
|
||||||
@@ -1089,15 +1123,7 @@ def main_script_replacements(cmd):
|
|||||||
job_id += 1
|
job_id += 1
|
||||||
|
|
||||||
if print_summary:
|
if print_summary:
|
||||||
_print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions)
|
_print_staging_summary(spec_labels, stages, rebuild_decisions)
|
||||||
|
|
||||||
# Clean up remote mirror override if enabled
|
|
||||||
# TODO: Remove this block in Spack 0.23
|
|
||||||
if deprecated_mirror_config:
|
|
||||||
if remote_mirror_override:
|
|
||||||
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
|
|
||||||
if spack_pipeline_type == "spack_pull_request":
|
|
||||||
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
|
||||||
|
|
||||||
tty.debug(f"{job_id} build jobs generated in {stage_id} stages")
|
tty.debug(f"{job_id} build jobs generated in {stage_id} stages")
|
||||||
|
|
||||||
@@ -1114,12 +1140,7 @@ def main_script_replacements(cmd):
|
|||||||
# warn only if there was actually a CDash configuration.
|
# warn only if there was actually a CDash configuration.
|
||||||
tty.warn("Unable to populate buildgroup without CDash credentials")
|
tty.warn("Unable to populate buildgroup without CDash credentials")
|
||||||
|
|
||||||
service_job_retries = {
|
if copy_only_pipeline:
|
||||||
"max": 2,
|
|
||||||
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
|
|
||||||
}
|
|
||||||
|
|
||||||
if copy_only_pipeline and not config_deprecated:
|
|
||||||
stage_names.append("copy")
|
stage_names.append("copy")
|
||||||
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
||||||
sync_job["stage"] = "copy"
|
sync_job["stage"] = "copy"
|
||||||
@@ -1129,17 +1150,12 @@ def main_script_replacements(cmd):
|
|||||||
if "variables" not in sync_job:
|
if "variables" not in sync_job:
|
||||||
sync_job["variables"] = {}
|
sync_job["variables"] = {}
|
||||||
|
|
||||||
sync_job["variables"]["SPACK_COPY_ONLY_DESTINATION"] = (
|
sync_job["variables"]["SPACK_COPY_ONLY_DESTINATION"] = buildcache_destination.fetch_url
|
||||||
buildcache_destination.fetch_url
|
|
||||||
if buildcache_destination
|
|
||||||
else remote_mirror_override or remote_mirror_url
|
|
||||||
)
|
|
||||||
|
|
||||||
if "buildcache-source" in pipeline_mirrors:
|
if "buildcache-source" not in pipeline_mirrors:
|
||||||
buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url
|
raise SpackCIError("Copy-only pipelines require a mirror named 'buildcache-source'")
|
||||||
else:
|
|
||||||
# TODO: Remove this condition in Spack 0.23
|
buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url
|
||||||
buildcache_source = os.environ.get("SPACK_SOURCE_MIRROR", None)
|
|
||||||
sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source
|
sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source
|
||||||
sync_job["dependencies"] = []
|
sync_job["dependencies"] = []
|
||||||
|
|
||||||
@@ -1147,27 +1163,6 @@ def main_script_replacements(cmd):
|
|||||||
job_id += 1
|
job_id += 1
|
||||||
|
|
||||||
if job_id > 0:
|
if job_id > 0:
|
||||||
# TODO: Remove this block in Spack 0.23
|
|
||||||
if temp_storage_url_prefix:
|
|
||||||
# There were some rebuild jobs scheduled, so we will need to
|
|
||||||
# schedule a job to clean up the temporary storage location
|
|
||||||
# associated with this pipeline.
|
|
||||||
stage_names.append("cleanup-temp-storage")
|
|
||||||
cleanup_job = copy.deepcopy(spack_ci_ir["jobs"]["cleanup"]["attributes"])
|
|
||||||
|
|
||||||
cleanup_job["stage"] = "cleanup-temp-storage"
|
|
||||||
cleanup_job["when"] = "always"
|
|
||||||
cleanup_job["retry"] = service_job_retries
|
|
||||||
cleanup_job["interruptible"] = True
|
|
||||||
|
|
||||||
cleanup_job["script"] = _unpack_script(
|
|
||||||
cleanup_job["script"],
|
|
||||||
op=lambda cmd: cmd.replace("mirror_prefix", temp_storage_url_prefix),
|
|
||||||
)
|
|
||||||
|
|
||||||
cleanup_job["dependencies"] = []
|
|
||||||
output_object["cleanup"] = cleanup_job
|
|
||||||
|
|
||||||
if (
|
if (
|
||||||
"script" in spack_ci_ir["jobs"]["signing"]["attributes"]
|
"script" in spack_ci_ir["jobs"]["signing"]["attributes"]
|
||||||
and spack_pipeline_type == "spack_protected_branch"
|
and spack_pipeline_type == "spack_protected_branch"
|
||||||
@@ -1184,11 +1179,9 @@ def main_script_replacements(cmd):
|
|||||||
signing_job["interruptible"] = True
|
signing_job["interruptible"] = True
|
||||||
if "variables" not in signing_job:
|
if "variables" not in signing_job:
|
||||||
signing_job["variables"] = {}
|
signing_job["variables"] = {}
|
||||||
signing_job["variables"]["SPACK_BUILDCACHE_DESTINATION"] = (
|
signing_job["variables"][
|
||||||
buildcache_destination.push_url # need the s3 url for aws s3 sync
|
"SPACK_BUILDCACHE_DESTINATION"
|
||||||
if buildcache_destination
|
] = buildcache_destination.push_url
|
||||||
else remote_mirror_override or remote_mirror_url
|
|
||||||
)
|
|
||||||
signing_job["dependencies"] = []
|
signing_job["dependencies"] = []
|
||||||
|
|
||||||
output_object["sign-pkgs"] = signing_job
|
output_object["sign-pkgs"] = signing_job
|
||||||
@@ -1199,16 +1192,17 @@ def main_script_replacements(cmd):
|
|||||||
final_job = spack_ci_ir["jobs"]["reindex"]["attributes"]
|
final_job = spack_ci_ir["jobs"]["reindex"]["attributes"]
|
||||||
|
|
||||||
final_job["stage"] = "stage-rebuild-index"
|
final_job["stage"] = "stage-rebuild-index"
|
||||||
target_mirror = remote_mirror_override or remote_mirror_url
|
target_mirror = buildcache_destination.push_url
|
||||||
if buildcache_destination:
|
|
||||||
target_mirror = buildcache_destination.push_url
|
|
||||||
final_job["script"] = _unpack_script(
|
final_job["script"] = _unpack_script(
|
||||||
final_job["script"],
|
final_job["script"],
|
||||||
op=lambda cmd: cmd.replace("{index_target_mirror}", target_mirror),
|
op=lambda cmd: cmd.replace("{index_target_mirror}", target_mirror),
|
||||||
)
|
)
|
||||||
|
|
||||||
final_job["when"] = "always"
|
final_job["when"] = "always"
|
||||||
final_job["retry"] = service_job_retries
|
final_job["retry"] = {
|
||||||
|
"max": 2,
|
||||||
|
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
|
||||||
|
}
|
||||||
final_job["interruptible"] = True
|
final_job["interruptible"] = True
|
||||||
final_job["dependencies"] = []
|
final_job["dependencies"] = []
|
||||||
|
|
||||||
@@ -1219,25 +1213,19 @@ def main_script_replacements(cmd):
|
|||||||
# Capture the version of Spack used to generate the pipeline, that can be
|
# Capture the version of Spack used to generate the pipeline, that can be
|
||||||
# passed to `git checkout` for version consistency. If we aren't in a Git
|
# passed to `git checkout` for version consistency. If we aren't in a Git
|
||||||
# repository, presume we are a Spack release and use the Git tag instead.
|
# repository, presume we are a Spack release and use the Git tag instead.
|
||||||
spack_version = spack.main.get_version()
|
spack_version = spack.get_version()
|
||||||
version_to_clone = spack.main.get_spack_commit() or f"v{spack.spack_version}"
|
version_to_clone = spack.get_spack_commit() or f"v{spack.spack_version}"
|
||||||
|
|
||||||
output_object["variables"] = {
|
output_object["variables"] = {
|
||||||
"SPACK_ARTIFACTS_ROOT": rel_artifacts_root,
|
"SPACK_ARTIFACTS_ROOT": rel_artifacts_root,
|
||||||
"SPACK_CONCRETE_ENV_DIR": rel_concrete_env_dir,
|
"SPACK_CONCRETE_ENV_DIR": rel_concrete_env_dir,
|
||||||
"SPACK_VERSION": spack_version,
|
"SPACK_VERSION": spack_version,
|
||||||
"SPACK_CHECKOUT_VERSION": version_to_clone,
|
"SPACK_CHECKOUT_VERSION": version_to_clone,
|
||||||
# TODO: Remove this line in Spack 0.23
|
|
||||||
"SPACK_REMOTE_MIRROR_URL": remote_mirror_url,
|
|
||||||
"SPACK_JOB_LOG_DIR": rel_job_log_dir,
|
"SPACK_JOB_LOG_DIR": rel_job_log_dir,
|
||||||
"SPACK_JOB_REPRO_DIR": rel_job_repro_dir,
|
"SPACK_JOB_REPRO_DIR": rel_job_repro_dir,
|
||||||
"SPACK_JOB_TEST_DIR": rel_job_test_dir,
|
"SPACK_JOB_TEST_DIR": rel_job_test_dir,
|
||||||
# TODO: Remove this line in Spack 0.23
|
|
||||||
"SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir,
|
|
||||||
"SPACK_PIPELINE_TYPE": str(spack_pipeline_type),
|
"SPACK_PIPELINE_TYPE": str(spack_pipeline_type),
|
||||||
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
|
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
|
||||||
# TODO: Remove this line in Spack 0.23
|
|
||||||
"SPACK_CI_SHARED_PR_MIRROR_URL": shared_pr_mirror or "None",
|
|
||||||
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag),
|
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag),
|
||||||
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
||||||
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
||||||
@@ -1246,10 +1234,6 @@ def main_script_replacements(cmd):
|
|||||||
for item, val in output_vars.items():
|
for item, val in output_vars.items():
|
||||||
output_vars[item] = ensure_expected_target_path(val)
|
output_vars[item] = ensure_expected_target_path(val)
|
||||||
|
|
||||||
# TODO: Remove this block in Spack 0.23
|
|
||||||
if deprecated_mirror_config and remote_mirror_override:
|
|
||||||
(output_object["variables"]["SPACK_REMOTE_MIRROR_OVERRIDE"]) = remote_mirror_override
|
|
||||||
|
|
||||||
spack_stack_name = os.environ.get("SPACK_CI_STACK_NAME", None)
|
spack_stack_name = os.environ.get("SPACK_CI_STACK_NAME", None)
|
||||||
if spack_stack_name:
|
if spack_stack_name:
|
||||||
output_object["variables"]["SPACK_CI_STACK_NAME"] = spack_stack_name
|
output_object["variables"]["SPACK_CI_STACK_NAME"] = spack_stack_name
|
||||||
@@ -1272,17 +1256,12 @@ def main_script_replacements(cmd):
|
|||||||
else:
|
else:
|
||||||
# No jobs were generated
|
# No jobs were generated
|
||||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||||
noop_job["retry"] = service_job_retries
|
# If this job fails ignore the status and carry on
|
||||||
|
noop_job["retry"] = 0
|
||||||
|
noop_job["allow_failure"] = True
|
||||||
|
|
||||||
if copy_only_pipeline and config_deprecated:
|
tty.debug("No specs to rebuild, generating no-op job")
|
||||||
tty.debug("Generating no-op job as copy-only is unsupported here.")
|
output_object = {"no-specs-to-rebuild": noop_job}
|
||||||
noop_job["script"] = [
|
|
||||||
'echo "copy-only pipelines are not supported with deprecated ci configs"'
|
|
||||||
]
|
|
||||||
output_object = {"unsupported-copy": noop_job}
|
|
||||||
else:
|
|
||||||
tty.debug("No specs to rebuild, generating no-op job")
|
|
||||||
output_object = {"no-specs-to-rebuild": noop_job}
|
|
||||||
|
|
||||||
# Ensure the child pipeline always runs
|
# Ensure the child pipeline always runs
|
||||||
output_object["workflow"] = {"rules": [{"when": "always"}]}
|
output_object["workflow"] = {"rules": [{"when": "always"}]}
|
||||||
@@ -2320,83 +2299,6 @@ def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optiona
|
|||||||
reporter.test_skipped_report(report_dir, spec, reason)
|
reporter.test_skipped_report(report_dir, spec, reason)
|
||||||
|
|
||||||
|
|
||||||
def translate_deprecated_config(config):
|
class SpackCIError(spack.error.SpackError):
|
||||||
# Remove all deprecated keys from config
|
def __init__(self, msg):
|
||||||
mappings = config.pop("mappings", [])
|
super().__init__(msg)
|
||||||
match_behavior = config.pop("match_behavior", "first")
|
|
||||||
|
|
||||||
build_job = {}
|
|
||||||
if "image" in config:
|
|
||||||
build_job["image"] = config.pop("image")
|
|
||||||
if "tags" in config:
|
|
||||||
build_job["tags"] = config.pop("tags")
|
|
||||||
if "variables" in config:
|
|
||||||
build_job["variables"] = config.pop("variables")
|
|
||||||
|
|
||||||
# Scripts always override in old CI
|
|
||||||
if "before_script" in config:
|
|
||||||
build_job["before_script:"] = config.pop("before_script")
|
|
||||||
if "script" in config:
|
|
||||||
build_job["script:"] = config.pop("script")
|
|
||||||
if "after_script" in config:
|
|
||||||
build_job["after_script:"] = config.pop("after_script")
|
|
||||||
|
|
||||||
signing_job = None
|
|
||||||
if "signing-job-attributes" in config:
|
|
||||||
signing_job = {"signing-job": config.pop("signing-job-attributes")}
|
|
||||||
|
|
||||||
service_job_attributes = None
|
|
||||||
if "service-job-attributes" in config:
|
|
||||||
service_job_attributes = config.pop("service-job-attributes")
|
|
||||||
|
|
||||||
# If this config already has pipeline-gen do not more
|
|
||||||
if "pipeline-gen" in config:
|
|
||||||
return True if mappings or build_job or signing_job or service_job_attributes else False
|
|
||||||
|
|
||||||
config["target"] = "gitlab"
|
|
||||||
|
|
||||||
config["pipeline-gen"] = []
|
|
||||||
pipeline_gen = config["pipeline-gen"]
|
|
||||||
|
|
||||||
# Build Job
|
|
||||||
submapping = []
|
|
||||||
for section in mappings:
|
|
||||||
submapping_section = {"match": section["match"]}
|
|
||||||
if "runner-attributes" in section:
|
|
||||||
remapped_attributes = {}
|
|
||||||
if match_behavior == "first":
|
|
||||||
for key, value in section["runner-attributes"].items():
|
|
||||||
# Scripts always override in old CI
|
|
||||||
if key == "script":
|
|
||||||
remapped_attributes["script:"] = value
|
|
||||||
elif key == "before_script":
|
|
||||||
remapped_attributes["before_script:"] = value
|
|
||||||
elif key == "after_script":
|
|
||||||
remapped_attributes["after_script:"] = value
|
|
||||||
else:
|
|
||||||
remapped_attributes[key] = value
|
|
||||||
else:
|
|
||||||
# Handle "merge" behavior be allowing scripts to merge in submapping section
|
|
||||||
remapped_attributes = section["runner-attributes"]
|
|
||||||
submapping_section["build-job"] = remapped_attributes
|
|
||||||
|
|
||||||
if "remove-attributes" in section:
|
|
||||||
# Old format only allowed tags in this section, so no extra checks are needed
|
|
||||||
submapping_section["build-job-remove"] = section["remove-attributes"]
|
|
||||||
submapping.append(submapping_section)
|
|
||||||
pipeline_gen.append({"submapping": submapping, "match_behavior": match_behavior})
|
|
||||||
|
|
||||||
if build_job:
|
|
||||||
pipeline_gen.append({"build-job": build_job})
|
|
||||||
|
|
||||||
# Signing Job
|
|
||||||
if signing_job:
|
|
||||||
pipeline_gen.append(signing_job)
|
|
||||||
|
|
||||||
# Service Jobs
|
|
||||||
if service_job_attributes:
|
|
||||||
pipeline_gen.append({"reindex-job": service_job_attributes})
|
|
||||||
pipeline_gen.append({"noop-job": service_job_attributes})
|
|
||||||
pipeline_gen.append({"cleanup-job": service_job_attributes})
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|||||||
@@ -17,7 +17,7 @@
|
|||||||
from llnl.util.tty.colify import colify
|
from llnl.util.tty.colify import colify
|
||||||
from llnl.util.tty.color import colorize
|
from llnl.util.tty.color import colorize
|
||||||
|
|
||||||
import spack.config
|
import spack.config # breaks a cycle.
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.extensions
|
import spack.extensions
|
||||||
@@ -173,10 +173,29 @@ def parse_specs(
|
|||||||
arg_string = " ".join([quote_kvp(arg) for arg in args])
|
arg_string = " ".join([quote_kvp(arg) for arg in args])
|
||||||
|
|
||||||
specs = spack.parser.parse(arg_string)
|
specs = spack.parser.parse(arg_string)
|
||||||
for spec in specs:
|
if not concretize:
|
||||||
if concretize:
|
return specs
|
||||||
spec.concretize(tests=tests)
|
|
||||||
return specs
|
to_concretize = [(s, None) for s in specs]
|
||||||
|
return _concretize_spec_pairs(to_concretize, tests=tests)
|
||||||
|
|
||||||
|
|
||||||
|
def _concretize_spec_pairs(to_concretize, tests=False):
|
||||||
|
"""Helper method that concretizes abstract specs from a list of abstract,concrete pairs.
|
||||||
|
|
||||||
|
Any spec with a concrete spec associated with it will concretize to that spec. Any spec
|
||||||
|
with ``None`` for its concrete spec will be newly concretized. This method respects unification
|
||||||
|
rules from config."""
|
||||||
|
unify = spack.config.get("concretizer:unify", False)
|
||||||
|
|
||||||
|
concretize_method = spack.concretize.concretize_separately # unify: false
|
||||||
|
if unify is True:
|
||||||
|
concretize_method = spack.concretize.concretize_together
|
||||||
|
elif unify == "when_possible":
|
||||||
|
concretize_method = spack.concretize.concretize_together_when_possible
|
||||||
|
|
||||||
|
concretized = concretize_method(*to_concretize, tests=tests)
|
||||||
|
return [concrete for _, concrete in concretized]
|
||||||
|
|
||||||
|
|
||||||
def matching_spec_from_env(spec):
|
def matching_spec_from_env(spec):
|
||||||
@@ -192,6 +211,22 @@ def matching_spec_from_env(spec):
|
|||||||
return spec.concretized()
|
return spec.concretized()
|
||||||
|
|
||||||
|
|
||||||
|
def matching_specs_from_env(specs):
|
||||||
|
"""
|
||||||
|
Same as ``matching_spec_from_env`` but respects spec unification rules.
|
||||||
|
|
||||||
|
For each spec, if there is a matching spec in the environment it is used. If no
|
||||||
|
matching spec is found, this will return the given spec but concretized in the
|
||||||
|
context of the active environment and other given specs, with unification rules applied.
|
||||||
|
"""
|
||||||
|
env = ev.active_environment()
|
||||||
|
spec_pairs = [(spec, env.matching_spec(spec) if env else None) for spec in specs]
|
||||||
|
additional_concrete_specs = (
|
||||||
|
[(concrete, concrete) for _, concrete in env.concretized_specs()] if env else []
|
||||||
|
)
|
||||||
|
return _concretize_spec_pairs(spec_pairs + additional_concrete_specs)[: len(spec_pairs)]
|
||||||
|
|
||||||
|
|
||||||
def disambiguate_spec(spec, env, local=False, installed=True, first=False):
|
def disambiguate_spec(spec, env, local=False, installed=True, first=False):
|
||||||
"""Given a spec, figure out which installed package it refers to.
|
"""Given a spec, figure out which installed package it refers to.
|
||||||
|
|
||||||
|
|||||||
@@ -11,6 +11,7 @@
|
|||||||
import llnl.util.tty.color as color
|
import llnl.util.tty.color as color
|
||||||
|
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
|
import spack.spec
|
||||||
|
|
||||||
description = "print architecture information about this machine"
|
description = "print architecture information about this machine"
|
||||||
section = "system"
|
section = "system"
|
||||||
@@ -18,12 +19,23 @@
|
|||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
|
# DEPRECATED: equivalent to --generic --target
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-g", "--generic-target", action="store_true", help="show the best generic target"
|
"-g",
|
||||||
|
"--generic-target",
|
||||||
|
action="store_true",
|
||||||
|
help="show the best generic target (deprecated)",
|
||||||
)
|
)
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"--known-targets", action="store_true", help="show a list of all known targets and exit"
|
"--known-targets", action="store_true", help="show a list of all known targets and exit"
|
||||||
)
|
)
|
||||||
|
target_type = subparser.add_mutually_exclusive_group()
|
||||||
|
target_type.add_argument(
|
||||||
|
"--family", action="store_true", help="print generic ISA (x86_64, aarch64, ppc64le, ...)"
|
||||||
|
)
|
||||||
|
target_type.add_argument(
|
||||||
|
"--generic", action="store_true", help="print feature level (x86_64_v3, armv8.4a, ...)"
|
||||||
|
)
|
||||||
parts = subparser.add_mutually_exclusive_group()
|
parts = subparser.add_mutually_exclusive_group()
|
||||||
parts2 = subparser.add_mutually_exclusive_group()
|
parts2 = subparser.add_mutually_exclusive_group()
|
||||||
parts.add_argument(
|
parts.add_argument(
|
||||||
@@ -79,6 +91,7 @@ def display_target_group(header, target_group):
|
|||||||
|
|
||||||
def arch(parser, args):
|
def arch(parser, args):
|
||||||
if args.generic_target:
|
if args.generic_target:
|
||||||
|
# TODO: add deprecation warning in 0.24
|
||||||
print(archspec.cpu.host().generic)
|
print(archspec.cpu.host().generic)
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -95,6 +108,10 @@ def arch(parser, args):
|
|||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
host_os = host_platform.operating_system(os_args)
|
host_os = host_platform.operating_system(os_args)
|
||||||
host_target = host_platform.target(target_args)
|
host_target = host_platform.target(target_args)
|
||||||
|
if args.family:
|
||||||
|
host_target = host_target.family
|
||||||
|
elif args.generic:
|
||||||
|
host_target = host_target.generic
|
||||||
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
||||||
|
|
||||||
if args.platform:
|
if args.platform:
|
||||||
|
|||||||
@@ -115,15 +115,11 @@ def audit(parser, args):
|
|||||||
def _process_reports(reports):
|
def _process_reports(reports):
|
||||||
for check, errors in reports:
|
for check, errors in reports:
|
||||||
if errors:
|
if errors:
|
||||||
msg = "{0}: {1} issue{2} found".format(
|
status = f"{len(errors)} issue{'' if len(errors) == 1 else 's'} found"
|
||||||
check, len(errors), "" if len(errors) == 1 else "s"
|
print(cl.colorize(f"{check}: @*r{{{status}}}"))
|
||||||
)
|
numdigits = len(str(len(errors)))
|
||||||
header = "@*b{" + msg + "}"
|
|
||||||
print(cl.colorize(header))
|
|
||||||
for idx, error in enumerate(errors):
|
for idx, error in enumerate(errors):
|
||||||
print(str(idx + 1) + ". " + str(error))
|
print(f"{idx + 1:>{numdigits}}. {error}")
|
||||||
raise SystemExit(1)
|
raise SystemExit(1)
|
||||||
else:
|
else:
|
||||||
msg = "{0}: 0 issues found.".format(check)
|
print(cl.colorize(f"{check}: @*g{{passed}}"))
|
||||||
header = "@*b{" + msg + "}"
|
|
||||||
print(cl.colorize(header))
|
|
||||||
|
|||||||
@@ -16,11 +16,11 @@
|
|||||||
import spack.bootstrap.config
|
import spack.bootstrap.config
|
||||||
import spack.bootstrap.core
|
import spack.bootstrap.core
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.main
|
|
||||||
import spack.mirror
|
import spack.mirror
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
|
import spack.util.spack_yaml
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
|
|
||||||
description = "manage bootstrap configuration"
|
description = "manage bootstrap configuration"
|
||||||
|
|||||||
@@ -23,14 +23,9 @@
|
|||||||
import spack.error
|
import spack.error
|
||||||
import spack.mirror
|
import spack.mirror
|
||||||
import spack.oci.oci
|
import spack.oci.oci
|
||||||
import spack.oci.opener
|
|
||||||
import spack.relocate
|
|
||||||
import spack.repo
|
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.user_environment
|
|
||||||
import spack.util.crypto
|
|
||||||
import spack.util.parallel
|
import spack.util.parallel
|
||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
|
|||||||
@@ -4,6 +4,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
|
import spack.spec
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
|
|
||||||
description = "change an existing spec in an environment"
|
description = "change an existing spec in an environment"
|
||||||
|
|||||||
@@ -15,7 +15,6 @@
|
|||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.util.crypto
|
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
from spack.package_base import (
|
from spack.package_base import (
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user