Compare commits
658 Commits
develop-20
...
cleanup_do
Author | SHA1 | Date | |
---|---|---|---|
![]() |
2f7d8947ba | ||
![]() |
9a604bf615 | ||
![]() |
f1487caa92 | ||
![]() |
de3fb78477 | ||
![]() |
ac0ed2c4cc | ||
![]() |
66a93b5433 | ||
![]() |
b7993317ea | ||
![]() |
66622ec4d0 | ||
![]() |
9b2cd1b208 | ||
![]() |
9888683a21 | ||
![]() |
fb46c7a72d | ||
![]() |
c0196cde39 | ||
![]() |
d091172d67 | ||
![]() |
ab51369087 | ||
![]() |
1cea82b629 | ||
![]() |
2abb711337 | ||
![]() |
6f948eb847 | ||
![]() |
93bf0634f3 | ||
![]() |
badb3cedcd | ||
![]() |
be918817d6 | ||
![]() |
41d9f687f6 | ||
![]() |
9642b04513 | ||
![]() |
bf16f0bf74 | ||
![]() |
ad518d975c | ||
![]() |
a76e3f2030 | ||
![]() |
1809b81e1d | ||
![]() |
a02b40b670 | ||
![]() |
6d8fdbcf82 | ||
![]() |
3dadf569a4 | ||
![]() |
751585f1e3 | ||
![]() |
f6d6a5a480 | ||
![]() |
57a1ebc77e | ||
![]() |
acdcd1016a | ||
![]() |
e7c9bb5258 | ||
![]() |
e083acdc5d | ||
![]() |
99fd37931c | ||
![]() |
00e68af794 | ||
![]() |
e33cbac01f | ||
![]() |
ada4c208d4 | ||
![]() |
91310d3ae6 | ||
![]() |
def1613741 | ||
![]() |
ac703bc88d | ||
![]() |
f0f5ffa9de | ||
![]() |
65929888de | ||
![]() |
2987efa93c | ||
![]() |
37de92e7a2 | ||
![]() |
42fd1cafe6 | ||
![]() |
370694f112 | ||
![]() |
fc7125fdf3 | ||
![]() |
3fed708618 | ||
![]() |
0614ded2ef | ||
![]() |
e38e51a6bc | ||
![]() |
c44c938caf | ||
![]() |
cdaacce4db | ||
![]() |
b98e5886e5 | ||
![]() |
09a88ad3bd | ||
![]() |
4d91d3f77f | ||
![]() |
b748907a61 | ||
![]() |
cbd9fad66e | ||
![]() |
82dd33c04c | ||
![]() |
31b2b790e7 | ||
![]() |
9fd698edcb | ||
![]() |
247446a8f3 | ||
![]() |
993f743245 | ||
![]() |
786f8dfcce | ||
![]() |
4691301eba | ||
![]() |
a55073e7b0 | ||
![]() |
484c9cf47c | ||
![]() |
9ed5e1de8e | ||
![]() |
4eb7b998e8 | ||
![]() |
3b423a67a2 | ||
![]() |
b803dabb2c | ||
![]() |
33dd894eff | ||
![]() |
f458392c1b | ||
![]() |
8c962a94b0 | ||
![]() |
8b165c2cfe | ||
![]() |
01edde35be | ||
![]() |
84d33fccce | ||
![]() |
c4a5a996a5 | ||
![]() |
6961514122 | ||
![]() |
a9e6074996 | ||
![]() |
30db764449 | ||
![]() |
f5b8b0ac5d | ||
![]() |
913dcd97bc | ||
![]() |
68570b7587 | ||
![]() |
2da4366ba6 | ||
![]() |
2713b0c216 | ||
![]() |
16b01c5661 | ||
![]() |
ebd4ef934c | ||
![]() |
97b5ec6e4f | ||
![]() |
4c9bc8d879 | ||
![]() |
825fd1ccf6 | ||
![]() |
33109ce9b9 | ||
![]() |
fb5910d139 | ||
![]() |
fa6b8a4ceb | ||
![]() |
97acf2614a | ||
![]() |
e99bf48d28 | ||
![]() |
b97015b791 | ||
![]() |
1884520f7b | ||
![]() |
7fbfb0f6dc | ||
![]() |
11d276ab6f | ||
![]() |
da1d533877 | ||
![]() |
c6997e11a7 | ||
![]() |
4322cf56b1 | ||
![]() |
907a37145f | ||
![]() |
4778d2d332 | ||
![]() |
eb256476d2 | ||
![]() |
ff26d2f833 | ||
![]() |
ed916ffe6c | ||
![]() |
4fbdf2f2c0 | ||
![]() |
60ba61f6b2 | ||
![]() |
0a4563fd02 | ||
![]() |
754408ca2b | ||
![]() |
0d817878ea | ||
![]() |
bf11fb037b | ||
![]() |
074b845cd3 | ||
![]() |
dd26732897 | ||
![]() |
3665c5c01b | ||
![]() |
73219e4b02 | ||
![]() |
57a90c91a4 | ||
![]() |
8f4a0718bf | ||
![]() |
9049ffdc7a | ||
![]() |
d1f313342e | ||
![]() |
e62cf9c45b | ||
![]() |
ee2723dc46 | ||
![]() |
d09b185522 | ||
![]() |
a31c525778 | ||
![]() |
2aa5a16433 | ||
![]() |
0c164d2740 | ||
![]() |
801390f6be | ||
![]() |
c601692bc7 | ||
![]() |
2b9c6790f2 | ||
![]() |
09ae2516d5 | ||
![]() |
eb9ff5d7a7 | ||
![]() |
dadb30f0e2 | ||
![]() |
d45f682573 | ||
![]() |
b7601f3042 | ||
![]() |
6b5a479d1e | ||
![]() |
1297dd7fbc | ||
![]() |
75c169d870 | ||
![]() |
afe431cfb5 | ||
![]() |
14bc900e9d | ||
![]() |
e42e541605 | ||
![]() |
9310fcabd8 | ||
![]() |
6822f99cc6 | ||
![]() |
703cd6a313 | ||
![]() |
5b59a53545 | ||
![]() |
b862eec6bc | ||
![]() |
dcc199ae63 | ||
![]() |
f8da72cffe | ||
![]() |
8650ba3cea | ||
![]() |
54aaa95a35 | ||
![]() |
5a29c9d82b | ||
![]() |
c8873ea35c | ||
![]() |
c7659df4af | ||
![]() |
0de6c17477 | ||
![]() |
6924c530e2 | ||
![]() |
38c8069ab4 | ||
![]() |
5cc07522ab | ||
![]() |
575a006ca3 | ||
![]() |
23ac56edfb | ||
![]() |
8c3068809f | ||
![]() |
2214fc855d | ||
![]() |
d44bdc40c9 | ||
![]() |
e952f6be8e | ||
![]() |
b95936f752 | ||
![]() |
8d0856d1cc | ||
![]() |
10f7014add | ||
![]() |
c9ed91758d | ||
![]() |
2c1d74db9b | ||
![]() |
5b93466340 | ||
![]() |
1ee344c75c | ||
![]() |
754011643c | ||
![]() |
2148292bdb | ||
![]() |
cf3576a9bb | ||
![]() |
a86f164835 | ||
![]() |
2782ae6d7e | ||
![]() |
b1fd6dbb6d | ||
![]() |
18936771ff | ||
![]() |
9a94ea7dfe | ||
![]() |
a93bd6cee4 | ||
![]() |
4c247e206c | ||
![]() |
fcdaccfeb6 | ||
![]() |
2fc056e27c | ||
![]() |
417c48b07a | ||
![]() |
f05033b0d2 | ||
![]() |
d63f06e4b7 | ||
![]() |
8296aaf175 | ||
![]() |
86ebcabd46 | ||
![]() |
87329639f2 | ||
![]() |
0acd6ae7b2 | ||
![]() |
395c911689 | ||
![]() |
2664303d7a | ||
![]() |
ff9568fa2f | ||
![]() |
632c009569 | ||
![]() |
55918c31d2 | ||
![]() |
b8461f3d2d | ||
![]() |
133895e785 | ||
![]() |
19e3ab83cf | ||
![]() |
e42a4a8bac | ||
![]() |
1462c35761 | ||
![]() |
0cf8cb70f4 | ||
![]() |
7b2450c22a | ||
![]() |
8f09f523cc | ||
![]() |
24d3ed8c18 | ||
![]() |
492c52089f | ||
![]() |
5df7dc88fc | ||
![]() |
4a75c3c87a | ||
![]() |
35aa02771a | ||
![]() |
b38a29f4df | ||
![]() |
9a25a58219 | ||
![]() |
c0c9743300 | ||
![]() |
a69af3c71f | ||
![]() |
cb92d70d6d | ||
![]() |
76ed4578e7 | ||
![]() |
504cc808d6 | ||
![]() |
8076134c91 | ||
![]() |
b4b3320f71 | ||
![]() |
e35bc1f82d | ||
![]() |
0de1ddcbe8 | ||
![]() |
e3aca49e25 | ||
![]() |
94c29e1cfc | ||
![]() |
0c00a297e1 | ||
![]() |
c6a1ec996c | ||
![]() |
0437c5314e | ||
![]() |
ffde309a99 | ||
![]() |
a08b4ae538 | ||
![]() |
404b1c6c19 | ||
![]() |
275339ab4c | ||
![]() |
877930c4ef | ||
![]() |
89d0215d5b | ||
![]() |
f003d8c0c3 | ||
![]() |
6ab92b119d | ||
![]() |
f809b56f81 | ||
![]() |
ec058556ad | ||
![]() |
ce78e8a1f8 | ||
![]() |
c3435b4e7d | ||
![]() |
02d2c4a9ff | ||
![]() |
9d03170cb2 | ||
![]() |
8892c878ce | ||
![]() |
cbf4d3967a | ||
![]() |
8bc0b2e086 | ||
![]() |
354615d491 | ||
![]() |
9ac261af58 | ||
![]() |
34b2f28a5e | ||
![]() |
8a10eff757 | ||
![]() |
44d09f2b2b | ||
![]() |
161b2d7cb0 | ||
![]() |
4de5b664cd | ||
![]() |
5d0c6c3350 | ||
![]() |
8391c8eb87 | ||
![]() |
3108849121 | ||
![]() |
52471bab02 | ||
![]() |
b8e3246e89 | ||
![]() |
60cb628283 | ||
![]() |
5bca7187a5 | ||
![]() |
65daf17b54 | ||
![]() |
d776dead56 | ||
![]() |
741a4a5d4f | ||
![]() |
dbe7b6bc6b | ||
![]() |
ffc904aa6b | ||
![]() |
f889b2a95e | ||
![]() |
7f609ba934 | ||
![]() |
ffd7830bfa | ||
![]() |
20a6b22f78 | ||
![]() |
1bff2f7034 | ||
![]() |
ca48233ef7 | ||
![]() |
c302049b5d | ||
![]() |
360dbe41f7 | ||
![]() |
ea1aa0714b | ||
![]() |
7af1a3d240 | ||
![]() |
962115b386 | ||
![]() |
f81ca0cd89 | ||
![]() |
25a5585f7d | ||
![]() |
e81ce18cad | ||
![]() |
d48d993ae7 | ||
![]() |
fbd5c3d589 | ||
![]() |
11aa02b37a | ||
![]() |
b9ebf8cc9c | ||
![]() |
1229d5a3cc | ||
![]() |
be5a096665 | ||
![]() |
32ce278a51 | ||
![]() |
e83536de38 | ||
![]() |
ff058377c5 | ||
![]() |
e855bb011d | ||
![]() |
dbab4828ed | ||
![]() |
fac92dceca | ||
![]() |
035b890b17 | ||
![]() |
2a7e5cafa1 | ||
![]() |
49845760b6 | ||
![]() |
ce6255c0bb | ||
![]() |
f0d54ba39d | ||
![]() |
2ec4281c4f | ||
![]() |
e80d75cbe3 | ||
![]() |
47e70c5c3a | ||
![]() |
fea2171672 | ||
![]() |
12a475e648 | ||
![]() |
c348891c07 | ||
![]() |
019e90ab36 | ||
![]() |
19137b2653 | ||
![]() |
2761e650fa | ||
![]() |
84ea389017 | ||
![]() |
17f07523f5 | ||
![]() |
bd2ddb8909 | ||
![]() |
f5db757e66 | ||
![]() |
277f8596de | ||
![]() |
c8bebff7f5 | ||
![]() |
61d2d21acc | ||
![]() |
7b27aed4c8 | ||
![]() |
ad0b256407 | ||
![]() |
a2a3a83a26 | ||
![]() |
7d86670826 | ||
![]() |
ae306b73c3 | ||
![]() |
b63cbe4e6e | ||
![]() |
ef220daaca | ||
![]() |
e86a3b68f7 | ||
![]() |
7319408bc7 | ||
![]() |
b34159348f | ||
![]() |
f13d998d21 | ||
![]() |
2912d4a661 | ||
![]() |
8e2ec58859 | ||
![]() |
01eb26578b | ||
![]() |
fe0a8a1735 | ||
![]() |
d523f12e99 | ||
![]() |
1b0631b69e | ||
![]() |
65bb3a12ea | ||
![]() |
5ac2b8a178 | ||
![]() |
b063765c2e | ||
![]() |
4511052d26 | ||
![]() |
3804d128e7 | ||
![]() |
f09ce00fe1 | ||
![]() |
cdde7c3ccf | ||
![]() |
c52c0a482f | ||
![]() |
8a75cdad9a | ||
![]() |
e0eea48ccf | ||
![]() |
61cbfc1da0 | ||
![]() |
d8c8074762 | ||
![]() |
faeef6272d | ||
![]() |
f6ad1e23f8 | ||
![]() |
a0173a5a94 | ||
![]() |
225be45687 | ||
![]() |
3581821d3c | ||
![]() |
79ad6f6b48 | ||
![]() |
6320993409 | ||
![]() |
1472dcace4 | ||
![]() |
755c113c16 | ||
![]() |
43bcb5056f | ||
![]() |
fd1c95a432 | ||
![]() |
5b5be0582f | ||
![]() |
aed1a3f980 | ||
![]() |
978be305a7 | ||
![]() |
7ddb40a804 | ||
![]() |
37664b36da | ||
![]() |
f33912d707 | ||
![]() |
e785d3716e | ||
![]() |
328787b017 | ||
![]() |
67a40c6cc4 | ||
![]() |
eccf97af33 | ||
![]() |
e63e8b5efa | ||
![]() |
bb25210b62 | ||
![]() |
f8ab94061f | ||
![]() |
ed15b73c3b | ||
![]() |
1f6da280b7 | ||
![]() |
1ad5739094 | ||
![]() |
06f33dcdbb | ||
![]() |
582254f891 | ||
![]() |
31694fe9bd | ||
![]() |
a53a14346e | ||
![]() |
c102ff953b | ||
![]() |
59a2a87937 | ||
![]() |
d86feeac54 | ||
![]() |
43e26b330c | ||
![]() |
9c8b5f58c0 | ||
![]() |
50aa5a7b24 | ||
![]() |
ffab156366 | ||
![]() |
e147679d40 | ||
![]() |
ef9bb7ebe5 | ||
![]() |
fc443ea30e | ||
![]() |
b601bace24 | ||
![]() |
cbad3d464a | ||
![]() |
b56e792295 | ||
![]() |
5b279c0732 | ||
![]() |
149753a52e | ||
![]() |
b582eacbc1 | ||
![]() |
037196c2bd | ||
![]() |
d9e8c5f13e | ||
![]() |
275d1d88f4 | ||
![]() |
a07d42d35b | ||
![]() |
19ad29a690 | ||
![]() |
4187c57250 | ||
![]() |
590be9bba1 | ||
![]() |
3edd68d981 | ||
![]() |
5ca0e94bdd | ||
![]() |
f6c9d98c8f | ||
![]() |
9854c9e5f2 | ||
![]() |
e5a602c1bb | ||
![]() |
37fe3b4984 | ||
![]() |
a00fddef4e | ||
![]() |
260b36e272 | ||
![]() |
117480dba9 | ||
![]() |
bc75f23927 | ||
![]() |
b0f1a0eb7c | ||
![]() |
4d616e1168 | ||
![]() |
4de8344c16 | ||
![]() |
411ea019f1 | ||
![]() |
296f99d800 | ||
![]() |
ca4df91e7d | ||
![]() |
9b8c06a049 | ||
![]() |
011ff48f82 | ||
![]() |
adcd05b365 | ||
![]() |
dc160e3a52 | ||
![]() |
ba953352a1 | ||
![]() |
d47e726b76 | ||
![]() |
89ab47284f | ||
![]() |
31bdcd7dc6 | ||
![]() |
f2bd11cbf4 | ||
![]() |
f69e8297a7 | ||
![]() |
c9377d9437 | ||
![]() |
899004e29a | ||
![]() |
df6427d259 | ||
![]() |
31cfcafeba | ||
![]() |
230bc7010a | ||
![]() |
957c0cc9da | ||
![]() |
99e4d6b446 | ||
![]() |
7acd0cd86c | ||
![]() |
d3378ffd25 | ||
![]() |
2356ccc816 | ||
![]() |
1d25275bd1 | ||
![]() |
7678635d36 | ||
![]() |
b2e28a0b08 | ||
![]() |
53385f12da | ||
![]() |
cfae194fbd | ||
![]() |
88c193b83a | ||
![]() |
c006cb573a | ||
![]() |
d8d41e9b0e | ||
![]() |
c6bfe7c6bd | ||
![]() |
4432f5a1fe | ||
![]() |
b9e0914ab2 | ||
![]() |
49a8e84588 | ||
![]() |
d36452cf4e | ||
![]() |
580cc3c91b | ||
![]() |
9ba7af404a | ||
![]() |
2da812cbad | ||
![]() |
420266c5c4 | ||
![]() |
049ade024a | ||
![]() |
75c71f7291 | ||
![]() |
0a7533a609 | ||
![]() |
7ecdc175ff | ||
![]() |
962262a1d3 | ||
![]() |
adaa0a4863 | ||
![]() |
5f56eee8b0 | ||
![]() |
aa6caf9ee6 | ||
![]() |
1eb2cb97ad | ||
![]() |
178a8bbdc5 | ||
![]() |
e4c233710c | ||
![]() |
b661acfa9b | ||
![]() |
7bddcd27d2 | ||
![]() |
5d2c67ec83 | ||
![]() |
62fd5d12c2 | ||
![]() |
64a7525e3f | ||
![]() |
bfe434cbd5 | ||
![]() |
39063baf18 | ||
![]() |
f4a4acd272 | ||
![]() |
8d2a059279 | ||
![]() |
34c89c0f7b | ||
![]() |
e1ea9e12a6 | ||
![]() |
5611523baf | ||
![]() |
4ff07c3918 | ||
![]() |
49489a4815 | ||
![]() |
fb53d31d09 | ||
![]() |
80b9807e10 | ||
![]() |
b573ec3920 | ||
![]() |
cbdc07248f | ||
![]() |
db6a2523d9 | ||
![]() |
c710a1597f | ||
![]() |
8c70912b11 | ||
![]() |
64f90c38be | ||
![]() |
d2f1e29927 | ||
![]() |
57586df91a | ||
![]() |
c00f36b5e2 | ||
![]() |
2a7dd29f95 | ||
![]() |
58e2f7a54f | ||
![]() |
e3afe9a364 | ||
![]() |
b0314faa3d | ||
![]() |
2099e9f5cd | ||
![]() |
5947c13570 | ||
![]() |
1259992159 | ||
![]() |
0477875667 | ||
![]() |
4d5844b460 | ||
![]() |
fc79c37e2d | ||
![]() |
1d76ed7aa4 | ||
![]() |
237f886e5d | ||
![]() |
834ed2f117 | ||
![]() |
73069045ae | ||
![]() |
e0efd2bea2 | ||
![]() |
b9873c5cea | ||
![]() |
2f711bda5f | ||
![]() |
f8381c9a63 | ||
![]() |
c8f61c8662 | ||
![]() |
507965cbc6 | ||
![]() |
1f6ce56d3b | ||
![]() |
3918f83ddc | ||
![]() |
d4dc13fffb | ||
![]() |
5008519a56 | ||
![]() |
dad5ff8796 | ||
![]() |
a24220b53f | ||
![]() |
2186ff720e | ||
![]() |
65d61e12c9 | ||
![]() |
05f3fef72c | ||
![]() |
21c2eedb80 | ||
![]() |
66a3c7bc42 | ||
![]() |
8b3d3ac2de | ||
![]() |
b5610cdb8b | ||
![]() |
6c6b262140 | ||
![]() |
796e372bde | ||
![]() |
78740942f9 | ||
![]() |
02a991688f | ||
![]() |
a8029c8ec4 | ||
![]() |
adb8f37fc5 | ||
![]() |
81b41d5948 | ||
![]() |
0ff980ae87 | ||
![]() |
74a93c04d8 | ||
![]() |
b72c7deacb | ||
![]() |
b061bbbb8f | ||
![]() |
bbfad7e979 | ||
![]() |
3a9963b497 | ||
![]() |
8ac00aa58f | ||
![]() |
13f80ff142 | ||
![]() |
e8291cbd74 | ||
![]() |
0dded55f39 | ||
![]() |
a4ca6452c0 | ||
![]() |
36761715fd | ||
![]() |
02b116bd56 | ||
![]() |
d4d7d5830d | ||
![]() |
389b1824e9 | ||
![]() |
e65be13056 | ||
![]() |
1580c1745c | ||
![]() |
cf54ef0fd3 | ||
![]() |
b8b02e0691 | ||
![]() |
8d986b8a99 | ||
![]() |
4b836cb795 | ||
![]() |
d5966e676d | ||
![]() |
e187508485 | ||
![]() |
80982149d5 | ||
![]() |
a1f2e794c7 | ||
![]() |
dbe323c631 | ||
![]() |
77ddafaaac | ||
![]() |
17efd6153c | ||
![]() |
93f356c1cc | ||
![]() |
386d115333 | ||
![]() |
6b512210d4 | ||
![]() |
ba215ca824 | ||
![]() |
629a3e9396 | ||
![]() |
08b07b9b27 | ||
![]() |
3a38122764 | ||
![]() |
25ab7cc16d | ||
![]() |
41773383ec | ||
![]() |
9855fbf7f1 | ||
![]() |
5ef9d7e3ed | ||
![]() |
5a4b7d3d44 | ||
![]() |
9b40c1e89d | ||
![]() |
edff99aab3 | ||
![]() |
22043617aa | ||
![]() |
7df23c7471 | ||
![]() |
ef87a9a052 | ||
![]() |
af62a062cc | ||
![]() |
e6114f544d | ||
![]() |
8d651625f7 | ||
![]() |
9346306b79 | ||
![]() |
f3a3e85bb9 | ||
![]() |
caaaba464e | ||
![]() |
8fae388f57 | ||
![]() |
a332e0c143 | ||
![]() |
bc662b8764 | ||
![]() |
7a8955597d | ||
![]() |
bcf9c646cf | ||
![]() |
a76fffe8ff | ||
![]() |
26c8714a24 | ||
![]() |
0776ff05d2 | ||
![]() |
d3beef6584 | ||
![]() |
bdd06cb176 | ||
![]() |
f639c4f1e6 | ||
![]() |
f18a106759 | ||
![]() |
5b01ddf832 | ||
![]() |
c1fc98eef8 | ||
![]() |
e9831985e4 | ||
![]() |
30e9545d3e | ||
![]() |
ce0910a82c | ||
![]() |
afc01f9570 | ||
![]() |
fc3a484a8c | ||
![]() |
de0d5ba883 | ||
![]() |
f756ab156c | ||
![]() |
540de118c1 | ||
![]() |
675be13a7b | ||
![]() |
3342866e0e | ||
![]() |
39ff675898 | ||
![]() |
f807337273 | ||
![]() |
8e4e3c9060 | ||
![]() |
6d67992191 | ||
![]() |
0f3fea511e | ||
![]() |
a0611650e2 | ||
![]() |
5959be577f | ||
![]() |
9b5e508d15 | ||
![]() |
66a30aef98 | ||
![]() |
b117074df4 | ||
![]() |
9f4be17451 | ||
![]() |
d70e9e131d | ||
![]() |
d7643d4f88 | ||
![]() |
73b6aa9b92 | ||
![]() |
6d51d94dab | ||
![]() |
1a965e9ec2 | ||
![]() |
a9e9b901d1 | ||
![]() |
95b46dca3d | ||
![]() |
7f6ae2a51e | ||
![]() |
489d5b0f21 | ||
![]() |
f884817009 | ||
![]() |
a30704fdad | ||
![]() |
57eb21ac3d | ||
![]() |
f48c36fc2c | ||
![]() |
a09b9f0659 | ||
![]() |
92d940b7f4 | ||
![]() |
d8c7cbe8f0 | ||
![]() |
717d4800e1 | ||
![]() |
c77916146c | ||
![]() |
f5135018dd | ||
![]() |
adfb3a77ad | ||
![]() |
d5ccf8203d | ||
![]() |
416943f7ed | ||
![]() |
519684978b | ||
![]() |
c9de1cbcda | ||
![]() |
eedc41405b | ||
![]() |
e6f48ceaf5 | ||
![]() |
2ba583e7eb | ||
![]() |
741b6bc0e4 | ||
![]() |
ff98c15065 | ||
![]() |
625d032e80 | ||
![]() |
5227f5f387 | ||
![]() |
170e322701 | ||
![]() |
cb673862d1 | ||
![]() |
31d6e7a901 | ||
![]() |
79db34574b | ||
![]() |
b3831d4e8c | ||
![]() |
35f0feba00 | ||
![]() |
9a04a94a26 | ||
![]() |
a87fc566ec | ||
![]() |
c8f6a19fc0 | ||
![]() |
365892be4c | ||
![]() |
70acce1aad | ||
![]() |
48e2dd8038 | ||
![]() |
2844f7425b | ||
![]() |
f75760d4f2 | ||
![]() |
b8e3f35a8b | ||
![]() |
f610c3e4d0 | ||
![]() |
a0b925dae3 | ||
![]() |
c99518709a | ||
![]() |
d67b5b300c | ||
![]() |
9bcca28afd | ||
![]() |
b07d1e0194 |
@@ -5,7 +5,7 @@ coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
threshold: 0.2%
|
||||
threshold: 2.0%
|
||||
|
||||
ignore:
|
||||
- lib/spack/spack/test/.*
|
||||
|
6
.github/workflows/audit.yaml
vendored
6
.github/workflows/audit.yaml
vendored
@@ -28,8 +28,8 @@ jobs:
|
||||
run:
|
||||
shell: ${{ matrix.system.shell }}
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
@@ -66,7 +66,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit externals
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
||||
with:
|
||||
name: coverage-audits-${{ matrix.system.os }}
|
||||
|
2
.github/workflows/bin/bootstrap-test.sh
vendored
2
.github/workflows/bin/bootstrap-test.sh
vendored
@@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
source share/spack/setup-env.sh
|
||||
$PYTHON bin/spack bootstrap disable github-actions-v0.4
|
||||
$PYTHON bin/spack bootstrap disable github-actions-v0.5
|
||||
$PYTHON bin/spack bootstrap disable spack-install
|
||||
$PYTHON bin/spack $SPACK_FLAGS solve zlib
|
||||
tree $BOOTSTRAP/store
|
||||
|
51
.github/workflows/bootstrap.yml
vendored
51
.github/workflows/bootstrap.yml
vendored
@@ -37,14 +37,14 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -60,17 +60,17 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
tree $HOME/.spack/bootstrap/store/
|
||||
@@ -83,22 +83,22 @@ jobs:
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: brew install tree gawk
|
||||
- name: Remove system executables
|
||||
run: |
|
||||
brew install tree gawk
|
||||
sudo rm -rf $(command -v gpg gpg2)
|
||||
- name: Setup Ubuntu
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: sudo rm -rf $(command -v gpg gpg2 patchelf)
|
||||
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
|
||||
sudo rm $(command -v gpg gpg2 patchelf)
|
||||
done
|
||||
- name: Checkout
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
@@ -110,19 +110,17 @@ jobs:
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: brew install tree
|
||||
- name: Remove system executables
|
||||
run: |
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Setup Ubuntu
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
|
||||
sudo rm $(command -v gpg gpg2 patchelf)
|
||||
done
|
||||
- name: Checkout
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: |
|
||||
3.8
|
||||
@@ -130,15 +128,16 @@ jobs:
|
||||
3.10
|
||||
3.11
|
||||
3.12
|
||||
3.13
|
||||
- name: Set bootstrap sources
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable spack-install
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -e
|
||||
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' ; do
|
||||
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' '3.13'; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
if [[ -d "$ver_dir" ]] ; then
|
||||
@@ -172,10 +171,10 @@ jobs:
|
||||
runs-on: "windows-latest"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Setup Windows
|
||||
@@ -185,8 +184,8 @@ jobs:
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
./share/spack/setup-env.ps1
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
|
8
.github/workflows/build-containers.yml
vendored
8
.github/workflows/build-containers.yml
vendored
@@ -55,7 +55,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
id: docker_meta
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||
path: dockerfiles
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@32945a339266b759abcbdc89316275140b0fc960
|
||||
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
@@ -126,7 +126,7 @@ jobs:
|
||||
needs: deploy-images
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
uses: actions/upload-artifact/merge@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: dockerfiles
|
||||
pattern: dockerfiles_*
|
||||
|
28
.github/workflows/ci.yaml
vendored
28
.github/workflows/ci.yaml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
@@ -83,10 +83,17 @@ jobs:
|
||||
|
||||
all-prechecks:
|
||||
needs: [ prechecks ]
|
||||
if: ${{ always() }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Success
|
||||
run: "true"
|
||||
run: |
|
||||
if [ "${{ needs.prechecks.result }}" == "failure" ] || [ "${{ needs.prechecks.result }}" == "canceled" ]; then
|
||||
echo "Unit tests failed."
|
||||
exit 1
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
|
||||
coverage:
|
||||
needs: [ unit-tests, prechecks ]
|
||||
@@ -94,8 +101,19 @@ jobs:
|
||||
secrets: inherit
|
||||
|
||||
all:
|
||||
needs: [ coverage, bootstrap ]
|
||||
needs: [ unit-tests, coverage, bootstrap ]
|
||||
if: ${{ always() }}
|
||||
runs-on: ubuntu-latest
|
||||
# See https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/accessing-contextual-information-about-workflow-runs#needs-context
|
||||
steps:
|
||||
- name: Success
|
||||
run: "true"
|
||||
- name: Status summary
|
||||
run: |
|
||||
if [ "${{ needs.unit-tests.result }}" == "failure" ] || [ "${{ needs.unit-tests.result }}" == "canceled" ]; then
|
||||
echo "Unit tests failed."
|
||||
exit 1
|
||||
elif [ "${{ needs.bootstrap.result }}" == "failure" ] || [ "${{ needs.bootstrap.result }}" == "canceled" ]; then
|
||||
echo "Bootstrap tests failed."
|
||||
exit 1
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
|
4
.github/workflows/coverage.yml
vendored
4
.github/workflows/coverage.yml
vendored
@@ -8,8 +8,8 @@ jobs:
|
||||
upload:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
|
4
.github/workflows/nightly-win-builds.yml
vendored
4
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
@@ -1,7 +1,7 @@
|
||||
black==24.8.0
|
||||
black==24.10.0
|
||||
clingo==5.7.1
|
||||
flake8==7.1.1
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
types-six==1.16.21.20240513
|
||||
types-six==1.16.21.20241105
|
||||
vermin==1.6.0
|
||||
|
51
.github/workflows/unit_tests.yaml
vendored
51
.github/workflows/unit_tests.yaml
vendored
@@ -40,10 +40,10 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -80,7 +80,7 @@ jobs:
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||
path: coverage
|
||||
@@ -89,10 +89,10 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: coverage-shell
|
||||
path: coverage
|
||||
@@ -130,7 +130,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -149,32 +149,33 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.11'
|
||||
python-version: '3.13'
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
sudo apt-get -y install coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build kcov
|
||||
sudo apt-get -y install coreutils gfortran graphviz gnupg2
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo pytest-xdist
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
- name: Run unit tests (full suite with coverage)
|
||||
env:
|
||||
COVERAGE: true
|
||||
COVERAGE_FILE: coverage/.coverage-clingo-cffi
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
. share/spack/setup-env.sh
|
||||
spack bootstrap disable spack-install
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap status
|
||||
spack solve zlib
|
||||
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretization/core.py
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: coverage-clingo-cffi
|
||||
path: coverage
|
||||
@@ -187,10 +188,10 @@ jobs:
|
||||
os: [macos-13, macos-14]
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -212,7 +213,7 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||
path: coverage
|
||||
@@ -225,10 +226,10 @@ jobs:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -243,7 +244,7 @@ jobs:
|
||||
run: |
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: coverage-windows
|
||||
path: coverage
|
||||
|
22
.github/workflows/valid-style.yml
vendored
22
.github/workflows/valid-style.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -35,10 +35,10 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -85,7 +85,7 @@ jobs:
|
||||
source share/spack/setup-env.sh
|
||||
spack debug report
|
||||
spack -d bootstrap now --dev
|
||||
spack style -t black
|
||||
spack -d style -t black
|
||||
spack unit-test -V
|
||||
import-check:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -98,14 +98,14 @@ jobs:
|
||||
# PR: use the base of the PR as the old commit
|
||||
- name: Checkout PR base commit
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.sha }}
|
||||
path: old
|
||||
# not a PR: use the previous commit as the old commit
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 2
|
||||
path: old
|
||||
@@ -114,14 +114,14 @@ jobs:
|
||||
run: git -C old reset --hard HEAD^
|
||||
|
||||
- name: Checkout new commit
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
path: new
|
||||
- name: Install circular import checker
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
repository: haampie/circular-import-fighter
|
||||
ref: 555519c6fd5564fd2eb844e7b87e84f4d12602e2
|
||||
ref: 9f60f51bc7134e0be73f27623f1b0357d1718427
|
||||
path: circular-import-fighter
|
||||
- name: Install dependencies
|
||||
working-directory: circular-import-fighter
|
||||
|
@@ -14,3 +14,26 @@ sphinx:
|
||||
python:
|
||||
install:
|
||||
- requirements: lib/spack/docs/requirements.txt
|
||||
|
||||
search:
|
||||
ranking:
|
||||
spack.html: -10
|
||||
spack.*.html: -10
|
||||
llnl.html: -10
|
||||
llnl.*.html: -10
|
||||
_modules/*: -10
|
||||
command_index.html: -9
|
||||
basic_usage.html: 5
|
||||
configuration.html: 5
|
||||
config_yaml.html: 5
|
||||
packages_yaml.html: 5
|
||||
build_settings.html: 5
|
||||
environments.html: 5
|
||||
containers.html: 5
|
||||
mirrors.html: 5
|
||||
module_file_support.html: 5
|
||||
repositories.html: 5
|
||||
binary_caches.html: 5
|
||||
chain.html: 5
|
||||
pipelines.html: 5
|
||||
packaging_guide.html: 5
|
||||
|
@@ -1,71 +1,11 @@
|
||||
@ECHO OFF
|
||||
setlocal EnableDelayedExpansion
|
||||
:: (c) 2021 Lawrence Livermore National Laboratory
|
||||
:: To use this file independently of Spack's installer, execute this script in its directory, or add the
|
||||
:: associated bin directory to your PATH. Invoke to launch Spack Shell.
|
||||
::
|
||||
:: source_dir/spack/bin/spack_cmd.bat
|
||||
::
|
||||
pushd %~dp0..
|
||||
set SPACK_ROOT=%CD%
|
||||
pushd %CD%\..
|
||||
set spackinstdir=%CD%
|
||||
popd
|
||||
|
||||
|
||||
:: Check if Python is on the PATH
|
||||
if not defined python_pf_ver (
|
||||
(for /f "delims=" %%F in ('where python.exe') do (
|
||||
set "python_pf_ver=%%F"
|
||||
goto :found_python
|
||||
) ) 2> NUL
|
||||
)
|
||||
:found_python
|
||||
if not defined python_pf_ver (
|
||||
:: If not, look for Python from the Spack installer
|
||||
:get_builtin
|
||||
(for /f "tokens=*" %%g in ('dir /b /a:d "!spackinstdir!\Python*"') do (
|
||||
set "python_ver=%%g")) 2> NUL
|
||||
|
||||
if not defined python_ver (
|
||||
echo Python was not found on your system.
|
||||
echo Please install Python or add Python to your PATH.
|
||||
) else (
|
||||
set "py_path=!spackinstdir!\!python_ver!"
|
||||
set "py_exe=!py_path!\python.exe"
|
||||
)
|
||||
goto :exitpoint
|
||||
) else (
|
||||
:: Python is already on the path
|
||||
set "py_exe=!python_pf_ver!"
|
||||
(for /F "tokens=* USEBACKQ" %%F in (
|
||||
`"!py_exe!" --version`) do (set "output=%%F")) 2>NUL
|
||||
if not "!output:Microsoft Store=!"=="!output!" goto :get_builtin
|
||||
goto :exitpoint
|
||||
)
|
||||
:exitpoint
|
||||
|
||||
set "PATH=%SPACK_ROOT%\bin\;%PATH%"
|
||||
if defined py_path (
|
||||
set "PATH=%py_path%;%PATH%"
|
||||
)
|
||||
|
||||
if defined py_exe (
|
||||
"%py_exe%" "%SPACK_ROOT%\bin\haspywin.py"
|
||||
)
|
||||
|
||||
set "EDITOR=notepad"
|
||||
|
||||
DOSKEY spacktivate=spack env activate $*
|
||||
|
||||
@echo **********************************************************************
|
||||
@echo ** Spack Package Manager
|
||||
@echo **********************************************************************
|
||||
|
||||
IF "%1"=="" GOTO CONTINUE
|
||||
set
|
||||
GOTO:EOF
|
||||
|
||||
:continue
|
||||
set PROMPT=[spack] %PROMPT%
|
||||
%comspec% /k
|
||||
call "%~dp0..\share\spack\setup-env.bat"
|
||||
pushd %SPACK_ROOT%
|
||||
%comspec% /K
|
||||
|
@@ -9,15 +9,15 @@ bootstrap:
|
||||
# may not be able to bootstrap all the software that Spack needs,
|
||||
# depending on its type.
|
||||
sources:
|
||||
- name: 'github-actions-v0.5'
|
||||
- name: github-actions-v0.6
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.6
|
||||
- name: github-actions-v0.5
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.5
|
||||
- name: 'github-actions-v0.4'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.4
|
||||
- name: 'spack-install'
|
||||
- name: spack-install
|
||||
metadata: $spack/share/spack/bootstrap/spack-install
|
||||
trusted:
|
||||
# By default we trust bootstrapping from sources and from binaries
|
||||
# produced on Github via the workflow
|
||||
github-actions-v0.6: true
|
||||
github-actions-v0.5: true
|
||||
github-actions-v0.4: true
|
||||
spack-install: true
|
||||
|
@@ -39,11 +39,19 @@ concretizer:
|
||||
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
|
||||
duplicates:
|
||||
# "none": allows a single node for any package in the DAG.
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only
|
||||
# (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: minimal
|
||||
# Option to specify compatiblity between operating systems for reuse of compilers and packages
|
||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
|
||||
os_compatible: {}
|
||||
|
||||
# Option to specify whether to support splicing. Splicing allows for
|
||||
# the relinking of concrete package dependencies in order to better
|
||||
# reuse already built packages with ABI compatible dependencies
|
||||
splice:
|
||||
explicit: []
|
||||
automatic: false
|
||||
|
@@ -40,9 +40,9 @@ packages:
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libc: [glibc, musl]
|
||||
libgfortran: [ gcc-runtime ]
|
||||
libgfortran: [gcc-runtime]
|
||||
libglx: [mesa+glx]
|
||||
libifcore: [ intel-oneapi-runtime ]
|
||||
libifcore: [intel-oneapi-runtime]
|
||||
libllvm: [llvm]
|
||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||
luajit: [lua-luajit-openresty, lua-luajit]
|
||||
|
@@ -1359,6 +1359,10 @@ For example, for the ``stackstart`` variant:
|
||||
mpileaks stackstart==4 # variant will be propagated to dependencies
|
||||
mpileaks stackstart=4 # only mpileaks will have this variant value
|
||||
|
||||
Spack also allows variants to be propagated from a package that does
|
||||
not have that variant.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
Compiler Flags
|
||||
^^^^^^^^^^^^^^
|
||||
|
@@ -166,3 +166,106 @@ while `py-numpy` still needs an older version:
|
||||
|
||||
Up to Spack v0.20 ``duplicates:strategy:none`` was the default (and only) behavior. From Spack v0.21 the
|
||||
default behavior is ``duplicates:strategy:minimal``.
|
||||
|
||||
--------
|
||||
Splicing
|
||||
--------
|
||||
|
||||
The ``splice`` key covers config attributes for splicing specs in the solver.
|
||||
|
||||
"Splicing" is a method for replacing a dependency with another spec
|
||||
that provides the same package or virtual. There are two types of
|
||||
splices, referring to different behaviors for shared dependencies
|
||||
between the root spec and the new spec replacing a dependency:
|
||||
"transitive" and "intransitive". A "transitive" splice is one that
|
||||
resolves all conflicts by taking the dependency from the new node. An
|
||||
"intransitive" splice is one that resolves all conflicts by taking the
|
||||
dependency from the original root. From a theory perspective, hybrid
|
||||
splices are possible but are not modeled by Spack.
|
||||
|
||||
All spliced specs retain a ``build_spec`` attribute that points to the
|
||||
original Spec before any splice occurred. The ``build_spec`` for a
|
||||
non-spliced spec is itself.
|
||||
|
||||
The figure below shows examples of transitive and intransitive splices:
|
||||
|
||||
.. figure:: images/splices.png
|
||||
:align: center
|
||||
|
||||
The concretizer can be configured to explicitly splice particular
|
||||
replacements for a target spec. Splicing will allow the user to make
|
||||
use of generically built public binary caches, while swapping in
|
||||
highly optimized local builds for performance critical components
|
||||
and/or components that interact closely with the specific hardware
|
||||
details of the system. The most prominent candidate for splicing is
|
||||
MPI providers. MPI packages have relatively well-understood ABI
|
||||
characteristics, and most High Performance Computing facilities deploy
|
||||
highly optimized MPI packages tailored to their particular
|
||||
hardware. The following config block configures Spack to replace
|
||||
whatever MPI provider each spec was concretized to use with the
|
||||
particular package of ``mpich`` with the hash that begins ``abcdef``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
splice:
|
||||
explicit:
|
||||
- target: mpi
|
||||
replacement: mpich/abcdef
|
||||
transitive: false
|
||||
|
||||
.. warning::
|
||||
|
||||
When configuring an explicit splice, you as the user take on the
|
||||
responsibility for ensuring ABI compatibility between the specs
|
||||
matched by the target and the replacement you provide. If they are
|
||||
not compatible, Spack will not warn you and your application will
|
||||
fail to run.
|
||||
|
||||
The ``target`` field of an explicit splice can be any abstract
|
||||
spec. The ``replacement`` field must be a spec that includes the hash
|
||||
of a concrete spec, and the replacement must either be the same
|
||||
package as the target, provide the virtual that is the target, or
|
||||
provide a virtual that the target provides. The ``transitive`` field
|
||||
is optional -- by default, splices will be transitive.
|
||||
|
||||
.. note::
|
||||
|
||||
With explicit splices configured, it is possible for Spack to
|
||||
concretize to a spec that does not satisfy the input. For example,
|
||||
with the config above ``hdf5 ^mvapich2`` will concretize to user
|
||||
``mpich/abcdef`` instead of ``mvapich2`` as the MPI provider. Spack
|
||||
will warn the user in this case, but will not fail the
|
||||
concretization.
|
||||
|
||||
.. _automatic_splicing:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Automatic Splicing
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The Spack solver can be configured to do automatic splicing for
|
||||
ABI-compatible packages. Automatic splices are enabled in the concretizer
|
||||
config section
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
splice:
|
||||
automatic: True
|
||||
|
||||
Packages can include ABI-compatibility information using the
|
||||
``can_splice`` directive. See :ref:`the packaging
|
||||
guide<abi_compatibility>` for instructions on specifying ABI
|
||||
compatibility using the ``can_splice`` directive.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``can_splice`` directive is experimental and may be changed in
|
||||
future versions.
|
||||
|
||||
When automatic splicing is enabled, the concretizer will combine any
|
||||
number of ABI-compatible specs if possible to reuse installed packages
|
||||
and packages available from binary caches. The end result of these
|
||||
specs is equivalent to a series of transitive/intransitive splices,
|
||||
but the series may be non-obvious.
|
||||
|
@@ -210,16 +210,18 @@ def setup(sphinx):
|
||||
# Spack classes that are private and we don't want to expose
|
||||
("py:class", "spack.provider_index._IndexBase"),
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
("py:class", "spack.build_systems._checks.BaseBuilder"),
|
||||
("py:class", "spack.build_systems._checks.BuilderWithDefaults"),
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.StandardVersion"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
("py:class", "spack.spec.ArchSpec"),
|
||||
("py:class", "spack.spec.InstallStatus"),
|
||||
("py:class", "spack.spec.SpecfileReaderBase"),
|
||||
("py:class", "spack.install_test.Pb"),
|
||||
("py:class", "spack.filesystem_view.SimpleFilesystemView"),
|
||||
("py:class", "spack.traverse.EdgeAndDepth"),
|
||||
("py:class", "archspec.cpu.microarchitecture.Microarchitecture"),
|
||||
("py:class", "spack.compiler.CompilerCache"),
|
||||
# TypeVar that is not handled correctly
|
||||
("py:class", "llnl.util.lang.T"),
|
||||
]
|
||||
|
@@ -281,7 +281,7 @@ When spack queries for configuration parameters, it searches in
|
||||
higher-precedence scopes first. So, settings in a higher-precedence file
|
||||
can override those with the same key in a lower-precedence one. For
|
||||
list-valued settings, Spack *prepends* higher-precedence settings to
|
||||
lower-precedence settings. Completely ignoring higher-level configuration
|
||||
lower-precedence settings. Completely ignoring lower-precedence configuration
|
||||
options is supported with the ``::`` notation for keys (see
|
||||
:ref:`config-overrides` below).
|
||||
|
||||
@@ -511,6 +511,7 @@ Spack understands over a dozen special variables. These are:
|
||||
* ``$target_family``. The target family for the current host, as
|
||||
detected by ArchSpec. E.g. ``x86_64`` or ``aarch64``.
|
||||
* ``$date``: the current date in the format YYYY-MM-DD
|
||||
* ``$spack_short_version``: the Spack version truncated to the first components.
|
||||
|
||||
|
||||
Note that, as with shell variables, you can write these as ``$varname``
|
||||
|
@@ -184,7 +184,7 @@ Style Tests
|
||||
|
||||
Spack uses `Flake8 <http://flake8.pycqa.org/en/latest/>`_ to test for
|
||||
`PEP 8 <https://www.python.org/dev/peps/pep-0008/>`_ conformance and
|
||||
`mypy <https://mypy.readthedocs.io/en/stable/>` for type checking. PEP 8 is
|
||||
`mypy <https://mypy.readthedocs.io/en/stable/>`_ for type checking. PEP 8 is
|
||||
a series of style guides for Python that provide suggestions for everything
|
||||
from variable naming to indentation. In order to limit the number of PRs that
|
||||
were mostly style changes, we decided to enforce PEP 8 conformance. Your PR
|
||||
|
@@ -333,13 +333,9 @@ inserting them at different places in the spack code base. Whenever a hook
|
||||
type triggers by way of a function call, we find all the hooks of that type,
|
||||
and run them.
|
||||
|
||||
Spack defines hooks by way of a module at ``lib/spack/spack/hooks`` where we can define
|
||||
types of hooks in the ``__init__.py``, and then python files in that folder
|
||||
can use hook functions. The files are automatically parsed, so if you write
|
||||
a new file for some integration (e.g., ``lib/spack/spack/hooks/myintegration.py``
|
||||
you can then write hook functions in that file that will be automatically detected,
|
||||
and run whenever your hook is called. This section will cover the basic kind
|
||||
of hooks, and how to write them.
|
||||
Spack defines hooks by way of a module in the ``lib/spack/spack/hooks`` directory.
|
||||
This module has to be registered in ``__init__.py`` so that Spack is aware of it.
|
||||
This section will cover the basic kind of hooks, and how to write them.
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
Types of Hooks
|
||||
@@ -712,27 +708,27 @@ Release branches
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
There are currently two types of Spack releases: :ref:`major releases
|
||||
<major-releases>` (``0.17.0``, ``0.18.0``, etc.) and :ref:`point releases
|
||||
<point-releases>` (``0.17.1``, ``0.17.2``, ``0.17.3``, etc.). Here is a
|
||||
<major-releases>` (``0.21.0``, ``0.22.0``, etc.) and :ref:`patch releases
|
||||
<patch-releases>` (``0.22.1``, ``0.22.2``, ``0.22.3``, etc.). Here is a
|
||||
diagram of how Spack release branches work::
|
||||
|
||||
o branch: develop (latest version, v0.19.0.dev0)
|
||||
o branch: develop (latest version, v0.23.0.dev0)
|
||||
|
|
||||
o
|
||||
| o branch: releases/v0.18, tag: v0.18.1
|
||||
| o branch: releases/v0.22, tag: v0.22.1
|
||||
o |
|
||||
| o tag: v0.18.0
|
||||
| o tag: v0.22.0
|
||||
o |
|
||||
| o
|
||||
|/
|
||||
o
|
||||
|
|
||||
o
|
||||
| o branch: releases/v0.17, tag: v0.17.2
|
||||
| o branch: releases/v0.21, tag: v0.21.2
|
||||
o |
|
||||
| o tag: v0.17.1
|
||||
| o tag: v0.21.1
|
||||
o |
|
||||
| o tag: v0.17.0
|
||||
| o tag: v0.21.0
|
||||
o |
|
||||
| o
|
||||
|/
|
||||
@@ -743,8 +739,8 @@ requests target ``develop``. The ``develop`` branch will report that its
|
||||
version is that of the next **major** release with a ``.dev0`` suffix.
|
||||
|
||||
Each Spack release series also has a corresponding branch, e.g.
|
||||
``releases/v0.18`` has ``0.18.x`` versions of Spack, and
|
||||
``releases/v0.17`` has ``0.17.x`` versions. A major release is the first
|
||||
``releases/v0.22`` has ``v0.22.x`` versions of Spack, and
|
||||
``releases/v0.21`` has ``v0.21.x`` versions. A major release is the first
|
||||
tagged version on a release branch. Minor releases are back-ported from
|
||||
develop onto release branches. This is typically done by cherry-picking
|
||||
bugfix commits off of ``develop``.
|
||||
@@ -774,27 +770,40 @@ for more details.
|
||||
Scheduling work for releases
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
We schedule work for releases by creating `GitHub projects
|
||||
<https://github.com/spack/spack/projects>`_. At any time, there may be
|
||||
several open release projects. For example, below are two releases (from
|
||||
some past version of the page linked above):
|
||||
We schedule work for **major releases** through `milestones
|
||||
<https://github.com/spack/spack/milestones>`_ and `GitHub Projects
|
||||
<https://github.com/spack/spack/projects>`_, while **patch releases** use `labels
|
||||
<https://github.com/spack/spack/labels>`_.
|
||||
|
||||
.. image:: images/projects.png
|
||||
There is only one milestone open at a time. Its name corresponds to the next major version, for
|
||||
example ``v0.23``. Important issues and pull requests should be assigned to this milestone by
|
||||
core developers, so that they are not forgotten at the time of release. The milestone is closed
|
||||
when the release is made, and a new milestone is created for the next major release.
|
||||
|
||||
This image shows one release in progress for ``0.15.1`` and another for
|
||||
``0.16.0``. Each of these releases has a project board containing issues
|
||||
and pull requests. GitHub shows a status bar with completed work in
|
||||
green, work in progress in purple, and work not started yet in gray, so
|
||||
it's fairly easy to see progress.
|
||||
Bug reports in GitHub issues are automatically labelled ``bug`` and ``triage``. Spack developers
|
||||
assign one of the labels ``impact-low``, ``impact-medium`` or ``impact-high``. This will make the
|
||||
issue appear in the `Triaged bugs <https://github.com/orgs/spack/projects/6>`_ project board.
|
||||
Important issues should be assigned to the next milestone as well, so they appear at the top of
|
||||
the project board.
|
||||
|
||||
Spack's project boards are not firm commitments so we move work between
|
||||
releases frequently. If we need to make a release and some tasks are not
|
||||
yet done, we will simply move them to the next minor or major release, rather
|
||||
than delaying the release to complete them.
|
||||
Spack's milestones are not firm commitments so we move work between releases frequently. If we
|
||||
need to make a release and some tasks are not yet done, we will simply move them to the next major
|
||||
release milestone, rather than delaying the release to complete them.
|
||||
|
||||
For more on using GitHub project boards, see `GitHub's documentation
|
||||
<https://docs.github.com/en/github/managing-your-work-on-github/about-project-boards>`_.
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Backporting bug fixes
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
When a bug is fixed in the ``develop`` branch, it is often necessary to backport the fix to one
|
||||
(or more) of the ``release/vX.Y`` branches. Only the release manager is responsible for doing
|
||||
backports, but Spack maintainers are responsible for labelling pull requests (and issues if no bug
|
||||
fix is available yet) with ``vX.Y.Z`` labels. The label should correspond to the next patch version
|
||||
that the bug fix should be backported to.
|
||||
|
||||
Backports are done publicly by the release manager using a pull request named ``Backports vX.Y.Z``.
|
||||
This pull request is opened from the ``backports/vX.Y.Z`` branch, targets the ``releases/vX.Y``
|
||||
branch and contains a (growing) list of cherry-picked commits from the ``develop`` branch.
|
||||
Typically there are one or two backport pull requests open at any given time.
|
||||
|
||||
.. _major-releases:
|
||||
|
||||
@@ -802,25 +811,21 @@ For more on using GitHub project boards, see `GitHub's documentation
|
||||
Making major releases
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Assuming a project board has already been created and all required work
|
||||
completed, the steps to make the major release are:
|
||||
Assuming all required work from the milestone is completed, the steps to make the major release
|
||||
are:
|
||||
|
||||
#. Create two new project boards:
|
||||
#. `Create a new milestone <https://github.com/spack/spack/milestones>`_ for the next major
|
||||
release.
|
||||
|
||||
* One for the next major release
|
||||
* One for the next point release
|
||||
#. `Create a new label <https://github.com/spack/spack/labels>`_ for the next patch release.
|
||||
|
||||
#. Move any optional tasks that are not done to one of the new project boards.
|
||||
|
||||
In general, small bugfixes should go to the next point release. Major
|
||||
features, refactors, and changes that could affect concretization should
|
||||
go in the next major release.
|
||||
#. Move any optional tasks that are not done to the next milestone.
|
||||
|
||||
#. Create a branch for the release, based on ``develop``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout -b releases/v0.15 develop
|
||||
$ git checkout -b releases/v0.23 develop
|
||||
|
||||
For a version ``vX.Y.Z``, the branch's name should be
|
||||
``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
|
||||
@@ -856,8 +861,8 @@ completed, the steps to make the major release are:
|
||||
|
||||
Create a pull request targeting the ``develop`` branch, bumping the major
|
||||
version in ``lib/spack/spack/__init__.py`` with a ``dev0`` release segment.
|
||||
For instance when you have just released ``v0.15.0``, set the version
|
||||
to ``(0, 16, 0, 'dev0')`` on ``develop``.
|
||||
For instance when you have just released ``v0.23.0``, set the version
|
||||
to ``(0, 24, 0, 'dev0')`` on ``develop``.
|
||||
|
||||
#. Follow the steps in :ref:`publishing-releases`.
|
||||
|
||||
@@ -866,82 +871,52 @@ completed, the steps to make the major release are:
|
||||
#. Follow the steps in :ref:`announcing-releases`.
|
||||
|
||||
|
||||
.. _point-releases:
|
||||
.. _patch-releases:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Making point releases
|
||||
Making patch releases
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Assuming a project board has already been created and all required work
|
||||
completed, the steps to make the point release are:
|
||||
To make the patch release process both efficient and transparent, we use a *backports pull request*
|
||||
which contains cherry-picked commits from the ``develop`` branch. The majority of the work is to
|
||||
cherry-pick the bug fixes, which ideally should be done as soon as they land on ``develop``:
|
||||
this ensures cherry-picking happens in order, and makes conflicts easier to resolve since the
|
||||
changes are fresh in the mind of the developer.
|
||||
|
||||
#. Create a new project board for the next point release.
|
||||
The backports pull request is always titled ``Backports vX.Y.Z`` and is labelled ``backports``. It
|
||||
is opened from a branch named ``backports/vX.Y.Z`` and targets the ``releases/vX.Y`` branch.
|
||||
|
||||
#. Move any optional tasks that are not done to the next project board.
|
||||
Whenever a pull request labelled ``vX.Y.Z`` is merged, cherry-pick the associated squashed commit
|
||||
on ``develop`` to the ``backports/vX.Y.Z`` branch. For pull requests that were rebased (or not
|
||||
squashed), cherry-pick each associated commit individually. Never force push to the
|
||||
``backports/vX.Y.Z`` branch.
|
||||
|
||||
#. Check out the release branch (it should already exist).
|
||||
.. warning::
|
||||
|
||||
For the ``X.Y.Z`` release, the release branch is called ``releases/vX.Y``.
|
||||
For ``v0.15.1``, you would check out ``releases/v0.15``:
|
||||
Sometimes you may **still** get merge conflicts even if you have
|
||||
cherry-picked all the commits in order. This generally means there
|
||||
is some other intervening pull request that the one you're trying
|
||||
to pick depends on. In these cases, you'll need to make a judgment
|
||||
call regarding those pull requests. Consider the number of affected
|
||||
files and/or the resulting differences.
|
||||
|
||||
.. code-block:: console
|
||||
1. If the changes are small, you might just cherry-pick it.
|
||||
|
||||
$ git checkout releases/v0.15
|
||||
2. If the changes are large, then you may decide that this fix is not
|
||||
worth including in a patch release, in which case you should remove
|
||||
the label from the pull request. Remember that large, manual backports
|
||||
are seldom the right choice for a patch release.
|
||||
|
||||
#. If a pull request to the release branch named ``Backports vX.Y.Z`` is not already
|
||||
in the project, create it. This pull request ought to be created as early as
|
||||
possible when working on a release project, so that we can build the release
|
||||
commits incrementally, and identify potential conflicts at an early stage.
|
||||
When all commits are cherry-picked in the ``backports/vX.Y.Z`` branch, make the patch
|
||||
release as follows:
|
||||
|
||||
#. Cherry-pick each pull request in the ``Done`` column of the release
|
||||
project board onto the ``Backports vX.Y.Z`` pull request.
|
||||
#. `Create a new label <https://github.com/spack/spack/labels>`_ ``vX.Y.{Z+1}`` for the next patch
|
||||
release.
|
||||
|
||||
This is **usually** fairly simple since we squash the commits from the
|
||||
vast majority of pull requests. That means there is only one commit
|
||||
per pull request to cherry-pick. For example, `this pull request
|
||||
<https://github.com/spack/spack/pull/15777>`_ has three commits, but
|
||||
they were squashed into a single commit on merge. You can see the
|
||||
commit that was created here:
|
||||
#. Replace the label ``vX.Y.Z`` with ``vX.Y.{Z+1}`` for all PRs and issues that are not done.
|
||||
|
||||
.. image:: images/pr-commit.png
|
||||
|
||||
You can easily cherry pick it like this (assuming you already have the
|
||||
release branch checked out):
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git cherry-pick 7e46da7
|
||||
|
||||
For pull requests that were rebased (or not squashed), you'll need to
|
||||
cherry-pick each associated commit individually.
|
||||
|
||||
.. warning::
|
||||
|
||||
It is important to cherry-pick commits in the order they happened,
|
||||
otherwise you can get conflicts while cherry-picking. When
|
||||
cherry-picking look at the merge date,
|
||||
**not** the number of the pull request or the date it was opened.
|
||||
|
||||
Sometimes you may **still** get merge conflicts even if you have
|
||||
cherry-picked all the commits in order. This generally means there
|
||||
is some other intervening pull request that the one you're trying
|
||||
to pick depends on. In these cases, you'll need to make a judgment
|
||||
call regarding those pull requests. Consider the number of affected
|
||||
files and or the resulting differences.
|
||||
|
||||
1. If the dependency changes are small, you might just cherry-pick it,
|
||||
too. If you do this, add the task to the release board.
|
||||
|
||||
2. If the changes are large, then you may decide that this fix is not
|
||||
worth including in a point release, in which case you should remove
|
||||
the task from the release project.
|
||||
|
||||
3. You can always decide to manually back-port the fix to the release
|
||||
branch if neither of the above options makes sense, but this can
|
||||
require a lot of work. It's seldom the right choice.
|
||||
|
||||
#. When all the commits from the project board are cherry-picked into
|
||||
the ``Backports vX.Y.Z`` pull request, you can push a commit to:
|
||||
#. Manually push a single commit with commit message ``Set version to vX.Y.Z`` to the
|
||||
``backports/vX.Y.Z`` branch, that both bumps the Spack version number and updates the changelog:
|
||||
|
||||
1. Bump the version in ``lib/spack/spack/__init__.py``.
|
||||
2. Update ``CHANGELOG.md`` with a list of the changes.
|
||||
@@ -950,20 +925,22 @@ completed, the steps to make the point release are:
|
||||
release branch. See `the changelog from 0.14.1
|
||||
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
|
||||
|
||||
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
|
||||
is needed to keep track in the release branch of all the commits that were
|
||||
cherry-picked.
|
||||
|
||||
#. Make sure CI passes on the release branch, including:
|
||||
#. Make sure CI passes on the **backports pull request**, including:
|
||||
|
||||
* Regular unit tests
|
||||
* Build tests
|
||||
* The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_
|
||||
|
||||
If CI does not pass, you'll need to figure out why, and make changes
|
||||
to the release branch until it does. You can make more commits, modify
|
||||
or remove cherry-picked commits, or cherry-pick **more** from
|
||||
``develop`` to make this happen.
|
||||
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
|
||||
is needed to keep track in the release branch of all the commits that were
|
||||
cherry-picked.
|
||||
|
||||
#. Make sure CI passes on the last commit of the **release branch**.
|
||||
|
||||
#. In the rare case you need to include additional commits in the patch release after the backports
|
||||
PR is merged, it is best to delete the last commit ``Set version to vX.Y.Z`` from the release
|
||||
branch with a single force push, open a new backports PR named ``Backports vX.Y.Z (2)``, and
|
||||
repeat the process. Avoid repeated force pushes to the release branch.
|
||||
|
||||
#. Follow the steps in :ref:`publishing-releases`.
|
||||
|
||||
@@ -1038,25 +1015,31 @@ Updating `releases/latest`
|
||||
|
||||
If the new release is the **highest** Spack release yet, you should
|
||||
also tag it as ``releases/latest``. For example, suppose the highest
|
||||
release is currently ``0.15.3``:
|
||||
release is currently ``0.22.3``:
|
||||
|
||||
* If you are releasing ``0.15.4`` or ``0.16.0``, then you should tag
|
||||
it with ``releases/latest``, as these are higher than ``0.15.3``.
|
||||
* If you are releasing ``0.22.4`` or ``0.23.0``, then you should tag
|
||||
it with ``releases/latest``, as these are higher than ``0.22.3``.
|
||||
|
||||
* If you are making a new release of an **older** major version of
|
||||
Spack, e.g. ``0.14.4``, then you should not tag it as
|
||||
Spack, e.g. ``0.21.4``, then you should not tag it as
|
||||
``releases/latest`` (as there are newer major versions).
|
||||
|
||||
To tag ``releases/latest``, do this:
|
||||
To do so, first fetch the latest tag created on GitHub, since you may not have it locally:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout releases/vX.Y # vX.Y is the new release's branch
|
||||
$ git tag --force releases/latest
|
||||
$ git push --force --tags
|
||||
$ git fetch --force git@github.com:spack/spack vX.Y.Z
|
||||
|
||||
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing
|
||||
``releases/latest`` tag with the new one.
|
||||
Then tag ``vX.Y.Z`` as ``releases/latest`` and push the individual tag to GitHub.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git tag --force releases/latest vX.Y.Z
|
||||
$ git push --force git@github.com:spack/spack releases/latest
|
||||
|
||||
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing ``releases/latest``
|
||||
tag with the new one. Do **not** use the ``--tags`` flag when pushing, since this will push *all*
|
||||
local tags.
|
||||
|
||||
|
||||
.. _announcing-releases:
|
||||
|
@@ -425,9 +425,13 @@ Developing Packages in a Spack Environment
|
||||
|
||||
The ``spack develop`` command allows one to develop Spack packages in
|
||||
an environment. It requires a spec containing a concrete version, and
|
||||
will configure Spack to install the package from local source. By
|
||||
default, it will also clone the package to a subdirectory in the
|
||||
environment. This package will have a special variant ``dev_path``
|
||||
will configure Spack to install the package from local source.
|
||||
If a version is not provided from the command line interface then spack
|
||||
will automatically pick the highest version the package has defined.
|
||||
This means any infinity versions (``develop``, ``main``, ``stable``) will be
|
||||
preferred in this selection process.
|
||||
By default, ``spack develop`` will also clone the package to a subdirectory in the
|
||||
environment for the local source. This package will have a special variant ``dev_path``
|
||||
set, and Spack will ensure the package and its dependents are rebuilt
|
||||
any time the environment is installed if the package's local source
|
||||
code has been modified. Spack's native implementation to check for modifications
|
||||
@@ -669,6 +673,9 @@ them to the environment.
|
||||
Environments can include files or URLs. File paths can be relative or
|
||||
absolute. URLs include the path to the text for individual files or
|
||||
can be the path to a directory containing configuration files.
|
||||
Spack supports ``file``, ``http``, ``https`` and ``ftp`` protocols (or
|
||||
schemes). Spack-specific, environment and user path variables may be
|
||||
used in these paths. See :ref:`config-file-variables` for more information.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Configuration precedence
|
||||
@@ -1035,7 +1042,7 @@ file snippet we define a view named ``mpis``, rooted at
|
||||
``/path/to/view`` in which all projections use the package name,
|
||||
version, and compiler name to determine the path for a given
|
||||
package. This view selects all packages that depend on MPI, and
|
||||
excludes those built with the PGI compiler at version 18.5.
|
||||
excludes those built with the GCC compiler at version 18.5.
|
||||
The root specs with their (transitive) link and run type dependencies
|
||||
will be put in the view due to the ``link: all`` option,
|
||||
and the files in the view will be symlinks to the spack install
|
||||
@@ -1049,7 +1056,7 @@ directories.
|
||||
mpis:
|
||||
root: /path/to/view
|
||||
select: [^mpi]
|
||||
exclude: ['%pgi@18.5']
|
||||
exclude: ['%gcc@18.5']
|
||||
projections:
|
||||
all: '{name}/{version}-{compiler.name}'
|
||||
link: all
|
||||
|
@@ -35,7 +35,7 @@ A build matrix showing which packages are working on which systems is shown belo
|
||||
.. code-block:: console
|
||||
|
||||
apt update
|
||||
apt install build-essential ca-certificates coreutils curl environment-modules gfortran git gpg lsb-release python3 python3-distutils python3-venv unzip zip
|
||||
apt install bzip2 ca-certificates file g++ gcc gfortran git gzip lsb-release patch python3 tar unzip xz-utils zstd
|
||||
|
||||
.. tab-item:: RHEL
|
||||
|
||||
@@ -43,14 +43,14 @@ A build matrix showing which packages are working on which systems is shown belo
|
||||
|
||||
dnf install epel-release
|
||||
dnf group install "Development Tools"
|
||||
dnf install curl findutils gcc-gfortran gnupg2 hostname iproute redhat-lsb-core python3 python3-pip python3-setuptools unzip python3-boto3
|
||||
dnf install gcc-gfortran redhat-lsb-core python3 unzip
|
||||
|
||||
.. tab-item:: macOS Brew
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
brew update
|
||||
brew install curl gcc git gnupg zip
|
||||
brew install gcc git zip
|
||||
|
||||
------------
|
||||
Installation
|
||||
@@ -283,10 +283,6 @@ compilers`` or ``spack compiler list``:
|
||||
intel@14.0.1 intel@13.0.1 intel@12.1.2 intel@10.1
|
||||
-- clang -------------------------------------------------------
|
||||
clang@3.4 clang@3.3 clang@3.2 clang@3.1
|
||||
-- pgi ---------------------------------------------------------
|
||||
pgi@14.3-0 pgi@13.2-0 pgi@12.1-0 pgi@10.9-0 pgi@8.0-1
|
||||
pgi@13.10-0 pgi@13.1-1 pgi@11.10-0 pgi@10.2-0 pgi@7.1-3
|
||||
pgi@13.6-0 pgi@12.8-0 pgi@11.1-0 pgi@9.0-4 pgi@7.0-6
|
||||
|
||||
Any of these compilers can be used to build Spack packages. More on
|
||||
how this is done is in :ref:`sec-specs`.
|
||||
@@ -806,65 +802,6 @@ flags to the ``icc`` command:
|
||||
spec: intel@15.0.24.4.9.3
|
||||
|
||||
|
||||
^^^
|
||||
PGI
|
||||
^^^
|
||||
|
||||
PGI comes with two sets of compilers for C++ and Fortran,
|
||||
distinguishable by their names. "Old" compilers:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
cc: /soft/pgi/15.10/linux86-64/15.10/bin/pgcc
|
||||
cxx: /soft/pgi/15.10/linux86-64/15.10/bin/pgCC
|
||||
f77: /soft/pgi/15.10/linux86-64/15.10/bin/pgf77
|
||||
fc: /soft/pgi/15.10/linux86-64/15.10/bin/pgf90
|
||||
|
||||
"New" compilers:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
cc: /soft/pgi/15.10/linux86-64/15.10/bin/pgcc
|
||||
cxx: /soft/pgi/15.10/linux86-64/15.10/bin/pgc++
|
||||
f77: /soft/pgi/15.10/linux86-64/15.10/bin/pgfortran
|
||||
fc: /soft/pgi/15.10/linux86-64/15.10/bin/pgfortran
|
||||
|
||||
Older installations of PGI contains just the old compilers; whereas
|
||||
newer installations contain the old and the new. The new compiler is
|
||||
considered preferable, as some packages
|
||||
(``hdf``) will not build with the old compiler.
|
||||
|
||||
When auto-detecting a PGI compiler, there are cases where Spack will
|
||||
find the old compilers, when you really want it to find the new
|
||||
compilers. It is best to check this ``compilers.yaml``; and if the old
|
||||
compilers are being used, change ``pgf77`` and ``pgf90`` to
|
||||
``pgfortran``.
|
||||
|
||||
Other issues:
|
||||
|
||||
* There are reports that some packages will not build with PGI,
|
||||
including ``libpciaccess`` and ``openssl``. A workaround is to
|
||||
build these packages with another compiler and then use them as
|
||||
dependencies for PGI-build packages. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install openmpi%pgi ^libpciaccess%gcc
|
||||
|
||||
|
||||
* PGI requires a license to use; see :ref:`licensed-compilers` for more
|
||||
information on installation.
|
||||
|
||||
.. note::
|
||||
|
||||
It is believed the problem with HDF 4 is that everything is
|
||||
compiled with the ``F77`` compiler, but at some point some Fortran
|
||||
90 code slipped in there. So compilers that can handle both FORTRAN
|
||||
77 and Fortran 90 (``gfortran``, ``pgfortran``, etc) are fine. But
|
||||
compilers specific to one or the other (``pgf77``, ``pgf90``) won't
|
||||
work.
|
||||
|
||||
|
||||
^^^
|
||||
NAG
|
||||
^^^
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 44 KiB |
Binary file not shown.
Before Width: | Height: | Size: 68 KiB |
BIN
lib/spack/docs/images/splices.png
Normal file
BIN
lib/spack/docs/images/splices.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 358 KiB |
@@ -12,10 +12,6 @@
|
||||
Spack
|
||||
===================
|
||||
|
||||
.. epigraph::
|
||||
|
||||
`These are docs for the Spack package manager. For sphere packing, see` `pyspack <https://pyspack.readthedocs.io>`_.
|
||||
|
||||
Spack is a package management tool designed to support multiple
|
||||
versions and configurations of software on a wide variety of platforms
|
||||
and environments. It was designed for large supercomputing centers,
|
||||
|
@@ -457,11 +457,11 @@ For instance, the following config options,
|
||||
tcl:
|
||||
all:
|
||||
suffixes:
|
||||
^python@3.12: 'python-3.12'
|
||||
^python@3: 'python{^python.version}'
|
||||
^openblas: 'openblas'
|
||||
|
||||
will add a ``python-3.12`` version string to any packages compiled with
|
||||
Python matching the spec, ``python@3.12``. This is useful to know which
|
||||
will add a ``python-3.12.1`` version string to any packages compiled with
|
||||
Python matching the spec, ``python@3``. This is useful to know which
|
||||
version of Python a set of Python extensions is associated with. Likewise, the
|
||||
``openblas`` string is attached to any program that has openblas in the spec,
|
||||
most likely via the ``+blas`` variant specification.
|
||||
|
@@ -1267,7 +1267,7 @@ Git fetching supports the following parameters to ``version``:
|
||||
This feature requires ``git`` to be version ``2.25.0`` or later but is useful for
|
||||
large repositories that have separate portions that can be built independently.
|
||||
If paths provided are directories then all the subdirectories and associated files
|
||||
will also be cloned.
|
||||
will also be cloned.
|
||||
|
||||
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
|
||||
|
||||
@@ -1367,8 +1367,8 @@ Submodules
|
||||
git-submodule``.
|
||||
|
||||
Sparse-Checkout
|
||||
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
|
||||
sparse-checkout feature. This will only clone the paths that are specified in the
|
||||
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
|
||||
sparse-checkout feature. This will only clone the paths that are specified in the
|
||||
``git_sparse_paths`` attribute for the package along with the files in the top level directory.
|
||||
This feature allows you to only clone what you need from a large repository.
|
||||
Note that this is a newer feature in git and requries git ``2.25.0`` or greater.
|
||||
@@ -1928,71 +1928,29 @@ to the empty list.
|
||||
String. A URL pointing to license setup instructions for the software.
|
||||
Defaults to the empty string.
|
||||
|
||||
For example, let's take a look at the package for the PGI compilers.
|
||||
For example, let's take a look at the Arm Forge package.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# Licensing
|
||||
license_required = True
|
||||
license_comment = "#"
|
||||
license_files = ["license.dat"]
|
||||
license_vars = ["PGROUPD_LICENSE_FILE", "LM_LICENSE_FILE"]
|
||||
license_url = "http://www.pgroup.com/doc/pgiinstall.pdf"
|
||||
license_comment = "#"
|
||||
license_files = ["licences/Licence"]
|
||||
license_vars = [
|
||||
"ALLINEA_LICENSE_DIR",
|
||||
"ALLINEA_LICENCE_DIR",
|
||||
"ALLINEA_LICENSE_FILE",
|
||||
"ALLINEA_LICENCE_FILE",
|
||||
]
|
||||
license_url = "https://developer.arm.com/documentation/101169/latest/Use-Arm-Licence-Server"
|
||||
|
||||
As you can see, PGI requires a license. Its license manager, FlexNet, uses
|
||||
the ``#`` symbol to denote a comment. It expects the license file to be
|
||||
named ``license.dat`` and to be located directly in the installation prefix.
|
||||
If you would like the installation file to be located elsewhere, simply set
|
||||
``PGROUPD_LICENSE_FILE`` or ``LM_LICENSE_FILE`` after installation. For
|
||||
further instructions on installation and licensing, see the URL provided.
|
||||
Arm Forge requires a license. Its license manager uses the ``#`` symbol to denote a comment.
|
||||
It expects the license file to be named ``License`` and to be located in a ``licenses`` directory
|
||||
in the installation prefix.
|
||||
|
||||
Let's walk through a sample PGI installation to see exactly what Spack is
|
||||
and isn't capable of. Since PGI does not provide a download URL, it must
|
||||
be downloaded manually. It can either be added to a mirror or located in
|
||||
the current directory when ``spack install pgi`` is run. See :ref:`mirrors`
|
||||
for instructions on setting up a mirror.
|
||||
|
||||
After running ``spack install pgi``, the first thing that will happen is
|
||||
Spack will create a global license file located at
|
||||
``$SPACK_ROOT/etc/spack/licenses/pgi/license.dat``. It will then open up the
|
||||
file using :ref:`your favorite editor <controlling-the-editor>`. It will look like
|
||||
this:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
# A license is required to use pgi.
|
||||
#
|
||||
# The recommended solution is to store your license key in this global
|
||||
# license file. After installation, the following symlink(s) will be
|
||||
# added to point to this file (relative to the installation prefix):
|
||||
#
|
||||
# license.dat
|
||||
#
|
||||
# Alternatively, use one of the following environment variable(s):
|
||||
#
|
||||
# PGROUPD_LICENSE_FILE
|
||||
# LM_LICENSE_FILE
|
||||
#
|
||||
# If you choose to store your license in a non-standard location, you may
|
||||
# set one of these variable(s) to the full pathname to the license file, or
|
||||
# port@host if you store your license keys on a dedicated license server.
|
||||
# You will likely want to set this variable in a module file so that it
|
||||
# gets loaded every time someone tries to use pgi.
|
||||
#
|
||||
# For further information on how to acquire a license, please refer to:
|
||||
#
|
||||
# http://www.pgroup.com/doc/pgiinstall.pdf
|
||||
#
|
||||
# You may enter your license below.
|
||||
|
||||
You can add your license directly to this file, or tell FlexNet to use a
|
||||
license stored on a separate license server. Here is an example that
|
||||
points to a license server called licman1:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
SERVER licman1.mcs.anl.gov 00163eb7fba5 27200
|
||||
USE_SERVER
|
||||
If you would like the installation file to be located elsewhere, simply set ``ALLINEA_LICENSE_DIR`` or
|
||||
one of the other license variables after installation. For further instructions on installation and
|
||||
licensing, see the URL provided.
|
||||
|
||||
If your package requires the license to install, you can reference the
|
||||
location of this global license using ``self.global_license_file``.
|
||||
@@ -2392,7 +2350,7 @@ by the ``--jobs`` option:
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 7, 11
|
||||
:linenos:
|
||||
|
||||
|
||||
class Xios(Package):
|
||||
...
|
||||
def install(self, spec, prefix):
|
||||
@@ -2503,15 +2461,14 @@ with. For example, suppose that in the ``libdwarf`` package you write:
|
||||
|
||||
depends_on("libelf@0.8")
|
||||
|
||||
Now ``libdwarf`` will require ``libelf`` at *exactly* version ``0.8``.
|
||||
You can also specify a requirement for a particular variant or for
|
||||
specific compiler flags:
|
||||
Now ``libdwarf`` will require ``libelf`` in the range ``0.8``, which
|
||||
includes patch versions ``0.8.1``, ``0.8.2``, etc. Apart from version
|
||||
restrictions, you can also specify variants if this package requires
|
||||
optional features of the dependency.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("libelf@0.8+debug")
|
||||
depends_on("libelf debug=True")
|
||||
depends_on("libelf cppflags='-fPIC'")
|
||||
depends_on("libelf@0.8 +parser +pic")
|
||||
|
||||
Both users *and* package authors can use the same spec syntax to refer
|
||||
to different package configurations. Users use the spec syntax on the
|
||||
@@ -2519,46 +2476,82 @@ command line to find installed packages or to install packages with
|
||||
particular constraints, and package authors can use specs to describe
|
||||
relationships between packages.
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
Version ranges
|
||||
^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Specifying backward and forward compatibility
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Although some packages require a specific version for their dependencies,
|
||||
most can be built with a range of versions. For example, if you are
|
||||
writing a package for a legacy Python module that only works with Python
|
||||
2.4 through 2.6, this would look like:
|
||||
Packages are often compatible with a range of versions of their
|
||||
dependencies. This is typically referred to as backward and forward
|
||||
compatibility. Spack allows you to specify this in the ``depends_on``
|
||||
directive using version ranges.
|
||||
|
||||
**Backwards compatibility** means that the package requires at least a
|
||||
certain version of its dependency:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@2.4:2.6")
|
||||
depends_on("python@3.10:")
|
||||
|
||||
Version ranges in Spack are *inclusive*, so ``2.4:2.6`` means any version
|
||||
greater than or equal to ``2.4`` and up to and including any ``2.6.x``. If
|
||||
you want to specify that a package works with any version of Python 3 (or
|
||||
higher), this would look like:
|
||||
In this case, the package requires Python 3.10 or newer.
|
||||
|
||||
Commonly, packages drop support for older versions of a dependency as
|
||||
they release new versions. In Spack you can conveniently add every
|
||||
backward compatibility rule as a separate line:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@3:")
|
||||
# backward compatibility with Python
|
||||
depends_on("python@3.8:")
|
||||
depends_on("python@3.9:", when="@1.2:")
|
||||
depends_on("python@3.10:", when="@1.4:")
|
||||
|
||||
Here we leave out the upper bound. If you want to say that a package
|
||||
requires Python 2, you can similarly leave out the lower bound:
|
||||
This means that in general we need Python 3.8 or newer; from version
|
||||
1.2 onwards we need Python 3.9 or newer; from version 1.4 onwards we
|
||||
need Python 3.10 or newer. Notice that it's fine to have overlapping
|
||||
ranges in the ``when`` clauses.
|
||||
|
||||
**Forward compatibility** means that the package requires at most a
|
||||
certain version of its dependency. Forward compatibility rules are
|
||||
necessary when there are breaking changes in the dependency that the
|
||||
package cannot handle. In Spack we often add forward compatibility
|
||||
bounds only at the time a new, breaking version of a dependency is
|
||||
released. As with backward compatibility, it is typical to see a list
|
||||
of forward compatibility bounds in a package file as seperate lines:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@:2")
|
||||
# forward compatibility with Python
|
||||
depends_on("python@:3.12", when="@:1.10")
|
||||
depends_on("python@:3.13", when="@:1.12")
|
||||
|
||||
Notice that we didn't use ``@:3``. Version ranges are *inclusive*, so
|
||||
``@:3`` means "up to and including any 3.x version".
|
||||
Notice how the ``:`` now appears before the version number both in the
|
||||
dependency and in the ``when`` clause. This tells Spack that in general
|
||||
we need Python 3.13 or older up to version ``1.12.x``, and up to version
|
||||
``1.10.x`` we need Python 3.12 or older. Said differently, forward compatibility
|
||||
with Python 3.13 was added in version 1.11, while version 1.13 added forward
|
||||
compatibility with Python 3.14.
|
||||
|
||||
You can also simply write
|
||||
Notice that a version range ``@:3.12`` includes *any* patch version
|
||||
number ``3.12.x``, which is often useful when specifying forward compatibility
|
||||
bounds.
|
||||
|
||||
So far we have seen open-ended version ranges, which is by far the most
|
||||
common use case. It is also possible to specify both a lower and an upper bound
|
||||
on the version of a dependency, like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@2.7")
|
||||
depends_on("python@3.10:3.12")
|
||||
|
||||
to tell Spack that the package needs Python 2.7.x. This is equivalent to
|
||||
``@2.7:2.7``.
|
||||
There is short syntax to specify that a package is compatible with say any
|
||||
``3.x`` version:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@3")
|
||||
|
||||
The above is equivalent to ``depends_on("python@3:3")``, which means at least
|
||||
Python version 3 and at most any version ``3.x.y``.
|
||||
|
||||
In very rare cases, you may need to specify an exact version, for example
|
||||
if you need to distinguish between ``3.2`` and ``3.2.1``:
|
||||
@@ -2932,9 +2925,9 @@ make sense during the build phase may not be needed at runtime, and vice versa.
|
||||
it makes sense to let a dependency set the environment variables for its dependents. To allow all
|
||||
this, Spack provides four different methods that can be overridden in a package:
|
||||
|
||||
1. :meth:`setup_build_environment <spack.builder.Builder.setup_build_environment>`
|
||||
1. :meth:`setup_build_environment <spack.builder.BaseBuilder.setup_build_environment>`
|
||||
2. :meth:`setup_run_environment <spack.package_base.PackageBase.setup_run_environment>`
|
||||
3. :meth:`setup_dependent_build_environment <spack.builder.Builder.setup_dependent_build_environment>`
|
||||
3. :meth:`setup_dependent_build_environment <spack.builder.BaseBuilder.setup_dependent_build_environment>`
|
||||
4. :meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
|
||||
|
||||
The Qt package, for instance, uses this call:
|
||||
@@ -5385,7 +5378,7 @@ by build recipes. Examples of checking :ref:`variant settings <variants>` and
|
||||
determine whether it needs to also set up build dependencies (see
|
||||
:ref:`test-build-tests`).
|
||||
|
||||
The ``MyPackage`` package below provides two basic test examples:
|
||||
The ``MyPackage`` package below provides two basic test examples:
|
||||
``test_example`` and ``test_example2``. The first runs the installed
|
||||
``example`` and ensures its output contains an expected string. The second
|
||||
runs ``example2`` without checking output so is only concerned with confirming
|
||||
@@ -5702,7 +5695,7 @@ subdirectory of the installation prefix. They are automatically copied to
|
||||
the appropriate relative paths under the test stage directory prior to
|
||||
executing stand-alone tests.
|
||||
|
||||
.. tip::
|
||||
.. tip::
|
||||
|
||||
*Perform test-related conversions once when copying files.*
|
||||
|
||||
@@ -7078,6 +7071,46 @@ might write:
|
||||
CXXFLAGS += -I$DWARF_PREFIX/include
|
||||
CXXFLAGS += -L$DWARF_PREFIX/lib
|
||||
|
||||
.. _abi_compatibility:
|
||||
|
||||
----------------------------
|
||||
Specifying ABI Compatibility
|
||||
----------------------------
|
||||
|
||||
Packages can include ABI-compatibility information using the
|
||||
``can_splice`` directive. For example, if ``Foo`` version 1.1 can
|
||||
always replace version 1.0, then the package could have:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
can_splice("foo@1.0", when="@1.1")
|
||||
|
||||
For virtual packages, packages can also specify ABI-compabitiliby with
|
||||
other packages providing the same virtual. For example, ``zlib-ng``
|
||||
could specify:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
can_splice("zlib@1.3.1", when="@2.2+compat")
|
||||
|
||||
Some packages have ABI-compatibility that is dependent on matching
|
||||
variant values, either for all variants or for some set of
|
||||
ABI-relevant variants. In those cases, it is not necessary to specify
|
||||
the full combinatorial explosion. The ``match_variants`` keyword can
|
||||
cover all single-value variants.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
can_splice("foo@1.1", when="@1.2", match_variants=["bar"]) # any value for bar as long as they're the same
|
||||
can_splice("foo@1.2", when="@1.3", match_variants="*") # any variant values if all single-value variants match
|
||||
|
||||
The concretizer will use ABI compatibility to determine automatic
|
||||
splices when :ref:`automatic splicing<automatic_splicing>` is enabled.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``can_splice`` directive is experimental, and may be replaced
|
||||
by a higher-level interface in future versions of Spack.
|
||||
|
||||
.. _package_class_structure:
|
||||
|
||||
|
@@ -59,7 +59,7 @@ Functional Example
|
||||
------------------
|
||||
|
||||
The simplest fully functional standalone example of a working pipeline can be
|
||||
examined live at this example `project <https://gitlab.com/scott.wittenburg/spack-pipeline-demo>`_
|
||||
examined live at this example `project <https://gitlab.com/spack/pipeline-quickstart>`_
|
||||
on gitlab.com.
|
||||
|
||||
Here's the ``.gitlab-ci.yml`` file from that example that builds and runs the
|
||||
@@ -67,39 +67,46 @@ pipeline:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
stages: [generate, build]
|
||||
stages: [ "generate", "build" ]
|
||||
|
||||
variables:
|
||||
SPACK_REPO: https://github.com/scottwittenburg/spack.git
|
||||
SPACK_REF: pipelines-reproducible-builds
|
||||
SPACK_REPOSITORY: "https://github.com/spack/spack.git"
|
||||
SPACK_REF: "develop-2024-10-06"
|
||||
SPACK_USER_CONFIG_PATH: ${CI_PROJECT_DIR}
|
||||
SPACK_BACKTRACE: 1
|
||||
|
||||
generate-pipeline:
|
||||
stage: generate
|
||||
tags:
|
||||
- docker
|
||||
- saas-linux-small-amd64
|
||||
stage: generate
|
||||
image:
|
||||
name: ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01
|
||||
entrypoint: [""]
|
||||
before_script:
|
||||
- git clone ${SPACK_REPO}
|
||||
- pushd spack && git checkout ${SPACK_REF} && popd
|
||||
- . "./spack/share/spack/setup-env.sh"
|
||||
name: ghcr.io/spack/ubuntu20.04-runner-x86_64:2023-01-01
|
||||
script:
|
||||
- git clone ${SPACK_REPOSITORY}
|
||||
- cd spack && git checkout ${SPACK_REF} && cd ../
|
||||
- . "./spack/share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- spack env activate --without-view .
|
||||
- spack -d ci generate
|
||||
- spack -d -v --color=always
|
||||
ci generate
|
||||
--check-index-only
|
||||
--artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/pipeline.yml"
|
||||
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml"
|
||||
artifacts:
|
||||
paths:
|
||||
- "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
|
||||
build-jobs:
|
||||
build-pipeline:
|
||||
stage: build
|
||||
trigger:
|
||||
include:
|
||||
- artifact: "jobs_scratch_dir/pipeline.yml"
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: generate-pipeline
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: generate-pipeline
|
||||
|
||||
|
||||
The key thing to note above is that there are two jobs: The first job to run,
|
||||
``generate-pipeline``, runs the ``spack ci generate`` command to generate a
|
||||
@@ -114,82 +121,93 @@ And here's the spack environment built by the pipeline represented as a
|
||||
spack:
|
||||
view: false
|
||||
concretizer:
|
||||
unify: false
|
||||
unify: true
|
||||
reuse: false
|
||||
|
||||
definitions:
|
||||
- pkgs:
|
||||
- zlib
|
||||
- bzip2
|
||||
- arch:
|
||||
- '%gcc@7.5.0 arch=linux-ubuntu18.04-x86_64'
|
||||
- bzip2 ~debug
|
||||
- compiler:
|
||||
- '%gcc'
|
||||
|
||||
specs:
|
||||
- matrix:
|
||||
- - $pkgs
|
||||
- - $arch
|
||||
|
||||
mirrors: { "mirror": "s3://spack-public/mirror" }
|
||||
- - $compiler
|
||||
|
||||
ci:
|
||||
enable-artifacts-buildcache: True
|
||||
rebuild-index: False
|
||||
target: gitlab
|
||||
|
||||
pipeline-gen:
|
||||
- any-job:
|
||||
before_script:
|
||||
- git clone ${SPACK_REPO}
|
||||
- pushd spack && git checkout ${SPACK_CHECKOUT_VERSION} && popd
|
||||
- . "./spack/share/spack/setup-env.sh"
|
||||
- build-job:
|
||||
tags: [docker]
|
||||
tags:
|
||||
- saas-linux-small-amd64
|
||||
image:
|
||||
name: ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01
|
||||
entrypoint: [""]
|
||||
name: ghcr.io/spack/ubuntu20.04-runner-x86_64:2023-01-01
|
||||
before_script:
|
||||
- git clone ${SPACK_REPOSITORY}
|
||||
- cd spack && git checkout ${SPACK_REF} && cd ../
|
||||
- . "./spack/share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- export SPACK_USER_CONFIG_PATH=${CI_PROJECT_DIR}
|
||||
- spack config blame mirrors
|
||||
|
||||
|
||||
The elements of this file important to spack ci pipelines are described in more
|
||||
detail below, but there are a couple of things to note about the above working
|
||||
example:
|
||||
|
||||
.. note::
|
||||
There is no ``script`` attribute specified for here. The reason for this is
|
||||
Spack CI will automatically generate reasonable default scripts. More
|
||||
detail on what is in these scripts can be found below.
|
||||
The use of ``reuse: false`` in spack environments used for pipelines is
|
||||
almost always what you want, as without it your pipelines will not rebuild
|
||||
packages even if package hashes have changed. This is due to the concretizer
|
||||
strongly preferring known hashes when ``reuse: true``.
|
||||
|
||||
Also notice the ``before_script`` section. It is required when using any of the
|
||||
default scripts to source the ``setup-env.sh`` script in order to inform
|
||||
the default scripts where to find the ``spack`` executable.
|
||||
The ``ci`` section in the above environment file contains the bare minimum
|
||||
configuration required for ``spack ci generate`` to create a working pipeline.
|
||||
The ``target: gitlab`` tells spack that the desired pipeline output is for
|
||||
gitlab. However, this isn't strictly required, as currently gitlab is the
|
||||
only possible output format for pipelines. The ``pipeline-gen`` section
|
||||
contains the key information needed to specify attributes for the generated
|
||||
jobs. Notice that it contains a list which has only a single element in
|
||||
this case. In real pipelines it will almost certainly have more elements,
|
||||
and in those cases, order is important: spack starts at the bottom of the
|
||||
list and works upwards when applying attributes.
|
||||
|
||||
Normally ``enable-artifacts-buildcache`` is not recommended in production as it
|
||||
results in large binary artifacts getting transferred back and forth between
|
||||
gitlab and the runners. But in this example on gitlab.com where there is no
|
||||
shared, persistent file system, and where no secrets are stored for giving
|
||||
permission to write to an S3 bucket, ``enabled-buildcache-artifacts`` is the only
|
||||
way to propagate binaries from jobs to their dependents.
|
||||
But in this simple case, we use only the special key ``any-job`` to
|
||||
indicate that spack should apply the specified attributes (``tags``, ``image``,
|
||||
and ``before_script``) to any job it generates. This includes jobs for
|
||||
building/pushing all packages, a ``rebuild-index`` job at the end of the
|
||||
pipeline, as well as any ``noop`` jobs that might be needed by gitlab when
|
||||
no rebuilds are required.
|
||||
|
||||
Also, it is usually a good idea to let the pipeline generate a final "rebuild the
|
||||
buildcache index" job, so that subsequent pipeline generation can quickly determine
|
||||
which specs are up to date and which need to be rebuilt (it's a good idea for other
|
||||
reasons as well, but those are out of scope for this discussion). In this case we
|
||||
have disabled it (using ``rebuild-index: False``) because the index would only be
|
||||
generated in the artifacts mirror anyway, and consequently would not be available
|
||||
during subsequent pipeline runs.
|
||||
Something to note is that in this simple case, we rely on spack to
|
||||
generate a reasonable script for the package build jobs (it just creates
|
||||
a script that invokes ``spack ci rebuild``).
|
||||
|
||||
.. note::
|
||||
With the addition of reproducible builds (#22887) a previously working
|
||||
pipeline will require some changes:
|
||||
Another thing to note is the use of the ``SPACK_USER_CONFIG_DIR`` environment
|
||||
variable in any generated jobs. The purpose of this is to make spack
|
||||
aware of one final file in the example, the one that contains the mirror
|
||||
configuration. This file, ``mirrors.yaml`` looks like this:
|
||||
|
||||
* In the build-jobs, the environment location changed.
|
||||
This will typically show as a ``KeyError`` in the failing job. Be sure to
|
||||
point to ``${SPACK_CONCRETE_ENV_DIR}``.
|
||||
.. code-block:: yaml
|
||||
|
||||
* When using ``include`` in your environment, be sure to make the included
|
||||
files available in the build jobs. This means adding those files to the
|
||||
artifact directory. Those files will also be missing in the reproducibility
|
||||
artifact.
|
||||
mirrors:
|
||||
buildcache-destination:
|
||||
url: oci://registry.gitlab.com/spack/pipeline-quickstart
|
||||
binary: true
|
||||
access_pair:
|
||||
id_variable: CI_REGISTRY_USER
|
||||
secret_variable: CI_REGISTRY_PASSWORD
|
||||
|
||||
* Because the location of the environment changed, including files with
|
||||
relative path may have to be adapted to work both in the project context
|
||||
(generation job) and in the concrete env dir context (build job).
|
||||
|
||||
Note the name of the mirror is ``buildcache-destination``, which is required
|
||||
as of Spack 0.23 (see below for more information). The mirror url simply
|
||||
points to the container registry associated with the project, while
|
||||
``id_variable`` and ``secret_variable`` refer to to environment variables
|
||||
containing the access credentials for the mirror.
|
||||
|
||||
When spack builds packages for this example project, they will be pushed to
|
||||
the project container registry, where they will be available for subsequent
|
||||
jobs to install as dependencies, or for other pipelines to use to build runnable
|
||||
container images.
|
||||
|
||||
-----------------------------------
|
||||
Spack commands supporting pipelines
|
||||
@@ -417,15 +435,6 @@ configuration with a ``script`` attribute. Specifying a signing job without a sc
|
||||
does not create a signing job and the job configuration attributes will be ignored.
|
||||
Signing jobs are always assigned the runner tags ``aws``, ``protected``, and ``notary``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Cleanup (cleanup)
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
When using ``temporary-storage-url-prefix`` the cleanup job will destroy the mirror
|
||||
created for the associated Gitlab pipeline. Cleanup jobs do not allow modifying the
|
||||
script, but do expect that the spack command is in the path and require a
|
||||
``before_script`` to be specified that sources the ``setup-env.sh`` script.
|
||||
|
||||
.. _noop_jobs:
|
||||
|
||||
^^^^^^^^^^^^
|
||||
@@ -592,6 +601,77 @@ the attributes will be merged starting from the bottom match going up to the top
|
||||
|
||||
In the case that no match is found in a submapping section, no additional attributes will be applied.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Dynamic Mapping Sections
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For large scale CI where cost optimization is required, dynamic mapping allows for the use of real-time
|
||||
mapping schemes served by a web service. This type of mapping does not support the ``-remove`` type
|
||||
behavior, but it does follow the rest of the merge rules for configurations.
|
||||
|
||||
The dynamic mapping service needs to implement a single REST API interface for getting
|
||||
requests ``GET <URL>[:PORT][/PATH]?spec=<pkg_name@pkg_version +variant1+variant2%compiler@compiler_version>``.
|
||||
|
||||
example request.
|
||||
|
||||
.. code-block::
|
||||
|
||||
https://my-dyn-mapping.spack.io/allocation?spec=zlib-ng@2.1.6 +compat+opt+shared+pic+new_strategies arch=linux-ubuntu20.04-x86_64_v3%gcc@12.0.0
|
||||
|
||||
|
||||
With an example response the updates kubernetes request variables, overrides the max retries for gitlab,
|
||||
and prepends a note about the modifications made by the my-dyn-mapping.spack.io service.
|
||||
|
||||
.. code-block::
|
||||
|
||||
200 OK
|
||||
|
||||
{
|
||||
"variables":
|
||||
{
|
||||
"KUBERNETES_CPU_REQUEST": "500m",
|
||||
"KUBERNETES_MEMORY_REQUEST": "2G",
|
||||
},
|
||||
"retry": { "max:": "1"}
|
||||
"script+:":
|
||||
[
|
||||
"echo \"Job modified by my-dyn-mapping.spack.io\""
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
The ci.yaml configuration section takes the URL endpoint as well as a number of options to configure how responses are handled.
|
||||
|
||||
It is possible to specify a list of allowed and ignored configuration attributes under ``allow`` and ``ignore``
|
||||
respectively. It is also possible to configure required attributes under ``required`` section.
|
||||
|
||||
Options to configure the client timeout and SSL verification using the ``timeout`` and ``verify_ssl`` options.
|
||||
By default, the ``timeout`` is set to the option in ``config:timeout`` and ``veryify_ssl`` is set the the option in ``config::verify_ssl``.
|
||||
|
||||
Passing header parameters to the request can be achieved through the ``header`` section. The values of the variables passed to the
|
||||
header may be environment variables that are expanded at runtime, such as a private token configured on the runner.
|
||||
|
||||
Here is an example configuration pointing to ``my-dyn-mapping.spack.io/allocation``.
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
ci:
|
||||
- dynamic-mapping:
|
||||
endpoint: my-dyn-mapping.spack.io/allocation
|
||||
timeout: 10
|
||||
verify_ssl: True
|
||||
header:
|
||||
PRIVATE_TOKEN: ${MY_PRIVATE_TOKEN}
|
||||
MY_CONFIG: "fuzz_allocation:false"
|
||||
allow:
|
||||
- variables
|
||||
ignore:
|
||||
- script
|
||||
require: []
|
||||
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Bootstrapping
|
||||
^^^^^^^^^^^^^
|
||||
@@ -670,15 +750,6 @@ environment/stack file, and in that case no bootstrapping will be done (only the
|
||||
specs will be staged for building) and the runners will be expected to already
|
||||
have all needed compilers installed and configured for spack to use.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Pipeline Buildcache
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``enable-artifacts-buildcache`` key
|
||||
takes a boolean and determines whether the pipeline uses artifacts to store and
|
||||
pass along the buildcaches from one stage to the next (the default if you don't
|
||||
provide this option is ``False``).
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Broken Specs URL
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
@@ -1,13 +1,13 @@
|
||||
sphinx==7.4.7
|
||||
sphinx==8.1.3
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.6.1
|
||||
sphinx-rtd-theme==2.0.0
|
||||
python-levenshtein==0.25.1
|
||||
docutils==0.20.1
|
||||
sphinx-rtd-theme==3.0.1
|
||||
python-levenshtein==0.26.1
|
||||
docutils==0.21.2
|
||||
pygments==2.18.0
|
||||
urllib3==2.2.3
|
||||
pytest==8.3.3
|
||||
isort==5.13.2
|
||||
black==24.8.0
|
||||
black==24.10.0
|
||||
flake8==7.1.1
|
||||
mypy==1.11.1
|
||||
|
238
lib/spack/env/cc
vendored
238
lib/spack/env/cc
vendored
@@ -101,10 +101,9 @@ setsep() {
|
||||
esac
|
||||
}
|
||||
|
||||
# prepend LISTNAME ELEMENT [SEP]
|
||||
# prepend LISTNAME ELEMENT
|
||||
#
|
||||
# Prepend ELEMENT to the list stored in the variable LISTNAME,
|
||||
# assuming the list is separated by SEP.
|
||||
# Prepend ELEMENT to the list stored in the variable LISTNAME.
|
||||
# Handles empty lists and single-element lists.
|
||||
prepend() {
|
||||
varname="$1"
|
||||
@@ -238,6 +237,36 @@ esac
|
||||
}
|
||||
"
|
||||
|
||||
# path_list functions. Path_lists have 3 parts: spack_store_<list>, <list> and system_<list>,
|
||||
# which are used to prioritize paths when assembling the final command line.
|
||||
|
||||
# init_path_lists LISTNAME
|
||||
# Set <LISTNAME>, spack_store_<LISTNAME>, and system_<LISTNAME> to "".
|
||||
init_path_lists() {
|
||||
eval "spack_store_$1=\"\""
|
||||
eval "$1=\"\""
|
||||
eval "system_$1=\"\""
|
||||
}
|
||||
|
||||
# assign_path_lists LISTNAME1 LISTNAME2
|
||||
# Copy contents of LISTNAME2 into LISTNAME1, for each path_list prefix.
|
||||
assign_path_lists() {
|
||||
eval "spack_store_$1=\"\${spack_store_$2}\""
|
||||
eval "$1=\"\${$2}\""
|
||||
eval "system_$1=\"\${system_$2}\""
|
||||
}
|
||||
|
||||
# append_path_lists LISTNAME ELT
|
||||
# Append the provided ELT to the appropriate list, based on the result of path_order().
|
||||
append_path_lists() {
|
||||
path_order "$2"
|
||||
case $? in
|
||||
0) eval "append spack_store_$1 \"\$2\"" ;;
|
||||
1) eval "append $1 \"\$2\"" ;;
|
||||
2) eval "append system_$1 \"\$2\"" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Check if optional parameters are defined
|
||||
# If we aren't asking for debug flags, don't add them
|
||||
if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then
|
||||
@@ -470,12 +499,7 @@ input_command="$*"
|
||||
parse_Wl() {
|
||||
while [ $# -ne 0 ]; do
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
append_path_lists return_rpath_dirs_list "$1"
|
||||
wl_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
@@ -484,24 +508,14 @@ parse_Wl() {
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
wl_expect_rpath=yes
|
||||
@@ -509,8 +523,7 @@ parse_Wl() {
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
-Wl)
|
||||
# Nested -Wl,-Wl means we're in NAG compiler territory, we don't support
|
||||
# it.
|
||||
# Nested -Wl,-Wl means we're in NAG compiler territory. We don't support it.
|
||||
return 1
|
||||
;;
|
||||
*)
|
||||
@@ -529,21 +542,10 @@ categorize_arguments() {
|
||||
return_other_args_list=""
|
||||
return_isystem_was_used=""
|
||||
|
||||
return_isystem_spack_store_include_dirs_list=""
|
||||
return_isystem_system_include_dirs_list=""
|
||||
return_isystem_include_dirs_list=""
|
||||
|
||||
return_spack_store_include_dirs_list=""
|
||||
return_system_include_dirs_list=""
|
||||
return_include_dirs_list=""
|
||||
|
||||
return_spack_store_lib_dirs_list=""
|
||||
return_system_lib_dirs_list=""
|
||||
return_lib_dirs_list=""
|
||||
|
||||
return_spack_store_rpath_dirs_list=""
|
||||
return_system_rpath_dirs_list=""
|
||||
return_rpath_dirs_list=""
|
||||
init_path_lists return_isystem_include_dirs_list
|
||||
init_path_lists return_include_dirs_list
|
||||
init_path_lists return_lib_dirs_list
|
||||
init_path_lists return_rpath_dirs_list
|
||||
|
||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||
wl_expect_rpath=no
|
||||
@@ -609,32 +611,17 @@ categorize_arguments() {
|
||||
arg="${1#-isystem}"
|
||||
return_isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_isystem_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_isystem_include_dirs_list "$arg" ;;
|
||||
2) append return_isystem_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
append_path_lists return_isystem_include_dirs_list "$arg"
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_include_dirs_list "$arg" ;;
|
||||
2) append return_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
append_path_lists return_include_dirs_list "$arg"
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_lib_dirs_list "$arg" ;;
|
||||
1) append return_lib_dirs_list "$arg" ;;
|
||||
2) append return_system_lib_dirs_list "$arg" ;;
|
||||
esac
|
||||
append_path_lists return_lib_dirs_list "$arg"
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
@@ -667,32 +654,17 @@ categorize_arguments() {
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
append_path_lists return_rpath_dirs_list "$1"
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
@@ -709,7 +681,36 @@ categorize_arguments() {
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list "$1"
|
||||
# if mode is not ld, we can just add to other args
|
||||
if [ "$mode" != "ld" ]; then
|
||||
append return_other_args_list "$1"
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
|
||||
# if we're in linker mode, we need to parse raw RPATH args
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
if [ $# -eq 1 ]; then
|
||||
# -rpath without value: let the linker raise an error.
|
||||
append return_other_args_list "$1"
|
||||
break
|
||||
fi
|
||||
shift
|
||||
append_path_lists return_rpath_dirs_list "$1"
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
@@ -731,21 +732,10 @@ categorize_arguments() {
|
||||
|
||||
categorize_arguments "$@"
|
||||
|
||||
spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
|
||||
isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
assign_path_lists isystem_include_dirs_list return_isystem_include_dirs_list
|
||||
assign_path_lists include_dirs_list return_include_dirs_list
|
||||
assign_path_lists lib_dirs_list return_lib_dirs_list
|
||||
assign_path_lists rpath_dirs_list return_rpath_dirs_list
|
||||
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
other_args_list="$return_other_args_list"
|
||||
@@ -821,21 +811,10 @@ IFS="$lsep"
|
||||
categorize_arguments $spack_flags_list
|
||||
unset IFS
|
||||
|
||||
spack_flags_isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_flags_spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
assign_path_lists spack_flags_isystem_include_dirs_list return_isystem_include_dirs_list
|
||||
assign_path_lists spack_flags_include_dirs_list return_include_dirs_list
|
||||
assign_path_lists spack_flags_lib_dirs_list return_lib_dirs_list
|
||||
assign_path_lists spack_flags_rpath_dirs_list return_rpath_dirs_list
|
||||
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
@@ -894,7 +873,7 @@ esac
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ] || [ "$isystem_was_used" = "true" ]; then
|
||||
extend isystem_spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend spack_store_isystem_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend isystem_include_dirs_list SPACK_INCLUDE_DIRS
|
||||
else
|
||||
extend spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
@@ -910,64 +889,63 @@ args_list="$flags_list"
|
||||
|
||||
# Include search paths partitioned by (in store, non-sytem, system)
|
||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||
extend args_list spack_flags_spack_store_include_dirs_list -I
|
||||
extend args_list spack_store_spack_flags_include_dirs_list -I
|
||||
extend args_list spack_store_include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_include_dirs_list -I
|
||||
extend args_list include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list spack_store_spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list spack_store_isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
extend args_list spack_flags_system_include_dirs_list -I
|
||||
extend args_list system_spack_flags_include_dirs_list -I
|
||||
extend args_list system_include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list system_spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list system_isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
# Library search paths partitioned by (in store, non-sytem, system)
|
||||
extend args_list spack_flags_spack_store_lib_dirs_list "-L"
|
||||
extend args_list spack_store_spack_flags_lib_dirs_list "-L"
|
||||
extend args_list spack_store_lib_dirs_list "-L"
|
||||
|
||||
extend args_list spack_flags_lib_dirs_list "-L"
|
||||
extend args_list lib_dirs_list "-L"
|
||||
|
||||
extend args_list spack_flags_system_lib_dirs_list "-L"
|
||||
extend args_list system_spack_flags_lib_dirs_list "-L"
|
||||
extend args_list system_lib_dirs_list "-L"
|
||||
|
||||
# RPATHs arguments
|
||||
rpath_prefix=""
|
||||
case "$mode" in
|
||||
ccld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$linker_arg$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "$rpath"
|
||||
extend args_list spack_store_rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
||||
extend args_list rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
||||
extend args_list system_rpath_dirs_list "$rpath"
|
||||
rpath_prefix="$rpath"
|
||||
;;
|
||||
ld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||
rpath_prefix="-rpath${lsep}"
|
||||
;;
|
||||
esac
|
||||
|
||||
# if mode is ccld or ld, extend RPATH lists with the prefix determined above
|
||||
if [ -n "$rpath_prefix" ]; then
|
||||
extend args_list spack_store_spack_flags_rpath_dirs_list "$rpath_prefix"
|
||||
extend args_list spack_store_rpath_dirs_list "$rpath_prefix"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath_prefix"
|
||||
extend args_list rpath_dirs_list "$rpath_prefix"
|
||||
|
||||
extend args_list system_spack_flags_rpath_dirs_list "$rpath_prefix"
|
||||
extend args_list system_rpath_dirs_list "$rpath_prefix"
|
||||
fi
|
||||
|
||||
# Other arguments from the input command
|
||||
extend args_list other_args_list
|
||||
extend args_list spack_flags_other_args_list
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.5-dev (commit bceb39528ac49dd0c876b2e9bf3e7482e9c2be4a)
|
||||
* Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
@@ -81,8 +81,13 @@ def __init__(self, name, parents, vendor, features, compilers, generation=0, cpu
|
||||
self.generation = generation
|
||||
# Only relevant for AArch64
|
||||
self.cpu_part = cpu_part
|
||||
# Cache the ancestor computation
|
||||
|
||||
# Cache the "ancestor" computation
|
||||
self._ancestors = None
|
||||
# Cache the "generic" computation
|
||||
self._generic = None
|
||||
# Cache the "family" computation
|
||||
self._family = None
|
||||
|
||||
@property
|
||||
def ancestors(self):
|
||||
@@ -174,18 +179,22 @@ def __contains__(self, feature):
|
||||
@property
|
||||
def family(self):
|
||||
"""Returns the architecture family a given target belongs to"""
|
||||
roots = [x for x in [self] + self.ancestors if not x.ancestors]
|
||||
msg = "a target is expected to belong to just one architecture family"
|
||||
msg += f"[found {', '.join(str(x) for x in roots)}]"
|
||||
assert len(roots) == 1, msg
|
||||
if self._family is None:
|
||||
roots = [x for x in [self] + self.ancestors if not x.ancestors]
|
||||
msg = "a target is expected to belong to just one architecture family"
|
||||
msg += f"[found {', '.join(str(x) for x in roots)}]"
|
||||
assert len(roots) == 1, msg
|
||||
self._family = roots.pop()
|
||||
|
||||
return roots.pop()
|
||||
return self._family
|
||||
|
||||
@property
|
||||
def generic(self):
|
||||
"""Returns the best generic architecture that is compatible with self"""
|
||||
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
||||
return max(generics, key=lambda x: len(x.ancestors))
|
||||
if self._generic is None:
|
||||
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
||||
self._generic = max(generics, key=lambda x: len(x.ancestors))
|
||||
return self._generic
|
||||
|
||||
def to_dict(self):
|
||||
"""Returns a dictionary representation of this object."""
|
||||
|
@@ -1482,7 +1482,6 @@
|
||||
"cldemote",
|
||||
"movdir64b",
|
||||
"movdiri",
|
||||
"pdcm",
|
||||
"serialize",
|
||||
"waitpkg"
|
||||
],
|
||||
@@ -2237,6 +2236,84 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"zen5": {
|
||||
"from": ["zen4"],
|
||||
"vendor": "AuthenticAMD",
|
||||
"features": [
|
||||
"abm",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"avx512_bf16",
|
||||
"avx512_bitalg",
|
||||
"avx512bw",
|
||||
"avx512cd",
|
||||
"avx512dq",
|
||||
"avx512f",
|
||||
"avx512ifma",
|
||||
"avx512vbmi",
|
||||
"avx512_vbmi2",
|
||||
"avx512vl",
|
||||
"avx512_vnni",
|
||||
"avx512_vp2intersect",
|
||||
"avx512_vpopcntdq",
|
||||
"avx_vnni",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"clwb",
|
||||
"clzero",
|
||||
"cppc",
|
||||
"cx16",
|
||||
"f16c",
|
||||
"flush_l1d",
|
||||
"fma",
|
||||
"fsgsbase",
|
||||
"gfni",
|
||||
"ibrs_enhanced",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"movdir64b",
|
||||
"movdiri",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"sse4a",
|
||||
"ssse3",
|
||||
"tsc_adjust",
|
||||
"vaes",
|
||||
"vpclmulqdq",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "14.1:",
|
||||
"name": "znver5",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"aocc": [
|
||||
{
|
||||
"versions": "5.0:",
|
||||
"name": "znver5",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "19.1:",
|
||||
"name": "znver5",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"ppc64": {
|
||||
"from": [],
|
||||
"vendor": "generic",
|
||||
|
@@ -41,6 +41,20 @@ def comma_and(sequence: List[str]) -> str:
|
||||
return comma_list(sequence, "and")
|
||||
|
||||
|
||||
def ordinal(number: int) -> str:
|
||||
"""Return the ordinal representation (1st, 2nd, 3rd, etc.) for the provided number.
|
||||
|
||||
Args:
|
||||
number: int to convert to ordinal number
|
||||
|
||||
Returns: number's corresponding ordinal
|
||||
"""
|
||||
idx = (number % 10) << 1
|
||||
tens = number % 100 // 10
|
||||
suffix = "th" if tens == 1 or idx > 6 else "thstndrd"[idx : idx + 2]
|
||||
return f"{number}{suffix}"
|
||||
|
||||
|
||||
def quote(sequence: List[str], q: str = "'") -> List[str]:
|
||||
"""Quotes each item in the input list with the quote character passed as second argument."""
|
||||
return [f"{q}{e}{q}" for e in sequence]
|
||||
|
@@ -20,11 +20,23 @@
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from itertools import accumulate
|
||||
from typing import Callable, Iterable, List, Match, Optional, Tuple, Union
|
||||
from typing import (
|
||||
Callable,
|
||||
Deque,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
Match,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
|
||||
import llnl.util.symlink
|
||||
from llnl.util import tty
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
from llnl.util.lang import dedupe, fnmatch_translate_multiple, memoized
|
||||
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
|
||||
|
||||
from ..path import path_to_os_path, system_path_filter
|
||||
@@ -47,6 +59,7 @@
|
||||
"copy_mode",
|
||||
"filter_file",
|
||||
"find",
|
||||
"find_first",
|
||||
"find_headers",
|
||||
"find_all_headers",
|
||||
"find_libraries",
|
||||
@@ -84,6 +97,8 @@
|
||||
"visit_directory_tree",
|
||||
]
|
||||
|
||||
Path = Union[str, pathlib.Path]
|
||||
|
||||
if sys.version_info < (3, 7, 4):
|
||||
# monkeypatch shutil.copystat to fix PermissionError when copying read-only
|
||||
# files on Lustre when using Python < 3.7.4
|
||||
@@ -1672,105 +1687,203 @@ def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2)
|
||||
return FindFirstFile(root, *files, bfs_depth=bfs_depth).find()
|
||||
|
||||
|
||||
def find(root, files, recursive=True):
|
||||
"""Search for ``files`` starting from the ``root`` directory.
|
||||
|
||||
Like GNU/BSD find but written entirely in Python.
|
||||
|
||||
Examples:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ find /usr -name python
|
||||
|
||||
is equivalent to:
|
||||
|
||||
>>> find('/usr', 'python')
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ find /usr/local/bin -maxdepth 1 -name python
|
||||
|
||||
is equivalent to:
|
||||
|
||||
>>> find('/usr/local/bin', 'python', recursive=False)
|
||||
def find(
|
||||
root: Union[Path, Sequence[Path]],
|
||||
files: Union[str, Sequence[str]],
|
||||
recursive: bool = True,
|
||||
max_depth: Optional[int] = None,
|
||||
) -> List[str]:
|
||||
"""Finds all files matching the patterns from ``files`` starting from ``root``. This function
|
||||
returns a deterministic result for the same input and directory structure when run multiple
|
||||
times. Symlinked directories are followed, and unique directories are searched only once. Each
|
||||
matching file is returned only once at lowest depth in case multiple paths exist due to
|
||||
symlinked directories.
|
||||
|
||||
Accepts any glob characters accepted by fnmatch:
|
||||
|
||||
========== ====================================
|
||||
Pattern Meaning
|
||||
========== ====================================
|
||||
``*`` matches everything
|
||||
``*`` matches one or more characters
|
||||
``?`` matches any single character
|
||||
``[seq]`` matches any character in ``seq``
|
||||
``[!seq]`` matches any character not in ``seq``
|
||||
========== ====================================
|
||||
|
||||
Parameters:
|
||||
root (str): The root directory to start searching from
|
||||
files (str or collections.abc.Sequence): Library name(s) to search for
|
||||
recursive (bool): if False search only root folder,
|
||||
if True descends top-down from the root. Defaults to True.
|
||||
Examples:
|
||||
|
||||
Returns:
|
||||
list: The files that have been found
|
||||
>>> find("/usr", "*.txt", recursive=True, max_depth=2)
|
||||
|
||||
finds all files with the extension ``.txt`` in the directory ``/usr`` and subdirectories up to
|
||||
depth 2.
|
||||
|
||||
>>> find(["/usr", "/var"], ["*.txt", "*.log"], recursive=True)
|
||||
|
||||
finds all files with the extension ``.txt`` or ``.log`` in the directories ``/usr`` and
|
||||
``/var`` at any depth.
|
||||
|
||||
>>> find("/usr", "GL/*.h", recursive=True)
|
||||
|
||||
finds all header files in a directory GL at any depth in the directory ``/usr``.
|
||||
|
||||
Parameters:
|
||||
root: One or more root directories to start searching from
|
||||
files: One or more filename patterns to search for
|
||||
recursive: if False search only root, if True descends from roots. Defaults to True.
|
||||
max_depth: if set, don't search below this depth. Cannot be set if recursive is False
|
||||
|
||||
Returns a list of absolute, matching file paths.
|
||||
"""
|
||||
if isinstance(root, (str, pathlib.Path)):
|
||||
root = [root]
|
||||
elif not isinstance(root, collections.abc.Sequence):
|
||||
raise TypeError(f"'root' arg must be a path or a sequence of paths, not '{type(root)}']")
|
||||
|
||||
if isinstance(files, str):
|
||||
files = [files]
|
||||
elif not isinstance(files, collections.abc.Sequence):
|
||||
raise TypeError(f"'files' arg must be str or a sequence of str, not '{type(files)}']")
|
||||
|
||||
if recursive:
|
||||
tty.debug(f"Find (recursive): {root} {str(files)}")
|
||||
result = _find_recursive(root, files)
|
||||
else:
|
||||
tty.debug(f"Find (not recursive): {root} {str(files)}")
|
||||
result = _find_non_recursive(root, files)
|
||||
# If recursive is false, max_depth can only be None or 0
|
||||
if max_depth and not recursive:
|
||||
raise ValueError(f"max_depth ({max_depth}) cannot be set if recursive is False")
|
||||
|
||||
tty.debug(f"Find complete: {root} {str(files)}")
|
||||
tty.debug(f"Find (max depth = {max_depth}): {root} {files}")
|
||||
if not recursive:
|
||||
max_depth = 0
|
||||
elif max_depth is None:
|
||||
max_depth = sys.maxsize
|
||||
result = _find_max_depth(root, files, max_depth)
|
||||
tty.debug(f"Find complete: {root} {files}")
|
||||
return result
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def _find_recursive(root, search_files):
|
||||
# The variable here is **on purpose** a defaultdict. The idea is that
|
||||
# we want to poke the filesystem as little as possible, but still maintain
|
||||
# stability in the order of the answer. Thus we are recording each library
|
||||
# found in a key, and reconstructing the stable order later.
|
||||
found_files = collections.defaultdict(list)
|
||||
|
||||
# Make the path absolute to have os.walk also return an absolute path
|
||||
root = os.path.abspath(root)
|
||||
for path, _, list_files in os.walk(root):
|
||||
for search_file in search_files:
|
||||
matches = glob.glob(os.path.join(path, search_file))
|
||||
matches = [os.path.join(path, x) for x in matches]
|
||||
found_files[search_file].extend(matches)
|
||||
|
||||
answer = []
|
||||
for search_file in search_files:
|
||||
answer.extend(found_files[search_file])
|
||||
|
||||
return answer
|
||||
def _log_file_access_issue(e: OSError, path: str) -> None:
|
||||
errno_name = errno.errorcode.get(e.errno, "UNKNOWN")
|
||||
tty.debug(f"find must skip {path}: {errno_name} {e}")
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def _find_non_recursive(root, search_files):
|
||||
# The variable here is **on purpose** a defaultdict as os.list_dir
|
||||
# can return files in any order (does not preserve stability)
|
||||
found_files = collections.defaultdict(list)
|
||||
def _file_id(s: os.stat_result) -> Tuple[int, int]:
|
||||
# Note: on windows, st_ino is the file index and st_dev is the volume serial number. See
|
||||
# https://github.com/python/cpython/blob/3.9/Python/fileutils.c
|
||||
return (s.st_ino, s.st_dev)
|
||||
|
||||
# Make the path absolute to have absolute path returned
|
||||
root = os.path.abspath(root)
|
||||
|
||||
for search_file in search_files:
|
||||
matches = glob.glob(os.path.join(root, search_file))
|
||||
matches = [os.path.join(root, x) for x in matches]
|
||||
found_files[search_file].extend(matches)
|
||||
def _dedupe_files(paths: List[str]) -> List[str]:
|
||||
"""Deduplicate files by inode and device, dropping files that cannot be accessed."""
|
||||
unique_files: List[str] = []
|
||||
# tuple of (inode, device) for each file without following symlinks
|
||||
visited: Set[Tuple[int, int]] = set()
|
||||
for path in paths:
|
||||
try:
|
||||
stat_info = os.lstat(path)
|
||||
except OSError as e:
|
||||
_log_file_access_issue(e, path)
|
||||
continue
|
||||
file_id = _file_id(stat_info)
|
||||
if file_id not in visited:
|
||||
unique_files.append(path)
|
||||
visited.add(file_id)
|
||||
return unique_files
|
||||
|
||||
answer = []
|
||||
for search_file in search_files:
|
||||
answer.extend(found_files[search_file])
|
||||
|
||||
return answer
|
||||
def _find_max_depth(
|
||||
roots: Sequence[Path], globs: Sequence[str], max_depth: int = sys.maxsize
|
||||
) -> List[str]:
|
||||
"""See ``find`` for the public API."""
|
||||
# We optimize for the common case of simple filename only patterns: a single, combined regex
|
||||
# is used. For complex patterns that include path components, we use a slower glob call from
|
||||
# every directory we visit within max_depth.
|
||||
filename_only_patterns = {
|
||||
f"pattern_{i}": os.path.normcase(x) for i, x in enumerate(globs) if "/" not in x
|
||||
}
|
||||
complex_patterns = {f"pattern_{i}": x for i, x in enumerate(globs) if "/" in x}
|
||||
regex = re.compile(fnmatch_translate_multiple(filename_only_patterns))
|
||||
# Ordered dictionary that keeps track of what pattern found which files
|
||||
matched_paths: Dict[str, List[str]] = {f"pattern_{i}": [] for i, _ in enumerate(globs)}
|
||||
# Ensure returned paths are always absolute
|
||||
roots = [os.path.abspath(r) for r in roots]
|
||||
# Breadth-first search queue. Each element is a tuple of (depth, dir)
|
||||
dir_queue: Deque[Tuple[int, str]] = collections.deque()
|
||||
# Set of visited directories. Each element is a tuple of (inode, device)
|
||||
visited_dirs: Set[Tuple[int, int]] = set()
|
||||
|
||||
for root in roots:
|
||||
try:
|
||||
stat_root = os.stat(root)
|
||||
except OSError as e:
|
||||
_log_file_access_issue(e, root)
|
||||
continue
|
||||
dir_id = _file_id(stat_root)
|
||||
if dir_id not in visited_dirs:
|
||||
dir_queue.appendleft((0, root))
|
||||
visited_dirs.add(dir_id)
|
||||
|
||||
while dir_queue:
|
||||
depth, curr_dir = dir_queue.pop()
|
||||
try:
|
||||
dir_iter = os.scandir(curr_dir)
|
||||
except OSError as e:
|
||||
_log_file_access_issue(e, curr_dir)
|
||||
continue
|
||||
|
||||
# Use glob.glob for complex patterns.
|
||||
for pattern_name, pattern in complex_patterns.items():
|
||||
matched_paths[pattern_name].extend(
|
||||
path for path in glob.glob(os.path.join(curr_dir, pattern))
|
||||
)
|
||||
|
||||
# List of subdirectories by path and (inode, device) tuple
|
||||
subdirs: List[Tuple[str, Tuple[int, int]]] = []
|
||||
|
||||
with dir_iter:
|
||||
for dir_entry in dir_iter:
|
||||
|
||||
# Match filename only patterns
|
||||
if filename_only_patterns:
|
||||
m = regex.match(os.path.normcase(dir_entry.name))
|
||||
if m:
|
||||
for pattern_name in filename_only_patterns:
|
||||
if m.group(pattern_name):
|
||||
matched_paths[pattern_name].append(dir_entry.path)
|
||||
break
|
||||
|
||||
# Collect subdirectories
|
||||
if depth >= max_depth:
|
||||
continue
|
||||
|
||||
try:
|
||||
if not dir_entry.is_dir(follow_symlinks=True):
|
||||
continue
|
||||
if sys.platform == "win32":
|
||||
# Note: st_ino/st_dev on DirEntry.stat are not set on Windows, so we have
|
||||
# to call os.stat
|
||||
stat_info = os.stat(dir_entry.path, follow_symlinks=True)
|
||||
else:
|
||||
stat_info = dir_entry.stat(follow_symlinks=True)
|
||||
except OSError as e:
|
||||
# Possible permission issue, or a symlink that cannot be resolved (ELOOP).
|
||||
_log_file_access_issue(e, dir_entry.path)
|
||||
continue
|
||||
|
||||
subdirs.append((dir_entry.path, _file_id(stat_info)))
|
||||
|
||||
# Enqueue subdirectories in a deterministic order
|
||||
if subdirs:
|
||||
subdirs.sort(key=lambda s: os.path.basename(s[0]))
|
||||
for subdir, subdir_id in subdirs:
|
||||
if subdir_id not in visited_dirs:
|
||||
dir_queue.appendleft((depth + 1, subdir))
|
||||
visited_dirs.add(subdir_id)
|
||||
|
||||
# Sort the matched paths for deterministic output
|
||||
for paths in matched_paths.values():
|
||||
paths.sort()
|
||||
all_matching_paths = [path for paths in matched_paths.values() for path in paths]
|
||||
|
||||
# We only dedupe files if we have any complex patterns, since only they can match the same file
|
||||
# multiple times
|
||||
return _dedupe_files(all_matching_paths) if complex_patterns else all_matching_paths
|
||||
|
||||
|
||||
# Utilities for libraries and headers
|
||||
@@ -2209,7 +2322,9 @@ def find_system_libraries(libraries, shared=True):
|
||||
return libraries_found
|
||||
|
||||
|
||||
def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
||||
def find_libraries(
|
||||
libraries, root, shared=True, recursive=False, runtime=True, max_depth: Optional[int] = None
|
||||
):
|
||||
"""Returns an iterable of full paths to libraries found in a root dir.
|
||||
|
||||
Accepts any glob characters accepted by fnmatch:
|
||||
@@ -2230,6 +2345,8 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
||||
otherwise for static. Defaults to True.
|
||||
recursive (bool): if False search only root folder,
|
||||
if True descends top-down from the root. Defaults to False.
|
||||
max_depth (int): if set, don't search below this depth. Cannot be set
|
||||
if recursive is False
|
||||
runtime (bool): Windows only option, no-op elsewhere. If true,
|
||||
search for runtime shared libs (.DLL), otherwise, search
|
||||
for .Lib files. If shared is false, this has no meaning.
|
||||
@@ -2238,6 +2355,7 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
||||
Returns:
|
||||
LibraryList: The libraries that have been found
|
||||
"""
|
||||
|
||||
if isinstance(libraries, str):
|
||||
libraries = [libraries]
|
||||
elif not isinstance(libraries, collections.abc.Sequence):
|
||||
@@ -2270,8 +2388,10 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
||||
libraries = ["{0}.{1}".format(lib, suffix) for lib in libraries for suffix in suffixes]
|
||||
|
||||
if not recursive:
|
||||
if max_depth:
|
||||
raise ValueError(f"max_depth ({max_depth}) cannot be set if recursive is False")
|
||||
# If not recursive, look for the libraries directly in root
|
||||
return LibraryList(find(root, libraries, False))
|
||||
return LibraryList(find(root, libraries, recursive=False))
|
||||
|
||||
# To speedup the search for external packages configured e.g. in /usr,
|
||||
# perform first non-recursive search in root/lib then in root/lib64 and
|
||||
@@ -2289,7 +2409,7 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
||||
if found_libs:
|
||||
break
|
||||
else:
|
||||
found_libs = find(root, libraries, True)
|
||||
found_libs = find(root, libraries, recursive=True, max_depth=max_depth)
|
||||
|
||||
return LibraryList(found_libs)
|
||||
|
||||
|
@@ -5,14 +5,17 @@
|
||||
|
||||
import collections.abc
|
||||
import contextlib
|
||||
import fnmatch
|
||||
import functools
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
import typing
|
||||
import warnings
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Callable, Iterable, List, Tuple, TypeVar
|
||||
from typing import Callable, Dict, Iterable, List, Tuple, TypeVar
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = r"^\.#|~$"
|
||||
@@ -858,6 +861,19 @@ def elide_list(line_list: List[str], max_num: int = 10) -> List[str]:
|
||||
return line_list
|
||||
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
PatternStr = re.Pattern[str]
|
||||
else:
|
||||
PatternStr = typing.Pattern[str]
|
||||
|
||||
|
||||
def fnmatch_translate_multiple(named_patterns: Dict[str, str]) -> str:
|
||||
"""Similar to ``fnmatch.translate``, but takes an ordered dictionary where keys are pattern
|
||||
names, and values are filename patterns. The output is a regex that matches any of the
|
||||
patterns in order, and named capture groups are used to identify which pattern matched."""
|
||||
return "|".join(f"(?P<{n}>{fnmatch.translate(p)})" for n, p in named_patterns.items())
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def nullcontext(*args, **kwargs):
|
||||
"""Empty context manager.
|
||||
@@ -870,15 +886,6 @@ class UnhashableArguments(TypeError):
|
||||
"""Raise when an @memoized function receives unhashable arg or kwarg values."""
|
||||
|
||||
|
||||
def enum(**kwargs):
|
||||
"""Return an enum-like class.
|
||||
|
||||
Args:
|
||||
**kwargs: explicit dictionary of enums
|
||||
"""
|
||||
return type("Enum", (object,), kwargs)
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
@@ -914,6 +921,21 @@ def ensure_last(lst, *elements):
|
||||
lst.append(lst.pop(lst.index(elt)))
|
||||
|
||||
|
||||
class Const:
|
||||
"""Class level constant, raises when trying to set the attribute"""
|
||||
|
||||
__slots__ = ["value"]
|
||||
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
return self.value
|
||||
|
||||
def __set__(self, instance, value):
|
||||
raise TypeError(f"Const value does not support assignment [value={self.value}]")
|
||||
|
||||
|
||||
class TypedMutableSequence(collections.abc.MutableSequence):
|
||||
"""Base class that behaves like a list, just with a different type.
|
||||
|
||||
@@ -1018,3 +1040,42 @@ def __init__(self, callback):
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
return self.callback(owner)
|
||||
|
||||
|
||||
class DeprecatedProperty:
|
||||
"""Data descriptor to error or warn when a deprecated property is accessed.
|
||||
|
||||
Derived classes must define a factory method to return an adaptor for the deprecated
|
||||
property, if the descriptor is not set to error.
|
||||
"""
|
||||
|
||||
__slots__ = ["name"]
|
||||
|
||||
#: 0 - Nothing
|
||||
#: 1 - Warning
|
||||
#: 2 - Error
|
||||
error_lvl = 0
|
||||
|
||||
def __init__(self, name: str) -> None:
|
||||
self.name = name
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
if instance is None:
|
||||
return self
|
||||
|
||||
if self.error_lvl == 1:
|
||||
warnings.warn(
|
||||
f"accessing the '{self.name}' property of '{instance}', which is deprecated"
|
||||
)
|
||||
elif self.error_lvl == 2:
|
||||
raise AttributeError(f"cannot access the '{self.name}' attribute of '{instance}'")
|
||||
|
||||
return self.factory(instance, owner)
|
||||
|
||||
def __set__(self, instance, value):
|
||||
raise TypeError(
|
||||
f"the deprecated property '{self.name}' of '{instance}' does not support assignment"
|
||||
)
|
||||
|
||||
def factory(self, instance, owner):
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
@@ -263,7 +263,9 @@ def match_to_ansi(match):
|
||||
f"Incomplete color format: '{match.group(0)}' in '{match.string}'"
|
||||
)
|
||||
|
||||
ansi_code = _escape(f"{styles[style]};{colors.get(color_code, '')}", color, enclose, zsh)
|
||||
color_number = colors.get(color_code, "")
|
||||
semi = ";" if color_number else ""
|
||||
ansi_code = _escape(f"{styles[style]}{semi}{color_number}", color, enclose, zsh)
|
||||
if text:
|
||||
return f"{ansi_code}{text}{_escape(0, color, enclose, zsh)}"
|
||||
else:
|
||||
|
@@ -10,7 +10,6 @@
|
||||
import errno
|
||||
import io
|
||||
import multiprocessing
|
||||
import multiprocessing.connection
|
||||
import os
|
||||
import re
|
||||
import select
|
||||
@@ -19,9 +18,10 @@
|
||||
import threading
|
||||
import traceback
|
||||
from contextlib import contextmanager
|
||||
from multiprocessing.connection import Connection
|
||||
from threading import Thread
|
||||
from types import ModuleType
|
||||
from typing import Optional
|
||||
from typing import Callable, Optional, Union
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -345,49 +345,6 @@ def close(self):
|
||||
self.file.close()
|
||||
|
||||
|
||||
class MultiProcessFd:
|
||||
"""Return an object which stores a file descriptor and can be passed as an
|
||||
argument to a function run with ``multiprocessing.Process``, such that
|
||||
the file descriptor is available in the subprocess."""
|
||||
|
||||
def __init__(self, fd):
|
||||
self._connection = None
|
||||
self._fd = None
|
||||
if sys.version_info >= (3, 8):
|
||||
self._connection = multiprocessing.connection.Connection(fd)
|
||||
else:
|
||||
self._fd = fd
|
||||
|
||||
@property
|
||||
def fd(self):
|
||||
if self._connection:
|
||||
return self._connection._handle
|
||||
else:
|
||||
return self._fd
|
||||
|
||||
def close(self):
|
||||
if self._connection:
|
||||
self._connection.close()
|
||||
else:
|
||||
os.close(self._fd)
|
||||
|
||||
|
||||
def close_connection_and_file(multiprocess_fd, file):
|
||||
# MultiprocessFd is intended to transmit a FD
|
||||
# to a child process, this FD is then opened to a Python File object
|
||||
# (using fdopen). In >= 3.8, MultiprocessFd encapsulates a
|
||||
# multiprocessing.connection.Connection; Connection closes the FD
|
||||
# when it is deleted, and prints a warning about duplicate closure if
|
||||
# it is not explicitly closed. In < 3.8, MultiprocessFd encapsulates a
|
||||
# simple FD; closing the FD here appears to conflict with
|
||||
# closure of the File object (in < 3.8 that is). Therefore this needs
|
||||
# to choose whether to close the File or the Connection.
|
||||
if sys.version_info >= (3, 8):
|
||||
multiprocess_fd.close()
|
||||
else:
|
||||
file.close()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def replace_environment(env):
|
||||
"""Replace the current environment (`os.environ`) with `env`.
|
||||
@@ -545,22 +502,20 @@ def __enter__(self):
|
||||
# forcing debug output.
|
||||
self._saved_debug = tty._debug
|
||||
|
||||
# OS-level pipe for redirecting output to logger
|
||||
read_fd, write_fd = os.pipe()
|
||||
# Pipe for redirecting output to logger
|
||||
read_fd, self.write_fd = multiprocessing.Pipe(duplex=False)
|
||||
|
||||
read_multiprocess_fd = MultiProcessFd(read_fd)
|
||||
|
||||
# Multiprocessing pipe for communication back from the daemon
|
||||
# Pipe for communication back from the daemon
|
||||
# Currently only used to save echo value between uses
|
||||
self.parent_pipe, child_pipe = multiprocessing.Pipe()
|
||||
self.parent_pipe, child_pipe = multiprocessing.Pipe(duplex=False)
|
||||
|
||||
# Sets a daemon that writes to file what it reads from a pipe
|
||||
try:
|
||||
# need to pass this b/c multiprocessing closes stdin in child.
|
||||
input_multiprocess_fd = None
|
||||
input_fd = None
|
||||
try:
|
||||
if sys.stdin.isatty():
|
||||
input_multiprocess_fd = MultiProcessFd(os.dup(sys.stdin.fileno()))
|
||||
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
||||
except BaseException:
|
||||
# just don't forward input if this fails
|
||||
pass
|
||||
@@ -569,9 +524,9 @@ def __enter__(self):
|
||||
self.process = multiprocessing.Process(
|
||||
target=_writer_daemon,
|
||||
args=(
|
||||
input_multiprocess_fd,
|
||||
read_multiprocess_fd,
|
||||
write_fd,
|
||||
input_fd,
|
||||
read_fd,
|
||||
self.write_fd,
|
||||
self.echo,
|
||||
self.log_file,
|
||||
child_pipe,
|
||||
@@ -582,9 +537,9 @@ def __enter__(self):
|
||||
self.process.start()
|
||||
|
||||
finally:
|
||||
if input_multiprocess_fd:
|
||||
input_multiprocess_fd.close()
|
||||
read_multiprocess_fd.close()
|
||||
if input_fd:
|
||||
input_fd.close()
|
||||
read_fd.close()
|
||||
|
||||
# Flush immediately before redirecting so that anything buffered
|
||||
# goes to the original stream
|
||||
@@ -602,9 +557,9 @@ def __enter__(self):
|
||||
self._saved_stderr = os.dup(sys.stderr.fileno())
|
||||
|
||||
# redirect to the pipe we created above
|
||||
os.dup2(write_fd, sys.stdout.fileno())
|
||||
os.dup2(write_fd, sys.stderr.fileno())
|
||||
os.close(write_fd)
|
||||
os.dup2(self.write_fd.fileno(), sys.stdout.fileno())
|
||||
os.dup2(self.write_fd.fileno(), sys.stderr.fileno())
|
||||
self.write_fd.close()
|
||||
|
||||
else:
|
||||
# Handle I/O the Python way. This won't redirect lower-level
|
||||
@@ -617,7 +572,7 @@ def __enter__(self):
|
||||
self._saved_stderr = sys.stderr
|
||||
|
||||
# create a file object for the pipe; redirect to it.
|
||||
pipe_fd_out = os.fdopen(write_fd, "w")
|
||||
pipe_fd_out = os.fdopen(self.write_fd.fileno(), "w", closefd=False)
|
||||
sys.stdout = pipe_fd_out
|
||||
sys.stderr = pipe_fd_out
|
||||
|
||||
@@ -653,6 +608,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
else:
|
||||
sys.stdout = self._saved_stdout
|
||||
sys.stderr = self._saved_stderr
|
||||
self.write_fd.close()
|
||||
|
||||
# print log contents in parent if needed.
|
||||
if self.log_file.write_in_parent:
|
||||
@@ -866,14 +822,14 @@ def force_echo(self):
|
||||
|
||||
|
||||
def _writer_daemon(
|
||||
stdin_multiprocess_fd,
|
||||
read_multiprocess_fd,
|
||||
write_fd,
|
||||
echo,
|
||||
log_file_wrapper,
|
||||
control_pipe,
|
||||
filter_fn,
|
||||
):
|
||||
stdin_fd: Optional[Connection],
|
||||
read_fd: Connection,
|
||||
write_fd: Connection,
|
||||
echo: bool,
|
||||
log_file_wrapper: FileWrapper,
|
||||
control_fd: Connection,
|
||||
filter_fn: Optional[Callable[[str], str]],
|
||||
) -> None:
|
||||
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
|
||||
|
||||
The daemon receives output from the parent process and writes it both
|
||||
@@ -910,43 +866,37 @@ def _writer_daemon(
|
||||
``StringIO`` in the parent. This is mainly for testing.
|
||||
|
||||
Arguments:
|
||||
stdin_multiprocess_fd (int): input from the terminal
|
||||
read_multiprocess_fd (int): pipe for reading from parent's redirected
|
||||
stdout
|
||||
echo (bool): initial echo setting -- controlled by user and
|
||||
preserved across multiple writer daemons
|
||||
log_file_wrapper (FileWrapper): file to log all output
|
||||
control_pipe (Pipe): multiprocessing pipe on which to send control
|
||||
information to the parent
|
||||
filter_fn (callable, optional): function to filter each line of output
|
||||
stdin_fd: optional input from the terminal
|
||||
read_fd: pipe for reading from parent's redirected stdout
|
||||
echo: initial echo setting -- controlled by user and preserved across multiple writer
|
||||
daemons
|
||||
log_file_wrapper: file to log all output
|
||||
control_pipe: multiprocessing pipe on which to send control information to the parent
|
||||
filter_fn: optional function to filter each line of output
|
||||
|
||||
"""
|
||||
# If this process was forked, then it will inherit file descriptors from
|
||||
# the parent process. This process depends on closing all instances of
|
||||
# write_fd to terminate the reading loop, so we close the file descriptor
|
||||
# here. Forking is the process spawning method everywhere except Mac OS
|
||||
# for Python >= 3.8 and on Windows
|
||||
if sys.version_info < (3, 8) or sys.platform != "darwin":
|
||||
os.close(write_fd)
|
||||
# This process depends on closing all instances of write_pipe to terminate the reading loop
|
||||
write_fd.close()
|
||||
|
||||
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
||||
# that prevents unbuffered text I/O.
|
||||
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
||||
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8")
|
||||
# 3. closefd=False because Connection has "ownership"
|
||||
read_file = os.fdopen(read_fd.fileno(), "r", 1, encoding="utf-8", closefd=False)
|
||||
|
||||
if stdin_multiprocess_fd:
|
||||
stdin = os.fdopen(stdin_multiprocess_fd.fd)
|
||||
if stdin_fd:
|
||||
stdin_file = os.fdopen(stdin_fd.fileno(), closefd=False)
|
||||
else:
|
||||
stdin = None
|
||||
stdin_file = None
|
||||
|
||||
# list of streams to select from
|
||||
istreams = [in_pipe, stdin] if stdin else [in_pipe]
|
||||
istreams = [read_file, stdin_file] if stdin_file else [read_file]
|
||||
force_echo = False # parent can force echo for certain output
|
||||
|
||||
log_file = log_file_wrapper.unwrap()
|
||||
|
||||
try:
|
||||
with keyboard_input(stdin) as kb:
|
||||
with keyboard_input(stdin_file) as kb:
|
||||
while True:
|
||||
# fix the terminal settings if we recently came to
|
||||
# the foreground
|
||||
@@ -959,12 +909,12 @@ def _writer_daemon(
|
||||
# Allow user to toggle echo with 'v' key.
|
||||
# Currently ignores other chars.
|
||||
# only read stdin if we're in the foreground
|
||||
if stdin in rlist and not _is_background_tty(stdin):
|
||||
if stdin_file and stdin_file in rlist and not _is_background_tty(stdin_file):
|
||||
# it's possible to be backgrounded between the above
|
||||
# check and the read, so we ignore SIGTTIN here.
|
||||
with ignore_signal(signal.SIGTTIN):
|
||||
try:
|
||||
if stdin.read(1) == "v":
|
||||
if stdin_file.read(1) == "v":
|
||||
echo = not echo
|
||||
except IOError as e:
|
||||
# If SIGTTIN is ignored, the system gives EIO
|
||||
@@ -973,13 +923,13 @@ def _writer_daemon(
|
||||
if e.errno != errno.EIO:
|
||||
raise
|
||||
|
||||
if in_pipe in rlist:
|
||||
if read_file in rlist:
|
||||
line_count = 0
|
||||
try:
|
||||
while line_count < 100:
|
||||
# Handle output from the calling process.
|
||||
try:
|
||||
line = _retry(in_pipe.readline)()
|
||||
line = _retry(read_file.readline)()
|
||||
except UnicodeDecodeError:
|
||||
# installs like --test=root gpgme produce non-UTF8 logs
|
||||
line = "<line lost: output was not encoded as UTF-8>\n"
|
||||
@@ -1008,7 +958,7 @@ def _writer_daemon(
|
||||
if xoff in controls:
|
||||
force_echo = False
|
||||
|
||||
if not _input_available(in_pipe):
|
||||
if not _input_available(read_file):
|
||||
break
|
||||
finally:
|
||||
if line_count > 0:
|
||||
@@ -1023,14 +973,14 @@ def _writer_daemon(
|
||||
finally:
|
||||
# send written data back to parent if we used a StringIO
|
||||
if isinstance(log_file, io.StringIO):
|
||||
control_pipe.send(log_file.getvalue())
|
||||
control_fd.send(log_file.getvalue())
|
||||
log_file_wrapper.close()
|
||||
close_connection_and_file(read_multiprocess_fd, in_pipe)
|
||||
if stdin_multiprocess_fd:
|
||||
close_connection_and_file(stdin_multiprocess_fd, stdin)
|
||||
read_fd.close()
|
||||
if stdin_fd:
|
||||
stdin_fd.close()
|
||||
|
||||
# send echo value back to the parent so it can be preserved.
|
||||
control_pipe.send(echo)
|
||||
control_fd.send(echo)
|
||||
|
||||
|
||||
def _retry(function):
|
||||
@@ -1072,3 +1022,22 @@ def wrapped(*args, **kwargs):
|
||||
|
||||
def _input_available(f):
|
||||
return f in select.select([f], [], [], 0)[0]
|
||||
|
||||
|
||||
LogType = Union[nixlog, winlog]
|
||||
|
||||
|
||||
def print_message(logger: LogType, msg: str, verbose: bool = False):
|
||||
"""Print the message to the log, optionally echoing.
|
||||
|
||||
Args:
|
||||
logger: instance of the output logger (e.g. nixlog or winlog)
|
||||
msg: message being output
|
||||
verbose: ``True`` displays verbose output, ``False`` suppresses
|
||||
it (``False`` is default)
|
||||
"""
|
||||
if verbose:
|
||||
with logger.force_echo():
|
||||
tty.info(msg, format="g")
|
||||
else:
|
||||
tty.info(msg, format="g")
|
||||
|
@@ -11,7 +11,7 @@
|
||||
import spack.util.git
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.23.0.dev0"
|
||||
__version__ = "0.24.0.dev0"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
@@ -69,4 +69,15 @@ def get_version() -> str:
|
||||
return spack_version
|
||||
|
||||
|
||||
__all__ = ["spack_version_info", "spack_version", "get_version", "get_spack_commit"]
|
||||
def get_short_version() -> str:
|
||||
"""Short Spack version."""
|
||||
return f"{spack_version_info[0]}.{spack_version_info[1]}"
|
||||
|
||||
|
||||
__all__ = [
|
||||
"spack_version_info",
|
||||
"spack_version",
|
||||
"get_version",
|
||||
"get_spack_commit",
|
||||
"get_short_version",
|
||||
]
|
||||
|
@@ -714,17 +714,16 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
# values are either Value objects (for conditional values) or the values themselves
|
||||
# values are either ConditionalValue objects or the values themselves
|
||||
build_system_names = set(
|
||||
v.value if isinstance(v, spack.variant.Value) else v
|
||||
v.value if isinstance(v, spack.variant.ConditionalValue) else v
|
||||
for _, variant in pkg_cls.variant_definitions("build_system")
|
||||
for v in variant.values
|
||||
)
|
||||
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in build_system_names]
|
||||
|
||||
module = pkg_cls.module
|
||||
has_builders_in_package_py = any(
|
||||
getattr(module, name, False) for name in builder_cls_names
|
||||
spack.builder.get_builder_class(pkg_cls, name) for name in builder_cls_names
|
||||
)
|
||||
if not has_builders_in_package_py:
|
||||
continue
|
||||
@@ -806,7 +805,7 @@ def _uses_deprecated_globals(pkgs, error_cls):
|
||||
|
||||
file = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
tree = ast.parse(open(file).read())
|
||||
visitor = DeprecatedMagicGlobals(("std_cmake_args",))
|
||||
visitor = DeprecatedMagicGlobals(("std_cmake_args", "std_meson_args", "std_pip_args"))
|
||||
visitor.visit(tree)
|
||||
if visitor.references_to_globals:
|
||||
errors.append(
|
||||
|
@@ -35,6 +35,7 @@
|
||||
import spack.caches
|
||||
import spack.config as config
|
||||
import spack.database as spack_db
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.hash_types as ht
|
||||
import spack.hooks
|
||||
@@ -86,6 +87,8 @@
|
||||
from spack.stage import Stage
|
||||
from spack.util.executable import which
|
||||
|
||||
from .enums import InstallRecordStatus
|
||||
|
||||
BUILD_CACHE_RELATIVE_PATH = "build_cache"
|
||||
BUILD_CACHE_KEYS_RELATIVE_PATH = "_pgp"
|
||||
|
||||
@@ -251,7 +254,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||
|
||||
spec_list = [
|
||||
s
|
||||
for s in db.query_local(installed=any, in_buildcache=any)
|
||||
for s in db.query_local(installed=InstallRecordStatus.ANY)
|
||||
if s.external or db.query_local_by_spec_hash(s.dag_hash()).in_buildcache
|
||||
]
|
||||
|
||||
@@ -712,15 +715,32 @@ def get_buildfile_manifest(spec):
|
||||
return data
|
||||
|
||||
|
||||
def hashes_to_prefixes(spec):
|
||||
"""Return a dictionary of hashes to prefixes for a spec and its deps, excluding externals"""
|
||||
return {
|
||||
s.dag_hash(): str(s.prefix)
|
||||
def deps_to_relocate(spec):
|
||||
"""Return the transitive link and direct run dependencies of the spec.
|
||||
|
||||
This is a special traversal for dependencies we need to consider when relocating a package.
|
||||
|
||||
Package binaries, scripts, and other files may refer to the prefixes of dependencies, so
|
||||
we need to rewrite those locations when dependencies are in a different place at install time
|
||||
than they were at build time.
|
||||
|
||||
This traversal covers transitive link dependencies and direct run dependencies because:
|
||||
|
||||
1. Spack adds RPATHs for transitive link dependencies so that packages can find needed
|
||||
dependency libraries.
|
||||
2. Packages may call any of their *direct* run dependencies (and may bake their paths into
|
||||
binaries or scripts), so we also need to search for run dependency prefixes when relocating.
|
||||
|
||||
This returns a deduplicated list of transitive link dependencies and direct run dependencies.
|
||||
"""
|
||||
deps = [
|
||||
s
|
||||
for s in itertools.chain(
|
||||
spec.traverse(root=True, deptype="link"), spec.dependencies(deptype="run")
|
||||
)
|
||||
if not s.external
|
||||
}
|
||||
]
|
||||
return llnl.util.lang.dedupe(deps, key=lambda s: s.dag_hash())
|
||||
|
||||
|
||||
def get_buildinfo_dict(spec):
|
||||
@@ -736,7 +756,7 @@ def get_buildinfo_dict(spec):
|
||||
"relocate_binaries": manifest["binary_to_relocate"],
|
||||
"relocate_links": manifest["link_to_relocate"],
|
||||
"hardlinks_deduped": manifest["hardlinks_deduped"],
|
||||
"hash_to_prefix": hashes_to_prefixes(spec),
|
||||
"hash_to_prefix": {d.dag_hash(): str(d.prefix) for d in deps_to_relocate(spec)},
|
||||
}
|
||||
|
||||
|
||||
@@ -1164,6 +1184,9 @@ def __init__(self, mirror: spack.mirror.Mirror, force: bool, update_index: bool)
|
||||
self.tmpdir: str
|
||||
self.executor: concurrent.futures.Executor
|
||||
|
||||
# Verify if the mirror meets the requirements to push
|
||||
self.mirror.ensure_mirror_usable("push")
|
||||
|
||||
def __enter__(self):
|
||||
self._tmpdir = tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root())
|
||||
self._executor = spack.util.parallel.make_concurrent_executor()
|
||||
@@ -1631,7 +1654,6 @@ def _oci_push(
|
||||
Dict[str, spack.oci.oci.Blob],
|
||||
List[Tuple[Spec, BaseException]],
|
||||
]:
|
||||
|
||||
# Spec dag hash -> blob
|
||||
checksums: Dict[str, spack.oci.oci.Blob] = {}
|
||||
|
||||
@@ -2201,11 +2223,36 @@ def relocate_package(spec):
|
||||
# First match specific prefix paths. Possibly the *local* install prefix
|
||||
# of some dependency is in an upstream, so we cannot assume the original
|
||||
# spack store root can be mapped uniformly to the new spack store root.
|
||||
for dag_hash, new_dep_prefix in hashes_to_prefixes(spec).items():
|
||||
if dag_hash in hash_to_old_prefix:
|
||||
old_dep_prefix = hash_to_old_prefix[dag_hash]
|
||||
prefix_to_prefix_bin[old_dep_prefix] = new_dep_prefix
|
||||
prefix_to_prefix_text[old_dep_prefix] = new_dep_prefix
|
||||
#
|
||||
# If the spec is spliced, we need to handle the simultaneous mapping
|
||||
# from the old install_tree to the new install_tree and from the build_spec
|
||||
# to the spliced spec.
|
||||
# Because foo.build_spec is foo for any non-spliced spec, we can simplify
|
||||
# by checking for spliced-in nodes by checking for nodes not in the build_spec
|
||||
# without any explicit check for whether the spec is spliced.
|
||||
# An analog in this algorithm is any spec that shares a name or provides the same virtuals
|
||||
# in the context of the relevant root spec. This ensures that the analog for a spec s
|
||||
# is the spec that s replaced when we spliced.
|
||||
relocation_specs = deps_to_relocate(spec)
|
||||
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
||||
for s in relocation_specs:
|
||||
analog = s
|
||||
if id(s) not in build_spec_ids:
|
||||
analogs = [
|
||||
d
|
||||
for d in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD)
|
||||
if s._splice_match(d, self_root=spec, other_root=spec.build_spec)
|
||||
]
|
||||
if analogs:
|
||||
# Prefer same-name analogs and prefer higher versions
|
||||
# This matches the preferences in Spec.splice, so we will find same node
|
||||
analog = max(analogs, key=lambda a: (a.name == s.name, a.version))
|
||||
|
||||
lookup_dag_hash = analog.dag_hash()
|
||||
if lookup_dag_hash in hash_to_old_prefix:
|
||||
old_dep_prefix = hash_to_old_prefix[lookup_dag_hash]
|
||||
prefix_to_prefix_bin[old_dep_prefix] = str(s.prefix)
|
||||
prefix_to_prefix_text[old_dep_prefix] = str(s.prefix)
|
||||
|
||||
# Only then add the generic fallback of install prefix -> install prefix.
|
||||
prefix_to_prefix_text[old_prefix] = new_prefix
|
||||
@@ -2520,7 +2567,13 @@ def _ensure_common_prefix(tar: tarfile.TarFile) -> str:
|
||||
return pkg_prefix
|
||||
|
||||
|
||||
def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
def install_root_node(
|
||||
spec: spack.spec.Spec,
|
||||
unsigned=False,
|
||||
force: bool = False,
|
||||
sha256: Optional[str] = None,
|
||||
allow_missing: bool = False,
|
||||
) -> None:
|
||||
"""Install the root node of a concrete spec from a buildcache.
|
||||
|
||||
Checking the sha256 sum of a node before installation is usually needed only
|
||||
@@ -2529,11 +2582,10 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
|
||||
Args:
|
||||
spec: spec to be installed (note that only the root node will be installed)
|
||||
unsigned (bool): if True allows installing unsigned binaries
|
||||
force (bool): force installation if the spec is already present in the
|
||||
local store
|
||||
sha256 (str): optional sha256 of the binary package, to be checked
|
||||
before installation
|
||||
unsigned: if True allows installing unsigned binaries
|
||||
force: force installation if the spec is already present in the local store
|
||||
sha256: optional sha256 of the binary package, to be checked before installation
|
||||
allow_missing: when true, allows installing a node with missing dependencies
|
||||
"""
|
||||
# Early termination
|
||||
if spec.external or spec.virtual:
|
||||
@@ -2543,10 +2595,10 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
|
||||
return
|
||||
|
||||
download_result = download_tarball(spec, unsigned)
|
||||
download_result = download_tarball(spec.build_spec, unsigned)
|
||||
if not download_result:
|
||||
msg = 'download of binary cache file for spec "{0}" failed'
|
||||
raise RuntimeError(msg.format(spec.format()))
|
||||
raise RuntimeError(msg.format(spec.build_spec.format()))
|
||||
|
||||
if sha256:
|
||||
checker = spack.util.crypto.Checker(sha256)
|
||||
@@ -2565,8 +2617,13 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
with spack.util.path.filter_padding():
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, force)
|
||||
spec.package.windows_establish_runtime_linkage()
|
||||
if spec.spliced: # overwrite old metadata with new
|
||||
spack.store.STORE.layout.write_spec(
|
||||
spec, spack.store.STORE.layout.spec_file_path(spec)
|
||||
)
|
||||
spack.hooks.post_install(spec, False)
|
||||
spack.store.STORE.db.add(spec)
|
||||
spack.store.STORE.db.add(spec, allow_missing=allow_missing)
|
||||
|
||||
|
||||
def install_single_spec(spec, unsigned=False, force=False):
|
||||
|
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Common basic functions used through the spack.bootstrap package"""
|
||||
import fnmatch
|
||||
import glob
|
||||
import importlib
|
||||
import os.path
|
||||
import re
|
||||
@@ -60,10 +61,19 @@ def _try_import_from_store(
|
||||
python, *_ = candidate_spec.dependencies("python-venv")
|
||||
else:
|
||||
python, *_ = candidate_spec.dependencies("python")
|
||||
module_paths = [
|
||||
os.path.join(candidate_spec.prefix, python.package.purelib),
|
||||
os.path.join(candidate_spec.prefix, python.package.platlib),
|
||||
]
|
||||
|
||||
# if python is installed, ask it for the layout
|
||||
if python.installed:
|
||||
module_paths = [
|
||||
os.path.join(candidate_spec.prefix, python.package.purelib),
|
||||
os.path.join(candidate_spec.prefix, python.package.platlib),
|
||||
]
|
||||
# otherwise search for the site-packages directory
|
||||
# (clingo from binaries with truncated python-venv runtime)
|
||||
else:
|
||||
module_paths = glob.glob(
|
||||
os.path.join(candidate_spec.prefix, "lib", "python*", "site-packages")
|
||||
)
|
||||
path_before = list(sys.path)
|
||||
|
||||
# NOTE: try module_paths first and last, last allows an existing version in path
|
||||
|
@@ -37,6 +37,7 @@
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
import spack.detection
|
||||
import spack.mirror
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.store
|
||||
@@ -44,7 +45,6 @@
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.url
|
||||
import spack.version
|
||||
from spack.installer import PackageInstaller
|
||||
|
||||
@@ -91,12 +91,7 @@ def __init__(self, conf: ConfigDictionary) -> None:
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||
|
||||
# Promote (relative) paths to file urls
|
||||
url = conf["info"]["url"]
|
||||
if spack.util.url.is_path_instead_of_url(url):
|
||||
if not os.path.isabs(url):
|
||||
url = os.path.join(self.metadata_dir, url)
|
||||
url = spack.util.url.path_to_file_url(url)
|
||||
self.url = url
|
||||
self.url = spack.mirror.Mirror(conf["info"]["url"]).fetch_url
|
||||
|
||||
@property
|
||||
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
||||
@@ -175,7 +170,15 @@ def _install_by_hash(
|
||||
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
|
||||
for match in spack.store.find([f"/{pkg_hash}"], multiple=False, query_fn=query):
|
||||
spack.binary_distribution.install_root_node(
|
||||
match, unsigned=True, force=True, sha256=pkg_sha256
|
||||
# allow_missing is true since when bootstrapping clingo we truncate runtime
|
||||
# deps such as gcc-runtime, since we link libstdc++ statically, and the other
|
||||
# further runtime deps are loaded by the Python interpreter. This just silences
|
||||
# warnings about missing dependencies.
|
||||
match,
|
||||
unsigned=True,
|
||||
force=True,
|
||||
sha256=pkg_sha256,
|
||||
allow_missing=True,
|
||||
)
|
||||
|
||||
def _install_and_test(
|
||||
@@ -599,7 +602,10 @@ def bootstrapping_sources(scope: Optional[str] = None):
|
||||
current = copy.copy(entry)
|
||||
metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
|
||||
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||
with open(metadata_yaml, encoding="utf-8") as stream:
|
||||
current.update(spack.util.spack_yaml.load(stream))
|
||||
list_of_sources.append(current)
|
||||
try:
|
||||
with open(metadata_yaml, encoding="utf-8") as stream:
|
||||
current.update(spack.util.spack_yaml.load(stream))
|
||||
list_of_sources.append(current)
|
||||
except OSError:
|
||||
pass
|
||||
return list_of_sources
|
||||
|
@@ -44,6 +44,7 @@
|
||||
from collections import defaultdict
|
||||
from enum import Flag, auto
|
||||
from itertools import chain
|
||||
from multiprocessing.connection import Connection
|
||||
from typing import Callable, Dict, List, Optional, Set, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
@@ -54,9 +55,7 @@
|
||||
from llnl.util.lang import dedupe, stable_partition
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
from llnl.util.tty.log import MultiProcessFd
|
||||
|
||||
import spack.build_systems._checks
|
||||
import spack.build_systems.cmake
|
||||
import spack.build_systems.meson
|
||||
import spack.build_systems.python
|
||||
@@ -91,7 +90,7 @@
|
||||
)
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.log_parse import make_log_context, parse_log_events
|
||||
from spack.util.module_cmd import load_module, path_from_modules
|
||||
from spack.util.module_cmd import load_module
|
||||
|
||||
#
|
||||
# This can be set by the user to globally disable parallel builds.
|
||||
@@ -617,13 +616,11 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
"""
|
||||
module = ModuleChangePropagator(pkg)
|
||||
|
||||
if context == Context.BUILD:
|
||||
module.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
||||
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||
|
||||
jobs = spack.config.determine_number_of_jobs(parallel=pkg.parallel)
|
||||
module.make_jobs = jobs
|
||||
if context == Context.BUILD:
|
||||
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
module.make = MakeExecutable("make", jobs)
|
||||
@@ -792,21 +789,6 @@ def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]
|
||||
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
||||
|
||||
|
||||
def get_rpaths(pkg):
|
||||
"""Get a list of all the rpaths for a package."""
|
||||
rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
|
||||
deps = get_rpath_deps(pkg)
|
||||
rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib))
|
||||
rpaths.extend(d.prefix.lib64 for d in deps if os.path.isdir(d.prefix.lib64))
|
||||
# Second module is our compiler mod name. We use that to get rpaths from
|
||||
# module show output.
|
||||
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
|
||||
mod_rpath = path_from_modules([pkg.compiler.modules[1]])
|
||||
if mod_rpath:
|
||||
rpaths.append(mod_rpath)
|
||||
return list(dedupe(filter_system_paths(rpaths)))
|
||||
|
||||
|
||||
def load_external_modules(pkg):
|
||||
"""Traverse a package's spec DAG and load any external modules.
|
||||
|
||||
@@ -1063,6 +1045,12 @@ def set_all_package_py_globals(self):
|
||||
# This includes runtime dependencies, also runtime deps of direct build deps.
|
||||
set_package_py_globals(pkg, context=Context.RUN)
|
||||
|
||||
# Looping over the set of packages a second time
|
||||
# ensures all globals are loaded into the module space prior to
|
||||
# any package setup. This guarantees package setup methods have
|
||||
# access to expected module level definitions such as "spack_cc"
|
||||
for dspec, flag in chain(self.external, self.nonexternal):
|
||||
pkg = dspec.package
|
||||
for spec in dspec.dependents():
|
||||
# Note: some specs have dependents that are unreachable from the root, so avoid
|
||||
# setting globals for those.
|
||||
@@ -1072,6 +1060,15 @@ def set_all_package_py_globals(self):
|
||||
pkg.setup_dependent_package(dependent_module, spec)
|
||||
dependent_module.propagate_changes_to_mro()
|
||||
|
||||
if self.context == Context.BUILD:
|
||||
pkg = self.specs[0].package
|
||||
module = ModuleChangePropagator(pkg)
|
||||
# std_cmake_args is not sufficiently static to be defined
|
||||
# in set_package_py_globals and is deprecated so its handled
|
||||
# here as a special case
|
||||
module.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
||||
module.propagate_changes_to_mro()
|
||||
|
||||
def get_env_modifications(self) -> EnvironmentModifications:
|
||||
"""Returns the environment variable modifications for the given input specs and context.
|
||||
Environment modifications include:
|
||||
@@ -1141,40 +1138,14 @@ def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
|
||||
env.prepend_path("PATH", bin_dir)
|
||||
|
||||
|
||||
def get_cmake_prefix_path(pkg):
|
||||
# Note that unlike modifications_from_dependencies, this does not include
|
||||
# any edits to CMAKE_PREFIX_PATH defined in custom
|
||||
# setup_dependent_build_environment implementations of dependency packages
|
||||
build_deps = set(pkg.spec.dependencies(deptype=("build", "test")))
|
||||
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
||||
build_link_deps = build_deps | link_deps
|
||||
spack_built = []
|
||||
externals = []
|
||||
# modifications_from_dependencies updates CMAKE_PREFIX_PATH by first
|
||||
# prepending all externals and then all non-externals
|
||||
for dspec in pkg.spec.traverse(root=False, order="post"):
|
||||
if dspec in build_link_deps:
|
||||
if dspec.external:
|
||||
externals.insert(0, dspec)
|
||||
else:
|
||||
spack_built.insert(0, dspec)
|
||||
|
||||
ordered_build_link_deps = spack_built + externals
|
||||
cmake_prefix_path_entries = []
|
||||
for spec in ordered_build_link_deps:
|
||||
cmake_prefix_path_entries.extend(spec.package.cmake_prefix_paths)
|
||||
|
||||
return filter_system_paths(cmake_prefix_path_entries)
|
||||
|
||||
|
||||
def _setup_pkg_and_run(
|
||||
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
|
||||
function: Callable,
|
||||
kwargs: Dict,
|
||||
write_pipe: multiprocessing.connection.Connection,
|
||||
input_multiprocess_fd: Optional[MultiProcessFd],
|
||||
jsfd1: Optional[MultiProcessFd],
|
||||
jsfd2: Optional[MultiProcessFd],
|
||||
write_pipe: Connection,
|
||||
input_pipe: Optional[Connection],
|
||||
jsfd1: Optional[Connection],
|
||||
jsfd2: Optional[Connection],
|
||||
):
|
||||
"""Main entry point in the child process for Spack builds.
|
||||
|
||||
@@ -1216,13 +1187,12 @@ def _setup_pkg_and_run(
|
||||
context: str = kwargs.get("context", "build")
|
||||
|
||||
try:
|
||||
# We are in the child process. Python sets sys.stdin to
|
||||
# open(os.devnull) to prevent our process and its parent from
|
||||
# simultaneously reading from the original stdin. But, we assume
|
||||
# that the parent process is not going to read from it till we
|
||||
# are done with the child, so we undo Python's precaution.
|
||||
if input_multiprocess_fd is not None:
|
||||
sys.stdin = os.fdopen(input_multiprocess_fd.fd)
|
||||
# We are in the child process. Python sets sys.stdin to open(os.devnull) to prevent our
|
||||
# process and its parent from simultaneously reading from the original stdin. But, we
|
||||
# assume that the parent process is not going to read from it till we are done with the
|
||||
# child, so we undo Python's precaution. closefd=False since Connection has ownership.
|
||||
if input_pipe is not None:
|
||||
sys.stdin = os.fdopen(input_pipe.fileno(), closefd=False)
|
||||
|
||||
pkg = serialized_pkg.restore()
|
||||
|
||||
@@ -1245,7 +1215,7 @@ def _setup_pkg_and_run(
|
||||
# objects can't be sent to the parent.
|
||||
exc_type = type(e)
|
||||
tb = e.__traceback__
|
||||
tb_string = traceback.format_exception(exc_type, e, tb)
|
||||
tb_string = "".join(traceback.format_exception(exc_type, e, tb))
|
||||
|
||||
# build up some context from the offending package so we can
|
||||
# show that, too.
|
||||
@@ -1291,8 +1261,8 @@ def _setup_pkg_and_run(
|
||||
|
||||
finally:
|
||||
write_pipe.close()
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
if input_pipe is not None:
|
||||
input_pipe.close()
|
||||
|
||||
|
||||
def start_build_process(pkg, function, kwargs):
|
||||
@@ -1319,23 +1289,9 @@ def child_fun():
|
||||
If something goes wrong, the child process catches the error and
|
||||
passes it to the parent wrapped in a ChildError. The parent is
|
||||
expected to handle (or re-raise) the ChildError.
|
||||
|
||||
This uses `multiprocessing.Process` to create the child process. The
|
||||
mechanism used to create the process differs on different operating
|
||||
systems and for different versions of Python. In some cases "fork"
|
||||
is used (i.e. the "fork" system call) and some cases it starts an
|
||||
entirely new Python interpreter process (in the docs this is referred
|
||||
to as the "spawn" start method). Breaking it down by OS:
|
||||
|
||||
- Linux always uses fork.
|
||||
- Mac OS uses fork before Python 3.8 and "spawn" for 3.8 and after.
|
||||
- Windows always uses the "spawn" start method.
|
||||
|
||||
For more information on `multiprocessing` child process creation
|
||||
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
||||
"""
|
||||
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
||||
input_multiprocess_fd = None
|
||||
input_fd = None
|
||||
jobserver_fd1 = None
|
||||
jobserver_fd2 = None
|
||||
|
||||
@@ -1344,14 +1300,13 @@ def child_fun():
|
||||
try:
|
||||
# Forward sys.stdin when appropriate, to allow toggling verbosity
|
||||
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
|
||||
input_fd = os.dup(sys.stdin.fileno())
|
||||
input_multiprocess_fd = MultiProcessFd(input_fd)
|
||||
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
||||
mflags = os.environ.get("MAKEFLAGS", False)
|
||||
if mflags:
|
||||
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
|
||||
if m:
|
||||
jobserver_fd1 = MultiProcessFd(int(m.group(1)))
|
||||
jobserver_fd2 = MultiProcessFd(int(m.group(2)))
|
||||
jobserver_fd1 = Connection(int(m.group(1)))
|
||||
jobserver_fd2 = Connection(int(m.group(2)))
|
||||
|
||||
p = multiprocessing.Process(
|
||||
target=_setup_pkg_and_run,
|
||||
@@ -1360,7 +1315,7 @@ def child_fun():
|
||||
function,
|
||||
kwargs,
|
||||
write_pipe,
|
||||
input_multiprocess_fd,
|
||||
input_fd,
|
||||
jobserver_fd1,
|
||||
jobserver_fd2,
|
||||
),
|
||||
@@ -1380,8 +1335,8 @@ def child_fun():
|
||||
|
||||
finally:
|
||||
# Close the input stream in the parent process
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
if input_fd is not None:
|
||||
input_fd.close()
|
||||
|
||||
def exitcode_msg(p):
|
||||
typ = "exit" if p.exitcode >= 0 else "signal"
|
||||
@@ -1419,7 +1374,7 @@ def exitcode_msg(p):
|
||||
return child_result
|
||||
|
||||
|
||||
CONTEXT_BASES = (spack.package_base.PackageBase, spack.build_systems._checks.BaseBuilder)
|
||||
CONTEXT_BASES = (spack.package_base.PackageBase, spack.builder.Builder)
|
||||
|
||||
|
||||
def get_package_context(traceback, context=3):
|
||||
|
@@ -9,6 +9,7 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.error
|
||||
import spack.phase_callbacks
|
||||
import spack.relocate
|
||||
import spack.spec
|
||||
import spack.store
|
||||
@@ -63,7 +64,7 @@ def apply_macos_rpath_fixups(builder: spack.builder.Builder):
|
||||
|
||||
|
||||
def ensure_build_dependencies_or_raise(
|
||||
spec: spack.spec.Spec, dependencies: List[spack.spec.Spec], error_msg: str
|
||||
spec: spack.spec.Spec, dependencies: List[str], error_msg: str
|
||||
):
|
||||
"""Ensure that some build dependencies are present in the concrete spec.
|
||||
|
||||
@@ -71,7 +72,7 @@ def ensure_build_dependencies_or_raise(
|
||||
|
||||
Args:
|
||||
spec: concrete spec to be checked.
|
||||
dependencies: list of abstract specs to be satisfied
|
||||
dependencies: list of package names of required build dependencies
|
||||
error_msg: brief error message to be prepended to a longer description
|
||||
|
||||
Raises:
|
||||
@@ -111,7 +112,7 @@ def execute_build_time_tests(builder: spack.builder.Builder):
|
||||
if not builder.pkg.run_tests or not builder.build_time_test_callbacks:
|
||||
return
|
||||
|
||||
builder.pkg.tester.phase_tests(builder, "build", builder.build_time_test_callbacks)
|
||||
builder.phase_tests("build", builder.build_time_test_callbacks)
|
||||
|
||||
|
||||
def execute_install_time_tests(builder: spack.builder.Builder):
|
||||
@@ -124,11 +125,11 @@ def execute_install_time_tests(builder: spack.builder.Builder):
|
||||
if not builder.pkg.run_tests or not builder.install_time_test_callbacks:
|
||||
return
|
||||
|
||||
builder.pkg.tester.phase_tests(builder, "install", builder.install_time_test_callbacks)
|
||||
builder.phase_tests("install", builder.install_time_test_callbacks)
|
||||
|
||||
|
||||
class BaseBuilder(spack.builder.Builder):
|
||||
"""Base class for builders to register common checks"""
|
||||
class BuilderWithDefaults(spack.builder.Builder):
|
||||
"""Base class for all specific builders with common callbacks registered."""
|
||||
|
||||
# Check that self.prefix is there after installation
|
||||
spack.builder.run_after("install")(sanity_check_prefix)
|
||||
spack.phase_callbacks.run_after("install")(sanity_check_prefix)
|
||||
|
@@ -6,7 +6,7 @@
|
||||
import os.path
|
||||
import stat
|
||||
import subprocess
|
||||
from typing import List
|
||||
from typing import Callable, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -15,6 +15,9 @@
|
||||
import spack.builder
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts, depends_on
|
||||
from spack.multimethod import when
|
||||
from spack.operating_systems.mac_os import macos_version
|
||||
@@ -22,7 +25,7 @@
|
||||
from spack.version import Version
|
||||
|
||||
from ._checks import (
|
||||
BaseBuilder,
|
||||
BuilderWithDefaults,
|
||||
apply_macos_rpath_fixups,
|
||||
ensure_build_dependencies_or_raise,
|
||||
execute_build_time_tests,
|
||||
@@ -69,14 +72,14 @@ def flags_to_build_system_args(self, flags):
|
||||
# Legacy methods (used by too many packages to change them,
|
||||
# need to forward to the builder)
|
||||
def enable_or_disable(self, *args, **kwargs):
|
||||
return self.builder.enable_or_disable(*args, **kwargs)
|
||||
return spack.builder.create(self).enable_or_disable(*args, **kwargs)
|
||||
|
||||
def with_or_without(self, *args, **kwargs):
|
||||
return self.builder.with_or_without(*args, **kwargs)
|
||||
return spack.builder.create(self).with_or_without(*args, **kwargs)
|
||||
|
||||
|
||||
@spack.builder.builder("autotools")
|
||||
class AutotoolsBuilder(BaseBuilder):
|
||||
class AutotoolsBuilder(BuilderWithDefaults):
|
||||
"""The autotools builder encodes the default way of installing software built
|
||||
with autotools. It has four phases that can be overridden, if need be:
|
||||
|
||||
@@ -157,7 +160,7 @@ class AutotoolsBuilder(BaseBuilder):
|
||||
install_libtool_archives = False
|
||||
|
||||
@property
|
||||
def patch_config_files(self):
|
||||
def patch_config_files(self) -> bool:
|
||||
"""Whether to update old ``config.guess`` and ``config.sub`` files
|
||||
distributed with the tarball.
|
||||
|
||||
@@ -177,7 +180,7 @@ def patch_config_files(self):
|
||||
)
|
||||
|
||||
@property
|
||||
def _removed_la_files_log(self):
|
||||
def _removed_la_files_log(self) -> str:
|
||||
"""File containing the list of removed libtool archives"""
|
||||
build_dir = self.build_directory
|
||||
if not os.path.isabs(self.build_directory):
|
||||
@@ -185,15 +188,15 @@ def _removed_la_files_log(self):
|
||||
return os.path.join(build_dir, "removed_la_files.txt")
|
||||
|
||||
@property
|
||||
def archive_files(self):
|
||||
def archive_files(self) -> List[str]:
|
||||
"""Files to archive for packages based on autotools"""
|
||||
files = [os.path.join(self.build_directory, "config.log")]
|
||||
if not self.install_libtool_archives:
|
||||
files.append(self._removed_la_files_log)
|
||||
return files
|
||||
|
||||
@spack.builder.run_after("autoreconf")
|
||||
def _do_patch_config_files(self):
|
||||
@spack.phase_callbacks.run_after("autoreconf")
|
||||
def _do_patch_config_files(self) -> None:
|
||||
"""Some packages ship with older config.guess/config.sub files and need to
|
||||
have these updated when installed on a newer architecture.
|
||||
|
||||
@@ -294,7 +297,7 @@ def runs_ok(script_abs_path):
|
||||
and set the prefix to the directory containing the `config.guess` and
|
||||
`config.sub` files.
|
||||
"""
|
||||
raise spack.error.InstallError(msg.format(", ".join(to_be_found), self.name))
|
||||
raise spack.error.InstallError(msg.format(", ".join(to_be_found), self.pkg.name))
|
||||
|
||||
# Copy the good files over the bad ones
|
||||
for abs_path in to_be_patched:
|
||||
@@ -304,8 +307,8 @@ def runs_ok(script_abs_path):
|
||||
fs.copy(substitutes[name], abs_path)
|
||||
os.chmod(abs_path, mode)
|
||||
|
||||
@spack.builder.run_before("configure")
|
||||
def _patch_usr_bin_file(self):
|
||||
@spack.phase_callbacks.run_before("configure")
|
||||
def _patch_usr_bin_file(self) -> None:
|
||||
"""On NixOS file is not available in /usr/bin/file. Patch configure
|
||||
scripts to use file from path."""
|
||||
|
||||
@@ -316,8 +319,8 @@ def _patch_usr_bin_file(self):
|
||||
with fs.keep_modification_time(*x.filenames):
|
||||
x.filter(regex="/usr/bin/file", repl="file", string=True)
|
||||
|
||||
@spack.builder.run_before("configure")
|
||||
def _set_autotools_environment_variables(self):
|
||||
@spack.phase_callbacks.run_before("configure")
|
||||
def _set_autotools_environment_variables(self) -> None:
|
||||
"""Many autotools builds use a version of mknod.m4 that fails when
|
||||
running as root unless FORCE_UNSAFE_CONFIGURE is set to 1.
|
||||
|
||||
@@ -330,8 +333,8 @@ def _set_autotools_environment_variables(self):
|
||||
"""
|
||||
os.environ["FORCE_UNSAFE_CONFIGURE"] = "1"
|
||||
|
||||
@spack.builder.run_before("configure")
|
||||
def _do_patch_libtool_configure(self):
|
||||
@spack.phase_callbacks.run_before("configure")
|
||||
def _do_patch_libtool_configure(self) -> None:
|
||||
"""Patch bugs that propagate from libtool macros into "configure" and
|
||||
further into "libtool". Note that patches that can be fixed by patching
|
||||
"libtool" directly should be implemented in the _do_patch_libtool method
|
||||
@@ -358,8 +361,8 @@ def _do_patch_libtool_configure(self):
|
||||
# Support Libtool 2.4.2 and older:
|
||||
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
||||
|
||||
@spack.builder.run_after("configure")
|
||||
def _do_patch_libtool(self):
|
||||
@spack.phase_callbacks.run_after("configure")
|
||||
def _do_patch_libtool(self) -> None:
|
||||
"""If configure generates a "libtool" script that does not correctly
|
||||
detect the compiler (and patch_libtool is set), patch in the correct
|
||||
values for libtool variables.
|
||||
@@ -507,27 +510,64 @@ def _do_patch_libtool(self):
|
||||
)
|
||||
|
||||
@property
|
||||
def configure_directory(self):
|
||||
def configure_directory(self) -> str:
|
||||
"""Return the directory where 'configure' resides."""
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
@property
|
||||
def configure_abs_path(self):
|
||||
def configure_abs_path(self) -> str:
|
||||
# Absolute path to configure
|
||||
configure_abs_path = os.path.join(os.path.abspath(self.configure_directory), "configure")
|
||||
return configure_abs_path
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
def build_directory(self) -> str:
|
||||
"""Override to provide another place to build the package"""
|
||||
return self.configure_directory
|
||||
|
||||
@spack.builder.run_before("autoreconf")
|
||||
def delete_configure_to_force_update(self):
|
||||
@spack.phase_callbacks.run_before("autoreconf")
|
||||
def delete_configure_to_force_update(self) -> None:
|
||||
if self.force_autoreconf:
|
||||
fs.force_remove(self.configure_abs_path)
|
||||
|
||||
def autoreconf(self, pkg, spec, prefix):
|
||||
@property
|
||||
def autoreconf_search_path_args(self) -> List[str]:
|
||||
"""Search path includes for autoreconf. Add an -I flag for all `aclocal` dirs
|
||||
of build deps, skips the default path of automake, move external include
|
||||
flags to the back, since they might pull in unrelated m4 files shadowing
|
||||
spack dependencies."""
|
||||
return _autoreconf_search_path_args(self.spec)
|
||||
|
||||
@spack.phase_callbacks.run_after("autoreconf")
|
||||
def set_configure_or_die(self) -> None:
|
||||
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
||||
is found, a module level attribute is set.
|
||||
|
||||
Raises:
|
||||
RuntimeError: if the "configure" script is not found
|
||||
"""
|
||||
# Check if the "configure" script is there. If not raise a RuntimeError.
|
||||
if not os.path.exists(self.configure_abs_path):
|
||||
msg = "configure script not found in {0}"
|
||||
raise RuntimeError(msg.format(self.configure_directory))
|
||||
|
||||
# Monkey-patch the configure script in the corresponding module
|
||||
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
|
||||
globals_for_pkg.configure = Executable(self.configure_abs_path)
|
||||
globals_for_pkg.propagate_changes_to_mro()
|
||||
|
||||
def configure_args(self) -> List[str]:
|
||||
"""Return the list of all the arguments that must be passed to configure,
|
||||
except ``--prefix`` which will be pre-pended to the list.
|
||||
"""
|
||||
return []
|
||||
|
||||
def autoreconf(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Not needed usually, configure should be already there"""
|
||||
|
||||
# If configure exists nothing needs to be done
|
||||
@@ -554,39 +594,12 @@ def autoreconf(self, pkg, spec, prefix):
|
||||
autoreconf_args += self.autoreconf_extra_args
|
||||
self.pkg.module.autoreconf(*autoreconf_args)
|
||||
|
||||
@property
|
||||
def autoreconf_search_path_args(self):
|
||||
"""Search path includes for autoreconf. Add an -I flag for all `aclocal` dirs
|
||||
of build deps, skips the default path of automake, move external include
|
||||
flags to the back, since they might pull in unrelated m4 files shadowing
|
||||
spack dependencies."""
|
||||
return _autoreconf_search_path_args(self.spec)
|
||||
|
||||
@spack.builder.run_after("autoreconf")
|
||||
def set_configure_or_die(self):
|
||||
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
||||
is found, a module level attribute is set.
|
||||
|
||||
Raises:
|
||||
RuntimeError: if the "configure" script is not found
|
||||
"""
|
||||
# Check if the "configure" script is there. If not raise a RuntimeError.
|
||||
if not os.path.exists(self.configure_abs_path):
|
||||
msg = "configure script not found in {0}"
|
||||
raise RuntimeError(msg.format(self.configure_directory))
|
||||
|
||||
# Monkey-patch the configure script in the corresponding module
|
||||
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
|
||||
globals_for_pkg.configure = Executable(self.configure_abs_path)
|
||||
globals_for_pkg.propagate_changes_to_mro()
|
||||
|
||||
def configure_args(self):
|
||||
"""Return the list of all the arguments that must be passed to configure,
|
||||
except ``--prefix`` which will be pre-pended to the list.
|
||||
"""
|
||||
return []
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
def configure(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "configure", with the arguments specified by the builder and an
|
||||
appropriately set prefix.
|
||||
"""
|
||||
@@ -597,7 +610,12 @@ def configure(self, pkg, spec, prefix):
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
pkg.module.configure(*options)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
# See https://autotools.io/automake/silent.html
|
||||
params = ["V=1"]
|
||||
@@ -605,41 +623,49 @@ def build(self, pkg, spec, prefix):
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*params)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self):
|
||||
def check(self) -> None:
|
||||
"""Run "make" on the ``test`` and ``check`` targets, if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("test")
|
||||
self.pkg._if_make_target_execute("check")
|
||||
|
||||
def _activate_or_not(
|
||||
self, name, activation_word, deactivation_word, activation_value=None, variant=None
|
||||
):
|
||||
self,
|
||||
name: str,
|
||||
activation_word: str,
|
||||
deactivation_word: str,
|
||||
activation_value: Optional[Union[Callable, str]] = None,
|
||||
variant=None,
|
||||
) -> List[str]:
|
||||
"""This function contain the current implementation details of
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without` and
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.enable_or_disable`.
|
||||
|
||||
Args:
|
||||
name (str): name of the option that is being activated or not
|
||||
activation_word (str): the default activation word ('with' in the
|
||||
case of ``with_or_without``)
|
||||
deactivation_word (str): the default deactivation word ('without'
|
||||
in the case of ``with_or_without``)
|
||||
activation_value (typing.Callable): callable that accepts a single
|
||||
value. This value is either one of the allowed values for a
|
||||
multi-valued variant or the name of a bool-valued variant.
|
||||
name: name of the option that is being activated or not
|
||||
activation_word: the default activation word ('with' in the case of
|
||||
``with_or_without``)
|
||||
deactivation_word: the default deactivation word ('without' in the case of
|
||||
``with_or_without``)
|
||||
activation_value: callable that accepts a single value. This value is either one of the
|
||||
allowed values for a multi-valued variant or the name of a bool-valued variant.
|
||||
Returns the parameter to be used when the value is activated.
|
||||
|
||||
The special value 'prefix' can also be assigned and will return
|
||||
The special value "prefix" can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
variant (str): name of the variant that is being processed
|
||||
(if different from option name)
|
||||
variant: name of the variant that is being processed (if different from option name)
|
||||
|
||||
Examples:
|
||||
|
||||
@@ -647,19 +673,19 @@ def _activate_or_not(
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('foo', values=('x', 'y'), description='')
|
||||
variant('bar', default=True, description='')
|
||||
variant('ba_z', default=True, description='')
|
||||
variant("foo", values=("x", "y"), description=")
|
||||
variant("bar", default=True, description=")
|
||||
variant("ba_z", default=True, description=")
|
||||
|
||||
calling this function like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
_activate_or_not(
|
||||
'foo', 'with', 'without', activation_value='prefix'
|
||||
"foo", "with", "without", activation_value="prefix"
|
||||
)
|
||||
_activate_or_not('bar', 'with', 'without')
|
||||
_activate_or_not('ba-z', 'with', 'without', variant='ba_z')
|
||||
_activate_or_not("bar", "with", "without")
|
||||
_activate_or_not("ba-z", "with", "without", variant="ba_z")
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
@@ -679,8 +705,8 @@ def _activate_or_not(
|
||||
Raises:
|
||||
KeyError: if name is not among known variants
|
||||
"""
|
||||
spec = self.pkg.spec
|
||||
args = []
|
||||
spec: spack.spec.Spec = self.pkg.spec
|
||||
args: List[str] = []
|
||||
|
||||
if activation_value == "prefix":
|
||||
activation_value = lambda x: spec[x].prefix
|
||||
@@ -698,7 +724,7 @@ def _activate_or_not(
|
||||
# Create a list of pairs. Each pair includes a configuration
|
||||
# option and whether or not that option is activated
|
||||
vdef = self.pkg.get_variant(variant)
|
||||
if set(vdef.values) == set((True, False)):
|
||||
if set(vdef.values) == set((True, False)): # type: ignore
|
||||
# BoolValuedVariant carry information about a single option.
|
||||
# Nonetheless, for uniformity of treatment we'll package them
|
||||
# in an iterable of one element.
|
||||
@@ -709,14 +735,12 @@ def _activate_or_not(
|
||||
# package's build system. It excludes values which have special
|
||||
# meanings and do not correspond to features (e.g. "none")
|
||||
feature_values = getattr(vdef.values, "feature_values", None) or vdef.values
|
||||
options = [(value, f"{variant}={value}" in spec) for value in feature_values]
|
||||
options = [(v, f"{variant}={v}" in spec) for v in feature_values] # type: ignore
|
||||
|
||||
# For each allowed value in the list of values
|
||||
for option_value, activated in options:
|
||||
# Search for an override in the package for this value
|
||||
override_name = "{0}_or_{1}_{2}".format(
|
||||
activation_word, deactivation_word, option_value
|
||||
)
|
||||
override_name = f"{activation_word}_or_{deactivation_word}_{option_value}"
|
||||
line_generator = getattr(self, override_name, None) or getattr(
|
||||
self.pkg, override_name, None
|
||||
)
|
||||
@@ -725,19 +749,24 @@ def _activate_or_not(
|
||||
|
||||
def _default_generator(is_activated):
|
||||
if is_activated:
|
||||
line = "--{0}-{1}".format(activation_word, option_value)
|
||||
line = f"--{activation_word}-{option_value}"
|
||||
if activation_value is not None and activation_value(
|
||||
option_value
|
||||
): # NOQA=ignore=E501
|
||||
line += "={0}".format(activation_value(option_value))
|
||||
line = f"{line}={activation_value(option_value)}"
|
||||
return line
|
||||
return "--{0}-{1}".format(deactivation_word, option_value)
|
||||
return f"--{deactivation_word}-{option_value}"
|
||||
|
||||
line_generator = _default_generator
|
||||
args.append(line_generator(activated))
|
||||
return args
|
||||
|
||||
def with_or_without(self, name, activation_value=None, variant=None):
|
||||
def with_or_without(
|
||||
self,
|
||||
name: str,
|
||||
activation_value: Optional[Union[Callable, str]] = None,
|
||||
variant: Optional[str] = None,
|
||||
) -> List[str]:
|
||||
"""Inspects a variant and returns the arguments that activate
|
||||
or deactivate the selected feature(s) for the configure options.
|
||||
|
||||
@@ -752,12 +781,11 @@ def with_or_without(self, name, activation_value=None, variant=None):
|
||||
``variant=value`` is in the spec.
|
||||
|
||||
Args:
|
||||
name (str): name of a valid multi-valued variant
|
||||
activation_value (typing.Callable): callable that accepts a single
|
||||
value and returns the parameter to be used leading to an entry
|
||||
of the type ``--with-{name}={parameter}``.
|
||||
name: name of a valid multi-valued variant
|
||||
activation_value: callable that accepts a single value and returns the parameter to be
|
||||
used leading to an entry of the type ``--with-{name}={parameter}``.
|
||||
|
||||
The special value 'prefix' can also be assigned and will return
|
||||
The special value "prefix" can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
|
||||
Returns:
|
||||
@@ -765,18 +793,22 @@ def with_or_without(self, name, activation_value=None, variant=None):
|
||||
"""
|
||||
return self._activate_or_not(name, "with", "without", activation_value, variant)
|
||||
|
||||
def enable_or_disable(self, name, activation_value=None, variant=None):
|
||||
def enable_or_disable(
|
||||
self,
|
||||
name: str,
|
||||
activation_value: Optional[Union[Callable, str]] = None,
|
||||
variant: Optional[str] = None,
|
||||
) -> List[str]:
|
||||
"""Same as
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`
|
||||
but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
|
||||
|
||||
Args:
|
||||
name (str): name of a valid multi-valued variant
|
||||
activation_value (typing.Callable): if present accepts a single value
|
||||
and returns the parameter to be used leading to an entry of the
|
||||
type ``--enable-{name}={parameter}``
|
||||
name: name of a valid multi-valued variant
|
||||
activation_value: if present accepts a single value and returns the parameter to be
|
||||
used leading to an entry of the type ``--enable-{name}={parameter}``
|
||||
|
||||
The special value 'prefix' can also be assigned and will return
|
||||
The special value "prefix" can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
|
||||
Returns:
|
||||
@@ -784,15 +816,15 @@ def enable_or_disable(self, name, activation_value=None, variant=None):
|
||||
"""
|
||||
return self._activate_or_not(name, "enable", "disable", activation_value, variant)
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def installcheck(self):
|
||||
def installcheck(self) -> None:
|
||||
"""Run "make" on the ``installcheck`` target, if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("installcheck")
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
def remove_libtool_archives(self):
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def remove_libtool_archives(self) -> None:
|
||||
"""Remove all .la files in prefix sub-folders if the package sets
|
||||
``install_libtool_archives`` to be False.
|
||||
"""
|
||||
@@ -814,12 +846,13 @@ def setup_build_environment(self, env):
|
||||
env.set("MACOSX_DEPLOYMENT_TARGET", "10.16")
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
|
||||
|
||||
def _autoreconf_search_path_args(spec):
|
||||
dirs_seen = set()
|
||||
flags_spack, flags_external = [], []
|
||||
def _autoreconf_search_path_args(spec: spack.spec.Spec) -> List[str]:
|
||||
dirs_seen: Set[Tuple[int, int]] = set()
|
||||
flags_spack: List[str] = []
|
||||
flags_external: List[str] = []
|
||||
|
||||
# We don't want to add an include flag for automake's default search path.
|
||||
for automake in spec.dependencies(name="automake", deptype="build"):
|
||||
|
@@ -10,8 +10,7 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
import spack.phase_callbacks
|
||||
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
|
||||
@@ -297,18 +296,6 @@ def initconfig_hardware_entries(self):
|
||||
def std_initconfig_entries(self):
|
||||
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
|
||||
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
|
||||
cmake_rpaths_env = spack.build_environment.get_rpaths(self.pkg)
|
||||
cmake_rpaths_path = ";".join(cmake_rpaths_env)
|
||||
complete_rpath_list = cmake_rpaths_path
|
||||
if "SPACK_COMPILER_EXTRA_RPATHS" in os.environ:
|
||||
spack_extra_rpaths_env = os.environ["SPACK_COMPILER_EXTRA_RPATHS"]
|
||||
spack_extra_rpaths_path = spack_extra_rpaths_env.replace(os.pathsep, ";")
|
||||
complete_rpath_list = "{0};{1}".format(complete_rpath_list, spack_extra_rpaths_path)
|
||||
|
||||
if "SPACK_COMPILER_IMPLICIT_RPATHS" in os.environ:
|
||||
spack_implicit_rpaths_env = os.environ["SPACK_COMPILER_IMPLICIT_RPATHS"]
|
||||
spack_implicit_rpaths_path = spack_implicit_rpaths_env.replace(os.pathsep, ";")
|
||||
complete_rpath_list = "{0};{1}".format(complete_rpath_list, spack_implicit_rpaths_path)
|
||||
|
||||
return [
|
||||
"#------------------{0}".format("-" * 60),
|
||||
@@ -318,8 +305,6 @@ def std_initconfig_entries(self):
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
||||
cmake_cache_string("CMAKE_INSTALL_RPATH_USE_LINK_PATH", "ON"),
|
||||
cmake_cache_string("CMAKE_BUILD_RPATH", complete_rpath_list),
|
||||
cmake_cache_string("CMAKE_INSTALL_RPATH", complete_rpath_list),
|
||||
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
|
||||
]
|
||||
|
||||
@@ -347,7 +332,7 @@ def std_cmake_args(self):
|
||||
args.extend(["-C", self.cache_path])
|
||||
return args
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def install_cmake_cache(self):
|
||||
fs.mkdirp(self.pkg.spec.prefix.share.cmake)
|
||||
fs.install(self.cache_path, self.pkg.spec.prefix.share.cmake)
|
||||
|
@@ -7,10 +7,11 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
|
||||
|
||||
class CargoPackage(spack.package_base.PackageBase):
|
||||
@@ -27,7 +28,7 @@ class CargoPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("cargo")
|
||||
class CargoBuilder(BaseBuilder):
|
||||
class CargoBuilder(BuilderWithDefaults):
|
||||
"""The Cargo builder encodes the most common way of building software with
|
||||
a rust Cargo.toml file. It has two phases that can be overridden, if need be:
|
||||
|
||||
@@ -77,7 +78,7 @@ def install(self, pkg, spec, prefix):
|
||||
with fs.working_dir(self.build_directory):
|
||||
fs.install_tree("out", prefix)
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def check(self):
|
||||
"""Run "cargo test"."""
|
||||
|
@@ -8,19 +8,24 @@
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
from typing import List, Optional, Tuple
|
||||
from itertools import chain
|
||||
from typing import Any, List, Optional, Set, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util.lang import stable_partition
|
||||
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
from spack.util.environment import filter_system_paths
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
|
||||
# Regex to extract the primary generator from the CMake generator
|
||||
# string.
|
||||
@@ -46,9 +51,9 @@ def _maybe_set_python_hints(pkg: spack.package_base.PackageBase, args: List[str]
|
||||
python_executable = pkg.spec["python"].command.path
|
||||
args.extend(
|
||||
[
|
||||
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),
|
||||
CMakeBuilder.define("Python_EXECUTABLE", python_executable),
|
||||
CMakeBuilder.define("Python3_EXECUTABLE", python_executable),
|
||||
define("PYTHON_EXECUTABLE", python_executable),
|
||||
define("Python_EXECUTABLE", python_executable),
|
||||
define("Python3_EXECUTABLE", python_executable),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -83,7 +88,7 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
|
||||
ipo = False
|
||||
|
||||
if cmake.satisfies("@3.9:"):
|
||||
args.append(CMakeBuilder.define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
|
||||
args.append(define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
|
||||
|
||||
# Disable Package Registry: export(PACKAGE) may put files in the user's home directory, and
|
||||
# find_package may search there. This is not what we want.
|
||||
@@ -91,30 +96,36 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
|
||||
# Do not populate CMake User Package Registry
|
||||
if cmake.satisfies("@3.15:"):
|
||||
# see https://cmake.org/cmake/help/latest/policy/CMP0090.html
|
||||
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0090", "NEW"))
|
||||
args.append(define("CMAKE_POLICY_DEFAULT_CMP0090", "NEW"))
|
||||
elif cmake.satisfies("@3.1:"):
|
||||
# see https://cmake.org/cmake/help/latest/variable/CMAKE_EXPORT_NO_PACKAGE_REGISTRY.html
|
||||
args.append(CMakeBuilder.define("CMAKE_EXPORT_NO_PACKAGE_REGISTRY", True))
|
||||
args.append(define("CMAKE_EXPORT_NO_PACKAGE_REGISTRY", True))
|
||||
|
||||
# Do not use CMake User/System Package Registry
|
||||
# https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#disabling-the-package-registry
|
||||
if cmake.satisfies("@3.16:"):
|
||||
args.append(CMakeBuilder.define("CMAKE_FIND_USE_PACKAGE_REGISTRY", False))
|
||||
args.append(define("CMAKE_FIND_USE_PACKAGE_REGISTRY", False))
|
||||
elif cmake.satisfies("@3.1:3.15"):
|
||||
args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY", False))
|
||||
args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY", False))
|
||||
args.append(define("CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY", False))
|
||||
args.append(define("CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY", False))
|
||||
|
||||
# Export a compilation database if supported.
|
||||
if _supports_compilation_databases(pkg):
|
||||
args.append(CMakeBuilder.define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
|
||||
args.append(define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
|
||||
|
||||
# Enable MACOSX_RPATH by default when cmake_minimum_required < 3
|
||||
# https://cmake.org/cmake/help/latest/policy/CMP0042.html
|
||||
if pkg.spec.satisfies("platform=darwin") and cmake.satisfies("@3:"):
|
||||
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"))
|
||||
args.append(define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"))
|
||||
|
||||
# Disable find package's config mode for versions of Boost that
|
||||
# didn't provide it. See https://github.com/spack/spack/issues/20169
|
||||
# and https://cmake.org/cmake/help/latest/module/FindBoost.html
|
||||
if pkg.spec.satisfies("^boost@:1.69.0"):
|
||||
args.append(define("Boost_NO_BOOST_CMAKE", True))
|
||||
|
||||
|
||||
def generator(*names: str, default: Optional[str] = None):
|
||||
def generator(*names: str, default: Optional[str] = None) -> None:
|
||||
"""The build system generator to use.
|
||||
|
||||
See ``cmake --help`` for a list of valid generators.
|
||||
@@ -152,6 +163,24 @@ def _values(x):
|
||||
conflicts(f"generator={x}")
|
||||
|
||||
|
||||
def get_cmake_prefix_path(pkg: spack.package_base.PackageBase) -> List[str]:
|
||||
"""Obtain the CMAKE_PREFIX_PATH entries for a package, based on the cmake_prefix_path package
|
||||
attribute of direct build/test and transitive link dependencies."""
|
||||
# Add direct build/test deps
|
||||
selected: Set[str] = {s.dag_hash() for s in pkg.spec.dependencies(deptype=dt.BUILD | dt.TEST)}
|
||||
# Add transitive link deps
|
||||
selected.update(s.dag_hash() for s in pkg.spec.traverse(root=False, deptype=dt.LINK))
|
||||
# Separate out externals so they do not shadow Spack prefixes
|
||||
externals, spack_built = stable_partition(
|
||||
(s for s in pkg.spec.traverse(root=False, order="topo") if s.dag_hash() in selected),
|
||||
lambda x: x.external,
|
||||
)
|
||||
|
||||
return filter_system_paths(
|
||||
path for spec in chain(spack_built, externals) for path in spec.package.cmake_prefix_paths
|
||||
)
|
||||
|
||||
|
||||
class CMakePackage(spack.package_base.PackageBase):
|
||||
"""Specialized class for packages built using CMake
|
||||
|
||||
@@ -243,15 +272,15 @@ def flags_to_build_system_args(self, flags):
|
||||
|
||||
# Legacy methods (used by too many packages to change them,
|
||||
# need to forward to the builder)
|
||||
def define(self, *args, **kwargs):
|
||||
return self.builder.define(*args, **kwargs)
|
||||
def define(self, cmake_var: str, value: Any) -> str:
|
||||
return define(cmake_var, value)
|
||||
|
||||
def define_from_variant(self, *args, **kwargs):
|
||||
return self.builder.define_from_variant(*args, **kwargs)
|
||||
def define_from_variant(self, cmake_var: str, variant: Optional[str] = None) -> str:
|
||||
return define_from_variant(self, cmake_var, variant)
|
||||
|
||||
|
||||
@spack.builder.builder("cmake")
|
||||
class CMakeBuilder(BaseBuilder):
|
||||
class CMakeBuilder(BuilderWithDefaults):
|
||||
"""The cmake builder encodes the default way of building software with CMake. IT
|
||||
has three phases that can be overridden:
|
||||
|
||||
@@ -301,15 +330,15 @@ class CMakeBuilder(BaseBuilder):
|
||||
build_time_test_callbacks = ["check"]
|
||||
|
||||
@property
|
||||
def archive_files(self):
|
||||
def archive_files(self) -> List[str]:
|
||||
"""Files to archive for packages based on CMake"""
|
||||
files = [os.path.join(self.build_directory, "CMakeCache.txt")]
|
||||
if _supports_compilation_databases(self):
|
||||
if _supports_compilation_databases(self.pkg):
|
||||
files.append(os.path.join(self.build_directory, "compile_commands.json"))
|
||||
return files
|
||||
|
||||
@property
|
||||
def root_cmakelists_dir(self):
|
||||
def root_cmakelists_dir(self) -> str:
|
||||
"""The relative path to the directory containing CMakeLists.txt
|
||||
|
||||
This path is relative to the root of the extracted tarball,
|
||||
@@ -318,16 +347,17 @@ def root_cmakelists_dir(self):
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
@property
|
||||
def generator(self):
|
||||
def generator(self) -> str:
|
||||
if self.spec.satisfies("generator=make"):
|
||||
return "Unix Makefiles"
|
||||
if self.spec.satisfies("generator=ninja"):
|
||||
return "Ninja"
|
||||
msg = f'{self.spec.format()} has an unsupported value for the "generator" variant'
|
||||
raise ValueError(msg)
|
||||
raise ValueError(
|
||||
f'{self.spec.format()} has an unsupported value for the "generator" variant'
|
||||
)
|
||||
|
||||
@property
|
||||
def std_cmake_args(self):
|
||||
def std_cmake_args(self) -> List[str]:
|
||||
"""Standard cmake arguments provided as a property for
|
||||
convenience of package writers
|
||||
"""
|
||||
@@ -336,7 +366,9 @@ def std_cmake_args(self):
|
||||
return args
|
||||
|
||||
@staticmethod
|
||||
def std_args(pkg, generator=None):
|
||||
def std_args(
|
||||
pkg: spack.package_base.PackageBase, generator: Optional[str] = None
|
||||
) -> List[str]:
|
||||
"""Computes the standard cmake arguments for a generic package"""
|
||||
default_generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
||||
generator = generator or default_generator
|
||||
@@ -353,11 +385,20 @@ def std_args(pkg, generator=None):
|
||||
except KeyError:
|
||||
build_type = "RelWithDebInfo"
|
||||
|
||||
define = CMakeBuilder.define
|
||||
args = [
|
||||
"-G",
|
||||
generator,
|
||||
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
|
||||
define("CMAKE_INSTALL_RPATH_USE_LINK_PATH", True),
|
||||
# only include the install prefix lib dirs; rpaths for deps are added by USE_LINK_PATH
|
||||
define(
|
||||
"CMAKE_INSTALL_RPATH",
|
||||
[
|
||||
pathlib.Path(pkg.prefix, "lib").as_posix(),
|
||||
pathlib.Path(pkg.prefix, "lib64").as_posix(),
|
||||
],
|
||||
),
|
||||
define("CMAKE_PREFIX_PATH", get_cmake_prefix_path(pkg)),
|
||||
define("CMAKE_BUILD_TYPE", build_type),
|
||||
]
|
||||
|
||||
@@ -372,164 +413,34 @@ def std_args(pkg, generator=None):
|
||||
_conditional_cmake_defaults(pkg, args)
|
||||
_maybe_set_python_hints(pkg, args)
|
||||
|
||||
# Set up CMake rpath
|
||||
args.extend(
|
||||
[
|
||||
define("CMAKE_INSTALL_RPATH_USE_LINK_PATH", True),
|
||||
define("CMAKE_INSTALL_RPATH", spack.build_environment.get_rpaths(pkg)),
|
||||
define("CMAKE_PREFIX_PATH", spack.build_environment.get_cmake_prefix_path(pkg)),
|
||||
]
|
||||
)
|
||||
|
||||
return args
|
||||
|
||||
@staticmethod
|
||||
def define_cuda_architectures(pkg):
|
||||
"""Returns the str ``-DCMAKE_CUDA_ARCHITECTURES:STRING=(expanded cuda_arch)``.
|
||||
|
||||
``cuda_arch`` is variant composed of a list of target CUDA architectures and
|
||||
it is declared in the cuda package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``cuda_arch`` variant is not set.
|
||||
|
||||
"""
|
||||
cmake_flag = str()
|
||||
if "cuda_arch" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.18:"):
|
||||
cmake_flag = CMakeBuilder.define(
|
||||
"CMAKE_CUDA_ARCHITECTURES", pkg.spec.variants["cuda_arch"].value
|
||||
)
|
||||
|
||||
return cmake_flag
|
||||
def define_cuda_architectures(pkg: spack.package_base.PackageBase) -> str:
|
||||
return define_cuda_architectures(pkg)
|
||||
|
||||
@staticmethod
|
||||
def define_hip_architectures(pkg):
|
||||
"""Returns the str ``-DCMAKE_HIP_ARCHITECTURES:STRING=(expanded amdgpu_target)``.
|
||||
|
||||
``amdgpu_target`` is variant composed of a list of the target HIP
|
||||
architectures and it is declared in the rocm package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``amdgpu_target`` variant is
|
||||
not set.
|
||||
|
||||
"""
|
||||
cmake_flag = str()
|
||||
if "amdgpu_target" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.21:"):
|
||||
cmake_flag = CMakeBuilder.define(
|
||||
"CMAKE_HIP_ARCHITECTURES", pkg.spec.variants["amdgpu_target"].value
|
||||
)
|
||||
|
||||
return cmake_flag
|
||||
def define_hip_architectures(pkg: spack.package_base.PackageBase) -> str:
|
||||
return define_hip_architectures(pkg)
|
||||
|
||||
@staticmethod
|
||||
def define(cmake_var, value):
|
||||
"""Return a CMake command line argument that defines a variable.
|
||||
def define(cmake_var: str, value: Any) -> str:
|
||||
return define(cmake_var, value)
|
||||
|
||||
The resulting argument will convert boolean values to OFF/ON
|
||||
and lists/tuples to CMake semicolon-separated string lists. All other
|
||||
values will be interpreted as strings.
|
||||
|
||||
Examples:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[define('BUILD_SHARED_LIBS', True),
|
||||
define('CMAKE_CXX_STANDARD', 14),
|
||||
define('swr', ['avx', 'avx2'])]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
["-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||
"-DSWR:STRING=avx;avx2]
|
||||
|
||||
"""
|
||||
# Create a list of pairs. Each pair includes a configuration
|
||||
# option and whether or not that option is activated
|
||||
if isinstance(value, bool):
|
||||
kind = "BOOL"
|
||||
value = "ON" if value else "OFF"
|
||||
else:
|
||||
kind = "STRING"
|
||||
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
|
||||
value = ";".join(str(v) for v in value)
|
||||
else:
|
||||
value = str(value)
|
||||
|
||||
return "".join(["-D", cmake_var, ":", kind, "=", value])
|
||||
|
||||
def define_from_variant(self, cmake_var, variant=None):
|
||||
"""Return a CMake command line argument from the given variant's value.
|
||||
|
||||
The optional ``variant`` argument defaults to the lower-case transform
|
||||
of ``cmake_var``.
|
||||
|
||||
This utility function is similar to
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`.
|
||||
|
||||
Examples:
|
||||
|
||||
Given a package with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('cxxstd', default='11', values=('11', '14'),
|
||||
multi=False, description='')
|
||||
variant('shared', default=True, description='')
|
||||
variant('swr', values=any_combination_of('avx', 'avx2'),
|
||||
description='')
|
||||
|
||||
calling this function like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
|
||||
self.define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'),
|
||||
self.define_from_variant('SWR')]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
["-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||
"-DSWR:STRING=avx;avx2]
|
||||
|
||||
for ``<spec-name> cxxstd=14 +shared swr=avx,avx2``
|
||||
|
||||
Note: if the provided variant is conditional, and the condition is not met,
|
||||
this function returns an empty string. CMake discards empty strings
|
||||
provided on the command line.
|
||||
"""
|
||||
|
||||
if variant is None:
|
||||
variant = cmake_var.lower()
|
||||
|
||||
if not self.pkg.has_variant(variant):
|
||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
|
||||
|
||||
if variant not in self.pkg.spec.variants:
|
||||
return ""
|
||||
|
||||
value = self.pkg.spec.variants[variant].value
|
||||
if isinstance(value, (tuple, list)):
|
||||
# Sort multi-valued variants for reproducibility
|
||||
value = sorted(value)
|
||||
|
||||
return self.define(cmake_var, value)
|
||||
def define_from_variant(self, cmake_var: str, variant: Optional[str] = None) -> str:
|
||||
return define_from_variant(self.pkg, cmake_var, variant)
|
||||
|
||||
@property
|
||||
def build_dirname(self):
|
||||
def build_dirname(self) -> str:
|
||||
"""Directory name to use when building the package."""
|
||||
return "spack-build-%s" % self.pkg.spec.dag_hash(7)
|
||||
return f"spack-build-{self.pkg.spec.dag_hash(7)}"
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
def build_directory(self) -> str:
|
||||
"""Full-path to the directory to use when building the package."""
|
||||
return os.path.join(self.pkg.stage.path, self.build_dirname)
|
||||
|
||||
def cmake_args(self):
|
||||
def cmake_args(self) -> List[str]:
|
||||
"""List of all the arguments that must be passed to cmake, except:
|
||||
|
||||
* CMAKE_INSTALL_PREFIX
|
||||
@@ -539,15 +450,32 @@ def cmake_args(self):
|
||||
"""
|
||||
return []
|
||||
|
||||
def cmake(self, pkg, spec, prefix):
|
||||
def cmake(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Runs ``cmake`` in the build directory"""
|
||||
|
||||
# skip cmake phase if it is an incremental develop build
|
||||
if spec.is_develop and os.path.isfile(
|
||||
os.path.join(self.build_directory, "CMakeCache.txt")
|
||||
):
|
||||
return
|
||||
|
||||
options = self.std_cmake_args
|
||||
options += self.cmake_args()
|
||||
options.append(os.path.abspath(self.root_cmakelists_dir))
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
pkg.module.cmake(*options)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Make the build targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
if self.generator == "Unix Makefiles":
|
||||
@@ -556,7 +484,12 @@ def build(self, pkg, spec, prefix):
|
||||
self.build_targets.append("-v")
|
||||
pkg.module.ninja(*self.build_targets)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
if self.generator == "Unix Makefiles":
|
||||
@@ -564,9 +497,9 @@ def install(self, pkg, spec, prefix):
|
||||
elif self.generator == "Ninja":
|
||||
pkg.module.ninja(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self):
|
||||
def check(self) -> None:
|
||||
"""Search the CMake-generated files for the targets ``test`` and ``check``,
|
||||
and runs them if found.
|
||||
"""
|
||||
@@ -577,3 +510,133 @@ def check(self):
|
||||
elif self.generator == "Ninja":
|
||||
self.pkg._if_ninja_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL")
|
||||
self.pkg._if_ninja_target_execute("check")
|
||||
|
||||
|
||||
def define(cmake_var: str, value: Any) -> str:
|
||||
"""Return a CMake command line argument that defines a variable.
|
||||
|
||||
The resulting argument will convert boolean values to OFF/ON and lists/tuples to CMake
|
||||
semicolon-separated string lists. All other values will be interpreted as strings.
|
||||
|
||||
Examples:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[define("BUILD_SHARED_LIBS", True),
|
||||
define("CMAKE_CXX_STANDARD", 14),
|
||||
define("swr", ["avx", "avx2"])]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
["-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||
"-DSWR:STRING=avx;avx2]
|
||||
|
||||
"""
|
||||
# Create a list of pairs. Each pair includes a configuration
|
||||
# option and whether or not that option is activated
|
||||
if isinstance(value, bool):
|
||||
kind = "BOOL"
|
||||
value = "ON" if value else "OFF"
|
||||
else:
|
||||
kind = "STRING"
|
||||
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
|
||||
value = ";".join(str(v) for v in value)
|
||||
else:
|
||||
value = str(value)
|
||||
|
||||
return "".join(["-D", cmake_var, ":", kind, "=", value])
|
||||
|
||||
|
||||
def define_from_variant(
|
||||
pkg: spack.package_base.PackageBase, cmake_var: str, variant: Optional[str] = None
|
||||
) -> str:
|
||||
"""Return a CMake command line argument from the given variant's value.
|
||||
|
||||
The optional ``variant`` argument defaults to the lower-case transform
|
||||
of ``cmake_var``.
|
||||
|
||||
Examples:
|
||||
|
||||
Given a package with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("cxxstd", default="11", values=("11", "14"),
|
||||
multi=False, description="")
|
||||
variant("shared", default=True, description="")
|
||||
variant("swr", values=any_combination_of("avx", "avx2"),
|
||||
description="")
|
||||
|
||||
calling this function like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[
|
||||
self.define_from_variant("BUILD_SHARED_LIBS", "shared"),
|
||||
self.define_from_variant("CMAKE_CXX_STANDARD", "cxxstd"),
|
||||
self.define_from_variant("SWR"),
|
||||
]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[
|
||||
"-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||
"-DSWR:STRING=avx;avx2",
|
||||
]
|
||||
|
||||
for ``<spec-name> cxxstd=14 +shared swr=avx,avx2``
|
||||
|
||||
Note: if the provided variant is conditional, and the condition is not met, this function
|
||||
returns an empty string. CMake discards empty strings provided on the command line.
|
||||
"""
|
||||
if variant is None:
|
||||
variant = cmake_var.lower()
|
||||
|
||||
if not pkg.has_variant(variant):
|
||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, pkg.name))
|
||||
|
||||
if variant not in pkg.spec.variants:
|
||||
return ""
|
||||
|
||||
value = pkg.spec.variants[variant].value
|
||||
if isinstance(value, (tuple, list)):
|
||||
# Sort multi-valued variants for reproducibility
|
||||
value = sorted(value)
|
||||
|
||||
return define(cmake_var, value)
|
||||
|
||||
|
||||
def define_hip_architectures(pkg: spack.package_base.PackageBase) -> str:
|
||||
"""Returns the str ``-DCMAKE_HIP_ARCHITECTURES:STRING=(expanded amdgpu_target)``.
|
||||
|
||||
``amdgpu_target`` is variant composed of a list of the target HIP
|
||||
architectures and it is declared in the rocm package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``amdgpu_target`` variant is
|
||||
not set.
|
||||
|
||||
"""
|
||||
if "amdgpu_target" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.21:"):
|
||||
return define("CMAKE_HIP_ARCHITECTURES", pkg.spec.variants["amdgpu_target"].value)
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
def define_cuda_architectures(pkg: spack.package_base.PackageBase) -> str:
|
||||
"""Returns the str ``-DCMAKE_CUDA_ARCHITECTURES:STRING=(expanded cuda_arch)``.
|
||||
|
||||
``cuda_arch`` is variant composed of a list of target CUDA architectures and
|
||||
it is declared in the cuda package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``cuda_arch`` variant is not set.
|
||||
|
||||
"""
|
||||
if "cuda_arch" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.18:"):
|
||||
return define("CMAKE_CUDA_ARCHITECTURES", pkg.spec.variants["cuda_arch"].value)
|
||||
return ""
|
||||
|
@@ -110,8 +110,8 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
|
||||
depends_on("cuda@5.0:10.2", when="cuda_arch=30")
|
||||
depends_on("cuda@5.0:10.2", when="cuda_arch=32")
|
||||
depends_on("cuda@5.0:", when="cuda_arch=35")
|
||||
depends_on("cuda@6.5:", when="cuda_arch=37")
|
||||
depends_on("cuda@5.0:11.8", when="cuda_arch=35")
|
||||
depends_on("cuda@6.5:11.8", when="cuda_arch=37")
|
||||
|
||||
depends_on("cuda@6.0:", when="cuda_arch=50")
|
||||
depends_on("cuda@6.5:", when="cuda_arch=52")
|
||||
@@ -131,6 +131,7 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
depends_on("cuda@11.8:", when="cuda_arch=89")
|
||||
|
||||
depends_on("cuda@12.0:", when="cuda_arch=90")
|
||||
depends_on("cuda@12.0:", when="cuda_arch=90a")
|
||||
|
||||
# From the NVIDIA install guide we know of conflicts for particular
|
||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||
@@ -149,7 +150,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
# minimum supported versions
|
||||
conflicts("%gcc@:4", when="+cuda ^cuda@11.0:")
|
||||
conflicts("%gcc@:5", when="+cuda ^cuda@11.4:")
|
||||
conflicts("%gcc@:7.2", when="+cuda ^cuda@12.4:")
|
||||
conflicts("%clang@:6", when="+cuda ^cuda@12.2:")
|
||||
|
||||
# maximum supported version
|
||||
@@ -180,13 +180,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
conflicts("%gcc@7:", when="+cuda ^cuda@:9.1 target=x86_64:")
|
||||
conflicts("%gcc@8:", when="+cuda ^cuda@:10.0.130 target=x86_64:")
|
||||
conflicts("%gcc@9:", when="+cuda ^cuda@:10.2.89 target=x86_64:")
|
||||
conflicts("%pgi@:14.8", when="+cuda ^cuda@:7.0.27 target=x86_64:")
|
||||
conflicts("%pgi@:15.3,15.5:", when="+cuda ^cuda@7.5 target=x86_64:")
|
||||
conflicts("%pgi@:16.2,16.0:16.3", when="+cuda ^cuda@8 target=x86_64:")
|
||||
conflicts("%pgi@:15,18:", when="+cuda ^cuda@9.0:9.1 target=x86_64:")
|
||||
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10.0 target=x86_64:")
|
||||
conflicts("%pgi@:17,20:", when="+cuda ^cuda@10.1.105:10.2.89 target=x86_64:")
|
||||
conflicts("%pgi@:17,21:", when="+cuda ^cuda@11.0.2:11.1.0 target=x86_64:")
|
||||
conflicts("%clang@:3.4", when="+cuda ^cuda@:7.5 target=x86_64:")
|
||||
conflicts("%clang@:3.7,4:", when="+cuda ^cuda@8.0:9.0 target=x86_64:")
|
||||
conflicts("%clang@:3.7,4.1:", when="+cuda ^cuda@9.1 target=x86_64:")
|
||||
@@ -212,9 +205,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
conflicts("%gcc@8:", when="+cuda ^cuda@:10.0.130 target=ppc64le:")
|
||||
conflicts("%gcc@9:", when="+cuda ^cuda@:10.1.243 target=ppc64le:")
|
||||
# officially, CUDA 11.0.2 only supports the system GCC 8.3 on ppc64le
|
||||
conflicts("%pgi", when="+cuda ^cuda@:8 target=ppc64le:")
|
||||
conflicts("%pgi@:16", when="+cuda ^cuda@:9.1.185 target=ppc64le:")
|
||||
conflicts("%pgi@:17", when="+cuda ^cuda@:10 target=ppc64le:")
|
||||
conflicts("%clang@4:", when="+cuda ^cuda@:9.0.176 target=ppc64le:")
|
||||
conflicts("%clang@5:", when="+cuda ^cuda@:9.1 target=ppc64le:")
|
||||
conflicts("%clang@6:", when="+cuda ^cuda@:9.2 target=ppc64le:")
|
||||
|
@@ -7,8 +7,9 @@
|
||||
import spack.builder
|
||||
import spack.directives
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
|
||||
from ._checks import BaseBuilder, apply_macos_rpath_fixups, execute_install_time_tests
|
||||
from ._checks import BuilderWithDefaults, apply_macos_rpath_fixups, execute_install_time_tests
|
||||
|
||||
|
||||
class Package(spack.package_base.PackageBase):
|
||||
@@ -26,7 +27,7 @@ class Package(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("generic")
|
||||
class GenericBuilder(BaseBuilder):
|
||||
class GenericBuilder(BuilderWithDefaults):
|
||||
"""A builder for a generic build system, that require packagers
|
||||
to implement an "install" phase.
|
||||
"""
|
||||
@@ -44,7 +45,7 @@ class GenericBuilder(BaseBuilder):
|
||||
install_time_test_callbacks = []
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
|
||||
# unconditionally perform any post-install phase tests
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
@@ -7,10 +7,11 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, extends
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
|
||||
|
||||
class GoPackage(spack.package_base.PackageBase):
|
||||
@@ -32,7 +33,7 @@ class GoPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("go")
|
||||
class GoBuilder(BaseBuilder):
|
||||
class GoBuilder(BuilderWithDefaults):
|
||||
"""The Go builder encodes the most common way of building software with
|
||||
a golang go.mod file. It has two phases that can be overridden, if need be:
|
||||
|
||||
@@ -99,7 +100,7 @@ def install(self, pkg, spec, prefix):
|
||||
fs.mkdirp(prefix.bin)
|
||||
fs.install(pkg.name, prefix.bin)
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def check(self):
|
||||
"""Run ``go test .`` in the source directory"""
|
||||
|
@@ -22,8 +22,8 @@
|
||||
install,
|
||||
)
|
||||
|
||||
import spack.builder
|
||||
import spack.error
|
||||
import spack.phase_callbacks
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.error import InstallError
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
@@ -1163,7 +1163,7 @@ def _determine_license_type(self):
|
||||
debug_print(license_type)
|
||||
return license_type
|
||||
|
||||
@spack.builder.run_before("install")
|
||||
@spack.phase_callbacks.run_before("install")
|
||||
def configure(self):
|
||||
"""Generates the silent.cfg file to pass to installer.sh.
|
||||
|
||||
@@ -1250,7 +1250,7 @@ def install(self, spec, prefix):
|
||||
for f in glob.glob("%s/intel*log" % tmpdir):
|
||||
install(f, dst)
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def validate_install(self):
|
||||
# Sometimes the installer exits with an error but doesn't pass a
|
||||
# non-zero exit code to spack. Check for the existence of a 'bin'
|
||||
@@ -1258,7 +1258,7 @@ def validate_install(self):
|
||||
if not os.path.exists(self.prefix.bin):
|
||||
raise InstallError("The installer has failed to install anything.")
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def configure_rpath(self):
|
||||
if "+rpath" not in self.spec:
|
||||
return
|
||||
@@ -1276,7 +1276,7 @@ def configure_rpath(self):
|
||||
with open(compiler_cfg, "w") as fh:
|
||||
fh.write("-Xlinker -rpath={0}\n".format(compilers_lib_dir))
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def configure_auto_dispatch(self):
|
||||
if self._has_compilers:
|
||||
if "auto_dispatch=none" in self.spec:
|
||||
@@ -1300,7 +1300,7 @@ def configure_auto_dispatch(self):
|
||||
with open(compiler_cfg, "a") as fh:
|
||||
fh.write("-ax{0}\n".format(",".join(ad)))
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def filter_compiler_wrappers(self):
|
||||
if ("+mpi" in self.spec or self.provides("mpi")) and "~newdtags" in self.spec:
|
||||
bin_dir = self.component_bin_dir("mpi")
|
||||
@@ -1308,7 +1308,7 @@ def filter_compiler_wrappers(self):
|
||||
f = os.path.join(bin_dir, f)
|
||||
filter_file("-Xlinker --enable-new-dtags", " ", f, string=True)
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def uninstall_ism(self):
|
||||
# The "Intel(R) Software Improvement Program" [ahem] gets installed,
|
||||
# apparently regardless of PHONEHOME_SEND_USAGE_DATA.
|
||||
@@ -1340,7 +1340,7 @@ def base_lib_dir(self):
|
||||
debug_print(d)
|
||||
return d
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def modify_LLVMgold_rpath(self):
|
||||
"""Add libimf.so and other required libraries to the RUNPATH of LLVMgold.so.
|
||||
|
||||
|
@@ -8,11 +8,14 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts, depends_on
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import (
|
||||
BaseBuilder,
|
||||
BuilderWithDefaults,
|
||||
apply_macos_rpath_fixups,
|
||||
execute_build_time_tests,
|
||||
execute_install_time_tests,
|
||||
@@ -36,7 +39,7 @@ class MakefilePackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("makefile")
|
||||
class MakefileBuilder(BaseBuilder):
|
||||
class MakefileBuilder(BuilderWithDefaults):
|
||||
"""The Makefile builder encodes the most common way of building software with
|
||||
Makefiles. It has three phases that can be overridden, if need be:
|
||||
|
||||
@@ -91,35 +94,50 @@ class MakefileBuilder(BaseBuilder):
|
||||
install_time_test_callbacks = ["installcheck"]
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
def build_directory(self) -> str:
|
||||
"""Return the directory containing the main Makefile."""
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
def edit(self, pkg, spec, prefix):
|
||||
def edit(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Edit the Makefile before calling make. The default is a no-op."""
|
||||
pass
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.build_targets)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self):
|
||||
def check(self) -> None:
|
||||
"""Run "make" on the ``test`` and ``check`` targets, if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("test")
|
||||
self.pkg._if_make_target_execute("check")
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def installcheck(self):
|
||||
def installcheck(self) -> None:
|
||||
"""Searches the Makefile for an ``installcheck`` target
|
||||
and runs it if found.
|
||||
"""
|
||||
@@ -127,4 +145,4 @@ def installcheck(self):
|
||||
self.pkg._if_make_target_execute("installcheck")
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
|
@@ -10,7 +10,7 @@
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import which
|
||||
|
||||
from ._checks import BaseBuilder
|
||||
from ._checks import BuilderWithDefaults
|
||||
|
||||
|
||||
class MavenPackage(spack.package_base.PackageBase):
|
||||
@@ -34,7 +34,7 @@ class MavenPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("maven")
|
||||
class MavenBuilder(BaseBuilder):
|
||||
class MavenBuilder(BuilderWithDefaults):
|
||||
"""The Maven builder encodes the default way to build software with Maven.
|
||||
It has two phases that can be overridden, if need be:
|
||||
|
||||
|
@@ -9,10 +9,13 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
|
||||
|
||||
class MesonPackage(spack.package_base.PackageBase):
|
||||
@@ -62,7 +65,7 @@ def flags_to_build_system_args(self, flags):
|
||||
|
||||
|
||||
@spack.builder.builder("meson")
|
||||
class MesonBuilder(BaseBuilder):
|
||||
class MesonBuilder(BuilderWithDefaults):
|
||||
"""The Meson builder encodes the default way to build software with Meson.
|
||||
The builder has three phases that can be overridden, if need be:
|
||||
|
||||
@@ -112,7 +115,7 @@ def archive_files(self):
|
||||
return [os.path.join(self.build_directory, "meson-logs", "meson-log.txt")]
|
||||
|
||||
@property
|
||||
def root_mesonlists_dir(self):
|
||||
def root_mesonlists_dir(self) -> str:
|
||||
"""Relative path to the directory containing meson.build
|
||||
|
||||
This path is relative to the root of the extracted tarball,
|
||||
@@ -121,7 +124,7 @@ def root_mesonlists_dir(self):
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
@property
|
||||
def std_meson_args(self):
|
||||
def std_meson_args(self) -> List[str]:
|
||||
"""Standard meson arguments provided as a property for convenience
|
||||
of package writers.
|
||||
"""
|
||||
@@ -132,7 +135,7 @@ def std_meson_args(self):
|
||||
return std_meson_args
|
||||
|
||||
@staticmethod
|
||||
def std_args(pkg):
|
||||
def std_args(pkg) -> List[str]:
|
||||
"""Standard meson arguments for a generic package."""
|
||||
try:
|
||||
build_type = pkg.spec.variants["buildtype"].value
|
||||
@@ -172,7 +175,7 @@ def build_directory(self):
|
||||
"""Directory to use when building the package."""
|
||||
return os.path.join(self.pkg.stage.path, self.build_dirname)
|
||||
|
||||
def meson_args(self):
|
||||
def meson_args(self) -> List[str]:
|
||||
"""List of arguments that must be passed to meson, except:
|
||||
|
||||
* ``--prefix``
|
||||
@@ -185,7 +188,12 @@ def meson_args(self):
|
||||
"""
|
||||
return []
|
||||
|
||||
def meson(self, pkg, spec, prefix):
|
||||
def meson(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run ``meson`` in the build directory"""
|
||||
options = []
|
||||
if self.spec["meson"].satisfies("@0.64:"):
|
||||
@@ -196,21 +204,31 @@ def meson(self, pkg, spec, prefix):
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
pkg.module.meson(*options)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Make the build targets"""
|
||||
options = ["-v"]
|
||||
options += self.build_targets
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.ninja(*options)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.ninja(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self):
|
||||
def check(self) -> None:
|
||||
"""Search Meson-generated files for the target ``test`` and run it if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_ninja_target_execute("test")
|
||||
|
@@ -10,7 +10,7 @@
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, conflicts
|
||||
|
||||
from ._checks import BaseBuilder
|
||||
from ._checks import BuilderWithDefaults
|
||||
|
||||
|
||||
class MSBuildPackage(spack.package_base.PackageBase):
|
||||
@@ -26,7 +26,7 @@ class MSBuildPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("msbuild")
|
||||
class MSBuildBuilder(BaseBuilder):
|
||||
class MSBuildBuilder(BuilderWithDefaults):
|
||||
"""The MSBuild builder encodes the most common way of building software with
|
||||
Mircosoft's MSBuild tool. It has two phases that can be overridden, if need be:
|
||||
|
||||
|
@@ -10,7 +10,7 @@
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, conflicts
|
||||
|
||||
from ._checks import BaseBuilder
|
||||
from ._checks import BuilderWithDefaults
|
||||
|
||||
|
||||
class NMakePackage(spack.package_base.PackageBase):
|
||||
@@ -26,7 +26,7 @@ class NMakePackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("nmake")
|
||||
class NMakeBuilder(BaseBuilder):
|
||||
class NMakeBuilder(BuilderWithDefaults):
|
||||
"""The NMake builder encodes the most common way of building software with
|
||||
Mircosoft's NMake tool. It has two phases that can be overridden, if need be:
|
||||
|
||||
|
@@ -7,7 +7,7 @@
|
||||
from spack.directives import build_system, extends
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BaseBuilder
|
||||
from ._checks import BuilderWithDefaults
|
||||
|
||||
|
||||
class OctavePackage(spack.package_base.PackageBase):
|
||||
@@ -29,7 +29,7 @@ class OctavePackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("octave")
|
||||
class OctaveBuilder(BaseBuilder):
|
||||
class OctaveBuilder(BuilderWithDefaults):
|
||||
"""The octave builder provides the following phases that can be overridden:
|
||||
|
||||
1. :py:meth:`~.OctaveBuilder.install`
|
||||
|
@@ -10,11 +10,12 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, extends
|
||||
from spack.install_test import SkipTest, test_part
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
|
||||
|
||||
class PerlPackage(spack.package_base.PackageBase):
|
||||
@@ -84,7 +85,7 @@ def test_use(self):
|
||||
|
||||
|
||||
@spack.builder.builder("perl")
|
||||
class PerlBuilder(BaseBuilder):
|
||||
class PerlBuilder(BuilderWithDefaults):
|
||||
"""The perl builder provides four phases that can be overridden, if required:
|
||||
|
||||
1. :py:meth:`~.PerlBuilder.configure`
|
||||
@@ -163,7 +164,7 @@ def configure(self, pkg, spec, prefix):
|
||||
# Build.PL may be too long causing the build to fail. Patching the shebang
|
||||
# does not happen until after install so set '/usr/bin/env perl' here in
|
||||
# the Build script.
|
||||
@spack.builder.run_after("configure")
|
||||
@spack.phase_callbacks.run_after("configure")
|
||||
def fix_shebang(self):
|
||||
if self.build_method == "Build.PL":
|
||||
pattern = "#!{0}".format(self.spec["perl"].command.path)
|
||||
@@ -175,7 +176,7 @@ def build(self, pkg, spec, prefix):
|
||||
self.build_executable()
|
||||
|
||||
# Ensure that tests run after build (if requested):
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self):
|
||||
"""Runs built-in tests of a Perl package."""
|
||||
|
@@ -24,6 +24,7 @@
|
||||
import spack.detection
|
||||
import spack.multimethod
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
@@ -34,7 +35,7 @@
|
||||
from spack.spec import Spec
|
||||
from spack.util.prefix import Prefix
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
|
||||
|
||||
def _flatten_dict(dictionary: Mapping[str, object]) -> Iterable[str]:
|
||||
@@ -374,7 +375,7 @@ def list_url(cls) -> Optional[str]: # type: ignore[override]
|
||||
return None
|
||||
|
||||
@property
|
||||
def python_spec(self):
|
||||
def python_spec(self) -> Spec:
|
||||
"""Get python-venv if it exists or python otherwise."""
|
||||
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||
return python
|
||||
@@ -425,7 +426,7 @@ def libs(self) -> LibraryList:
|
||||
|
||||
|
||||
@spack.builder.builder("python_pip")
|
||||
class PythonPipBuilder(BaseBuilder):
|
||||
class PythonPipBuilder(BuilderWithDefaults):
|
||||
phases = ("install",)
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
@@ -543,4 +544,4 @@ def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
with fs.working_dir(self.build_directory):
|
||||
pip(*args)
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
@@ -6,9 +6,10 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
|
||||
|
||||
class QMakePackage(spack.package_base.PackageBase):
|
||||
@@ -30,7 +31,7 @@ class QMakePackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("qmake")
|
||||
class QMakeBuilder(BaseBuilder):
|
||||
class QMakeBuilder(BuilderWithDefaults):
|
||||
"""The qmake builder provides three phases that can be overridden:
|
||||
|
||||
1. :py:meth:`~.QMakeBuilder.qmake`
|
||||
@@ -81,4 +82,4 @@ def check(self):
|
||||
with working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("check")
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
@@ -8,7 +8,7 @@
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, extends, maintainers
|
||||
|
||||
from ._checks import BaseBuilder
|
||||
from ._checks import BuilderWithDefaults
|
||||
|
||||
|
||||
class RubyPackage(spack.package_base.PackageBase):
|
||||
@@ -28,7 +28,7 @@ class RubyPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("ruby")
|
||||
class RubyBuilder(BaseBuilder):
|
||||
class RubyBuilder(BuilderWithDefaults):
|
||||
"""The Ruby builder provides two phases that can be overridden if required:
|
||||
|
||||
#. :py:meth:`~.RubyBuilder.build`
|
||||
|
@@ -4,9 +4,10 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
|
||||
|
||||
class SConsPackage(spack.package_base.PackageBase):
|
||||
@@ -28,7 +29,7 @@ class SConsPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("scons")
|
||||
class SConsBuilder(BaseBuilder):
|
||||
class SConsBuilder(BuilderWithDefaults):
|
||||
"""The Scons builder provides the following phases that can be overridden:
|
||||
|
||||
1. :py:meth:`~.SConsBuilder.build`
|
||||
@@ -79,4 +80,4 @@ def build_test(self):
|
||||
"""
|
||||
pass
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
@@ -11,11 +11,12 @@
|
||||
import spack.builder
|
||||
import spack.install_test
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
|
||||
|
||||
class SIPPackage(spack.package_base.PackageBase):
|
||||
@@ -103,7 +104,7 @@ def test_imports(self):
|
||||
|
||||
|
||||
@spack.builder.builder("sip")
|
||||
class SIPBuilder(BaseBuilder):
|
||||
class SIPBuilder(BuilderWithDefaults):
|
||||
"""The SIP builder provides the following phases that can be overridden:
|
||||
|
||||
* configure
|
||||
@@ -170,4 +171,4 @@ def install_args(self):
|
||||
"""Arguments to pass to install."""
|
||||
return []
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
@@ -6,9 +6,10 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests, execute_install_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests, execute_install_time_tests
|
||||
|
||||
|
||||
class WafPackage(spack.package_base.PackageBase):
|
||||
@@ -30,7 +31,7 @@ class WafPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("waf")
|
||||
class WafBuilder(BaseBuilder):
|
||||
class WafBuilder(BuilderWithDefaults):
|
||||
"""The WAF builder provides the following phases that can be overridden:
|
||||
|
||||
* configure
|
||||
@@ -136,7 +137,7 @@ def build_test(self):
|
||||
"""
|
||||
pass
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def install_test(self):
|
||||
"""Run unit tests after install.
|
||||
@@ -146,4 +147,4 @@ def install_test(self):
|
||||
"""
|
||||
pass
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
@@ -6,43 +6,35 @@
|
||||
import collections.abc
|
||||
import copy
|
||||
import functools
|
||||
from typing import List, Optional, Tuple
|
||||
from typing import Dict, List, Optional, Tuple, Type
|
||||
|
||||
from llnl.util import lang
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.log as log
|
||||
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.install_test
|
||||
import spack.multimethod
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
|
||||
#: Builder classes, as registered by the "builder" decorator
|
||||
BUILDER_CLS = {}
|
||||
|
||||
#: An object of this kind is a shared global state used to collect callbacks during
|
||||
#: class definition time, and is flushed when the class object is created at the end
|
||||
#: of the class definition
|
||||
#:
|
||||
#: Args:
|
||||
#: attribute_name (str): name of the attribute that will be attached to the builder
|
||||
#: callbacks (list): container used to temporarily aggregate the callbacks
|
||||
CallbackTemporaryStage = collections.namedtuple(
|
||||
"CallbackTemporaryStage", ["attribute_name", "callbacks"]
|
||||
)
|
||||
|
||||
#: Shared global state to aggregate "@run_before" callbacks
|
||||
_RUN_BEFORE = CallbackTemporaryStage(attribute_name="run_before_callbacks", callbacks=[])
|
||||
#: Shared global state to aggregate "@run_after" callbacks
|
||||
_RUN_AFTER = CallbackTemporaryStage(attribute_name="run_after_callbacks", callbacks=[])
|
||||
BUILDER_CLS: Dict[str, Type["Builder"]] = {}
|
||||
|
||||
#: Map id(pkg) to a builder, to avoid creating multiple
|
||||
#: builders for the same package object.
|
||||
_BUILDERS = {}
|
||||
_BUILDERS: Dict[int, "Builder"] = {}
|
||||
|
||||
|
||||
def builder(build_system_name):
|
||||
def builder(build_system_name: str):
|
||||
"""Class decorator used to register the default builder
|
||||
for a given build-system.
|
||||
|
||||
Args:
|
||||
build_system_name (str): name of the build-system
|
||||
build_system_name: name of the build-system
|
||||
"""
|
||||
|
||||
def _decorator(cls):
|
||||
@@ -53,13 +45,9 @@ def _decorator(cls):
|
||||
return _decorator
|
||||
|
||||
|
||||
def create(pkg):
|
||||
"""Given a package object with an associated concrete spec,
|
||||
return the builder object that can install it.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package for which we want the builder
|
||||
"""
|
||||
def create(pkg: spack.package_base.PackageBase) -> "Builder":
|
||||
"""Given a package object with an associated concrete spec, return the builder object that can
|
||||
install it."""
|
||||
if id(pkg) not in _BUILDERS:
|
||||
_BUILDERS[id(pkg)] = _create(pkg)
|
||||
return _BUILDERS[id(pkg)]
|
||||
@@ -74,7 +62,15 @@ def __call__(self, spec, prefix):
|
||||
return self.phase_fn(self.builder.pkg, spec, prefix)
|
||||
|
||||
|
||||
def _create(pkg):
|
||||
def get_builder_class(pkg, name: str) -> Optional[Type["Builder"]]:
|
||||
"""Return the builder class if a package module defines it."""
|
||||
cls = getattr(pkg.module, name, None)
|
||||
if cls and cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
|
||||
return cls
|
||||
return None
|
||||
|
||||
|
||||
def _create(pkg: spack.package_base.PackageBase) -> "Builder":
|
||||
"""Return a new builder object for the package object being passed as argument.
|
||||
|
||||
The function inspects the build-system used by the package object and try to:
|
||||
@@ -94,14 +90,15 @@ class hierarchy (look at AspellDictPackage for an example of that)
|
||||
to look for build-related methods in the ``*Package``.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package object for which we need a builder
|
||||
pkg: package object for which we need a builder
|
||||
"""
|
||||
package_buildsystem = buildsystem_name(pkg)
|
||||
default_builder_cls = BUILDER_CLS[package_buildsystem]
|
||||
builder_cls_name = default_builder_cls.__name__
|
||||
builder_cls = getattr(pkg.module, builder_cls_name, None)
|
||||
if builder_cls:
|
||||
return builder_cls(pkg)
|
||||
builder_class = get_builder_class(pkg, builder_cls_name)
|
||||
|
||||
if builder_class:
|
||||
return builder_class(pkg)
|
||||
|
||||
# Specialized version of a given buildsystem can subclass some
|
||||
# base classes and specialize certain phases or methods or attributes.
|
||||
@@ -130,11 +127,16 @@ def __init__(self, wrapped_pkg_object, root_builder):
|
||||
new_cls_name,
|
||||
bases,
|
||||
{
|
||||
# boolean to indicate whether install-time tests are run
|
||||
"run_tests": property(lambda x: x.wrapped_package_object.run_tests),
|
||||
# boolean to indicate whether the package's stand-alone tests
|
||||
# require a compiler
|
||||
"test_requires_compiler": property(
|
||||
lambda x: x.wrapped_package_object.test_requires_compiler
|
||||
),
|
||||
# TestSuite instance the spec is a part of
|
||||
"test_suite": property(lambda x: x.wrapped_package_object.test_suite),
|
||||
# PackageTest instance to manage the spec's testing
|
||||
"tester": property(lambda x: x.wrapped_package_object.tester),
|
||||
},
|
||||
)
|
||||
@@ -158,8 +160,8 @@ def __forward(self, *args, **kwargs):
|
||||
# with the same name is defined in the Package, it will override this definition
|
||||
# (when _ForwardToBaseBuilder is initialized)
|
||||
for method_name in (
|
||||
base_cls.phases
|
||||
+ base_cls.legacy_methods
|
||||
base_cls.phases # type: ignore
|
||||
+ base_cls.legacy_methods # type: ignore
|
||||
+ getattr(base_cls, "legacy_long_methods", tuple())
|
||||
+ ("setup_build_environment", "setup_dependent_build_environment")
|
||||
):
|
||||
@@ -171,14 +173,14 @@ def __forward(self):
|
||||
|
||||
return __forward
|
||||
|
||||
for attribute_name in base_cls.legacy_attributes:
|
||||
for attribute_name in base_cls.legacy_attributes: # type: ignore
|
||||
setattr(
|
||||
_ForwardToBaseBuilder,
|
||||
attribute_name,
|
||||
property(forward_property_to_getattr(attribute_name)),
|
||||
)
|
||||
|
||||
class Adapter(base_cls, metaclass=_PackageAdapterMeta):
|
||||
class Adapter(base_cls, metaclass=_PackageAdapterMeta): # type: ignore
|
||||
def __init__(self, pkg):
|
||||
# Deal with custom phases in packages here
|
||||
if hasattr(pkg, "phases"):
|
||||
@@ -203,99 +205,18 @@ def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
return Adapter(pkg)
|
||||
|
||||
|
||||
def buildsystem_name(pkg):
|
||||
def buildsystem_name(pkg: spack.package_base.PackageBase) -> str:
|
||||
"""Given a package object with an associated concrete spec,
|
||||
return the name of its build system.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package for which we want
|
||||
the build system name
|
||||
"""
|
||||
return the name of its build system."""
|
||||
try:
|
||||
return pkg.spec.variants["build_system"].value
|
||||
except KeyError:
|
||||
# We are reading an old spec without the build_system variant
|
||||
return pkg.legacy_buildsystem
|
||||
|
||||
|
||||
class PhaseCallbacksMeta(type):
|
||||
"""Permit to register arbitrary functions during class definition and run them
|
||||
later, before or after a given install phase.
|
||||
|
||||
Each method decorated with ``run_before`` or ``run_after`` gets temporarily
|
||||
stored in a global shared state when a class being defined is parsed by the Python
|
||||
interpreter. At class definition time that temporary storage gets flushed and a list
|
||||
of callbacks is attached to the class being defined.
|
||||
"""
|
||||
|
||||
def __new__(mcs, name, bases, attr_dict):
|
||||
for temporary_stage in (_RUN_BEFORE, _RUN_AFTER):
|
||||
staged_callbacks = temporary_stage.callbacks
|
||||
|
||||
# Here we have an adapter from an old-style package. This means there is no
|
||||
# hierarchy of builders, and every callback that had to be combined between
|
||||
# *Package and *Builder has been combined already by _PackageAdapterMeta
|
||||
if name == "Adapter":
|
||||
continue
|
||||
|
||||
# If we are here we have callbacks. To get a complete list, we accumulate all the
|
||||
# callbacks from base classes, we deduplicate them, then prepend what we have
|
||||
# registered here.
|
||||
#
|
||||
# The order should be:
|
||||
# 1. Callbacks are registered in order within the same class
|
||||
# 2. Callbacks defined in derived classes precede those defined in base
|
||||
# classes
|
||||
callbacks_from_base = []
|
||||
for base in bases:
|
||||
current_callbacks = getattr(base, temporary_stage.attribute_name, None)
|
||||
if not current_callbacks:
|
||||
continue
|
||||
callbacks_from_base.extend(current_callbacks)
|
||||
callbacks_from_base = list(lang.dedupe(callbacks_from_base))
|
||||
# Set the callbacks in this class and flush the temporary stage
|
||||
attr_dict[temporary_stage.attribute_name] = staged_callbacks[:] + callbacks_from_base
|
||||
del temporary_stage.callbacks[:]
|
||||
|
||||
return super(PhaseCallbacksMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
||||
|
||||
@staticmethod
|
||||
def run_after(phase, when=None):
|
||||
"""Decorator to register a function for running after a given phase.
|
||||
|
||||
Args:
|
||||
phase (str): phase after which the function must run.
|
||||
when (str): condition under which the function is run (if None, it is always run).
|
||||
"""
|
||||
|
||||
def _decorator(fn):
|
||||
key = (phase, when)
|
||||
item = (key, fn)
|
||||
_RUN_AFTER.callbacks.append(item)
|
||||
return fn
|
||||
|
||||
return _decorator
|
||||
|
||||
@staticmethod
|
||||
def run_before(phase, when=None):
|
||||
"""Decorator to register a function for running before a given phase.
|
||||
|
||||
Args:
|
||||
phase (str): phase before which the function must run.
|
||||
when (str): condition under which the function is run (if None, it is always run).
|
||||
"""
|
||||
|
||||
def _decorator(fn):
|
||||
key = (phase, when)
|
||||
item = (key, fn)
|
||||
_RUN_BEFORE.callbacks.append(item)
|
||||
return fn
|
||||
|
||||
return _decorator
|
||||
return pkg.legacy_buildsystem # type: ignore
|
||||
|
||||
|
||||
class BuilderMeta(
|
||||
PhaseCallbacksMeta,
|
||||
spack.phase_callbacks.PhaseCallbacksMeta,
|
||||
spack.multimethod.MultiMethodMeta,
|
||||
type(collections.abc.Sequence), # type: ignore
|
||||
):
|
||||
@@ -390,8 +311,12 @@ def __new__(mcs, name, bases, attr_dict):
|
||||
)
|
||||
|
||||
combine_callbacks = _PackageAdapterMeta.combine_callbacks
|
||||
attr_dict[_RUN_BEFORE.attribute_name] = combine_callbacks(_RUN_BEFORE.attribute_name)
|
||||
attr_dict[_RUN_AFTER.attribute_name] = combine_callbacks(_RUN_AFTER.attribute_name)
|
||||
attr_dict[spack.phase_callbacks._RUN_BEFORE.attribute_name] = combine_callbacks(
|
||||
spack.phase_callbacks._RUN_BEFORE.attribute_name
|
||||
)
|
||||
attr_dict[spack.phase_callbacks._RUN_AFTER.attribute_name] = combine_callbacks(
|
||||
spack.phase_callbacks._RUN_AFTER.attribute_name
|
||||
)
|
||||
|
||||
return super(_PackageAdapterMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
||||
|
||||
@@ -411,8 +336,8 @@ def __init__(self, name, builder):
|
||||
self.name = name
|
||||
self.builder = builder
|
||||
self.phase_fn = self._select_phase_fn()
|
||||
self.run_before = self._make_callbacks(_RUN_BEFORE.attribute_name)
|
||||
self.run_after = self._make_callbacks(_RUN_AFTER.attribute_name)
|
||||
self.run_before = self._make_callbacks(spack.phase_callbacks._RUN_BEFORE.attribute_name)
|
||||
self.run_after = self._make_callbacks(spack.phase_callbacks._RUN_AFTER.attribute_name)
|
||||
|
||||
def _make_callbacks(self, callbacks_attribute):
|
||||
result = []
|
||||
@@ -473,15 +398,103 @@ def copy(self):
|
||||
return copy.deepcopy(self)
|
||||
|
||||
|
||||
class Builder(collections.abc.Sequence, metaclass=BuilderMeta):
|
||||
"""A builder is a class that, given a package object (i.e. associated with
|
||||
concrete spec), knows how to install it.
|
||||
class BaseBuilder(metaclass=BuilderMeta):
|
||||
"""An interface for builders, without any phases defined. This class is exposed in the package
|
||||
API, so that packagers can create a single class to define ``setup_build_environment`` and
|
||||
``@run_before`` and ``@run_after`` callbacks that can be shared among different builders.
|
||||
|
||||
The builder behaves like a sequence, and when iterated over return the
|
||||
"phases" of the installation in the correct order.
|
||||
Example:
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package object to be built
|
||||
.. code-block:: python
|
||||
|
||||
class AnyBuilder(BaseBuilder):
|
||||
@run_after("install")
|
||||
def fixup_install(self):
|
||||
# do something after the package is installed
|
||||
pass
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
env.set("MY_ENV_VAR", "my_value")
|
||||
|
||||
class CMakeBuilder(cmake.CMakeBuilder, AnyBuilder):
|
||||
pass
|
||||
|
||||
class AutotoolsBuilder(autotools.AutotoolsBuilder, AnyBuilder):
|
||||
pass
|
||||
"""
|
||||
|
||||
def __init__(self, pkg: spack.package_base.PackageBase) -> None:
|
||||
self.pkg = pkg
|
||||
|
||||
@property
|
||||
def spec(self) -> spack.spec.Spec:
|
||||
return self.pkg.spec
|
||||
|
||||
@property
|
||||
def stage(self):
|
||||
return self.pkg.stage
|
||||
|
||||
@property
|
||||
def prefix(self):
|
||||
return self.pkg.prefix
|
||||
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
"""Sets up the build environment for a package.
|
||||
|
||||
This method will be called before the current package prefix exists in
|
||||
Spack's store.
|
||||
|
||||
Args:
|
||||
env: environment modifications to be applied when the package is built. Package authors
|
||||
can call methods on it to alter the build environment.
|
||||
"""
|
||||
if not hasattr(super(), "setup_build_environment"):
|
||||
return
|
||||
super().setup_build_environment(env) # type: ignore
|
||||
|
||||
def setup_dependent_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications, dependent_spec: spack.spec.Spec
|
||||
) -> None:
|
||||
"""Sets up the build environment of a package that depends on this one.
|
||||
|
||||
This is similar to ``setup_build_environment``, but it is used to modify the build
|
||||
environment of a package that *depends* on this one.
|
||||
|
||||
This gives packages the ability to set environment variables for the build of the
|
||||
dependent, which can be useful to provide search hints for headers or libraries if they are
|
||||
not in standard locations.
|
||||
|
||||
This method will be called before the dependent package prefix exists in Spack's store.
|
||||
|
||||
Args:
|
||||
env: environment modifications to be applied when the dependent package is built.
|
||||
Package authors can call methods on it to alter the build environment.
|
||||
|
||||
dependent_spec: the spec of the dependent package about to be built. This allows the
|
||||
extendee (self) to query the dependent's state. Note that *this* package's spec is
|
||||
available as ``self.spec``
|
||||
"""
|
||||
if not hasattr(super(), "setup_dependent_build_environment"):
|
||||
return
|
||||
super().setup_dependent_build_environment(env, dependent_spec) # type: ignore
|
||||
|
||||
def __repr__(self):
|
||||
fmt = "{name}{/hash:7}"
|
||||
return f"{self.__class__.__name__}({self.spec.format(fmt)})"
|
||||
|
||||
def __str__(self):
|
||||
fmt = "{name}{/hash:7}"
|
||||
return f'"{self.__class__.__name__}" builder for "{self.spec.format(fmt)}"'
|
||||
|
||||
|
||||
class Builder(BaseBuilder, collections.abc.Sequence):
|
||||
"""A builder is a class that, given a package object (i.e. associated with concrete spec),
|
||||
knows how to install it and perform install-time checks.
|
||||
|
||||
The builder behaves like a sequence, and when iterated over return the "phases" of the
|
||||
installation in the correct order.
|
||||
"""
|
||||
|
||||
#: Sequence of phases. Must be defined in derived classes
|
||||
@@ -496,79 +509,19 @@ class Builder(collections.abc.Sequence, metaclass=BuilderMeta):
|
||||
build_time_test_callbacks: List[str]
|
||||
install_time_test_callbacks: List[str]
|
||||
|
||||
#: List of glob expressions. Each expression must either be
|
||||
#: absolute or relative to the package source path.
|
||||
#: Matching artifacts found at the end of the build process will be
|
||||
#: copied in the same directory tree as _spack_build_logfile and
|
||||
#: _spack_build_envfile.
|
||||
archive_files: List[str] = []
|
||||
#: List of glob expressions. Each expression must either be absolute or relative to the package
|
||||
#: source path. Matching artifacts found at the end of the build process will be copied in the
|
||||
#: same directory tree as _spack_build_logfile and _spack_build_envfile.
|
||||
@property
|
||||
def archive_files(self) -> List[str]:
|
||||
return []
|
||||
|
||||
def __init__(self, pkg):
|
||||
self.pkg = pkg
|
||||
def __init__(self, pkg: spack.package_base.PackageBase) -> None:
|
||||
super().__init__(pkg)
|
||||
self.callbacks = {}
|
||||
for phase in self.phases:
|
||||
self.callbacks[phase] = InstallationPhase(phase, self)
|
||||
|
||||
@property
|
||||
def spec(self):
|
||||
return self.pkg.spec
|
||||
|
||||
@property
|
||||
def stage(self):
|
||||
return self.pkg.stage
|
||||
|
||||
@property
|
||||
def prefix(self):
|
||||
return self.pkg.prefix
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
"""Sets up the build environment for a package.
|
||||
|
||||
This method will be called before the current package prefix exists in
|
||||
Spack's store.
|
||||
|
||||
Args:
|
||||
env (spack.util.environment.EnvironmentModifications): environment
|
||||
modifications to be applied when the package is built. Package authors
|
||||
can call methods on it to alter the build environment.
|
||||
"""
|
||||
if not hasattr(super(), "setup_build_environment"):
|
||||
return
|
||||
super().setup_build_environment(env)
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
"""Sets up the build environment of packages that depend on this one.
|
||||
|
||||
This is similar to ``setup_build_environment``, but it is used to
|
||||
modify the build environments of packages that *depend* on this one.
|
||||
|
||||
This gives packages like Python and others that follow the extension
|
||||
model a way to implement common environment or compile-time settings
|
||||
for dependencies.
|
||||
|
||||
This method will be called before the dependent package prefix exists
|
||||
in Spack's store.
|
||||
|
||||
Examples:
|
||||
1. Installing python modules generally requires ``PYTHONPATH``
|
||||
to point to the ``lib/pythonX.Y/site-packages`` directory in the
|
||||
module's install prefix. This method could be used to set that
|
||||
variable.
|
||||
|
||||
Args:
|
||||
env (spack.util.environment.EnvironmentModifications): environment
|
||||
modifications to be applied when the dependent package is built.
|
||||
Package authors can call methods on it to alter the build environment.
|
||||
|
||||
dependent_spec (spack.spec.Spec): the spec of the dependent package
|
||||
about to be built. This allows the extendee (self) to query
|
||||
the dependent's state. Note that *this* package's spec is
|
||||
available as ``self.spec``
|
||||
"""
|
||||
if not hasattr(super(), "setup_dependent_build_environment"):
|
||||
return
|
||||
super().setup_dependent_build_environment(env, dependent_spec)
|
||||
|
||||
def __getitem__(self, idx):
|
||||
key = self.phases[idx]
|
||||
return self.callbacks[key]
|
||||
@@ -576,15 +529,51 @@ def __getitem__(self, idx):
|
||||
def __len__(self):
|
||||
return len(self.phases)
|
||||
|
||||
def __repr__(self):
|
||||
msg = "{0}({1})"
|
||||
return msg.format(type(self).__name__, self.pkg.spec.format("{name}/{hash:7}"))
|
||||
def phase_tests(self, phase_name: str, method_names: List[str]):
|
||||
"""Execute the package's phase-time tests.
|
||||
|
||||
def __str__(self):
|
||||
msg = '"{0}" builder for "{1}"'
|
||||
return msg.format(type(self).build_system, self.pkg.spec.format("{name}/{hash:7}"))
|
||||
This process uses the same test setup and logging used for
|
||||
stand-alone tests for consistency.
|
||||
|
||||
Args:
|
||||
phase_name: the name of the build-time phase (e.g., ``build``, ``install``)
|
||||
method_names: phase-specific callback method names
|
||||
"""
|
||||
verbose = tty.is_verbose()
|
||||
fail_fast = spack.config.get("config:fail_fast", False)
|
||||
|
||||
# Export these names as standalone to be used in packages
|
||||
run_after = PhaseCallbacksMeta.run_after
|
||||
run_before = PhaseCallbacksMeta.run_before
|
||||
tester = self.pkg.tester
|
||||
with tester.test_logger(verbose=verbose, externals=False) as logger:
|
||||
# Report running each of the methods in the build log
|
||||
log.print_message(logger, f"Running {phase_name}-time tests", verbose)
|
||||
tester.set_current_specs(self.pkg.spec, self.pkg.spec)
|
||||
|
||||
have_tests = any(name.startswith("test_") for name in method_names)
|
||||
if have_tests:
|
||||
spack.install_test.copy_test_files(self.pkg, self.pkg.spec)
|
||||
|
||||
for name in method_names:
|
||||
try:
|
||||
# Prefer the method in the package over the builder's.
|
||||
# We need this primarily to pick up arbitrarily named test
|
||||
# methods but also some build-time checks.
|
||||
fn = getattr(self.pkg, name, getattr(self, name))
|
||||
|
||||
msg = f"RUN-TESTS: {phase_name}-time tests [{name}]"
|
||||
log.print_message(logger, msg, verbose)
|
||||
|
||||
fn()
|
||||
|
||||
except AttributeError as e:
|
||||
msg = f"RUN-TESTS: method not implemented [{name}]"
|
||||
log.print_message(logger, msg, verbose)
|
||||
|
||||
tester.add_failure(e, msg)
|
||||
if fail_fast:
|
||||
break
|
||||
|
||||
if have_tests:
|
||||
log.print_message(logger, "Completed testing", verbose)
|
||||
|
||||
# Raise exception if any failures encountered
|
||||
tester.handle_failures()
|
||||
|
@@ -5,7 +5,6 @@
|
||||
|
||||
"""Caches used by Spack to store data"""
|
||||
import os
|
||||
from typing import Union
|
||||
|
||||
import llnl.util.lang
|
||||
from llnl.util.filesystem import mkdirp
|
||||
@@ -32,12 +31,8 @@ def _misc_cache():
|
||||
return spack.util.file_cache.FileCache(path)
|
||||
|
||||
|
||||
FileCacheType = Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton]
|
||||
|
||||
#: Spack's cache for small data
|
||||
MISC_CACHE: Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton] = (
|
||||
llnl.util.lang.Singleton(_misc_cache)
|
||||
)
|
||||
MISC_CACHE: spack.util.file_cache.FileCache = llnl.util.lang.Singleton(_misc_cache) # type: ignore
|
||||
|
||||
|
||||
def fetch_cache_location():
|
||||
@@ -74,6 +69,4 @@ def store(self, fetcher, relative_dest):
|
||||
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
FETCH_CACHE: Union[spack.fetch_strategy.FsCache, llnl.util.lang.Singleton] = (
|
||||
llnl.util.lang.Singleton(_fetch_cache)
|
||||
)
|
||||
FETCH_CACHE: spack.fetch_strategy.FsCache = llnl.util.lang.Singleton(_fetch_cache) # type: ignore
|
||||
|
@@ -10,6 +10,7 @@
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import ssl
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
@@ -19,21 +20,22 @@
|
||||
from collections import defaultdict, namedtuple
|
||||
from typing import Dict, List, Optional, Set, Tuple
|
||||
from urllib.error import HTTPError, URLError
|
||||
from urllib.parse import urlencode
|
||||
from urllib.request import HTTPHandler, Request, build_opener
|
||||
from urllib.parse import quote, urlencode, urlparse
|
||||
from urllib.request import HTTPHandler, HTTPSHandler, Request, build_opener
|
||||
|
||||
import ruamel.yaml
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
from llnl.util.lang import Singleton, memoized
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.builder
|
||||
import spack.concretize
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.main
|
||||
import spack.mirror
|
||||
import spack.paths
|
||||
@@ -50,6 +52,31 @@
|
||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||
|
||||
|
||||
def _urlopen():
|
||||
error_handler = web_util.SpackHTTPDefaultErrorHandler()
|
||||
|
||||
# One opener with HTTPS ssl enabled
|
||||
with_ssl = build_opener(
|
||||
HTTPHandler(), HTTPSHandler(context=web_util.ssl_create_default_context()), error_handler
|
||||
)
|
||||
|
||||
# One opener with HTTPS ssl disabled
|
||||
without_ssl = build_opener(
|
||||
HTTPHandler(), HTTPSHandler(context=ssl._create_unverified_context()), error_handler
|
||||
)
|
||||
|
||||
# And dynamically dispatch based on the config:verify_ssl.
|
||||
def dispatch_open(fullurl, data=None, timeout=None, verify_ssl=True):
|
||||
opener = with_ssl if verify_ssl else without_ssl
|
||||
timeout = timeout or spack.config.get("config:connect_timeout", 1)
|
||||
return opener.open(fullurl, data, timeout)
|
||||
|
||||
return dispatch_open
|
||||
|
||||
|
||||
_dyn_mapping_urlopener = Singleton(_urlopen)
|
||||
|
||||
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
|
||||
JOB_RETRY_CONDITIONS = [
|
||||
# "always",
|
||||
@@ -69,8 +96,6 @@
|
||||
|
||||
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
# TODO: Remove this in Spack 0.23
|
||||
SHARED_PR_MIRROR_URL = "s3://spack-binaries-prs/shared_pr_mirror"
|
||||
JOB_NAME_FORMAT = (
|
||||
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{ arch=architecture}"
|
||||
)
|
||||
@@ -175,11 +200,11 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
||||
return nodes, edges, stages
|
||||
|
||||
|
||||
def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions):
|
||||
def _print_staging_summary(spec_labels, stages, rebuild_decisions):
|
||||
if not stages:
|
||||
return
|
||||
|
||||
mirrors = spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True)
|
||||
mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||
tty.msg("Checked the following mirrors for binaries:")
|
||||
for m in mirrors.values():
|
||||
tty.msg(f" {m.fetch_url}")
|
||||
@@ -226,21 +251,14 @@ def _spec_matches(spec, match_string):
|
||||
return spec.intersects(match_string)
|
||||
|
||||
|
||||
def _format_job_needs(
|
||||
dep_jobs, build_group, prune_dag, rebuild_decisions, enable_artifacts_buildcache
|
||||
):
|
||||
def _format_job_needs(dep_jobs, build_group, prune_dag, rebuild_decisions):
|
||||
needs_list = []
|
||||
for dep_job in dep_jobs:
|
||||
dep_spec_key = _spec_ci_label(dep_job)
|
||||
rebuild = rebuild_decisions[dep_spec_key].rebuild
|
||||
|
||||
if not prune_dag or rebuild:
|
||||
needs_list.append(
|
||||
{
|
||||
"job": get_job_name(dep_job, build_group),
|
||||
"artifacts": enable_artifacts_buildcache,
|
||||
}
|
||||
)
|
||||
needs_list.append({"job": get_job_name(dep_job, build_group), "artifacts": False})
|
||||
return needs_list
|
||||
|
||||
|
||||
@@ -384,12 +402,6 @@ def __init__(self, ci_config, spec_labels, stages):
|
||||
|
||||
self.ir = {
|
||||
"jobs": {},
|
||||
"temporary-storage-url-prefix": self.ci_config.get(
|
||||
"temporary-storage-url-prefix", None
|
||||
),
|
||||
"enable-artifacts-buildcache": self.ci_config.get(
|
||||
"enable-artifacts-buildcache", False
|
||||
),
|
||||
"rebuild-index": self.ci_config.get("rebuild-index", True),
|
||||
"broken-specs-url": self.ci_config.get("broken-specs-url", None),
|
||||
"broken-tests-packages": self.ci_config.get("broken-tests-packages", []),
|
||||
@@ -405,9 +417,20 @@ def __init__(self, ci_config, spec_labels, stages):
|
||||
if name not in ["any", "build"]:
|
||||
jobs[name] = self.__init_job("")
|
||||
|
||||
def __init_job(self, spec):
|
||||
def __init_job(self, release_spec):
|
||||
"""Initialize job object"""
|
||||
return {"spec": spec, "attributes": {}}
|
||||
job_object = {"spec": release_spec, "attributes": {}}
|
||||
if release_spec:
|
||||
job_vars = job_object["attributes"].setdefault("variables", {})
|
||||
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec.dag_hash()
|
||||
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
||||
job_vars["SPACK_JOB_SPEC_PKG_VERSION"] = release_spec.format("{version}")
|
||||
job_vars["SPACK_JOB_SPEC_COMPILER_NAME"] = release_spec.format("{compiler.name}")
|
||||
job_vars["SPACK_JOB_SPEC_COMPILER_VERSION"] = release_spec.format("{compiler.version}")
|
||||
job_vars["SPACK_JOB_SPEC_ARCH"] = release_spec.format("{architecture}")
|
||||
job_vars["SPACK_JOB_SPEC_VARIANTS"] = release_spec.format("{variants}")
|
||||
|
||||
return job_object
|
||||
|
||||
def __is_named(self, section):
|
||||
"""Check if a pipeline-gen configuration section is for a named job,
|
||||
@@ -500,6 +523,7 @@ def generate_ir(self):
|
||||
for section in reversed(pipeline_gen):
|
||||
name = self.__is_named(section)
|
||||
has_submapping = "submapping" in section
|
||||
has_dynmapping = "dynamic-mapping" in section
|
||||
section = cfg.InternalConfigScope._process_dict_keyname_overrides(section)
|
||||
|
||||
if name:
|
||||
@@ -542,6 +566,108 @@ def _apply_section(dest, src):
|
||||
job["attributes"] = self.__apply_submapping(
|
||||
job["attributes"], job["spec"], section
|
||||
)
|
||||
elif has_dynmapping:
|
||||
mapping = section["dynamic-mapping"]
|
||||
|
||||
dynmap_name = mapping.get("name")
|
||||
|
||||
# Check if this section should be skipped
|
||||
dynmap_skip = os.environ.get("SPACK_CI_SKIP_DYNAMIC_MAPPING")
|
||||
if dynmap_name and dynmap_skip:
|
||||
if re.match(dynmap_skip, dynmap_name):
|
||||
continue
|
||||
|
||||
# Get the endpoint
|
||||
endpoint = mapping["endpoint"]
|
||||
endpoint_url = urlparse(endpoint)
|
||||
|
||||
# Configure the request header
|
||||
header = {"User-Agent": web_util.SPACK_USER_AGENT}
|
||||
header.update(mapping.get("header", {}))
|
||||
|
||||
# Expand header environment variables
|
||||
# ie. if tokens are passed
|
||||
for value in header.values():
|
||||
value = os.path.expandvars(value)
|
||||
|
||||
verify_ssl = mapping.get("verify_ssl", spack.config.get("config:verify_ssl", True))
|
||||
timeout = mapping.get("timeout", spack.config.get("config:connect_timeout", 1))
|
||||
|
||||
required = mapping.get("require", [])
|
||||
allowed = mapping.get("allow", [])
|
||||
ignored = mapping.get("ignore", [])
|
||||
|
||||
# required keys are implicitly allowed
|
||||
allowed = sorted(set(allowed + required))
|
||||
ignored = sorted(set(ignored))
|
||||
required = sorted(set(required))
|
||||
|
||||
# Make sure required things are not also ignored
|
||||
assert not any([ikey in required for ikey in ignored])
|
||||
|
||||
def job_query(job):
|
||||
job_vars = job["attributes"]["variables"]
|
||||
query = (
|
||||
"{SPACK_JOB_SPEC_PKG_NAME}@{SPACK_JOB_SPEC_PKG_VERSION}"
|
||||
# The preceding spaces are required (ref. https://github.com/spack/spack-gantry/blob/develop/docs/api.md#allocation)
|
||||
" {SPACK_JOB_SPEC_VARIANTS}"
|
||||
" arch={SPACK_JOB_SPEC_ARCH}"
|
||||
"%{SPACK_JOB_SPEC_COMPILER_NAME}@{SPACK_JOB_SPEC_COMPILER_VERSION}"
|
||||
).format_map(job_vars)
|
||||
return f"spec={quote(query)}"
|
||||
|
||||
for job in jobs.values():
|
||||
if not job["spec"]:
|
||||
continue
|
||||
|
||||
# Create request for this job
|
||||
query = job_query(job)
|
||||
request = Request(
|
||||
endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
|
||||
)
|
||||
try:
|
||||
response = _dyn_mapping_urlopener(
|
||||
request, verify_ssl=verify_ssl, timeout=timeout
|
||||
)
|
||||
except Exception as e:
|
||||
# For now just ignore any errors from dynamic mapping and continue
|
||||
# This is still experimental, and failures should not stop CI
|
||||
# from running normally
|
||||
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}")
|
||||
tty.warn(f"{e}")
|
||||
continue
|
||||
|
||||
config = json.load(codecs.getreader("utf-8")(response))
|
||||
|
||||
# Strip ignore keys
|
||||
if ignored:
|
||||
for key in ignored:
|
||||
if key in config:
|
||||
config.pop(key)
|
||||
|
||||
# Only keep allowed keys
|
||||
clean_config = {}
|
||||
if allowed:
|
||||
for key in allowed:
|
||||
if key in config:
|
||||
clean_config[key] = config[key]
|
||||
else:
|
||||
clean_config = config
|
||||
|
||||
# Verify all of the required keys are present
|
||||
if required:
|
||||
missing_keys = []
|
||||
for key in required:
|
||||
if key not in clean_config.keys():
|
||||
missing_keys.append(key)
|
||||
|
||||
if missing_keys:
|
||||
tty.warn(f"Response missing required keys: {missing_keys}")
|
||||
|
||||
if clean_config:
|
||||
job["attributes"] = spack.config.merge_yaml(
|
||||
job.get("attributes", {}), clean_config
|
||||
)
|
||||
|
||||
for _, job in jobs.items():
|
||||
if job["spec"]:
|
||||
@@ -558,14 +684,13 @@ def generate_gitlab_ci_yaml(
|
||||
prune_dag=False,
|
||||
check_index_only=False,
|
||||
artifacts_root=None,
|
||||
remote_mirror_override=None,
|
||||
):
|
||||
"""Generate a gitlab yaml file to run a dynamic child pipeline from
|
||||
the spec matrix in the active environment.
|
||||
|
||||
Arguments:
|
||||
env (spack.environment.Environment): Activated environment object
|
||||
which must contain a gitlab-ci section describing how to map
|
||||
which must contain a ci section describing how to map
|
||||
specs to runners
|
||||
print_summary (bool): Should we print a summary of all the jobs in
|
||||
the stages in which they were placed.
|
||||
@@ -580,39 +705,21 @@ def generate_gitlab_ci_yaml(
|
||||
artifacts_root (str): Path where artifacts like logs, environment
|
||||
files (spack.yaml, spack.lock), etc should be written. GitLab
|
||||
requires this to be within the project directory.
|
||||
remote_mirror_override (str): Typically only needed when one spack.yaml
|
||||
is used to populate several mirrors with binaries, based on some
|
||||
criteria. Spack protected pipelines populate different mirrors based
|
||||
on branch name, facilitated by this option. DEPRECATED
|
||||
"""
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
with env.write_transaction():
|
||||
env.concretize()
|
||||
env.write()
|
||||
|
||||
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
|
||||
|
||||
# Get the joined "ci" config with all of the current scopes resolved
|
||||
ci_config = cfg.get("ci")
|
||||
|
||||
config_deprecated = False
|
||||
if not ci_config:
|
||||
tty.warn("Environment does not have `ci` a configuration")
|
||||
gitlabci_config = yaml_root.get("gitlab-ci")
|
||||
if not gitlabci_config:
|
||||
tty.die("Environment yaml does not have `gitlab-ci` config section. Cannot recover.")
|
||||
|
||||
tty.warn(
|
||||
"The `gitlab-ci` configuration is deprecated in favor of `ci`.\n",
|
||||
"To update run \n\t$ spack env update /path/to/ci/spack.yaml",
|
||||
)
|
||||
translate_deprecated_config(gitlabci_config)
|
||||
ci_config = gitlabci_config
|
||||
config_deprecated = True
|
||||
raise SpackCIError("Environment does not have a `ci` configuration")
|
||||
|
||||
# Default target is gitlab...and only target is gitlab
|
||||
if not ci_config.get("target", "gitlab") == "gitlab":
|
||||
tty.die('Spack CI module only generates target "gitlab"')
|
||||
raise SpackCIError('Spack CI module only generates target "gitlab"')
|
||||
|
||||
cdash_config = cfg.get("cdash")
|
||||
cdash_handler = CDashHandler(cdash_config) if "build-group" in cdash_config else None
|
||||
@@ -673,12 +780,6 @@ def generate_gitlab_ci_yaml(
|
||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE", None)
|
||||
|
||||
copy_only_pipeline = spack_pipeline_type == "spack_copy_only"
|
||||
if copy_only_pipeline and config_deprecated:
|
||||
tty.warn(
|
||||
"SPACK_PIPELINE_TYPE=spack_copy_only is not supported when using\n",
|
||||
"deprecated ci configuration, a no-op pipeline will be generated\n",
|
||||
"instead.",
|
||||
)
|
||||
|
||||
def ensure_expected_target_path(path):
|
||||
"""Returns passed paths with all Windows path separators exchanged
|
||||
@@ -697,38 +798,16 @@ def ensure_expected_target_path(path):
|
||||
return path
|
||||
|
||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||
deprecated_mirror_config = False
|
||||
buildcache_destination = None
|
||||
if "buildcache-destination" in pipeline_mirrors:
|
||||
if remote_mirror_override:
|
||||
tty.die(
|
||||
"Using the deprecated --buildcache-destination cli option and "
|
||||
"having a mirror named 'buildcache-destination' at the same time "
|
||||
"is not allowed"
|
||||
)
|
||||
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
||||
else:
|
||||
deprecated_mirror_config = True
|
||||
# TODO: This will be an error in Spack 0.23
|
||||
if "buildcache-destination" not in pipeline_mirrors:
|
||||
raise SpackCIError("spack ci generate requires a mirror named 'buildcache-destination'")
|
||||
|
||||
# TODO: Remove this block in spack 0.23
|
||||
remote_mirror_url = None
|
||||
if deprecated_mirror_config:
|
||||
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
||||
tty.die("spack ci generate requires an env containing a mirror")
|
||||
|
||||
ci_mirrors = yaml_root["mirrors"]
|
||||
mirror_urls = [url for url in ci_mirrors.values()]
|
||||
remote_mirror_url = mirror_urls[0]
|
||||
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
||||
|
||||
spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None)
|
||||
if spack_buildcache_copy:
|
||||
buildcache_copies = {}
|
||||
buildcache_copy_src_prefix = (
|
||||
buildcache_destination.fetch_url
|
||||
if buildcache_destination
|
||||
else remote_mirror_override or remote_mirror_url
|
||||
)
|
||||
buildcache_copy_src_prefix = buildcache_destination.fetch_url
|
||||
buildcache_copy_dest_prefix = spack_buildcache_copy
|
||||
|
||||
# Check for a list of "known broken" specs that we should not bother
|
||||
@@ -738,55 +817,10 @@ def ensure_expected_target_path(path):
|
||||
if "broken-specs-url" in ci_config:
|
||||
broken_specs_url = ci_config["broken-specs-url"]
|
||||
|
||||
enable_artifacts_buildcache = False
|
||||
if "enable-artifacts-buildcache" in ci_config:
|
||||
tty.warn("Support for enable-artifacts-buildcache will be removed in Spack 0.23")
|
||||
enable_artifacts_buildcache = ci_config["enable-artifacts-buildcache"]
|
||||
|
||||
rebuild_index_enabled = True
|
||||
if "rebuild-index" in ci_config and ci_config["rebuild-index"] is False:
|
||||
rebuild_index_enabled = False
|
||||
|
||||
temp_storage_url_prefix = None
|
||||
if "temporary-storage-url-prefix" in ci_config:
|
||||
tty.warn("Support for temporary-storage-url-prefix will be removed in Spack 0.23")
|
||||
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
||||
|
||||
# If a remote mirror override (alternate buildcache destination) was
|
||||
# specified, add it here in case it has already built hashes we might
|
||||
# generate.
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
mirrors_to_check = None
|
||||
if deprecated_mirror_config and remote_mirror_override:
|
||||
if spack_pipeline_type == "spack_protected_branch":
|
||||
# Overriding the main mirror in this case might result
|
||||
# in skipping jobs on a release pipeline because specs are
|
||||
# up to date in develop. Eventually we want to notice and take
|
||||
# advantage of this by scheduling a job to copy the spec from
|
||||
# develop to the release, but until we have that, this makes
|
||||
# sure we schedule a rebuild job if the spec isn't already in
|
||||
# override mirror.
|
||||
mirrors_to_check = {"override": remote_mirror_override}
|
||||
|
||||
# If we have a remote override and we want generate pipeline using
|
||||
# --check-index-only, then the override mirror needs to be added to
|
||||
# the configured mirrors when bindist.update() is run, or else we
|
||||
# won't fetch its index and include in our local cache.
|
||||
spack.mirror.add(
|
||||
spack.mirror.Mirror(remote_mirror_override, name="ci_pr_mirror"),
|
||||
cfg.default_modify_scope(),
|
||||
)
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
shared_pr_mirror = None
|
||||
if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request":
|
||||
stack_name = os.environ.get("SPACK_CI_STACK_NAME", "")
|
||||
shared_pr_mirror = url_util.join(SHARED_PR_MIRROR_URL, stack_name)
|
||||
spack.mirror.add(
|
||||
spack.mirror.Mirror(shared_pr_mirror, name="ci_shared_pr_mirror"),
|
||||
cfg.default_modify_scope(),
|
||||
)
|
||||
|
||||
pipeline_artifacts_dir = artifacts_root
|
||||
if not pipeline_artifacts_dir:
|
||||
proj_dir = os.environ.get("CI_PROJECT_DIR", os.getcwd())
|
||||
@@ -795,9 +829,8 @@ def ensure_expected_target_path(path):
|
||||
pipeline_artifacts_dir = os.path.abspath(pipeline_artifacts_dir)
|
||||
concrete_env_dir = os.path.join(pipeline_artifacts_dir, "concrete_environment")
|
||||
|
||||
# Now that we've added the mirrors we know about, they should be properly
|
||||
# reflected in the environment manifest file, so copy that into the
|
||||
# concrete environment directory, along with the spack.lock file.
|
||||
# Copy the environment manifest file into the concrete environment directory,
|
||||
# along with the spack.lock file.
|
||||
if not os.path.exists(concrete_env_dir):
|
||||
os.makedirs(concrete_env_dir)
|
||||
shutil.copyfile(env.manifest_path, os.path.join(concrete_env_dir, "spack.yaml"))
|
||||
@@ -822,18 +855,12 @@ def ensure_expected_target_path(path):
|
||||
env_includes.extend(include_scopes)
|
||||
env_yaml_root["spack"]["include"] = [ensure_expected_target_path(i) for i in env_includes]
|
||||
|
||||
if "gitlab-ci" in env_yaml_root["spack"] and "ci" not in env_yaml_root["spack"]:
|
||||
env_yaml_root["spack"]["ci"] = env_yaml_root["spack"].pop("gitlab-ci")
|
||||
translate_deprecated_config(env_yaml_root["spack"]["ci"])
|
||||
|
||||
with open(os.path.join(concrete_env_dir, "spack.yaml"), "w") as fd:
|
||||
fd.write(syaml.dump_config(env_yaml_root, default_flow_style=False))
|
||||
|
||||
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
|
||||
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
|
||||
job_test_dir = os.path.join(pipeline_artifacts_dir, "tests")
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
local_mirror_dir = os.path.join(pipeline_artifacts_dir, "mirror")
|
||||
user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data")
|
||||
|
||||
# We communicate relative paths to the downstream jobs to avoid issues in
|
||||
@@ -847,8 +874,6 @@ def ensure_expected_target_path(path):
|
||||
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
|
||||
rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir)
|
||||
rel_job_test_dir = os.path.relpath(job_test_dir, ci_project_dir)
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
rel_local_mirror_dir = os.path.join(local_mirror_dir, ci_project_dir)
|
||||
rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir)
|
||||
|
||||
# Speed up staging by first fetching binary indices from all mirrors
|
||||
@@ -910,7 +935,7 @@ def ensure_expected_target_path(path):
|
||||
continue
|
||||
|
||||
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
||||
spec=release_spec, mirrors_to_check=mirrors_to_check, index_only=check_index_only
|
||||
spec=release_spec, index_only=check_index_only
|
||||
)
|
||||
|
||||
spec_record.rebuild = not up_to_date_mirrors
|
||||
@@ -952,36 +977,16 @@ def main_script_replacements(cmd):
|
||||
|
||||
job_name = get_job_name(release_spec, build_group)
|
||||
|
||||
job_vars = job_object.setdefault("variables", {})
|
||||
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
|
||||
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
||||
job_vars["SPACK_JOB_SPEC_PKG_VERSION"] = release_spec.format("{version}")
|
||||
job_vars["SPACK_JOB_SPEC_COMPILER_NAME"] = release_spec.format("{compiler.name}")
|
||||
job_vars["SPACK_JOB_SPEC_COMPILER_VERSION"] = release_spec.format("{compiler.version}")
|
||||
job_vars["SPACK_JOB_SPEC_ARCH"] = release_spec.format("{architecture}")
|
||||
job_vars["SPACK_JOB_SPEC_VARIANTS"] = release_spec.format("{variants}")
|
||||
|
||||
job_object["needs"] = []
|
||||
if spec_label in dependencies:
|
||||
if enable_artifacts_buildcache:
|
||||
# Get dependencies transitively, so they're all
|
||||
# available in the artifacts buildcache.
|
||||
dep_jobs = [d for d in release_spec.traverse(deptype="all", root=False)]
|
||||
else:
|
||||
# In this case, "needs" is only used for scheduling
|
||||
# purposes, so we only get the direct dependencies.
|
||||
dep_jobs = []
|
||||
for dep_label in dependencies[spec_label]:
|
||||
dep_jobs.append(spec_labels[dep_label])
|
||||
# In this case, "needs" is only used for scheduling
|
||||
# purposes, so we only get the direct dependencies.
|
||||
dep_jobs = []
|
||||
for dep_label in dependencies[spec_label]:
|
||||
dep_jobs.append(spec_labels[dep_label])
|
||||
|
||||
job_object["needs"].extend(
|
||||
_format_job_needs(
|
||||
dep_jobs,
|
||||
build_group,
|
||||
prune_dag,
|
||||
rebuild_decisions,
|
||||
enable_artifacts_buildcache,
|
||||
)
|
||||
_format_job_needs(dep_jobs, build_group, prune_dag, rebuild_decisions)
|
||||
)
|
||||
|
||||
rebuild_spec = spec_record.rebuild
|
||||
@@ -1038,6 +1043,7 @@ def main_script_replacements(cmd):
|
||||
|
||||
# Let downstream jobs know whether the spec needed rebuilding, regardless
|
||||
# whether DAG pruning was enabled or not.
|
||||
job_vars = job_object["variables"]
|
||||
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = str(rebuild_spec)
|
||||
|
||||
if cdash_handler:
|
||||
@@ -1062,19 +1068,6 @@ def main_script_replacements(cmd):
|
||||
},
|
||||
)
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if enable_artifacts_buildcache:
|
||||
bc_root = os.path.join(local_mirror_dir, "build_cache")
|
||||
job_object["artifacts"]["paths"].extend(
|
||||
[
|
||||
os.path.join(bc_root, p)
|
||||
for p in [
|
||||
bindist.tarball_name(release_spec, ".spec.json"),
|
||||
bindist.tarball_directory_name(release_spec),
|
||||
]
|
||||
]
|
||||
)
|
||||
|
||||
job_object["stage"] = stage_name
|
||||
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
|
||||
job_object["interruptible"] = True
|
||||
@@ -1089,15 +1082,7 @@ def main_script_replacements(cmd):
|
||||
job_id += 1
|
||||
|
||||
if print_summary:
|
||||
_print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions)
|
||||
|
||||
# Clean up remote mirror override if enabled
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if deprecated_mirror_config:
|
||||
if remote_mirror_override:
|
||||
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
||||
_print_staging_summary(spec_labels, stages, rebuild_decisions)
|
||||
|
||||
tty.debug(f"{job_id} build jobs generated in {stage_id} stages")
|
||||
|
||||
@@ -1119,7 +1104,7 @@ def main_script_replacements(cmd):
|
||||
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
|
||||
}
|
||||
|
||||
if copy_only_pipeline and not config_deprecated:
|
||||
if copy_only_pipeline:
|
||||
stage_names.append("copy")
|
||||
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
||||
sync_job["stage"] = "copy"
|
||||
@@ -1129,17 +1114,12 @@ def main_script_replacements(cmd):
|
||||
if "variables" not in sync_job:
|
||||
sync_job["variables"] = {}
|
||||
|
||||
sync_job["variables"]["SPACK_COPY_ONLY_DESTINATION"] = (
|
||||
buildcache_destination.fetch_url
|
||||
if buildcache_destination
|
||||
else remote_mirror_override or remote_mirror_url
|
||||
)
|
||||
sync_job["variables"]["SPACK_COPY_ONLY_DESTINATION"] = buildcache_destination.fetch_url
|
||||
|
||||
if "buildcache-source" in pipeline_mirrors:
|
||||
buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url
|
||||
else:
|
||||
# TODO: Remove this condition in Spack 0.23
|
||||
buildcache_source = os.environ.get("SPACK_SOURCE_MIRROR", None)
|
||||
if "buildcache-source" not in pipeline_mirrors:
|
||||
raise SpackCIError("Copy-only pipelines require a mirror named 'buildcache-source'")
|
||||
|
||||
buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url
|
||||
sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source
|
||||
sync_job["dependencies"] = []
|
||||
|
||||
@@ -1147,27 +1127,6 @@ def main_script_replacements(cmd):
|
||||
job_id += 1
|
||||
|
||||
if job_id > 0:
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if temp_storage_url_prefix:
|
||||
# There were some rebuild jobs scheduled, so we will need to
|
||||
# schedule a job to clean up the temporary storage location
|
||||
# associated with this pipeline.
|
||||
stage_names.append("cleanup-temp-storage")
|
||||
cleanup_job = copy.deepcopy(spack_ci_ir["jobs"]["cleanup"]["attributes"])
|
||||
|
||||
cleanup_job["stage"] = "cleanup-temp-storage"
|
||||
cleanup_job["when"] = "always"
|
||||
cleanup_job["retry"] = service_job_retries
|
||||
cleanup_job["interruptible"] = True
|
||||
|
||||
cleanup_job["script"] = _unpack_script(
|
||||
cleanup_job["script"],
|
||||
op=lambda cmd: cmd.replace("mirror_prefix", temp_storage_url_prefix),
|
||||
)
|
||||
|
||||
cleanup_job["dependencies"] = []
|
||||
output_object["cleanup"] = cleanup_job
|
||||
|
||||
if (
|
||||
"script" in spack_ci_ir["jobs"]["signing"]["attributes"]
|
||||
and spack_pipeline_type == "spack_protected_branch"
|
||||
@@ -1184,11 +1143,9 @@ def main_script_replacements(cmd):
|
||||
signing_job["interruptible"] = True
|
||||
if "variables" not in signing_job:
|
||||
signing_job["variables"] = {}
|
||||
signing_job["variables"]["SPACK_BUILDCACHE_DESTINATION"] = (
|
||||
buildcache_destination.push_url # need the s3 url for aws s3 sync
|
||||
if buildcache_destination
|
||||
else remote_mirror_override or remote_mirror_url
|
||||
)
|
||||
signing_job["variables"][
|
||||
"SPACK_BUILDCACHE_DESTINATION"
|
||||
] = buildcache_destination.push_url
|
||||
signing_job["dependencies"] = []
|
||||
|
||||
output_object["sign-pkgs"] = signing_job
|
||||
@@ -1199,9 +1156,7 @@ def main_script_replacements(cmd):
|
||||
final_job = spack_ci_ir["jobs"]["reindex"]["attributes"]
|
||||
|
||||
final_job["stage"] = "stage-rebuild-index"
|
||||
target_mirror = remote_mirror_override or remote_mirror_url
|
||||
if buildcache_destination:
|
||||
target_mirror = buildcache_destination.push_url
|
||||
target_mirror = buildcache_destination.push_url
|
||||
final_job["script"] = _unpack_script(
|
||||
final_job["script"],
|
||||
op=lambda cmd: cmd.replace("{index_target_mirror}", target_mirror),
|
||||
@@ -1227,17 +1182,11 @@ def main_script_replacements(cmd):
|
||||
"SPACK_CONCRETE_ENV_DIR": rel_concrete_env_dir,
|
||||
"SPACK_VERSION": spack_version,
|
||||
"SPACK_CHECKOUT_VERSION": version_to_clone,
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
"SPACK_REMOTE_MIRROR_URL": remote_mirror_url,
|
||||
"SPACK_JOB_LOG_DIR": rel_job_log_dir,
|
||||
"SPACK_JOB_REPRO_DIR": rel_job_repro_dir,
|
||||
"SPACK_JOB_TEST_DIR": rel_job_test_dir,
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
"SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir,
|
||||
"SPACK_PIPELINE_TYPE": str(spack_pipeline_type),
|
||||
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
"SPACK_CI_SHARED_PR_MIRROR_URL": shared_pr_mirror or "None",
|
||||
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag),
|
||||
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
||||
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
||||
@@ -1246,10 +1195,6 @@ def main_script_replacements(cmd):
|
||||
for item, val in output_vars.items():
|
||||
output_vars[item] = ensure_expected_target_path(val)
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if deprecated_mirror_config and remote_mirror_override:
|
||||
(output_object["variables"]["SPACK_REMOTE_MIRROR_OVERRIDE"]) = remote_mirror_override
|
||||
|
||||
spack_stack_name = os.environ.get("SPACK_CI_STACK_NAME", None)
|
||||
if spack_stack_name:
|
||||
output_object["variables"]["SPACK_CI_STACK_NAME"] = spack_stack_name
|
||||
@@ -1276,15 +1221,8 @@ def main_script_replacements(cmd):
|
||||
noop_job["retry"] = 0
|
||||
noop_job["allow_failure"] = True
|
||||
|
||||
if copy_only_pipeline and config_deprecated:
|
||||
tty.debug("Generating no-op job as copy-only is unsupported here.")
|
||||
noop_job["script"] = [
|
||||
'echo "copy-only pipelines are not supported with deprecated ci configs"'
|
||||
]
|
||||
output_object = {"unsupported-copy": noop_job}
|
||||
else:
|
||||
tty.debug("No specs to rebuild, generating no-op job")
|
||||
output_object = {"no-specs-to-rebuild": noop_job}
|
||||
tty.debug("No specs to rebuild, generating no-op job")
|
||||
output_object = {"no-specs-to-rebuild": noop_job}
|
||||
|
||||
# Ensure the child pipeline always runs
|
||||
output_object["workflow"] = {"rules": [{"when": "always"}]}
|
||||
@@ -1450,7 +1388,11 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
||||
|
||||
stage_dir = job_pkg.stage.path
|
||||
tty.debug(f"stage dir: {stage_dir}")
|
||||
for file in [job_pkg.log_path, job_pkg.env_mods_path, *job_pkg.builder.archive_files]:
|
||||
for file in [
|
||||
job_pkg.log_path,
|
||||
job_pkg.env_mods_path,
|
||||
*spack.builder.create(job_pkg).archive_files,
|
||||
]:
|
||||
copy_files_to_artifacts(file, job_log_dir)
|
||||
|
||||
|
||||
@@ -2322,83 +2264,6 @@ def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optiona
|
||||
reporter.test_skipped_report(report_dir, spec, reason)
|
||||
|
||||
|
||||
def translate_deprecated_config(config):
|
||||
# Remove all deprecated keys from config
|
||||
mappings = config.pop("mappings", [])
|
||||
match_behavior = config.pop("match_behavior", "first")
|
||||
|
||||
build_job = {}
|
||||
if "image" in config:
|
||||
build_job["image"] = config.pop("image")
|
||||
if "tags" in config:
|
||||
build_job["tags"] = config.pop("tags")
|
||||
if "variables" in config:
|
||||
build_job["variables"] = config.pop("variables")
|
||||
|
||||
# Scripts always override in old CI
|
||||
if "before_script" in config:
|
||||
build_job["before_script:"] = config.pop("before_script")
|
||||
if "script" in config:
|
||||
build_job["script:"] = config.pop("script")
|
||||
if "after_script" in config:
|
||||
build_job["after_script:"] = config.pop("after_script")
|
||||
|
||||
signing_job = None
|
||||
if "signing-job-attributes" in config:
|
||||
signing_job = {"signing-job": config.pop("signing-job-attributes")}
|
||||
|
||||
service_job_attributes = None
|
||||
if "service-job-attributes" in config:
|
||||
service_job_attributes = config.pop("service-job-attributes")
|
||||
|
||||
# If this config already has pipeline-gen do not more
|
||||
if "pipeline-gen" in config:
|
||||
return True if mappings or build_job or signing_job or service_job_attributes else False
|
||||
|
||||
config["target"] = "gitlab"
|
||||
|
||||
config["pipeline-gen"] = []
|
||||
pipeline_gen = config["pipeline-gen"]
|
||||
|
||||
# Build Job
|
||||
submapping = []
|
||||
for section in mappings:
|
||||
submapping_section = {"match": section["match"]}
|
||||
if "runner-attributes" in section:
|
||||
remapped_attributes = {}
|
||||
if match_behavior == "first":
|
||||
for key, value in section["runner-attributes"].items():
|
||||
# Scripts always override in old CI
|
||||
if key == "script":
|
||||
remapped_attributes["script:"] = value
|
||||
elif key == "before_script":
|
||||
remapped_attributes["before_script:"] = value
|
||||
elif key == "after_script":
|
||||
remapped_attributes["after_script:"] = value
|
||||
else:
|
||||
remapped_attributes[key] = value
|
||||
else:
|
||||
# Handle "merge" behavior be allowing scripts to merge in submapping section
|
||||
remapped_attributes = section["runner-attributes"]
|
||||
submapping_section["build-job"] = remapped_attributes
|
||||
|
||||
if "remove-attributes" in section:
|
||||
# Old format only allowed tags in this section, so no extra checks are needed
|
||||
submapping_section["build-job-remove"] = section["remove-attributes"]
|
||||
submapping.append(submapping_section)
|
||||
pipeline_gen.append({"submapping": submapping, "match_behavior": match_behavior})
|
||||
|
||||
if build_job:
|
||||
pipeline_gen.append({"build-job": build_job})
|
||||
|
||||
# Signing Job
|
||||
if signing_job:
|
||||
pipeline_gen.append(signing_job)
|
||||
|
||||
# Service Jobs
|
||||
if service_job_attributes:
|
||||
pipeline_gen.append({"reindex-job": service_job_attributes})
|
||||
pipeline_gen.append({"noop-job": service_job_attributes})
|
||||
pipeline_gen.append({"cleanup-job": service_job_attributes})
|
||||
|
||||
return True
|
||||
class SpackCIError(spack.error.SpackError):
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
@@ -8,7 +8,8 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from typing import List, Union
|
||||
from collections import Counter
|
||||
from typing import List, Optional, Union
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.tty as tty
|
||||
@@ -17,12 +18,14 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.concretize
|
||||
import spack.config # breaks a cycle.
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.extensions
|
||||
import spack.parser
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.traverse as traverse
|
||||
@@ -30,6 +33,8 @@
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
|
||||
# cmd has a submodule called "list" so preserve the python list module
|
||||
python_list = list
|
||||
|
||||
@@ -173,10 +178,66 @@ def parse_specs(
|
||||
arg_string = " ".join([quote_kvp(arg) for arg in args])
|
||||
|
||||
specs = spack.parser.parse(arg_string)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests)
|
||||
return specs
|
||||
if not concretize:
|
||||
return specs
|
||||
|
||||
to_concretize = [(s, None) for s in specs]
|
||||
return _concretize_spec_pairs(to_concretize, tests=tests)
|
||||
|
||||
|
||||
def _concretize_spec_pairs(to_concretize, tests=False):
|
||||
"""Helper method that concretizes abstract specs from a list of abstract,concrete pairs.
|
||||
|
||||
Any spec with a concrete spec associated with it will concretize to that spec. Any spec
|
||||
with ``None`` for its concrete spec will be newly concretized. This method respects unification
|
||||
rules from config."""
|
||||
unify = spack.config.get("concretizer:unify", False)
|
||||
|
||||
# Special case for concretizing a single spec
|
||||
if len(to_concretize) == 1:
|
||||
abstract, concrete = to_concretize[0]
|
||||
return [concrete or abstract.concretized()]
|
||||
|
||||
# Special case if every spec is either concrete or has an abstract hash
|
||||
if all(
|
||||
concrete or abstract.concrete or abstract.abstract_hash
|
||||
for abstract, concrete in to_concretize
|
||||
):
|
||||
# Get all the concrete specs
|
||||
ret = [
|
||||
concrete or (abstract if abstract.concrete else abstract.lookup_hash())
|
||||
for abstract, concrete in to_concretize
|
||||
]
|
||||
|
||||
# If unify: true, check that specs don't conflict
|
||||
# Since all concrete, "when_possible" is not relevant
|
||||
if unify is True: # True, "when_possible", False are possible values
|
||||
runtimes = spack.repo.PATH.packages_with_tags("runtime")
|
||||
specs_per_name = Counter(
|
||||
spec.name
|
||||
for spec in traverse.traverse_nodes(
|
||||
ret, deptype=("link", "run"), key=traverse.by_dag_hash
|
||||
)
|
||||
if spec.name not in runtimes # runtimes are allowed multiple times
|
||||
)
|
||||
|
||||
conflicts = sorted(name for name, count in specs_per_name.items() if count > 1)
|
||||
if conflicts:
|
||||
raise spack.error.SpecError(
|
||||
"Specs conflict and `concretizer:unify` is configured true.",
|
||||
f" specs depend on multiple versions of {', '.join(conflicts)}",
|
||||
)
|
||||
return ret
|
||||
|
||||
# Standard case
|
||||
concretize_method = spack.concretize.concretize_separately # unify: false
|
||||
if unify is True:
|
||||
concretize_method = spack.concretize.concretize_together
|
||||
elif unify == "when_possible":
|
||||
concretize_method = spack.concretize.concretize_together_when_possible
|
||||
|
||||
concretized = concretize_method(to_concretize, tests=tests)
|
||||
return [concrete for _, concrete in concretized]
|
||||
|
||||
|
||||
def matching_spec_from_env(spec):
|
||||
@@ -192,39 +253,64 @@ def matching_spec_from_env(spec):
|
||||
return spec.concretized()
|
||||
|
||||
|
||||
def disambiguate_spec(spec, env, local=False, installed=True, first=False):
|
||||
def matching_specs_from_env(specs):
|
||||
"""
|
||||
Same as ``matching_spec_from_env`` but respects spec unification rules.
|
||||
|
||||
For each spec, if there is a matching spec in the environment it is used. If no
|
||||
matching spec is found, this will return the given spec but concretized in the
|
||||
context of the active environment and other given specs, with unification rules applied.
|
||||
"""
|
||||
env = ev.active_environment()
|
||||
spec_pairs = [(spec, env.matching_spec(spec) if env else None) for spec in specs]
|
||||
additional_concrete_specs = (
|
||||
[(concrete, concrete) for _, concrete in env.concretized_specs()] if env else []
|
||||
)
|
||||
return _concretize_spec_pairs(spec_pairs + additional_concrete_specs)[: len(spec_pairs)]
|
||||
|
||||
|
||||
def disambiguate_spec(
|
||||
spec: spack.spec.Spec,
|
||||
env: Optional[ev.Environment],
|
||||
local: bool = False,
|
||||
installed: Union[bool, InstallRecordStatus] = True,
|
||||
first: bool = False,
|
||||
) -> spack.spec.Spec:
|
||||
"""Given a spec, figure out which installed package it refers to.
|
||||
|
||||
Arguments:
|
||||
spec (spack.spec.Spec): a spec to disambiguate
|
||||
env (spack.environment.Environment): a spack environment,
|
||||
if one is active, or None if no environment is active
|
||||
local (bool): do not search chained spack instances
|
||||
installed (bool or spack.database.InstallStatus or typing.Iterable):
|
||||
install status argument passed to database query.
|
||||
See ``spack.database.Database._query`` for details.
|
||||
Args:
|
||||
spec: a spec to disambiguate
|
||||
env: a spack environment, if one is active, or None if no environment is active
|
||||
local: do not search chained spack instances
|
||||
installed: install status argument passed to database query.
|
||||
first: returns the first matching spec, even if more than one match is found
|
||||
"""
|
||||
hashes = env.all_hashes() if env else None
|
||||
return disambiguate_spec_from_hashes(spec, hashes, local, installed, first)
|
||||
|
||||
|
||||
def disambiguate_spec_from_hashes(spec, hashes, local=False, installed=True, first=False):
|
||||
def disambiguate_spec_from_hashes(
|
||||
spec: spack.spec.Spec,
|
||||
hashes: List[str],
|
||||
local: bool = False,
|
||||
installed: Union[bool, InstallRecordStatus] = True,
|
||||
first: bool = False,
|
||||
) -> spack.spec.Spec:
|
||||
"""Given a spec and a list of hashes, get concrete spec the spec refers to.
|
||||
|
||||
Arguments:
|
||||
spec (spack.spec.Spec): a spec to disambiguate
|
||||
hashes (typing.Iterable): a set of hashes of specs among which to disambiguate
|
||||
local (bool): do not search chained spack instances
|
||||
installed (bool or spack.database.InstallStatus or typing.Iterable):
|
||||
install status argument passed to database query.
|
||||
See ``spack.database.Database._query`` for details.
|
||||
spec: a spec to disambiguate
|
||||
hashes: a set of hashes of specs among which to disambiguate
|
||||
local: if True, do not search chained spack instances
|
||||
installed: install status argument passed to database query.
|
||||
first: returns the first matching spec, even if more than one match is found
|
||||
"""
|
||||
if local:
|
||||
matching_specs = spack.store.STORE.db.query_local(spec, hashes=hashes, installed=installed)
|
||||
else:
|
||||
matching_specs = spack.store.STORE.db.query(spec, hashes=hashes, installed=installed)
|
||||
if not matching_specs:
|
||||
tty.die("Spec '%s' matches no installed packages." % spec)
|
||||
tty.die(f"Spec '{spec}' matches no installed packages.")
|
||||
|
||||
elif first:
|
||||
return matching_specs[0]
|
||||
@@ -509,6 +595,18 @@ def __init__(self, name):
|
||||
super().__init__("{0} is not a permissible Spack command name.".format(name))
|
||||
|
||||
|
||||
class MultipleSpecsMatch(Exception):
|
||||
"""Raised when multiple specs match a constraint, in a context where
|
||||
this is not allowed.
|
||||
"""
|
||||
|
||||
|
||||
class NoSpecMatches(Exception):
|
||||
"""Raised when no spec matches a constraint, in a context where
|
||||
this is not allowed.
|
||||
"""
|
||||
|
||||
|
||||
########################################
|
||||
# argparse types for argument validation
|
||||
########################################
|
||||
|
@@ -19,12 +19,23 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
# DEPRECATED: equivalent to --generic --target
|
||||
subparser.add_argument(
|
||||
"-g", "--generic-target", action="store_true", help="show the best generic target"
|
||||
"-g",
|
||||
"--generic-target",
|
||||
action="store_true",
|
||||
help="show the best generic target (deprecated)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--known-targets", action="store_true", help="show a list of all known targets and exit"
|
||||
)
|
||||
target_type = subparser.add_mutually_exclusive_group()
|
||||
target_type.add_argument(
|
||||
"--family", action="store_true", help="print generic ISA (x86_64, aarch64, ppc64le, ...)"
|
||||
)
|
||||
target_type.add_argument(
|
||||
"--generic", action="store_true", help="print feature level (x86_64_v3, armv8.4a, ...)"
|
||||
)
|
||||
parts = subparser.add_mutually_exclusive_group()
|
||||
parts2 = subparser.add_mutually_exclusive_group()
|
||||
parts.add_argument(
|
||||
@@ -80,6 +91,7 @@ def display_target_group(header, target_group):
|
||||
|
||||
def arch(parser, args):
|
||||
if args.generic_target:
|
||||
# TODO: add deprecation warning in 0.24
|
||||
print(archspec.cpu.host().generic)
|
||||
return
|
||||
|
||||
@@ -96,6 +108,10 @@ def arch(parser, args):
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system(os_args)
|
||||
host_target = host_platform.target(target_args)
|
||||
if args.family:
|
||||
host_target = host_target.family
|
||||
elif args.generic:
|
||||
host_target = host_target.generic
|
||||
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
||||
|
||||
if args.platform:
|
||||
|
@@ -34,6 +34,8 @@
|
||||
from spack.cmd.common import arguments
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
|
||||
description = "create, download and install binary packages"
|
||||
section = "packaging"
|
||||
level = "long"
|
||||
@@ -308,7 +310,10 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
|
||||
def _matching_specs(specs: List[Spec]) -> List[Spec]:
|
||||
"""Disambiguate specs and return a list of matching specs"""
|
||||
return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs]
|
||||
return [
|
||||
spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=InstallRecordStatus.ANY)
|
||||
for s in specs
|
||||
]
|
||||
|
||||
|
||||
def _format_spec(spec: Spec) -> str:
|
||||
|
@@ -62,13 +62,6 @@ def setup_parser(subparser):
|
||||
"path to the file where generated jobs file should be written. "
|
||||
"default is .gitlab-ci.yml in the root of the repository",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--copy-to",
|
||||
default=None,
|
||||
help="path to additional directory for job files\n\n"
|
||||
"this option provides an absolute path to a directory where the generated "
|
||||
"jobs yaml file should be copied. default is not to copy",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--optimize",
|
||||
action="store_true",
|
||||
@@ -83,12 +76,6 @@ def setup_parser(subparser):
|
||||
default=False,
|
||||
help="(DEPRECATED) disable DAG scheduling (use 'plain' dependencies)",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--buildcache-destination",
|
||||
default=None,
|
||||
help="override the mirror configured in the environment\n\n"
|
||||
"allows for pushing binaries from the generated pipeline to a different location",
|
||||
)
|
||||
prune_group = generate.add_mutually_exclusive_group()
|
||||
prune_group.add_argument(
|
||||
"--prune-dag",
|
||||
@@ -214,20 +201,10 @@ def ci_generate(args):
|
||||
|
||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||
|
||||
if args.copy_to:
|
||||
tty.warn("The flag --copy-to is deprecated and will be removed in Spack 0.23")
|
||||
|
||||
if args.buildcache_destination:
|
||||
tty.warn(
|
||||
"The flag --buildcache-destination is deprecated and will be removed in Spack 0.23"
|
||||
)
|
||||
|
||||
output_file = args.output_file
|
||||
copy_yaml_to = args.copy_to
|
||||
prune_dag = args.prune_dag
|
||||
index_only = args.index_only
|
||||
artifacts_root = args.artifacts_root
|
||||
buildcache_destination = args.buildcache_destination
|
||||
|
||||
if not output_file:
|
||||
output_file = os.path.abspath(".gitlab-ci.yml")
|
||||
@@ -245,15 +222,8 @@ def ci_generate(args):
|
||||
prune_dag=prune_dag,
|
||||
check_index_only=index_only,
|
||||
artifacts_root=artifacts_root,
|
||||
remote_mirror_override=buildcache_destination,
|
||||
)
|
||||
|
||||
if copy_yaml_to:
|
||||
copy_to_dir = os.path.dirname(copy_yaml_to)
|
||||
if not os.path.exists(copy_to_dir):
|
||||
os.makedirs(copy_to_dir)
|
||||
shutil.copyfile(output_file, copy_yaml_to)
|
||||
|
||||
|
||||
def ci_reindex(args):
|
||||
"""rebuild the buildcache index for the remote mirror
|
||||
@@ -298,22 +268,13 @@ def ci_rebuild(args):
|
||||
job_log_dir = os.environ.get("SPACK_JOB_LOG_DIR")
|
||||
job_test_dir = os.environ.get("SPACK_JOB_TEST_DIR")
|
||||
repro_dir = os.environ.get("SPACK_JOB_REPRO_DIR")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
local_mirror_dir = os.environ.get("SPACK_LOCAL_MIRROR_DIR")
|
||||
concrete_env_dir = os.environ.get("SPACK_CONCRETE_ENV_DIR")
|
||||
ci_pipeline_id = os.environ.get("CI_PIPELINE_ID")
|
||||
ci_job_name = os.environ.get("CI_JOB_NAME")
|
||||
signing_key = os.environ.get("SPACK_SIGNING_KEY")
|
||||
job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME")
|
||||
job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH")
|
||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
remote_mirror_override = os.environ.get("SPACK_REMOTE_MIRROR_OVERRIDE")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
remote_mirror_url = os.environ.get("SPACK_REMOTE_MIRROR_URL")
|
||||
spack_ci_stack_name = os.environ.get("SPACK_CI_STACK_NAME")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
shared_pr_mirror_url = os.environ.get("SPACK_CI_SHARED_PR_MIRROR_URL")
|
||||
rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING")
|
||||
require_signing = os.environ.get("SPACK_REQUIRE_SIGNING")
|
||||
|
||||
@@ -333,12 +294,10 @@ def ci_rebuild(args):
|
||||
job_log_dir = os.path.join(ci_project_dir, job_log_dir)
|
||||
job_test_dir = os.path.join(ci_project_dir, job_test_dir)
|
||||
repro_dir = os.path.join(ci_project_dir, repro_dir)
|
||||
local_mirror_dir = os.path.join(ci_project_dir, local_mirror_dir)
|
||||
concrete_env_dir = os.path.join(ci_project_dir, concrete_env_dir)
|
||||
|
||||
# Debug print some of the key environment variables we should have received
|
||||
tty.debug("pipeline_artifacts_dir = {0}".format(pipeline_artifacts_dir))
|
||||
tty.debug("remote_mirror_url = {0}".format(remote_mirror_url))
|
||||
tty.debug("job_spec_pkg_name = {0}".format(job_spec_pkg_name))
|
||||
|
||||
# Query the environment manifest to find out whether we're reporting to a
|
||||
@@ -370,51 +329,11 @@ def ci_rebuild(args):
|
||||
full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False
|
||||
|
||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||
deprecated_mirror_config = False
|
||||
buildcache_destination = None
|
||||
if "buildcache-destination" in pipeline_mirrors:
|
||||
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
||||
else:
|
||||
deprecated_mirror_config = True
|
||||
# TODO: This will be an error in Spack 0.23
|
||||
if "buildcache-destination" not in pipeline_mirrors:
|
||||
tty.die("spack ci rebuild requires a mirror named 'buildcache-destination")
|
||||
|
||||
# If no override url exists, then just push binary package to the
|
||||
# normal remote mirror url.
|
||||
# TODO: Remove in Spack 0.23
|
||||
buildcache_mirror_url = remote_mirror_override or remote_mirror_url
|
||||
if buildcache_destination:
|
||||
buildcache_mirror_url = buildcache_destination.push_url
|
||||
|
||||
# Figure out what is our temporary storage mirror: Is it artifacts
|
||||
# buildcache? Or temporary-storage-url-prefix? In some cases we need to
|
||||
# force something or pipelines might not have a way to propagate build
|
||||
# artifacts from upstream to downstream jobs.
|
||||
# TODO: Remove this in Spack 0.23
|
||||
pipeline_mirror_url = None
|
||||
|
||||
# TODO: Remove this in Spack 0.23
|
||||
temp_storage_url_prefix = None
|
||||
if "temporary-storage-url-prefix" in ci_config:
|
||||
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
||||
pipeline_mirror_url = url_util.join(temp_storage_url_prefix, ci_pipeline_id)
|
||||
|
||||
# TODO: Remove this in Spack 0.23
|
||||
enable_artifacts_mirror = False
|
||||
if "enable-artifacts-buildcache" in ci_config:
|
||||
enable_artifacts_mirror = ci_config["enable-artifacts-buildcache"]
|
||||
if enable_artifacts_mirror or (
|
||||
spack_is_pr_pipeline and not enable_artifacts_mirror and not temp_storage_url_prefix
|
||||
):
|
||||
# If you explicitly enabled the artifacts buildcache feature, or
|
||||
# if this is a PR pipeline but you did not enable either of the
|
||||
# per-pipeline temporary storage features, we force the use of
|
||||
# artifacts buildcache. Otherwise jobs will not have binary
|
||||
# dependencies from previous stages available since we do not
|
||||
# allow pushing binaries to the remote mirror during PR pipelines.
|
||||
enable_artifacts_mirror = True
|
||||
pipeline_mirror_url = url_util.path_to_file_url(local_mirror_dir)
|
||||
mirror_msg = "artifact buildcache enabled, mirror url: {0}".format(pipeline_mirror_url)
|
||||
tty.debug(mirror_msg)
|
||||
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
||||
|
||||
# Get the concrete spec to be built by this job.
|
||||
try:
|
||||
@@ -489,48 +408,7 @@ def ci_rebuild(args):
|
||||
fd.write(spack_info.encode("utf8"))
|
||||
fd.write(b"\n")
|
||||
|
||||
pipeline_mirrors = []
|
||||
|
||||
# If we decided there should be a temporary storage mechanism, add that
|
||||
# mirror now so it's used when we check for a hash match already
|
||||
# built for this spec.
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if pipeline_mirror_url:
|
||||
mirror = spack.mirror.Mirror(pipeline_mirror_url, name=spack_ci.TEMP_STORAGE_MIRROR_NAME)
|
||||
spack.mirror.add(mirror, cfg.default_modify_scope())
|
||||
pipeline_mirrors.append(pipeline_mirror_url)
|
||||
|
||||
# Check configured mirrors for a built spec with a matching hash
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
mirrors_to_check = None
|
||||
if remote_mirror_override:
|
||||
if spack_pipeline_type == "spack_protected_branch":
|
||||
# Passing "mirrors_to_check" below means we *only* look in the override
|
||||
# mirror to see if we should skip building, which is what we want.
|
||||
mirrors_to_check = {"override": remote_mirror_override}
|
||||
|
||||
# Adding this mirror to the list of configured mirrors means dependencies
|
||||
# could be installed from either the override mirror or any other configured
|
||||
# mirror (e.g. remote_mirror_url which is defined in the environment or
|
||||
# pipeline_mirror_url), which is also what we want.
|
||||
spack.mirror.add(
|
||||
spack.mirror.Mirror(remote_mirror_override, name="mirror_override"),
|
||||
cfg.default_modify_scope(),
|
||||
)
|
||||
pipeline_mirrors.append(remote_mirror_override)
|
||||
|
||||
# TODO: Remove this in Spack 0.23
|
||||
if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request":
|
||||
if shared_pr_mirror_url != "None":
|
||||
pipeline_mirrors.append(shared_pr_mirror_url)
|
||||
|
||||
matches = (
|
||||
None
|
||||
if full_rebuild
|
||||
else bindist.get_mirrors_for_spec(
|
||||
job_spec, mirrors_to_check=mirrors_to_check, index_only=False
|
||||
)
|
||||
)
|
||||
matches = None if full_rebuild else bindist.get_mirrors_for_spec(job_spec, index_only=False)
|
||||
|
||||
if matches:
|
||||
# Got a hash match on at least one configured mirror. All
|
||||
@@ -542,25 +420,10 @@ def ci_rebuild(args):
|
||||
tty.msg("No need to rebuild {0}, found hash match at: ".format(job_spec_pkg_name))
|
||||
for match in matches:
|
||||
tty.msg(" {0}".format(match["mirror_url"]))
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if enable_artifacts_mirror:
|
||||
matching_mirror = matches[0]["mirror_url"]
|
||||
build_cache_dir = os.path.join(local_mirror_dir, "build_cache")
|
||||
tty.debug("Getting {0} buildcache from {1}".format(job_spec_pkg_name, matching_mirror))
|
||||
tty.debug("Downloading to {0}".format(build_cache_dir))
|
||||
bindist.download_single_spec(job_spec, build_cache_dir, mirror_url=matching_mirror)
|
||||
|
||||
# Now we are done and successful
|
||||
return 0
|
||||
|
||||
# Before beginning the install, if this is a "rebuild everything" pipeline, we
|
||||
# only want to keep the mirror being used by the current pipeline as it's binary
|
||||
# package destination. This ensures that the when we rebuild everything, we only
|
||||
# consume binary dependencies built in this pipeline.
|
||||
# TODO: Remove this in Spack 0.23
|
||||
if deprecated_mirror_config and full_rebuild:
|
||||
spack_ci.remove_other_mirrors(pipeline_mirrors, cfg.default_modify_scope())
|
||||
|
||||
# No hash match anywhere means we need to rebuild spec
|
||||
|
||||
# Start with spack arguments
|
||||
@@ -681,17 +544,11 @@ def ci_rebuild(args):
|
||||
cdash_handler.copy_test_results(reports_dir, job_test_dir)
|
||||
|
||||
if install_exit_code == 0:
|
||||
# If the install succeeded, push it to one or more mirrors. Failure to push to any mirror
|
||||
# If the install succeeded, push it to the buildcache destination. Failure to push
|
||||
# will result in a non-zero exit code. Pushing is best-effort.
|
||||
mirror_urls = [buildcache_mirror_url]
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if pipeline_mirror_url:
|
||||
mirror_urls.append(pipeline_mirror_url)
|
||||
|
||||
for result in spack_ci.create_buildcache(
|
||||
input_spec=job_spec,
|
||||
destination_mirror_urls=mirror_urls,
|
||||
destination_mirror_urls=[buildcache_destination.push_url],
|
||||
sign_binaries=spack_ci.can_sign_binaries(),
|
||||
):
|
||||
if not result.success:
|
||||
|
@@ -105,7 +105,8 @@ def clean(parser, args):
|
||||
# Then do the cleaning falling through the cases
|
||||
if args.specs:
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
specs = list(spack.cmd.matching_spec_from_env(x) for x in specs)
|
||||
specs = spack.cmd.matching_specs_from_env(specs)
|
||||
|
||||
for spec in specs:
|
||||
msg = "Cleaning build stage [{0}]"
|
||||
tty.msg(msg.format(spec.short_spec))
|
||||
|
@@ -581,23 +581,51 @@ def add_concretizer_args(subparser):
|
||||
|
||||
|
||||
def add_connection_args(subparser, add_help):
|
||||
subparser.add_argument(
|
||||
"--s3-access-key-id", help="ID string to use to connect to this S3 mirror"
|
||||
def add_argument_string_or_variable(parser, arg: str, *, deprecate_str: bool = True, **kwargs):
|
||||
group = parser.add_mutually_exclusive_group()
|
||||
group.add_argument(arg, **kwargs)
|
||||
# Update help string
|
||||
if "help" in kwargs:
|
||||
kwargs["help"] = "environment variable containing " + kwargs["help"]
|
||||
group.add_argument(arg + "-variable", **kwargs)
|
||||
|
||||
s3_connection_parser = subparser.add_argument_group("S3 Connection")
|
||||
|
||||
add_argument_string_or_variable(
|
||||
s3_connection_parser,
|
||||
"--s3-access-key-id",
|
||||
help="ID string to use to connect to this S3 mirror",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--s3-access-key-secret", help="secret string to use to connect to this S3 mirror"
|
||||
add_argument_string_or_variable(
|
||||
s3_connection_parser,
|
||||
"--s3-access-key-secret",
|
||||
help="secret string to use to connect to this S3 mirror",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--s3-access-token", help="access token to use to connect to this S3 mirror"
|
||||
add_argument_string_or_variable(
|
||||
s3_connection_parser,
|
||||
"--s3-access-token",
|
||||
help="access token to use to connect to this S3 mirror",
|
||||
)
|
||||
subparser.add_argument(
|
||||
s3_connection_parser.add_argument(
|
||||
"--s3-profile", help="S3 profile name to use to connect to this S3 mirror", default=None
|
||||
)
|
||||
subparser.add_argument(
|
||||
s3_connection_parser.add_argument(
|
||||
"--s3-endpoint-url", help="endpoint URL to use to connect to this S3 mirror"
|
||||
)
|
||||
subparser.add_argument("--oci-username", help="username to use to connect to this OCI mirror")
|
||||
subparser.add_argument("--oci-password", help="password to use to connect to this OCI mirror")
|
||||
|
||||
oci_connection_parser = subparser.add_argument_group("OCI Connection")
|
||||
|
||||
add_argument_string_or_variable(
|
||||
oci_connection_parser,
|
||||
"--oci-username",
|
||||
deprecate_str=False,
|
||||
help="username to use to connect to this OCI mirror",
|
||||
)
|
||||
add_argument_string_or_variable(
|
||||
oci_connection_parser,
|
||||
"--oci-password",
|
||||
help="password to use to connect to this OCI mirror",
|
||||
)
|
||||
|
||||
|
||||
def use_buildcache(cli_arg_value):
|
||||
@@ -660,34 +688,32 @@ def mirror_name_or_url(m):
|
||||
# accidentally to a dir in the current working directory.
|
||||
|
||||
# If there's a \ or / in the name, it's interpreted as a path or url.
|
||||
if "/" in m or "\\" in m:
|
||||
if "/" in m or "\\" in m or m in (".", ".."):
|
||||
return spack.mirror.Mirror(m)
|
||||
|
||||
# Otherwise, the named mirror is required to exist.
|
||||
try:
|
||||
return spack.mirror.require_mirror_name(m)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(
|
||||
str(e) + ". Did you mean {}?".format(os.path.join(".", m))
|
||||
)
|
||||
raise argparse.ArgumentTypeError(f"{e}. Did you mean {os.path.join('.', m)}?") from e
|
||||
|
||||
|
||||
def mirror_url(url):
|
||||
try:
|
||||
return spack.mirror.Mirror.from_url(url)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(str(e))
|
||||
raise argparse.ArgumentTypeError(str(e)) from e
|
||||
|
||||
|
||||
def mirror_directory(path):
|
||||
try:
|
||||
return spack.mirror.Mirror.from_local_path(path)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(str(e))
|
||||
raise argparse.ArgumentTypeError(str(e)) from e
|
||||
|
||||
|
||||
def mirror_name(name):
|
||||
try:
|
||||
return spack.mirror.require_mirror_name(name)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(str(e))
|
||||
raise argparse.ArgumentTypeError(str(e)) from e
|
||||
|
@@ -99,5 +99,5 @@ def deconcretize(parser, args):
|
||||
" Use `spack deconcretize --all` to deconcretize ALL specs.",
|
||||
)
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [any]
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
|
||||
deconcretize_specs(args, specs)
|
||||
|
@@ -23,9 +23,10 @@
|
||||
import spack.installer
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
from spack.error import SpackError
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
|
||||
description = "replace one package with another via symlinks"
|
||||
section = "admin"
|
||||
level = "long"
|
||||
@@ -95,8 +96,12 @@ def deprecate(parser, args):
|
||||
if len(specs) != 2:
|
||||
raise SpackError("spack deprecate requires exactly two specs")
|
||||
|
||||
install_query = [InstallStatuses.INSTALLED, InstallStatuses.DEPRECATED]
|
||||
deprecate = spack.cmd.disambiguate_spec(specs[0], env, local=True, installed=install_query)
|
||||
deprecate = spack.cmd.disambiguate_spec(
|
||||
specs[0],
|
||||
env,
|
||||
local=True,
|
||||
installed=(InstallRecordStatus.INSTALLED | InstallRecordStatus.DEPRECATED),
|
||||
)
|
||||
|
||||
if args.install:
|
||||
deprecator = specs[1].concretized()
|
||||
|
@@ -85,8 +85,14 @@ def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
|
||||
|
||||
|
||||
def develop(parser, args):
|
||||
# Note: we could put develop specs in any scope, but I assume
|
||||
# users would only ever want to do this for either (a) an active
|
||||
# env or (b) a specified config file (e.g. that is included by
|
||||
# an environment)
|
||||
# TODO: when https://github.com/spack/spack/pull/35307 is merged,
|
||||
# an active env is not required if a scope is specified
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
if not args.spec:
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
if args.clone is False:
|
||||
raise SpackError("No spec provided to spack develop command")
|
||||
|
||||
@@ -116,16 +122,18 @@ def develop(parser, args):
|
||||
raise SpackError("spack develop requires at most one named spec")
|
||||
|
||||
spec = specs[0]
|
||||
|
||||
version = spec.versions.concrete_range_as_version
|
||||
if not version:
|
||||
raise SpackError("Packages to develop must have a concrete version")
|
||||
# look up the maximum version so infintiy versions are preferred for develop
|
||||
version = max(spec.package_class.versions.keys())
|
||||
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
|
||||
spec.versions = spack.version.VersionList([version])
|
||||
|
||||
# If user does not specify --path, we choose to create a directory in the
|
||||
# active environment's directory, named after the spec
|
||||
path = args.path or spec.name
|
||||
if not os.path.isabs(path):
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
|
||||
else:
|
||||
abspath = path
|
||||
@@ -149,13 +157,6 @@ def develop(parser, args):
|
||||
|
||||
_retrieve_develop_source(spec, abspath)
|
||||
|
||||
# Note: we could put develop specs in any scope, but I assume
|
||||
# users would only ever want to do this for either (a) an active
|
||||
# env or (b) a specified config file (e.g. that is included by
|
||||
# an environment)
|
||||
# TODO: when https://github.com/spack/spack/pull/35307 is merged,
|
||||
# an active env is not required if a scope is specified
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
tty.debug("Updating develop config for {0} transactionally".format(env.name))
|
||||
with env.write_transaction():
|
||||
if args.build_directory is not None:
|
||||
|
@@ -10,11 +10,12 @@
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
from typing import List, Optional, Set
|
||||
|
||||
import llnl.string as string
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import islink, symlink
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
|
||||
@@ -50,6 +51,8 @@
|
||||
"update",
|
||||
"revert",
|
||||
"depfile",
|
||||
"track",
|
||||
"untrack",
|
||||
]
|
||||
|
||||
|
||||
@@ -57,35 +60,41 @@
|
||||
# env create
|
||||
#
|
||||
def env_create_setup_parser(subparser):
|
||||
"""create a new environment"""
|
||||
subparser.add_argument("env_name", metavar="env", help="name or directory of environment")
|
||||
"""create a new environment
|
||||
|
||||
create a new environment or, optionally, copy an existing environment
|
||||
|
||||
a manifest file results in a new abstract environment while a lock file
|
||||
creates a new concrete environment
|
||||
"""
|
||||
subparser.add_argument(
|
||||
"env_name", metavar="env", help="name or directory of the new environment"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-d", "--dir", action="store_true", help="create an environment in a specific directory"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--keep-relative",
|
||||
action="store_true",
|
||||
help="copy relative develop paths verbatim into the new environment"
|
||||
" when initializing from envfile",
|
||||
help="copy envfile's relative develop paths verbatim",
|
||||
)
|
||||
view_opts = subparser.add_mutually_exclusive_group()
|
||||
view_opts.add_argument(
|
||||
"--without-view", action="store_true", help="do not maintain a view for this environment"
|
||||
)
|
||||
view_opts.add_argument(
|
||||
"--with-view",
|
||||
help="specify that this environment should maintain a view at the"
|
||||
" specified path (by default the view is maintained in the"
|
||||
" environment directory)",
|
||||
"--with-view", help="maintain view at WITH_VIEW (vs. environment's directory)"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"envfile",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
||||
help="manifest or lock file (ends with '.json' or '.lock')",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--include-concrete", action="append", help="name of old environment to copy specs from"
|
||||
"--include-concrete",
|
||||
action="append",
|
||||
help="copy concrete specs from INCLUDE_CONCRETE's environment",
|
||||
)
|
||||
|
||||
|
||||
@@ -173,7 +182,7 @@ def _env_create(
|
||||
# env activate
|
||||
#
|
||||
def env_activate_setup_parser(subparser):
|
||||
"""set the current environment"""
|
||||
"""set the active environment"""
|
||||
shells = subparser.add_mutually_exclusive_group()
|
||||
shells.add_argument(
|
||||
"--sh",
|
||||
@@ -213,14 +222,14 @@ def env_activate_setup_parser(subparser):
|
||||
|
||||
view_options = subparser.add_mutually_exclusive_group()
|
||||
view_options.add_argument(
|
||||
"--with-view",
|
||||
"-v",
|
||||
"--with-view",
|
||||
metavar="name",
|
||||
help="set runtime environment variables for specific view",
|
||||
help="set runtime environment variables for the named view",
|
||||
)
|
||||
view_options.add_argument(
|
||||
"--without-view",
|
||||
"-V",
|
||||
"--without-view",
|
||||
action="store_true",
|
||||
help="do not set runtime environment variables for any view",
|
||||
)
|
||||
@@ -230,14 +239,14 @@ def env_activate_setup_parser(subparser):
|
||||
"--prompt",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="decorate the command line prompt when activating",
|
||||
help="add the active environment to the command line prompt",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"--temp",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="create and activate an environment in a temporary directory",
|
||||
help="create and activate in a temporary directory",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--create",
|
||||
@@ -249,13 +258,12 @@ def env_activate_setup_parser(subparser):
|
||||
"--envfile",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
||||
help="manifest or lock file (ends with '.json' or '.lock')",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--keep-relative",
|
||||
action="store_true",
|
||||
help="copy relative develop paths verbatim into the new environment"
|
||||
" when initializing from envfile",
|
||||
help="copy envfile's relative develop paths verbatim when create",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-d",
|
||||
@@ -269,10 +277,7 @@ def env_activate_setup_parser(subparser):
|
||||
dest="env_name",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help=(
|
||||
"name of managed environment or directory of the independent env"
|
||||
" (when using --dir/-d) to activate"
|
||||
),
|
||||
help=("name or directory of the environment being activated"),
|
||||
)
|
||||
|
||||
|
||||
@@ -385,7 +390,7 @@ def env_activate(args):
|
||||
# env deactivate
|
||||
#
|
||||
def env_deactivate_setup_parser(subparser):
|
||||
"""deactivate any active environment in the shell"""
|
||||
"""deactivate the active environment"""
|
||||
shells = subparser.add_mutually_exclusive_group()
|
||||
shells.add_argument(
|
||||
"--sh",
|
||||
@@ -444,104 +449,253 @@ def env_deactivate(args):
|
||||
sys.stdout.write(cmds)
|
||||
|
||||
|
||||
#
|
||||
# env track
|
||||
#
|
||||
def env_track_setup_parser(subparser):
|
||||
"""track an environment from a directory in Spack"""
|
||||
subparser.add_argument("-n", "--name", help="custom environment name")
|
||||
subparser.add_argument("dir", help="path to environment")
|
||||
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
|
||||
|
||||
def env_track(args):
|
||||
src_path = os.path.abspath(args.dir)
|
||||
if not ev.is_env_dir(src_path):
|
||||
tty.die("Cannot track environment. Path doesn't contain an environment")
|
||||
|
||||
if args.name:
|
||||
name = args.name
|
||||
else:
|
||||
name = os.path.basename(src_path)
|
||||
|
||||
try:
|
||||
dst_path = ev.environment_dir_from_name(name, exists_ok=False)
|
||||
except ev.SpackEnvironmentError:
|
||||
tty.die(
|
||||
f"An environment named {name} already exists. Set a name with:"
|
||||
"\n\n"
|
||||
f" spack env track --name NAME {src_path}\n"
|
||||
)
|
||||
|
||||
symlink(src_path, dst_path)
|
||||
|
||||
tty.msg(f"Tracking environment in {src_path}")
|
||||
tty.msg(
|
||||
"You can now activate this environment with the following command:\n\n"
|
||||
f" spack env activate {name}\n"
|
||||
)
|
||||
|
||||
|
||||
#
|
||||
# env remove & untrack helpers
|
||||
#
|
||||
def filter_managed_env_names(env_names: Set[str]) -> Set[str]:
|
||||
tracked_env_names = {e for e in env_names if islink(ev.environment_dir_from_name(e))}
|
||||
managed_env_names = env_names - set(tracked_env_names)
|
||||
|
||||
num_managed_envs = len(managed_env_names)
|
||||
managed_envs_str = " ".join(managed_env_names)
|
||||
if num_managed_envs >= 2:
|
||||
tty.error(
|
||||
f"The following are not tracked environments. "
|
||||
"To remove them completely run,"
|
||||
"\n\n"
|
||||
f" spack env rm {managed_envs_str}\n"
|
||||
)
|
||||
|
||||
elif num_managed_envs > 0:
|
||||
tty.error(
|
||||
f"'{managed_envs_str}' is not a tracked env. "
|
||||
"To remove it completely run,"
|
||||
"\n\n"
|
||||
f" spack env rm {managed_envs_str}\n"
|
||||
)
|
||||
|
||||
return tracked_env_names
|
||||
|
||||
|
||||
def get_valid_envs(env_names: Set[str]) -> Set[ev.Environment]:
|
||||
valid_envs = set()
|
||||
for env_name in env_names:
|
||||
try:
|
||||
env = ev.read(env_name)
|
||||
valid_envs.add(env)
|
||||
|
||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||
pass
|
||||
|
||||
return valid_envs
|
||||
|
||||
|
||||
def _env_untrack_or_remove(
|
||||
env_names: List[str], remove: bool = False, force: bool = False, yes_to_all: bool = False
|
||||
):
|
||||
all_env_names = set(ev.all_environment_names())
|
||||
known_env_names = set(env_names).intersection(all_env_names)
|
||||
unknown_env_names = set(env_names) - known_env_names
|
||||
|
||||
# print error for unknown environments
|
||||
for env_name in unknown_env_names:
|
||||
tty.error(f"Environment '{env_name}' does not exist")
|
||||
|
||||
# if only unlinking is allowed, remove all environments
|
||||
# which do not point internally at symlinks
|
||||
if not remove:
|
||||
env_names_to_remove = filter_managed_env_names(known_env_names)
|
||||
else:
|
||||
env_names_to_remove = known_env_names
|
||||
|
||||
# initalize all environments with valid spack.yaml configs
|
||||
all_valid_envs = get_valid_envs(all_env_names)
|
||||
|
||||
# build a task list of environments and bad env names to remove
|
||||
envs_to_remove = [e for e in all_valid_envs if e.name in env_names_to_remove]
|
||||
bad_env_names_to_remove = env_names_to_remove - {e.name for e in envs_to_remove}
|
||||
for remove_env in envs_to_remove:
|
||||
for env in all_valid_envs:
|
||||
# don't check if an environment is included to itself
|
||||
if env.name == remove_env.name:
|
||||
continue
|
||||
|
||||
# check if an environment is included un another
|
||||
if remove_env.path in env.included_concrete_envs:
|
||||
msg = f"Environment '{remove_env.name}' is used by environment '{env.name}'"
|
||||
if force:
|
||||
tty.warn(msg)
|
||||
else:
|
||||
tty.error(msg)
|
||||
envs_to_remove.remove(remove_env)
|
||||
|
||||
# ask the user if they really want to remove the known environments
|
||||
# force should do the same as yes to all here following the symantics of rm
|
||||
if not (yes_to_all or force) and (envs_to_remove or bad_env_names_to_remove):
|
||||
environments = string.plural(len(env_names_to_remove), "environment", show_n=False)
|
||||
envs = string.comma_and(list(env_names_to_remove))
|
||||
answer = tty.get_yes_or_no(
|
||||
f"Really {'remove' if remove else 'untrack'} {environments} {envs}?", default=False
|
||||
)
|
||||
if not answer:
|
||||
tty.die("Will not remove any environments")
|
||||
|
||||
# keep track of the environments we remove for later printing the exit code
|
||||
removed_env_names = []
|
||||
for env in envs_to_remove:
|
||||
name = env.name
|
||||
if not force and env.active:
|
||||
tty.error(
|
||||
f"Environment '{name}' can't be "
|
||||
f"{'removed' if remove else 'untracked'} while activated."
|
||||
)
|
||||
continue
|
||||
# Get path to check if environment is a tracked / symlinked environment
|
||||
if islink(env.path):
|
||||
real_env_path = os.path.realpath(env.path)
|
||||
os.unlink(env.path)
|
||||
tty.msg(
|
||||
f"Sucessfully untracked environment '{name}', "
|
||||
"but it can still be found at:\n\n"
|
||||
f" {real_env_path}\n"
|
||||
)
|
||||
else:
|
||||
env.destroy()
|
||||
tty.msg(f"Successfully removed environment '{name}'")
|
||||
|
||||
removed_env_names.append(env.name)
|
||||
|
||||
for bad_env_name in bad_env_names_to_remove:
|
||||
shutil.rmtree(
|
||||
spack.environment.environment.environment_dir_from_name(bad_env_name, exists_ok=True)
|
||||
)
|
||||
tty.msg(f"Successfully removed environment '{bad_env_name}'")
|
||||
removed_env_names.append(env.name)
|
||||
|
||||
# Following the design of linux rm we should exit with a status of 1
|
||||
# anytime we cannot delete every environment the user asks for.
|
||||
# However, we should still process all the environments we know about
|
||||
# and delete them instead of failing on the first unknown enviornment.
|
||||
if len(removed_env_names) < len(known_env_names):
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
#
|
||||
# env untrack
|
||||
#
|
||||
def env_untrack_setup_parser(subparser):
|
||||
"""track an environment from a directory in Spack"""
|
||||
subparser.add_argument("env", nargs="+", help="tracked environment name")
|
||||
subparser.add_argument(
|
||||
"-f", "--force", action="store_true", help="force unlink even when environment is active"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
|
||||
|
||||
def env_untrack(args):
|
||||
_env_untrack_or_remove(
|
||||
env_names=args.env, force=args.force, yes_to_all=args.yes_to_all, remove=False
|
||||
)
|
||||
|
||||
|
||||
#
|
||||
# env remove
|
||||
#
|
||||
def env_remove_setup_parser(subparser):
|
||||
"""remove an existing environment"""
|
||||
subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove")
|
||||
"""remove managed environment(s)
|
||||
|
||||
remove existing environment(s) managed by Spack
|
||||
|
||||
directory environments and manifests embedded in repositories must be
|
||||
removed manually
|
||||
"""
|
||||
subparser.add_argument(
|
||||
"rm_env", metavar="env", nargs="+", help="name(s) of the environment(s) being removed"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
subparser.add_argument(
|
||||
"-f",
|
||||
"--force",
|
||||
action="store_true",
|
||||
help="remove the environment even if it is included in another environment",
|
||||
help="force removal even when included in other environment(s)",
|
||||
)
|
||||
|
||||
|
||||
def env_remove(args):
|
||||
"""Remove a *named* environment.
|
||||
|
||||
This removes an environment managed by Spack. Directory environments
|
||||
and manifests embedded in repositories should be removed manually.
|
||||
"""
|
||||
remove_envs = []
|
||||
valid_envs = []
|
||||
bad_envs = []
|
||||
|
||||
for env_name in ev.all_environment_names():
|
||||
try:
|
||||
env = ev.read(env_name)
|
||||
valid_envs.append(env)
|
||||
|
||||
if env_name in args.rm_env:
|
||||
remove_envs.append(env)
|
||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||
if env_name in args.rm_env:
|
||||
bad_envs.append(env_name)
|
||||
|
||||
# Check if remove_env is included from another env before trying to remove
|
||||
for env in valid_envs:
|
||||
for remove_env in remove_envs:
|
||||
# don't check if environment is included to itself
|
||||
if env.name == remove_env.name:
|
||||
continue
|
||||
|
||||
if remove_env.path in env.included_concrete_envs:
|
||||
msg = f'Environment "{remove_env.name}" is being used by environment "{env.name}"'
|
||||
if args.force:
|
||||
tty.warn(msg)
|
||||
else:
|
||||
tty.die(msg)
|
||||
|
||||
if not args.yes_to_all:
|
||||
environments = string.plural(len(args.rm_env), "environment", show_n=False)
|
||||
envs = string.comma_and(args.rm_env)
|
||||
answer = tty.get_yes_or_no(f"Really remove {environments} {envs}?", default=False)
|
||||
if not answer:
|
||||
tty.die("Will not remove any environments")
|
||||
|
||||
for env in remove_envs:
|
||||
name = env.name
|
||||
if env.active:
|
||||
tty.die(f"Environment {name} can't be removed while activated.")
|
||||
env.destroy()
|
||||
tty.msg(f"Successfully removed environment '{name}'")
|
||||
|
||||
for bad_env_name in bad_envs:
|
||||
shutil.rmtree(
|
||||
spack.environment.environment.environment_dir_from_name(bad_env_name, exists_ok=True)
|
||||
)
|
||||
tty.msg(f"Successfully removed environment '{bad_env_name}'")
|
||||
"""remove existing environment(s)"""
|
||||
_env_untrack_or_remove(
|
||||
env_names=args.rm_env, remove=True, force=args.force, yes_to_all=args.yes_to_all
|
||||
)
|
||||
|
||||
|
||||
#
|
||||
# env rename
|
||||
#
|
||||
def env_rename_setup_parser(subparser):
|
||||
"""rename an existing environment"""
|
||||
"""rename an existing environment
|
||||
|
||||
rename a managed environment or move an independent/directory environment
|
||||
|
||||
operation cannot be performed to or from an active environment
|
||||
"""
|
||||
subparser.add_argument(
|
||||
"mv_from", metavar="from", help="name (or path) of existing environment"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"mv_to", metavar="to", help="new name (or path) for existing environment"
|
||||
"mv_from", metavar="from", help="current name or directory of the environment"
|
||||
)
|
||||
subparser.add_argument("mv_to", metavar="to", help="new name or directory for the environment")
|
||||
subparser.add_argument(
|
||||
"-d",
|
||||
"--dir",
|
||||
action="store_true",
|
||||
help="the specified arguments correspond to directory paths",
|
||||
help="positional arguments are environment directory paths",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-f", "--force", action="store_true", help="allow overwriting of an existing environment"
|
||||
"-f",
|
||||
"--force",
|
||||
action="store_true",
|
||||
help="force renaming even if overwriting an existing environment",
|
||||
)
|
||||
|
||||
|
||||
def env_rename(args):
|
||||
"""Rename an environment.
|
||||
|
||||
This renames a managed environment or moves an independent environment.
|
||||
"""
|
||||
"""rename or move an existing environment"""
|
||||
|
||||
# Directory option has been specified
|
||||
if args.dir:
|
||||
@@ -590,7 +744,7 @@ def env_rename(args):
|
||||
# env list
|
||||
#
|
||||
def env_list_setup_parser(subparser):
|
||||
"""list managed environments"""
|
||||
"""list all managed environments"""
|
||||
|
||||
|
||||
def env_list(args):
|
||||
@@ -626,13 +780,14 @@ def actions():
|
||||
# env view
|
||||
#
|
||||
def env_view_setup_parser(subparser):
|
||||
"""manage a view associated with the environment"""
|
||||
"""manage the environment's view
|
||||
|
||||
provide the path when enabling a view with a non-default path
|
||||
"""
|
||||
subparser.add_argument(
|
||||
"action", choices=ViewAction.actions(), help="action to take for the environment's view"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"view_path", nargs="?", help="when enabling a view, optionally set the path manually"
|
||||
)
|
||||
subparser.add_argument("view_path", nargs="?", help="view's non-default path when enabling it")
|
||||
|
||||
|
||||
def env_view(args):
|
||||
@@ -660,7 +815,7 @@ def env_view(args):
|
||||
# env status
|
||||
#
|
||||
def env_status_setup_parser(subparser):
|
||||
"""print whether there is an active environment"""
|
||||
"""print active environment status"""
|
||||
|
||||
|
||||
def env_status(args):
|
||||
@@ -720,14 +875,22 @@ def env_loads(args):
|
||||
|
||||
|
||||
def env_update_setup_parser(subparser):
|
||||
"""update environments to the latest format"""
|
||||
"""update the environment manifest to the latest schema format
|
||||
|
||||
update the environment to the latest schema format, which may not be
|
||||
readable by older versions of spack
|
||||
|
||||
a backup copy of the manifest is retained in case there is a need to revert
|
||||
this operation
|
||||
"""
|
||||
subparser.add_argument(
|
||||
metavar="env", dest="update_env", help="name or directory of the environment to activate"
|
||||
metavar="env", dest="update_env", help="name or directory of the environment"
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
|
||||
|
||||
def env_update(args):
|
||||
"""update the manifest to the latest format"""
|
||||
manifest_file = ev.manifest_file(args.update_env)
|
||||
backup_file = manifest_file + ".bkp"
|
||||
|
||||
@@ -757,14 +920,22 @@ def env_update(args):
|
||||
|
||||
|
||||
def env_revert_setup_parser(subparser):
|
||||
"""restore environments to their state before update"""
|
||||
"""restore the environment manifest to its previous format
|
||||
|
||||
revert the environment's manifest to the schema format from its last
|
||||
'spack env update'
|
||||
|
||||
the current manifest will be overwritten by the backup copy and the backup
|
||||
copy will be removed
|
||||
"""
|
||||
subparser.add_argument(
|
||||
metavar="env", dest="revert_env", help="name or directory of the environment to activate"
|
||||
metavar="env", dest="revert_env", help="name or directory of the environment"
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
|
||||
|
||||
def env_revert(args):
|
||||
"""restore the environment manifest to its previous format"""
|
||||
manifest_file = ev.manifest_file(args.revert_env)
|
||||
backup_file = manifest_file + ".bkp"
|
||||
|
||||
@@ -796,15 +967,19 @@ def env_revert(args):
|
||||
|
||||
|
||||
def env_depfile_setup_parser(subparser):
|
||||
"""generate a depfile from the concrete environment specs"""
|
||||
"""generate a depfile to exploit parallel builds across specs
|
||||
|
||||
requires the active environment to be concrete
|
||||
"""
|
||||
subparser.add_argument(
|
||||
"--make-prefix",
|
||||
"--make-target-prefix",
|
||||
default=None,
|
||||
metavar="TARGET",
|
||||
help="prefix Makefile targets (and variables) with <TARGET>/<name>\n\nby default "
|
||||
"the absolute path to the directory makedeps under the environment metadata dir is "
|
||||
"used. can be set to an empty string --make-prefix ''",
|
||||
help="prefix Makefile targets/variables with <TARGET>/<name>,\n"
|
||||
"which can be an empty string (--make-prefix '')\n"
|
||||
"defaults to the absolute path of the environment's makedeps\n"
|
||||
"environment metadata dir\n",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--make-disable-jobserver",
|
||||
@@ -819,8 +994,8 @@ def env_depfile_setup_parser(subparser):
|
||||
type=arguments.use_buildcache,
|
||||
default="package:auto,dependencies:auto",
|
||||
metavar="[{auto,only,never},][package:{auto,only,never},][dependencies:{auto,only,never}]",
|
||||
help="when using `only`, redundant build dependencies are pruned from the DAG\n\n"
|
||||
"this flag is passed on to the generated spack install commands",
|
||||
help="use `only` to prune redundant build dependencies\n"
|
||||
"option is also passed to generated spack install commands",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-o",
|
||||
@@ -834,14 +1009,14 @@ def env_depfile_setup_parser(subparser):
|
||||
"--generator",
|
||||
default="make",
|
||||
choices=("make",),
|
||||
help="specify the depfile type\n\ncurrently only make is supported",
|
||||
help="specify the depfile type (only supports `make`)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
metavar="specs",
|
||||
dest="specs",
|
||||
nargs=argparse.REMAINDER,
|
||||
default=None,
|
||||
help="generate a depfile only for matching specs in the environment",
|
||||
help="limit the generated file to matching specs",
|
||||
)
|
||||
|
||||
|
||||
@@ -910,7 +1085,12 @@ def setup_parser(subparser):
|
||||
setup_parser_cmd_name = "env_%s_setup_parser" % name
|
||||
setup_parser_cmd = globals()[setup_parser_cmd_name]
|
||||
|
||||
subsubparser = sp.add_parser(name, aliases=aliases, help=setup_parser_cmd.__doc__)
|
||||
subsubparser = sp.add_parser(
|
||||
name,
|
||||
aliases=aliases,
|
||||
description=setup_parser_cmd.__doc__,
|
||||
help=spack.cmd.first_line(setup_parser_cmd.__doc__),
|
||||
)
|
||||
setup_parser_cmd(subsubparser)
|
||||
|
||||
|
||||
|
@@ -17,7 +17,8 @@
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
|
||||
description = "list and search installed packages"
|
||||
section = "basic"
|
||||
@@ -137,21 +138,22 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"--loaded", action="store_true", help="show only packages loaded in the user environment"
|
||||
)
|
||||
subparser.add_argument(
|
||||
only_missing_or_deprecated = subparser.add_mutually_exclusive_group()
|
||||
only_missing_or_deprecated.add_argument(
|
||||
"-M",
|
||||
"--only-missing",
|
||||
action="store_true",
|
||||
dest="only_missing",
|
||||
help="show only missing dependencies",
|
||||
)
|
||||
only_missing_or_deprecated.add_argument(
|
||||
"--only-deprecated", action="store_true", help="show only deprecated packages"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--deprecated",
|
||||
action="store_true",
|
||||
help="show deprecated packages as well as installed specs",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--only-deprecated", action="store_true", help="show only deprecated packages"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--install-tree",
|
||||
action="store",
|
||||
@@ -165,26 +167,35 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def query_arguments(args):
|
||||
# Set up query arguments.
|
||||
installed = []
|
||||
if not (args.only_missing or args.only_deprecated):
|
||||
installed.append(InstallStatuses.INSTALLED)
|
||||
if (args.deprecated or args.only_deprecated) and not args.only_missing:
|
||||
installed.append(InstallStatuses.DEPRECATED)
|
||||
if (args.missing or args.only_missing) and not args.only_deprecated:
|
||||
installed.append(InstallStatuses.MISSING)
|
||||
if args.only_missing and (args.deprecated or args.missing):
|
||||
raise RuntimeError("cannot use --only-missing with --deprecated, or --missing")
|
||||
|
||||
known = any
|
||||
if args.only_deprecated and (args.deprecated or args.missing):
|
||||
raise RuntimeError("cannot use --only-deprecated with --deprecated, or --missing")
|
||||
|
||||
installed = InstallRecordStatus.INSTALLED
|
||||
if args.only_missing:
|
||||
installed = InstallRecordStatus.MISSING
|
||||
elif args.only_deprecated:
|
||||
installed = InstallRecordStatus.DEPRECATED
|
||||
|
||||
if args.missing:
|
||||
installed |= InstallRecordStatus.MISSING
|
||||
|
||||
if args.deprecated:
|
||||
installed |= InstallRecordStatus.DEPRECATED
|
||||
|
||||
predicate_fn = None
|
||||
if args.unknown:
|
||||
known = False
|
||||
predicate_fn = lambda x: not spack.repo.PATH.exists(x.spec.name)
|
||||
|
||||
explicit = any
|
||||
explicit = None
|
||||
if args.explicit:
|
||||
explicit = True
|
||||
if args.implicit:
|
||||
explicit = False
|
||||
|
||||
q_args = {"installed": installed, "known": known, "explicit": explicit}
|
||||
q_args = {"installed": installed, "predicate_fn": predicate_fn, "explicit": explicit}
|
||||
|
||||
install_tree = args.install_tree
|
||||
upstreams = spack.config.get("upstreams", {})
|
||||
@@ -222,11 +233,9 @@ def decorator(spec, fmt):
|
||||
def display_env(env, args, decorator, results):
|
||||
"""Display extra find output when running in an environment.
|
||||
|
||||
Find in an environment outputs 2 or 3 sections:
|
||||
|
||||
1. Root specs
|
||||
2. Concretized roots (if asked for with -c)
|
||||
3. Installed specs
|
||||
In an environment, `spack find` outputs a preliminary section
|
||||
showing the root specs of the environment (this is in addition
|
||||
to the section listing out specs matching the query parameters).
|
||||
|
||||
"""
|
||||
tty.msg("In environment %s" % env.name)
|
||||
@@ -299,6 +308,56 @@ def root_decorator(spec, string):
|
||||
print()
|
||||
|
||||
|
||||
def _find_query(args, env):
|
||||
q_args = query_arguments(args)
|
||||
concretized_but_not_installed = list()
|
||||
if env:
|
||||
all_env_specs = env.all_specs()
|
||||
if args.constraint:
|
||||
init_specs = cmd.parse_specs(args.constraint)
|
||||
env_specs = env.all_matching_specs(*init_specs)
|
||||
else:
|
||||
env_specs = all_env_specs
|
||||
|
||||
spec_hashes = set(x.dag_hash() for x in env_specs)
|
||||
specs_meeting_q_args = set(spack.store.STORE.db.query(hashes=spec_hashes, **q_args))
|
||||
|
||||
results = list()
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
for spec in env_specs:
|
||||
if not spec.installed:
|
||||
concretized_but_not_installed.append(spec)
|
||||
if spec in specs_meeting_q_args:
|
||||
results.append(spec)
|
||||
else:
|
||||
results = args.specs(**q_args)
|
||||
|
||||
# use groups by default except with format.
|
||||
if args.groups is None:
|
||||
args.groups = not args.format
|
||||
|
||||
# Exit early with an error code if no package matches the constraint
|
||||
if concretized_but_not_installed and args.show_concretized:
|
||||
pass
|
||||
elif results:
|
||||
pass
|
||||
elif args.constraint:
|
||||
raise cmd.NoSpecMatches()
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
|
||||
results = [x for x in results if x.name in packages_with_tags]
|
||||
concretized_but_not_installed = [
|
||||
x for x in concretized_but_not_installed if x.name in packages_with_tags
|
||||
]
|
||||
|
||||
if args.loaded:
|
||||
results = cmd.filter_loaded_specs(results)
|
||||
|
||||
return results, concretized_but_not_installed
|
||||
|
||||
|
||||
def find(parser, args):
|
||||
env = ev.active_environment()
|
||||
|
||||
@@ -307,34 +366,12 @@ def find(parser, args):
|
||||
if not env and args.show_concretized:
|
||||
tty.die("-c / --show-concretized requires an active environment")
|
||||
|
||||
if env:
|
||||
if args.constraint:
|
||||
init_specs = spack.cmd.parse_specs(args.constraint)
|
||||
results = env.all_matching_specs(*init_specs)
|
||||
else:
|
||||
results = env.all_specs()
|
||||
else:
|
||||
q_args = query_arguments(args)
|
||||
results = args.specs(**q_args)
|
||||
|
||||
decorator = make_env_decorator(env) if env else lambda s, f: f
|
||||
|
||||
# use groups by default except with format.
|
||||
if args.groups is None:
|
||||
args.groups = not args.format
|
||||
|
||||
# Exit early with an error code if no package matches the constraint
|
||||
if not results and args.constraint:
|
||||
constraint_str = " ".join(str(s) for s in args.constraint_specs)
|
||||
tty.die(f"No package matches the query: {constraint_str}")
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
|
||||
results = [x for x in results if x.name in packages_with_tags]
|
||||
|
||||
if args.loaded:
|
||||
results = spack.cmd.filter_loaded_specs(results)
|
||||
try:
|
||||
results, concretized_but_not_installed = _find_query(args, env)
|
||||
except cmd.NoSpecMatches:
|
||||
# Note: this uses args.constraint vs. args.constraint_specs because
|
||||
# the latter only exists if you call args.specs()
|
||||
tty.die(f"No package matches the query: {' '.join(args.constraint)}")
|
||||
|
||||
if args.install_status or args.show_concretized:
|
||||
status_fn = spack.spec.Spec.install_status
|
||||
@@ -345,14 +382,16 @@ def find(parser, args):
|
||||
if args.json:
|
||||
cmd.display_specs_as_json(results, deps=args.deps)
|
||||
else:
|
||||
decorator = make_env_decorator(env) if env else lambda s, f: f
|
||||
|
||||
if not args.format:
|
||||
if env:
|
||||
display_env(env, args, decorator, results)
|
||||
|
||||
if not args.only_roots:
|
||||
display_results = results
|
||||
if not args.show_concretized:
|
||||
display_results = list(x for x in results if x.installed)
|
||||
display_results = list(results)
|
||||
if args.show_concretized:
|
||||
display_results += concretized_but_not_installed
|
||||
cmd.display_specs(
|
||||
display_results, args, decorator=decorator, all_headers=True, status_fn=status_fn
|
||||
)
|
||||
@@ -370,13 +409,9 @@ def find(parser, args):
|
||||
concretized_suffix += " (show with `spack find -c`)"
|
||||
|
||||
pkg_type = "loaded" if args.loaded else "installed"
|
||||
spack.cmd.print_how_many_pkgs(
|
||||
list(x for x in results if x.installed), pkg_type, suffix=installed_suffix
|
||||
)
|
||||
cmd.print_how_many_pkgs(results, pkg_type, suffix=installed_suffix)
|
||||
|
||||
if env:
|
||||
spack.cmd.print_how_many_pkgs(
|
||||
list(x for x in results if not x.installed),
|
||||
"concretized",
|
||||
suffix=concretized_suffix,
|
||||
cmd.print_how_many_pkgs(
|
||||
concretized_but_not_installed, "concretized", suffix=concretized_suffix
|
||||
)
|
||||
|
@@ -78,8 +78,8 @@
|
||||
boxlib @B{dim=2} boxlib built for 2 dimensions
|
||||
libdwarf @g{%intel} ^libelf@g{%gcc}
|
||||
libdwarf, built with intel compiler, linked to libelf built with gcc
|
||||
mvapich2 @g{%pgi} @B{fabrics=psm,mrail,sock}
|
||||
mvapich2, built with pgi compiler, with support for multiple fabrics
|
||||
mvapich2 @g{%gcc} @B{fabrics=psm,mrail,sock}
|
||||
mvapich2, built with gcc compiler, with support for multiple fabrics
|
||||
"""
|
||||
|
||||
|
||||
|
@@ -11,6 +11,7 @@
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.builder
|
||||
import spack.deptypes as dt
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.install_test
|
||||
@@ -202,11 +203,13 @@ def print_namespace(pkg, args):
|
||||
def print_phases(pkg, args):
|
||||
"""output installation phases"""
|
||||
|
||||
if hasattr(pkg.builder, "phases") and pkg.builder.phases:
|
||||
builder = spack.builder.create(pkg)
|
||||
|
||||
if hasattr(builder, "phases") and builder.phases:
|
||||
color.cprint("")
|
||||
color.cprint(section_title("Installation Phases:"))
|
||||
phase_str = ""
|
||||
for phase in pkg.builder.phases:
|
||||
for phase in builder.phases:
|
||||
phase_str += " {0}".format(phase)
|
||||
color.cprint(phase_str)
|
||||
|
||||
|
@@ -10,7 +10,8 @@
|
||||
import spack.cmd
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
|
||||
description = "mark packages as explicitly or implicitly installed"
|
||||
section = "admin"
|
||||
@@ -67,8 +68,7 @@ def find_matching_specs(specs, allow_multiple_matches=False):
|
||||
has_errors = False
|
||||
|
||||
for spec in specs:
|
||||
install_query = [InstallStatuses.INSTALLED]
|
||||
matching = spack.store.STORE.db.query_local(spec, installed=install_query)
|
||||
matching = spack.store.STORE.db.query_local(spec, installed=InstallRecordStatus.INSTALLED)
|
||||
# For each spec provided, make sure it refers to only one package.
|
||||
# Fail and ask user to be unambiguous if it doesn't
|
||||
if not allow_multiple_matches and len(matching) > 1:
|
||||
@@ -80,8 +80,8 @@ def find_matching_specs(specs, allow_multiple_matches=False):
|
||||
has_errors = True
|
||||
|
||||
# No installed package matches the query
|
||||
if len(matching) == 0 and spec is not any:
|
||||
tty.die("{0} does not match any installed packages.".format(spec))
|
||||
if len(matching) == 0 and spec is not None:
|
||||
tty.die(f"{spec} does not match any installed packages.")
|
||||
|
||||
specs_from_cli.extend(matching)
|
||||
|
||||
@@ -98,8 +98,9 @@ def do_mark(specs, explicit):
|
||||
specs (list): list of specs to be marked
|
||||
explicit (bool): whether to mark specs as explicitly installed
|
||||
"""
|
||||
for spec in specs:
|
||||
spack.store.STORE.db.update_explicit(spec, explicit)
|
||||
with spack.store.STORE.db.write_transaction():
|
||||
for spec in specs:
|
||||
spack.store.STORE.db.mark(spec, "explicit", explicit)
|
||||
|
||||
|
||||
def mark_specs(args, specs):
|
||||
@@ -116,6 +117,6 @@ def mark(parser, args):
|
||||
" Use `spack mark --all` to mark ALL packages.",
|
||||
)
|
||||
|
||||
# [any] here handles the --all case by forcing all specs to be returned
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [any]
|
||||
# [None] here handles the --all case by forcing all specs to be returned
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
|
||||
mark_specs(args, specs)
|
||||
|
@@ -231,31 +231,133 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
|
||||
def _configure_access_pair(
|
||||
args, id_tok, id_variable_tok, secret_tok, secret_variable_tok, default=None
|
||||
):
|
||||
"""Configure the access_pair options"""
|
||||
|
||||
# Check if any of the arguments are set to update this access_pair.
|
||||
# If none are set, then skip computing the new access pair
|
||||
args_id = getattr(args, id_tok)
|
||||
args_id_variable = getattr(args, id_variable_tok)
|
||||
args_secret = getattr(args, secret_tok)
|
||||
args_secret_variable = getattr(args, secret_variable_tok)
|
||||
if not any([args_id, args_id_variable, args_secret, args_secret_variable]):
|
||||
return None
|
||||
|
||||
def _default_value(id_):
|
||||
if isinstance(default, list):
|
||||
return default[0] if id_ == "id" else default[1]
|
||||
elif isinstance(default, dict):
|
||||
return default.get(id_)
|
||||
else:
|
||||
return None
|
||||
|
||||
def _default_variable(id_):
|
||||
if isinstance(default, dict):
|
||||
return default.get(id_ + "_variable")
|
||||
else:
|
||||
return None
|
||||
|
||||
id_ = None
|
||||
id_variable = None
|
||||
secret = None
|
||||
secret_variable = None
|
||||
|
||||
# Get the value/default value if the argument of the inverse
|
||||
if not args_id_variable:
|
||||
id_ = getattr(args, id_tok) or _default_value("id")
|
||||
if not args_id:
|
||||
id_variable = getattr(args, id_variable_tok) or _default_variable("id")
|
||||
if not args_secret_variable:
|
||||
secret = getattr(args, secret_tok) or _default_value("secret")
|
||||
if not args_secret:
|
||||
secret_variable = getattr(args, secret_variable_tok) or _default_variable("secret")
|
||||
|
||||
if (id_ or id_variable) and (secret or secret_variable):
|
||||
if secret:
|
||||
if not id_:
|
||||
raise SpackError("Cannot add mirror with a variable id and text secret")
|
||||
|
||||
return [id_, secret]
|
||||
else:
|
||||
return dict(
|
||||
[
|
||||
(("id", id_) if id_ else ("id_variable", id_variable)),
|
||||
("secret_variable", secret_variable),
|
||||
]
|
||||
)
|
||||
else:
|
||||
if id_ or id_variable or secret or secret_variable is not None:
|
||||
id_arg_tok = id_tok.replace("_", "-")
|
||||
secret_arg_tok = secret_tok.replace("_", "-")
|
||||
tty.warn(
|
||||
"Expected both parts of the access pair to be specified. "
|
||||
f"(i.e. --{id_arg_tok} and --{secret_arg_tok})"
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def mirror_add(args):
|
||||
"""add a mirror to Spack"""
|
||||
if (
|
||||
args.s3_access_key_id
|
||||
or args.s3_access_key_secret
|
||||
or args.s3_access_token
|
||||
or args.s3_access_key_id_variable
|
||||
or args.s3_access_key_secret_variable
|
||||
or args.s3_access_token_variable
|
||||
or args.s3_profile
|
||||
or args.s3_endpoint_url
|
||||
or args.type
|
||||
or args.oci_username
|
||||
or args.oci_password
|
||||
or args.oci_username_variable
|
||||
or args.oci_password_variable
|
||||
or args.autopush
|
||||
or args.signed is not None
|
||||
):
|
||||
connection = {"url": args.url}
|
||||
if args.s3_access_key_id and args.s3_access_key_secret:
|
||||
connection["access_pair"] = [args.s3_access_key_id, args.s3_access_key_secret]
|
||||
# S3 Connection
|
||||
if args.s3_access_key_secret:
|
||||
tty.warn(
|
||||
"Configuring mirror secrets as plain text with --s3-access-key-secret is "
|
||||
"deprecated. Use --s3-access-key-secret-variable instead"
|
||||
)
|
||||
if args.oci_password:
|
||||
tty.warn(
|
||||
"Configuring mirror secrets as plain text with --oci-password is deprecated. "
|
||||
"Use --oci-password-variable instead"
|
||||
)
|
||||
access_pair = _configure_access_pair(
|
||||
args,
|
||||
"s3_access_key_id",
|
||||
"s3_access_key_id_variable",
|
||||
"s3_access_key_secret",
|
||||
"s3_access_key_secret_variable",
|
||||
)
|
||||
if access_pair:
|
||||
connection["access_pair"] = access_pair
|
||||
|
||||
if args.s3_access_token:
|
||||
connection["access_token"] = args.s3_access_token
|
||||
elif args.s3_access_token_variable:
|
||||
connection["access_token_variable"] = args.s3_access_token_variable
|
||||
|
||||
if args.s3_profile:
|
||||
connection["profile"] = args.s3_profile
|
||||
|
||||
if args.s3_endpoint_url:
|
||||
connection["endpoint_url"] = args.s3_endpoint_url
|
||||
if args.oci_username and args.oci_password:
|
||||
connection["access_pair"] = [args.oci_username, args.oci_password]
|
||||
|
||||
# OCI Connection
|
||||
access_pair = _configure_access_pair(
|
||||
args, "oci_username", "oci_username_variable", "oci_password", "oci_password_variable"
|
||||
)
|
||||
if access_pair:
|
||||
connection["access_pair"] = access_pair
|
||||
|
||||
if args.type:
|
||||
connection["binary"] = "binary" in args.type
|
||||
connection["source"] = "source" in args.type
|
||||
@@ -285,16 +387,35 @@ def _configure_mirror(args):
|
||||
changes = {}
|
||||
if args.url:
|
||||
changes["url"] = args.url
|
||||
if args.s3_access_key_id and args.s3_access_key_secret:
|
||||
changes["access_pair"] = [args.s3_access_key_id, args.s3_access_key_secret]
|
||||
|
||||
default_access_pair = entry._get_value("access_pair", direction or "fetch")
|
||||
# TODO: Init access_pair args with the fetch/push/base values in the current mirror state
|
||||
access_pair = _configure_access_pair(
|
||||
args,
|
||||
"s3_access_key_id",
|
||||
"s3_access_key_id_variable",
|
||||
"s3_access_key_secret",
|
||||
"s3_access_key_secret_variable",
|
||||
default=default_access_pair,
|
||||
)
|
||||
if access_pair:
|
||||
changes["access_pair"] = access_pair
|
||||
if args.s3_access_token:
|
||||
changes["access_token"] = args.s3_access_token
|
||||
if args.s3_profile:
|
||||
changes["profile"] = args.s3_profile
|
||||
if args.s3_endpoint_url:
|
||||
changes["endpoint_url"] = args.s3_endpoint_url
|
||||
if args.oci_username and args.oci_password:
|
||||
changes["access_pair"] = [args.oci_username, args.oci_password]
|
||||
access_pair = _configure_access_pair(
|
||||
args,
|
||||
"oci_username",
|
||||
"oci_username_variable",
|
||||
"oci_password",
|
||||
"oci_password_variable",
|
||||
default=default_access_pair,
|
||||
)
|
||||
if access_pair:
|
||||
changes["access_pair"] = access_pair
|
||||
if getattr(args, "signed", None) is not None:
|
||||
changes["signed"] = args.signed
|
||||
if getattr(args, "autopush", None) is not None:
|
||||
|
@@ -19,6 +19,7 @@
|
||||
import spack.modules
|
||||
import spack.modules.common
|
||||
import spack.repo
|
||||
from spack.cmd import MultipleSpecsMatch, NoSpecMatches
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "manipulate module files"
|
||||
@@ -91,18 +92,6 @@ def add_loads_arguments(subparser):
|
||||
arguments.add_common_arguments(subparser, ["recurse_dependencies"])
|
||||
|
||||
|
||||
class MultipleSpecsMatch(Exception):
|
||||
"""Raised when multiple specs match a constraint, in a context where
|
||||
this is not allowed.
|
||||
"""
|
||||
|
||||
|
||||
class NoSpecMatches(Exception):
|
||||
"""Raised when no spec matches a constraint, in a context where
|
||||
this is not allowed.
|
||||
"""
|
||||
|
||||
|
||||
def one_spec_or_raise(specs):
|
||||
"""Ensures exactly one spec has been selected, or raises the appropriate
|
||||
exception.
|
||||
@@ -378,7 +367,10 @@ def refresh(module_type, specs, args):
|
||||
def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
||||
# Qualifiers to be used when querying the db for specs
|
||||
constraint_qualifiers = {
|
||||
"refresh": {"installed": True, "known": lambda x: not spack.repo.PATH.exists(x)}
|
||||
"refresh": {
|
||||
"installed": True,
|
||||
"predicate_fn": lambda x: spack.repo.PATH.exists(x.spec.name),
|
||||
}
|
||||
}
|
||||
query_args = constraint_qualifiers.get(args.subparser_name, {})
|
||||
|
||||
|
@@ -33,8 +33,9 @@ def patch(parser, args):
|
||||
spack.config.set("config:checksum", False, scope="command_line")
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
specs = spack.cmd.matching_specs_from_env(specs)
|
||||
for spec in specs:
|
||||
_patch(spack.cmd.matching_spec_from_env(spec).package)
|
||||
_patch(spec.package)
|
||||
|
||||
|
||||
def _patch_env(env: ev.Environment):
|
||||
|
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import sys
|
||||
|
||||
@@ -12,13 +11,12 @@
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cmd.spec
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.hash_types as ht
|
||||
import spack.solver.asp as asp
|
||||
import spack.spec
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "concretize a specs using an ASP solver"
|
||||
section = "developer"
|
||||
@@ -41,42 +39,6 @@ def setup_parser(subparser):
|
||||
" solutions models found by asp program\n"
|
||||
" all all of the above",
|
||||
)
|
||||
|
||||
# Below are arguments w.r.t. spec display (like spack spec)
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "namespaces"])
|
||||
|
||||
install_status_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||
|
||||
subparser.add_argument(
|
||||
"-y",
|
||||
"--yaml",
|
||||
action="store_const",
|
||||
dest="format",
|
||||
default=None,
|
||||
const="yaml",
|
||||
help="print concrete spec as yaml",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-j",
|
||||
"--json",
|
||||
action="store_const",
|
||||
dest="format",
|
||||
default=None,
|
||||
const="json",
|
||||
help="print concrete spec as json",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-c",
|
||||
"--cover",
|
||||
action="store",
|
||||
default="nodes",
|
||||
choices=["nodes", "edges", "paths"],
|
||||
help="how extensively to traverse the DAG (default: nodes)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-t", "--types", action="store_true", default=False, help="show dependency types"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--timers",
|
||||
action="store_true",
|
||||
@@ -86,9 +48,8 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"--stats", action="store_true", default=False, help="print out statistics from clingo"
|
||||
)
|
||||
subparser.add_argument("specs", nargs=argparse.REMAINDER, help="specs of packages")
|
||||
|
||||
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
||||
spack.cmd.spec.setup_parser(subparser)
|
||||
|
||||
|
||||
def _process_result(result, show, required_format, kwargs):
|
||||
@@ -164,11 +125,12 @@ def solve(parser, args):
|
||||
|
||||
# If we have an active environment, pick the specs from there
|
||||
env = spack.environment.active_environment()
|
||||
if env and args.specs:
|
||||
msg = "cannot give explicit specs when an environment is active"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
specs = list(env.user_specs) if env else spack.cmd.parse_specs(args.specs)
|
||||
if args.specs:
|
||||
specs = spack.cmd.parse_specs(args.specs)
|
||||
elif env:
|
||||
specs = list(env.user_specs)
|
||||
else:
|
||||
tty.die("spack solve requires at least one spec or an active environment")
|
||||
|
||||
solver = asp.Solver()
|
||||
output = sys.stdout if "asp" in show else None
|
||||
|
@@ -82,64 +82,44 @@ def spec(parser, args):
|
||||
if args.namespaces:
|
||||
fmt = "{namespace}." + fmt
|
||||
|
||||
tree_kwargs = {
|
||||
"cover": args.cover,
|
||||
"format": fmt,
|
||||
"hashlen": None if args.very_long else 7,
|
||||
"show_types": args.types,
|
||||
"status_fn": install_status_fn if args.install_status else None,
|
||||
}
|
||||
|
||||
# use a read transaction if we are getting install status for every
|
||||
# spec in the DAG. This avoids repeatedly querying the DB.
|
||||
tree_context = lang.nullcontext
|
||||
if args.install_status:
|
||||
tree_context = spack.store.STORE.db.read_transaction
|
||||
|
||||
# Use command line specified specs, otherwise try to use environment specs.
|
||||
env = ev.active_environment()
|
||||
|
||||
if args.specs:
|
||||
input_specs = spack.cmd.parse_specs(args.specs)
|
||||
concretized_specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
specs = list(zip(input_specs, concretized_specs))
|
||||
concrete_specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
elif env:
|
||||
env.concretize()
|
||||
concrete_specs = env.concrete_roots()
|
||||
else:
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
env.concretize()
|
||||
specs = env.concretized_specs()
|
||||
tty.die("spack spec requires at least one spec or an active environment")
|
||||
|
||||
# environments are printed together in a combined tree() invocation,
|
||||
# except when using --yaml or --json, which we print spec by spec below.
|
||||
if not args.format:
|
||||
tree_kwargs["key"] = spack.traverse.by_dag_hash
|
||||
tree_kwargs["hashes"] = args.long or args.very_long
|
||||
print(spack.spec.tree([concrete for _, concrete in specs], **tree_kwargs))
|
||||
return
|
||||
else:
|
||||
tty.die("spack spec requires at least one spec or an active environment")
|
||||
|
||||
for input, output in specs:
|
||||
# With --yaml or --json, just print the raw specs to output
|
||||
if args.format:
|
||||
# With --yaml, --json, or --format, just print the raw specs to output
|
||||
if args.format:
|
||||
for spec in concrete_specs:
|
||||
if args.format == "yaml":
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(output.to_yaml(hash=ht.dag_hash))
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||
elif args.format == "json":
|
||||
print(output.to_json(hash=ht.dag_hash))
|
||||
print(spec.to_json(hash=ht.dag_hash))
|
||||
else:
|
||||
print(output.format(args.format))
|
||||
continue
|
||||
print(spec.format(args.format))
|
||||
return
|
||||
|
||||
with tree_context():
|
||||
# Only show the headers for input specs that are not concrete to avoid
|
||||
# repeated output. This happens because parse_specs outputs concrete
|
||||
# specs for `/hash` inputs.
|
||||
if not input.concrete:
|
||||
tree_kwargs["hashes"] = False # Always False for input spec
|
||||
print("Input spec")
|
||||
print("--------------------------------")
|
||||
print(input.tree(**tree_kwargs))
|
||||
print("Concretized")
|
||||
print("--------------------------------")
|
||||
|
||||
tree_kwargs["hashes"] = args.long or args.very_long
|
||||
print(output.tree(**tree_kwargs))
|
||||
with tree_context():
|
||||
print(
|
||||
spack.spec.tree(
|
||||
concrete_specs,
|
||||
cover=args.cover,
|
||||
format=fmt,
|
||||
hashlen=None if args.very_long else 7,
|
||||
show_types=args.types,
|
||||
status_fn=install_status_fn if args.install_status else None,
|
||||
hashes=args.long or args.very_long,
|
||||
key=spack.traverse.by_dag_hash,
|
||||
)
|
||||
)
|
||||
|
@@ -47,8 +47,8 @@ def stage(parser, args):
|
||||
if len(specs) > 1 and custom_path:
|
||||
tty.die("`--path` requires a single spec, but multiple were provided")
|
||||
|
||||
specs = spack.cmd.matching_specs_from_env(specs)
|
||||
for spec in specs:
|
||||
spec = spack.cmd.matching_spec_from_env(spec)
|
||||
pkg = spec.package
|
||||
|
||||
if custom_path:
|
||||
|
@@ -165,7 +165,7 @@ def test_run(args):
|
||||
if args.fail_fast:
|
||||
spack.config.set("config:fail_fast", True, scope="command_line")
|
||||
|
||||
explicit = args.explicit or any
|
||||
explicit = args.explicit or None
|
||||
explicit_str = "explicitly " if args.explicit else ""
|
||||
|
||||
# Get specs to test
|
||||
|
@@ -24,7 +24,7 @@
|
||||
|
||||
|
||||
# tutorial configuration parameters
|
||||
tutorial_branch = "releases/v0.22"
|
||||
tutorial_branch = "releases/v0.23"
|
||||
tutorial_mirror = "file:///mirror"
|
||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||
|
||||
|
@@ -17,7 +17,8 @@
|
||||
import spack.store
|
||||
import spack.traverse as traverse
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
|
||||
description = "remove installed packages"
|
||||
section = "build"
|
||||
@@ -90,6 +91,7 @@ def find_matching_specs(
|
||||
env: optional active environment
|
||||
specs: list of specs to be matched against installed packages
|
||||
allow_multiple_matches: if True multiple matches are admitted
|
||||
origin: origin of the spec
|
||||
|
||||
Return:
|
||||
list: list of specs
|
||||
@@ -98,12 +100,14 @@ def find_matching_specs(
|
||||
hashes = env.all_hashes() if env else None
|
||||
|
||||
# List of specs that match expressions given via command line
|
||||
specs_from_cli = []
|
||||
specs_from_cli: List[spack.spec.Spec] = []
|
||||
has_errors = False
|
||||
for spec in specs:
|
||||
install_query = [InstallStatuses.INSTALLED, InstallStatuses.DEPRECATED]
|
||||
matching = spack.store.STORE.db.query_local(
|
||||
spec, hashes=hashes, installed=install_query, origin=origin
|
||||
spec,
|
||||
hashes=hashes,
|
||||
installed=(InstallRecordStatus.INSTALLED | InstallRecordStatus.DEPRECATED),
|
||||
origin=origin,
|
||||
)
|
||||
# For each spec provided, make sure it refers to only one package.
|
||||
# Fail and ask user to be unambiguous if it doesn't
|
||||
@@ -116,7 +120,7 @@ def find_matching_specs(
|
||||
has_errors = True
|
||||
|
||||
# No installed package matches the query
|
||||
if len(matching) == 0 and spec is not any:
|
||||
if len(matching) == 0 and spec is not None:
|
||||
if env:
|
||||
pkg_type = "packages in environment '%s'" % env.name
|
||||
else:
|
||||
@@ -213,7 +217,7 @@ def get_uninstall_list(args, specs: List[spack.spec.Spec], env: Optional[ev.Envi
|
||||
|
||||
# Gets the list of installed specs that match the ones given via cli
|
||||
# args.all takes care of the case where '-a' is given in the cli
|
||||
matching_specs = find_matching_specs(env, specs, args.all)
|
||||
matching_specs = find_matching_specs(env, specs, args.all, origin=args.origin)
|
||||
dependent_specs = installed_dependents(matching_specs)
|
||||
all_uninstall_specs = matching_specs + dependent_specs if args.dependents else matching_specs
|
||||
other_dependent_envs = dependent_environments(all_uninstall_specs, current_env=env)
|
||||
@@ -301,6 +305,6 @@ def uninstall(parser, args):
|
||||
" Use `spack uninstall --all` to uninstall ALL packages.",
|
||||
)
|
||||
|
||||
# [any] here handles the --all case by forcing all specs to be returned
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [any]
|
||||
# [None] here handles the --all case by forcing all specs to be returned
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
|
||||
uninstall_specs(args, specs)
|
||||
|
@@ -33,6 +33,8 @@
|
||||
YamlFilesystemView.
|
||||
|
||||
"""
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.link_tree import MergeConflictError
|
||||
|
||||
@@ -178,7 +180,12 @@ def setup_parser(sp):
|
||||
|
||||
|
||||
def view(parser, args):
|
||||
"Produce a view of a set of packages."
|
||||
"""Produce a view of a set of packages."""
|
||||
|
||||
if sys.platform == "win32" and args.action in ("hardlink", "hard"):
|
||||
# Hard-linked views are not yet allowed on Windows.
|
||||
# See https://github.com/spack/spack/pull/46335#discussion_r1757411915
|
||||
tty.die("Hard linking is not supported on Windows. Please use symlinks or copy methods.")
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs)
|
||||
path = args.path[0]
|
||||
|
@@ -4,20 +4,23 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import contextlib
|
||||
import hashlib
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import List, Optional, Sequence
|
||||
from typing import Dict, List, Optional, Sequence
|
||||
|
||||
import llnl.path
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import path_contains_subdirectory, paths_containing_libs
|
||||
|
||||
import spack.caches
|
||||
import spack.error
|
||||
import spack.schema.environment
|
||||
import spack.spec
|
||||
@@ -26,6 +29,7 @@
|
||||
import spack.util.module_cmd
|
||||
import spack.version
|
||||
from spack.util.environment import filter_system_paths
|
||||
from spack.util.file_cache import FileCache
|
||||
|
||||
__all__ = ["Compiler"]
|
||||
|
||||
@@ -34,7 +38,7 @@
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
||||
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()) -> str:
|
||||
"""Invokes the compiler at a given path passing a single
|
||||
version argument and returns the output.
|
||||
|
||||
@@ -57,7 +61,7 @@ def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
||||
return output
|
||||
|
||||
|
||||
def get_compiler_version_output(compiler_path, *args, **kwargs):
|
||||
def get_compiler_version_output(compiler_path, *args, **kwargs) -> str:
|
||||
"""Wrapper for _get_compiler_version_output()."""
|
||||
# This ensures that we memoize compiler output by *absolute path*,
|
||||
# not just executable name. If we don't do this, and the path changes
|
||||
@@ -275,7 +279,7 @@ def __init__(
|
||||
operating_system,
|
||||
target,
|
||||
paths,
|
||||
modules=None,
|
||||
modules: Optional[List[str]] = None,
|
||||
alias=None,
|
||||
environment=None,
|
||||
extra_rpaths=None,
|
||||
@@ -290,6 +294,7 @@ def __init__(
|
||||
self.environment = environment or {}
|
||||
self.extra_rpaths = extra_rpaths or []
|
||||
self.enable_implicit_rpaths = enable_implicit_rpaths
|
||||
self.cache = COMPILER_CACHE
|
||||
|
||||
self.cc = paths[0]
|
||||
self.cxx = paths[1]
|
||||
@@ -390,15 +395,11 @@ def real_version(self):
|
||||
|
||||
E.g. C++11 flag checks.
|
||||
"""
|
||||
if not self._real_version:
|
||||
try:
|
||||
real_version = spack.version.Version(self.get_real_version())
|
||||
if real_version == spack.version.Version("unknown"):
|
||||
return self.version
|
||||
self._real_version = real_version
|
||||
except spack.util.executable.ProcessError:
|
||||
self._real_version = self.version
|
||||
return self._real_version
|
||||
real_version_str = self.cache.get(self).real_version
|
||||
if not real_version_str or real_version_str == "unknown":
|
||||
return self.version
|
||||
|
||||
return spack.version.StandardVersion.from_string(real_version_str)
|
||||
|
||||
def implicit_rpaths(self) -> List[str]:
|
||||
if self.enable_implicit_rpaths is False:
|
||||
@@ -427,6 +428,11 @@ def default_dynamic_linker(self) -> Optional[str]:
|
||||
@property
|
||||
def default_libc(self) -> Optional["spack.spec.Spec"]:
|
||||
"""Determine libc targeted by the compiler from link line"""
|
||||
# technically this should be testing the target platform of the compiler, but we don't have
|
||||
# that, so stick to host platform for now.
|
||||
if sys.platform in ("darwin", "win32"):
|
||||
return None
|
||||
|
||||
dynamic_linker = self.default_dynamic_linker
|
||||
|
||||
if not dynamic_linker:
|
||||
@@ -445,19 +451,23 @@ def required_libs(self):
|
||||
@property
|
||||
def compiler_verbose_output(self) -> Optional[str]:
|
||||
"""Verbose output from compiling a dummy C source file. Output is cached."""
|
||||
if not hasattr(self, "_compile_c_source_output"):
|
||||
self._compile_c_source_output = self._compile_dummy_c_source()
|
||||
return self._compile_c_source_output
|
||||
return self.cache.get(self).c_compiler_output
|
||||
|
||||
def _compile_dummy_c_source(self) -> Optional[str]:
|
||||
cc = self.cc if self.cc else self.cxx
|
||||
if self.cc:
|
||||
cc = self.cc
|
||||
ext = "c"
|
||||
else:
|
||||
cc = self.cxx
|
||||
ext = "cc"
|
||||
|
||||
if not cc or not self.verbose_flag:
|
||||
return None
|
||||
|
||||
try:
|
||||
tmpdir = tempfile.mkdtemp(prefix="spack-implicit-link-info")
|
||||
fout = os.path.join(tmpdir, "output")
|
||||
fin = os.path.join(tmpdir, "main.c")
|
||||
fin = os.path.join(tmpdir, f"main.{ext}")
|
||||
|
||||
with open(fin, "w") as csource:
|
||||
csource.write(
|
||||
@@ -559,7 +569,7 @@ def fc_pic_flag(self):
|
||||
# Note: This is not a class method. The class methods are used to detect
|
||||
# compilers on PATH based systems, and do not set up the run environment of
|
||||
# the compiler. This method can be called on `module` based systems as well
|
||||
def get_real_version(self):
|
||||
def get_real_version(self) -> str:
|
||||
"""Query the compiler for its version.
|
||||
|
||||
This is the "real" compiler version, regardless of what is in the
|
||||
@@ -569,14 +579,17 @@ def get_real_version(self):
|
||||
modifications) to enable the compiler to run properly on any platform.
|
||||
"""
|
||||
cc = spack.util.executable.Executable(self.cc)
|
||||
with self.compiler_environment():
|
||||
output = cc(
|
||||
self.version_argument,
|
||||
output=str,
|
||||
error=str,
|
||||
ignore_errors=tuple(self.ignore_version_errors),
|
||||
)
|
||||
return self.extract_version_from_output(output)
|
||||
try:
|
||||
with self.compiler_environment():
|
||||
output = cc(
|
||||
self.version_argument,
|
||||
output=str,
|
||||
error=str,
|
||||
ignore_errors=tuple(self.ignore_version_errors),
|
||||
)
|
||||
return self.extract_version_from_output(output)
|
||||
except spack.util.executable.ProcessError:
|
||||
return "unknown"
|
||||
|
||||
@property
|
||||
def prefix(self):
|
||||
@@ -603,7 +616,7 @@ def default_version(cls, cc):
|
||||
|
||||
@classmethod
|
||||
@llnl.util.lang.memoized
|
||||
def extract_version_from_output(cls, output):
|
||||
def extract_version_from_output(cls, output: str) -> str:
|
||||
"""Extracts the version from compiler's output."""
|
||||
match = re.search(cls.version_regex, output)
|
||||
return match.group(1) if match else "unknown"
|
||||
@@ -732,3 +745,106 @@ def __init__(self, compiler, feature, flag_name, ver_string=None):
|
||||
)
|
||||
+ " implement the {0} property and submit a pull request or issue.".format(flag_name),
|
||||
)
|
||||
|
||||
|
||||
class CompilerCacheEntry:
|
||||
"""Deserialized cache entry for a compiler"""
|
||||
|
||||
__slots__ = ["c_compiler_output", "real_version"]
|
||||
|
||||
def __init__(self, c_compiler_output: Optional[str], real_version: str):
|
||||
self.c_compiler_output = c_compiler_output
|
||||
self.real_version = real_version
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Optional[str]]):
|
||||
if not isinstance(data, dict):
|
||||
raise ValueError(f"Invalid {cls.__name__} data")
|
||||
c_compiler_output = data.get("c_compiler_output")
|
||||
real_version = data.get("real_version")
|
||||
if not isinstance(real_version, str) or not isinstance(
|
||||
c_compiler_output, (str, type(None))
|
||||
):
|
||||
raise ValueError(f"Invalid {cls.__name__} data")
|
||||
return cls(c_compiler_output, real_version)
|
||||
|
||||
|
||||
class CompilerCache:
|
||||
"""Base class for compiler output cache. Default implementation does not cache anything."""
|
||||
|
||||
def value(self, compiler: Compiler) -> Dict[str, Optional[str]]:
|
||||
return {
|
||||
"c_compiler_output": compiler._compile_dummy_c_source(),
|
||||
"real_version": compiler.get_real_version(),
|
||||
}
|
||||
|
||||
def get(self, compiler: Compiler) -> CompilerCacheEntry:
|
||||
return CompilerCacheEntry.from_dict(self.value(compiler))
|
||||
|
||||
|
||||
class FileCompilerCache(CompilerCache):
|
||||
"""Cache for compiler output, which is used to determine implicit link paths, the default libc
|
||||
version, and the compiler version."""
|
||||
|
||||
name = os.path.join("compilers", "compilers.json")
|
||||
|
||||
def __init__(self, cache: "FileCache") -> None:
|
||||
self.cache = cache
|
||||
self.cache.init_entry(self.name)
|
||||
self._data: Dict[str, Dict[str, Optional[str]]] = {}
|
||||
|
||||
def _get_entry(self, key: str) -> Optional[CompilerCacheEntry]:
|
||||
try:
|
||||
return CompilerCacheEntry.from_dict(self._data[key])
|
||||
except ValueError:
|
||||
del self._data[key]
|
||||
except KeyError:
|
||||
pass
|
||||
return None
|
||||
|
||||
def get(self, compiler: Compiler) -> CompilerCacheEntry:
|
||||
# Cache hit
|
||||
try:
|
||||
with self.cache.read_transaction(self.name) as f:
|
||||
assert f is not None
|
||||
self._data = json.loads(f.read())
|
||||
assert isinstance(self._data, dict)
|
||||
except (json.JSONDecodeError, AssertionError):
|
||||
self._data = {}
|
||||
|
||||
key = self._key(compiler)
|
||||
value = self._get_entry(key)
|
||||
if value is not None:
|
||||
return value
|
||||
|
||||
# Cache miss
|
||||
with self.cache.write_transaction(self.name) as (old, new):
|
||||
try:
|
||||
assert old is not None
|
||||
self._data = json.loads(old.read())
|
||||
assert isinstance(self._data, dict)
|
||||
except (json.JSONDecodeError, AssertionError):
|
||||
self._data = {}
|
||||
|
||||
# Use cache entry that may have been created by another process in the meantime.
|
||||
entry = self._get_entry(key)
|
||||
|
||||
# Finally compute the cache entry
|
||||
if entry is None:
|
||||
self._data[key] = self.value(compiler)
|
||||
entry = CompilerCacheEntry.from_dict(self._data[key])
|
||||
|
||||
new.write(json.dumps(self._data, separators=(",", ":")))
|
||||
|
||||
return entry
|
||||
|
||||
def _key(self, compiler: Compiler) -> str:
|
||||
as_bytes = json.dumps(compiler.to_dict(), separators=(",", ":")).encode("utf-8")
|
||||
return hashlib.sha256(as_bytes).hexdigest()
|
||||
|
||||
|
||||
def _make_compiler_cache():
|
||||
return FileCompilerCache(spack.caches.MISC_CACHE)
|
||||
|
||||
|
||||
COMPILER_CACHE: CompilerCache = llnl.util.lang.Singleton(_make_compiler_cache) # type: ignore
|
||||
|
@@ -116,5 +116,5 @@ def fflags(self):
|
||||
def _handle_default_flag_addtions(self):
|
||||
# This is a known issue for AOCC 3.0 see:
|
||||
# https://developer.amd.com/wp-content/resources/AOCC-3.0-Install-Guide.pdf
|
||||
if self.real_version.satisfies(ver("3.0.0")):
|
||||
if self.version.satisfies(ver("3.0.0")):
|
||||
return "-Wno-unused-command-line-argument " "-mllvm -eliminate-similar-expr=false"
|
||||
|
@@ -16,7 +16,6 @@
|
||||
("gfortran", os.path.join("clang", "gfortran")),
|
||||
("xlf_r", os.path.join("xl_r", "xlf_r")),
|
||||
("xlf", os.path.join("xl", "xlf")),
|
||||
("pgfortran", os.path.join("pgi", "pgfortran")),
|
||||
("ifort", os.path.join("intel", "ifort")),
|
||||
]
|
||||
|
||||
@@ -25,7 +24,6 @@
|
||||
("gfortran", os.path.join("clang", "gfortran")),
|
||||
("xlf90_r", os.path.join("xl_r", "xlf90_r")),
|
||||
("xlf90", os.path.join("xl", "xlf90")),
|
||||
("pgfortran", os.path.join("pgi", "pgfortran")),
|
||||
("ifort", os.path.join("intel", "ifort")),
|
||||
]
|
||||
|
||||
|
@@ -92,6 +92,14 @@ def c11_flag(self):
|
||||
else:
|
||||
return "-std=c1x"
|
||||
|
||||
@property
|
||||
def c18_flag(self):
|
||||
# c18 supported since oneapi 2022, which is classic version 2021.5.0
|
||||
if self.real_version < Version("21.5.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C18 standard", "c18_flag", "< 21.5.0")
|
||||
else:
|
||||
return "-std=c18"
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self):
|
||||
return "-fPIC"
|
||||
@@ -116,9 +124,8 @@ def setup_custom_environment(self, pkg, env):
|
||||
# Edge cases for Intel's oneAPI compilers when using the legacy classic compilers:
|
||||
# Always pass flags to disable deprecation warnings, since these warnings can
|
||||
# confuse tools that parse the output of compiler commands (e.g. version checks).
|
||||
if self.cc and self.cc.endswith("icc") and self.real_version >= Version("2021"):
|
||||
if self.real_version >= Version("2021") and self.real_version <= Version("2023"):
|
||||
env.append_flags("SPACK_ALWAYS_CFLAGS", "-diag-disable=10441")
|
||||
if self.cxx and self.cxx.endswith("icpc") and self.real_version >= Version("2021"):
|
||||
env.append_flags("SPACK_ALWAYS_CXXFLAGS", "-diag-disable=10441")
|
||||
if self.fc and self.fc.endswith("ifort") and self.real_version >= Version("2021"):
|
||||
if self.real_version >= Version("2021") and self.real_version <= Version("2024"):
|
||||
env.append_flags("SPACK_ALWAYS_FFLAGS", "-diag-disable=10448")
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user